diff --git a/pyatlan/client/admin.py b/pyatlan/client/admin.py index a3720fb2b..dcdabcf5a 100644 --- a/pyatlan/client/admin.py +++ b/pyatlan/client/admin.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2022 Atlan Pte. Ltd. -from pydantic import ValidationError, parse_obj_as, validate_arguments +from pydantic.v1 import ValidationError, parse_obj_as, validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ADMIN_EVENTS, KEYCLOAK_EVENTS diff --git a/pyatlan/client/asset.py b/pyatlan/client/asset.py index 584940709..e8ea4c5c2 100644 --- a/pyatlan/client/asset.py +++ b/pyatlan/client/asset.py @@ -11,7 +11,7 @@ from warnings import warn import requests -from pydantic import ( +from pydantic.v1 import ( StrictStr, ValidationError, constr, diff --git a/pyatlan/client/atlan.py b/pyatlan/client/atlan.py index 35e9056ef..aa1c0e110 100644 --- a/pyatlan/client/atlan.py +++ b/pyatlan/client/atlan.py @@ -16,7 +16,7 @@ from warnings import warn import requests -from pydantic import ( +from pydantic.v1 import ( BaseSettings, HttpUrl, PrivateAttr, diff --git a/pyatlan/client/audit.py b/pyatlan/client/audit.py index b84e33335..be7ef8fcb 100644 --- a/pyatlan/client/audit.py +++ b/pyatlan/client/audit.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2022 Atlan Pte. Ltd. -from pydantic import ValidationError, parse_obj_as, validate_arguments +from pydantic.v1 import ValidationError, parse_obj_as, validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import AUDIT_SEARCH diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index 71e49a5c3..6b81fe0e7 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -1,4 +1,4 @@ -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ( diff --git a/pyatlan/client/group.py b/pyatlan/client/group.py index a7a07e031..3354573b9 100644 --- a/pyatlan/client/group.py +++ b/pyatlan/client/group.py @@ -2,7 +2,7 @@ # Copyright 2022 Atlan Pte. Ltd. from typing import Optional -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ( diff --git a/pyatlan/client/query.py b/pyatlan/client/query.py index fa7895e81..ed984516f 100644 --- a/pyatlan/client/query.py +++ b/pyatlan/client/query.py @@ -1,4 +1,4 @@ -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import RUN_QUERY diff --git a/pyatlan/client/role.py b/pyatlan/client/role.py index 3530247b3..3608b1910 100644 --- a/pyatlan/client/role.py +++ b/pyatlan/client/role.py @@ -2,7 +2,7 @@ # Copyright 2022 Atlan Pte. Ltd. from typing import Optional -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import GET_ROLES diff --git a/pyatlan/client/search_log.py b/pyatlan/client/search_log.py index e1bd7a478..5f7ce1bf9 100644 --- a/pyatlan/client/search_log.py +++ b/pyatlan/client/search_log.py @@ -1,6 +1,6 @@ from typing import Union -from pydantic import ValidationError, parse_obj_as, validate_arguments +from pydantic.v1 import ValidationError, parse_obj_as, validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import SEARCH_LOG diff --git a/pyatlan/client/token.py b/pyatlan/client/token.py index e4daa7344..7e890e4a1 100644 --- a/pyatlan/client/token.py +++ b/pyatlan/client/token.py @@ -4,7 +4,7 @@ from typing import Optional -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import DELETE_API_TOKEN, GET_API_TOKENS, UPSERT_API_TOKEN diff --git a/pyatlan/client/typedef.py b/pyatlan/client/typedef.py index 1a89c6afc..5be8252d5 100644 --- a/pyatlan/client/typedef.py +++ b/pyatlan/client/typedef.py @@ -2,7 +2,7 @@ # Copyright 2022 Atlan Pte. Ltd. from typing import Union -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ( @@ -32,7 +32,7 @@ def _build_typedef_request(typedef: TypeDef) -> TypeDefResponse: entity_defs=[], relationship_defs=[], custom_metadata_defs=[], - ) + ) # type: ignore[call-arg] elif isinstance(typedef, CustomMetadataDef): # Set up the request payload... payload = TypeDefResponse( @@ -42,7 +42,7 @@ def _build_typedef_request(typedef: TypeDef) -> TypeDefResponse: entity_defs=[], relationship_defs=[], custom_metadata_defs=[typedef], - ) + ) # type: ignore[call-arg] elif isinstance(typedef, EnumDef): # Set up the request payload... payload = TypeDefResponse( @@ -52,7 +52,7 @@ def _build_typedef_request(typedef: TypeDef) -> TypeDefResponse: entity_defs=[], relationship_defs=[], custom_metadata_defs=[], - ) + ) # type: ignore[call-arg] else: raise ErrorCode.UNABLE_TO_UPDATE_TYPEDEF_CATEGORY.exception_with_parameters( typedef.category.value diff --git a/pyatlan/client/user.py b/pyatlan/client/user.py index 894120a06..fa4e0b2f6 100644 --- a/pyatlan/client/user.py +++ b/pyatlan/client/user.py @@ -4,7 +4,7 @@ from typing import Any, Optional -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ( @@ -59,7 +59,7 @@ def create( cur = CreateUserRequest(users=[]) for user in users: role_name = str(user.workspace_role) - if role_id := RoleCache.get_id_for_name(role_name): + if role_id := RoleCache.get_id_for_name(role_name) and user.email: to_create = CreateUserRequest.CreateUser( email=user.email, role_name=role_name, @@ -342,7 +342,7 @@ def _add_as( :raises NotFoundError: if the asset to which to add the API token as a viewer cannot be found """ from pyatlan.client.atlan import client_connection - from pyatlan.model.assets.asset00 import Asset + from pyatlan.model.assets import Asset from pyatlan.model.fluent_search import FluentSearch if keyword_field not in [Asset.ADMIN_USERS, Asset.VIEWER_USERS]: diff --git a/pyatlan/client/workflow.py b/pyatlan/client/workflow.py index 255aaeac5..5570a183d 100644 --- a/pyatlan/client/workflow.py +++ b/pyatlan/client/workflow.py @@ -4,7 +4,7 @@ from time import sleep from typing import Optional, Union, overload -from pydantic import validate_arguments +from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ( diff --git a/pyatlan/generator/class_generator.py b/pyatlan/generator/class_generator.py index 3401bf0f5..94c0adb56 100644 --- a/pyatlan/generator/class_generator.py +++ b/pyatlan/generator/class_generator.py @@ -112,6 +112,7 @@ class ModuleInfo: count: int = 0 modules: set["ModuleInfo"] = set() modules_by_asset_name: dict[str, str] = {} + assets: dict[str, "AssetInfo"] = {} @classmethod def check_for_circular_module_dependencies(cls): @@ -210,6 +211,8 @@ def __init__(self, name: str, entity_def: EntityDef): self.required_asset_infos: set["AssetInfo"] = set() self.circular_dependencies: set["AssetInfo"] = set() self.order: int = 0 + self.module_name = to_snake_case(name) + self.super_type: Optional[AssetInfo] = None def __hash__(self): return hash(self._name) @@ -228,6 +231,45 @@ def super_class(self): else: return self.entity_def.super_types[0] + @property + def import_super_class(self): + if self._name == REFERENCEABLE: + return "" + super_type = AssetInfo.asset_info_by_name[self.entity_def.super_types[0]] + return f"from .{super_type.module_name} import {super_type.name}" + + @property + def get_update_forward_refs(self): + import_set = set() + asset_name = self.name + current_name = self.name + while current_name != "Referenceable" and self.hierarchy_graph.predecessors( + current_name + ): + parent_asset_name = next( + iter(self.hierarchy_graph.predecessors(current_name)) + ) + if parent_asset_name == "Referenceable": + break + parent_asset = self.asset_info_by_name[ + parent_asset_name + ].required_asset_infos + + for parent_module in parent_asset: + if asset_name != parent_module.name: + import_set.add(parent_module.name) + + current_name = parent_asset_name + + return {key: key for key in import_set} + + @property + def imports_for_referenced_assets(self): + return [ + f"from .{required_asset.module_name} import {required_asset.name} # noqa" + for required_asset in self.required_asset_infos + ] + def update_attribute_defs(self): def get_ancestor_relationship_defs( ancestor_name: str, ancestor_relationship_defs @@ -284,10 +326,6 @@ def update_required_asset_names(self) -> None: for a in relationship_attribute_defs if a["name"] not in attributes_to_remove ] - if self.entity_def.super_types: - self.required_asset_infos.add( - AssetInfo.asset_info_by_name[self.entity_def.super_types[0]] - ) def merge_attributes(self, entity_def): def merge_them(s, a): @@ -357,10 +395,8 @@ def create_modules(cls): asset_info = cls.asset_info_by_name[asset_name] asset_info.order = order order += 1 - if asset_info.module_info is None: - ModuleInfo(asset_info=asset_info) - else: - asset_info.module_info.add_asset_info(asset_info=asset_info) + + ModuleInfo.assets[asset_name] = asset_info class AttributeType(Enum): @@ -612,40 +648,32 @@ def get_ancestor_relationship_defs( ) def render_modules(self, modules: list[ModuleInfo]): - for module in modules: - self.render_module(module) - self.render_init(modules) + self.render_init(modules) # type: ignore - def render_module(self, module: ModuleInfo): + def render_module(self, asset_info: AssetInfo): template = self.environment.get_template("module.jinja2") content = template.render( { - "module": module, + "asset_info": asset_info, "existz": os.path.exists, - "module_name": module.name, - "modules_by_asset_name": ModuleInfo.modules_by_asset_name, } ) - with (ASSETS_DIR / f"{module.name}.py").open("w") as script: + with (ASSETS_DIR / f"{asset_info.module_name}.py").open("w") as script: script.write(content) - def render_init(self, modules: list[ModuleInfo]): - imports = sorted( - { - f"from .{asset_info.module_info.name} import {asset_info.name}" - for module in modules - if module.status == ModuleStatus.ACTIVE - for asset_info in module.asset_infos - if asset_info.module_info - } - ) + def render_init(self, assets: list[AssetInfo]): + asset_names = [asset.name for asset in assets] + asset_imports = [ + f"from .{asset.module_name} import {asset.name}" for asset in assets + ] + template = self.environment.get_template("init.jinja2") content = template.render( - { - "imports": imports, - } + {"asset_imports": asset_imports, "asset_names": asset_names} ) - with (ASSETS_DIR / "__init__.py").open("w") as script: + + init_path = ASSETS_DIR / "__init__.py" + with init_path.open("w") as script: script.write(content) def render_structs(self, struct_defs): @@ -788,13 +816,12 @@ def create(cls, enum_defs): AssetInfo.set_entity_defs(type_defs.entity_defs) AssetInfo.update_all_circular_dependencies() AssetInfo.create_modules() - ModuleInfo.check_for_circular_module_dependencies() for file in (ASSETS_DIR).glob("*.py"): file.unlink() generator = Generator() - generator.render_modules( - [m for m in ModuleInfo.modules if m.status == ModuleStatus.ACTIVE] - ) + for asset_info in ModuleInfo.assets.values(): + generator.render_module(asset_info) + generator.render_init(ModuleInfo.assets.values()) # type: ignore generator.render_structs(type_defs.struct_defs) EnumDefInfo.create(type_defs.enum_defs) generator.render_enums(EnumDefInfo.enum_def_info) diff --git a/pyatlan/generator/templates/imports.jinja2 b/pyatlan/generator/templates/imports.jinja2 index 4f39237b6..a20f22b01 100644 --- a/pyatlan/generator/templates/imports.jinja2 +++ b/pyatlan/generator/templates/imports.jinja2 @@ -8,7 +8,7 @@ from io import StringIO from typing import Any, ClassVar, Optional, Set, Type, TypeVar, TYPE_CHECKING, cast, overload from urllib.parse import quote, unquote -from pydantic import Field, PrivateAttr, StrictStr, root_validator, validator +from pydantic.v1 import Field, PrivateAttr, StrictStr, root_validator, validator from pyatlan.errors import ErrorCode from pyatlan.model.core import Announcement, AtlanObject, AtlanTag, AtlanTagName, Meaning @@ -103,5 +103,6 @@ from pyatlan.utils import ( next_id, to_camel_case, validate_required_fields, + validate_single_required_field, get_parent_qualified_name, ) diff --git a/pyatlan/generator/templates/init.jinja2 b/pyatlan/generator/templates/init.jinja2 index 7128380b1..f7bf0c8fd 100644 --- a/pyatlan/generator/templates/init.jinja2 +++ b/pyatlan/generator/templates/init.jinja2 @@ -1,5 +1,11 @@ # Copyright 2022 Atlan Pte. Ltd. -from .asset00 import validate_single_required_field -{% for import in imports -%} -{{ import }} +# isort: skip_file +{% for asset_import in asset_imports -%} +{{ asset_import }} {% endfor %} + +# Update asset forward references: +localns = locals() +{%- for asset_name in asset_names %} +{{ asset_name }}.Attributes.update_forward_refs(**localns) +{%- endfor %} diff --git a/pyatlan/generator/templates/methods/asset/asset.jinja2 b/pyatlan/generator/templates/methods/asset/asset.jinja2 index 70e5db6ff..739fe6b9b 100644 --- a/pyatlan/generator/templates/methods/asset/asset.jinja2 +++ b/pyatlan/generator/templates/methods/asset/asset.jinja2 @@ -1,4 +1,5 @@ + _subtypes_:dict[str, type] = dict() def __init_subclass__(cls, type_name=None): diff --git a/pyatlan/generator/templates/methods/asset/purpose.jinja2 b/pyatlan/generator/templates/methods/asset/purpose.jinja2 index fa35f0c04..381724156 100644 --- a/pyatlan/generator/templates/methods/asset/purpose.jinja2 +++ b/pyatlan/generator/templates/methods/asset/purpose.jinja2 @@ -2,7 +2,7 @@ @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, atlan_tags: list[str]) -> {{ entity_def.name }}: + def create(cls, *, name: str, atlan_tags: list[AtlanTagName]) -> {{ entity_def.name }}: validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) attributes = Purpose.Attributes.create(name=name, atlan_tags=atlan_tags) return cls(attributes=attributes) diff --git a/pyatlan/generator/templates/methods/attribute/a_p_i_path.jinja2 b/pyatlan/generator/templates/methods/attribute/a_p_i_path.jinja2 index cbbcc713b..225a109e3 100644 --- a/pyatlan/generator/templates/methods/attribute/a_p_i_path.jinja2 +++ b/pyatlan/generator/templates/methods/attribute/a_p_i_path.jinja2 @@ -24,5 +24,5 @@ connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", qualified_name=f"{spec_qualified_name}{path_raw_uri}", connector_name=connector_type.value, - apiSpec=APISpec.ref_by_qualified_name(spec_qualified_name), + api_spec=APISpec.ref_by_qualified_name(spec_qualified_name), ) diff --git a/pyatlan/generator/templates/methods/attribute/atlas_glossary.jinja2 b/pyatlan/generator/templates/methods/attribute/atlas_glossary.jinja2 index fe16016d1..f19d91269 100644 --- a/pyatlan/generator/templates/methods/attribute/atlas_glossary.jinja2 +++ b/pyatlan/generator/templates/methods/attribute/atlas_glossary.jinja2 @@ -5,4 +5,4 @@ def create(cls, *, name: StrictStr, icon: Optional[AtlanIcon] = None)->{{ entity_def.name }}.Attributes: validate_required_fields(["name"], [name]) icon_str = icon.value if icon is not None else None - return AtlasGlossary.Attributes(name=name, qualified_name=next_id(), icon=icon_str) + return AtlasGlossary.Attributes(name=name, qualified_name=next_id(), asset_icon=icon_str) diff --git a/pyatlan/generator/templates/methods/attribute/data_domain.jinja2 b/pyatlan/generator/templates/methods/attribute/data_domain.jinja2 index 65cdc4b76..d45ca32c3 100644 --- a/pyatlan/generator/templates/methods/attribute/data_domain.jinja2 +++ b/pyatlan/generator/templates/methods/attribute/data_domain.jinja2 @@ -24,5 +24,5 @@ name=name, parent_domain=parent_domain, qualified_name=qualified_name, - icon=icon_str, + asset_icon=icon_str, ) diff --git a/pyatlan/generator/templates/methods/attribute/data_product.jinja2 b/pyatlan/generator/templates/methods/attribute/data_product.jinja2 index 6bc8c2f86..0997a3632 100644 --- a/pyatlan/generator/templates/methods/attribute/data_product.jinja2 +++ b/pyatlan/generator/templates/methods/attribute/data_product.jinja2 @@ -25,5 +25,5 @@ data_product_assets_d_s_l=assets_dsl, data_domain=domain, qualified_name=f"default/product/{camel_case_name}", - icon=icon_str, + asset_icon=icon_str, ) diff --git a/pyatlan/generator/templates/methods/attribute/purpose.jinja2 b/pyatlan/generator/templates/methods/attribute/purpose.jinja2 index 58560f5d7..f3b48fcef 100644 --- a/pyatlan/generator/templates/methods/attribute/purpose.jinja2 +++ b/pyatlan/generator/templates/methods/attribute/purpose.jinja2 @@ -2,7 +2,7 @@ @classmethod # @validate_arguments() @init_guid - def create(cls, name: str, atlan_tags: list[str]) -> {{ entity_def.name }}.Attributes: + def create(cls, name: str, atlan_tags: list[AtlanTagName]) -> {{ entity_def.name }}.Attributes: validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) return Purpose.Attributes( qualified_name=name, diff --git a/pyatlan/generator/templates/methods/imports/auth_policy.jinja2 b/pyatlan/generator/templates/methods/imports/auth_policy.jinja2 index 604a16e0b..20b1a6e2b 100644 --- a/pyatlan/generator/templates/methods/imports/auth_policy.jinja2 +++ b/pyatlan/generator/templates/methods/imports/auth_policy.jinja2 @@ -1 +1 @@ -from .{{ modules_by_asset_name['Asset'] }} import SelfAsset +from .asset import SelfAsset diff --git a/pyatlan/generator/templates/methods/imports/persona.jinja2 b/pyatlan/generator/templates/methods/imports/persona.jinja2 index 8ff9c691c..7384ec4f0 100644 --- a/pyatlan/generator/templates/methods/imports/persona.jinja2 +++ b/pyatlan/generator/templates/methods/imports/persona.jinja2 @@ -1,2 +1,2 @@ -from .{{ modules_by_asset_name['AuthPolicy'] }} import AuthPolicy -from .{{ modules_by_asset_name['Asset'] }} import SelfAsset +from .auth_policy import AuthPolicy +from .asset import SelfAsset diff --git a/pyatlan/generator/templates/methods/imports/purpose.jinja2 b/pyatlan/generator/templates/methods/imports/purpose.jinja2 index 8ff9c691c..7384ec4f0 100644 --- a/pyatlan/generator/templates/methods/imports/purpose.jinja2 +++ b/pyatlan/generator/templates/methods/imports/purpose.jinja2 @@ -1,2 +1,2 @@ -from .{{ modules_by_asset_name['AuthPolicy'] }} import AuthPolicy -from .{{ modules_by_asset_name['Asset'] }} import SelfAsset +from .auth_policy import AuthPolicy +from .asset import SelfAsset diff --git a/pyatlan/generator/templates/module.jinja2 b/pyatlan/generator/templates/module.jinja2 index 0737a6625..a53c80d93 100644 --- a/pyatlan/generator/templates/module.jinja2 +++ b/pyatlan/generator/templates/module.jinja2 @@ -5,24 +5,27 @@ {% from 'macros.jinja2' import gen_property_relationship_class_vars %} {% include 'imports.jinja2' %} -{% for import in module.imports%} -{{ import }} -{% endfor %} -{% for asset_info in module.ordered_asset_infos %} - {% set entity_def = asset_info.entity_def %} - {% set file_name = 'methods/imports/' + entity_def.name | to_snake_case + '.jinja2' %} - {% if existz('templates/' + file_name) %} + +{% if asset_info.name != 'Referenceable' and asset_info.name != 'Asset' %} +from .asset import SelfAsset +{% endif %} + +{% set entity_def = asset_info.entity_def %} + +{% set file_name = 'methods/imports/' + entity_def.name | to_snake_case + '.jinja2' %} +{% if existz('templates/' + file_name) %} {% include file_name %} - {% endif %} -{% endfor %} -{% if module_name == 'asset00' %} -{% include 'globals.jinja2' %} {% endif %} -{% for asset_info in module.ordered_asset_infos %} - {% set entity_def = asset_info.entity_def %} +{% set entity_def = asset_info.entity_def %} {%- set super_classes = ['AtlanObject'] if not entity_def.super_types else entity_def.super_types %} +{{ asset_info.import_super_class }} + +{% if asset_info.name == 'Asset' %} +SelfAsset = TypeVar("SelfAsset", bound="Asset") +{% endif %} + class {{ entity_def.name }}({{super_classes[0]}} {%- if "Asset" in super_classes %}, type_name='{{ entity_def.name }}'{% endif %}): """Description""" {% if entity_def.name == "Referenceable" %} @@ -65,9 +68,24 @@ class {{ entity_def.name }}({{super_classes[0]}} {%- if "Asset" in super_classes ) {%- endif %} {% endif %} -{% endfor %} -{% for asset_info in module.ordered_asset_infos %} - {% set entity_def = asset_info.entity_def %} -{{entity_def.name}}.Attributes.update_forward_refs() +{% if asset_info.module_name == 'referenceable' %} +# Imports required for fixing circular dependencies: +from .asset import Asset # noqa # isort:skip +{% endif %} + +{% if asset_info.module_name == 'metric' %} +# Imports required for fixing circular dependencies: +from .asset import Asset # noqa # isort:skip +from .catalog import Catalog # noqa # isort:skip +from .s_q_l import SQL # noqa # isort:skip +{% endif %} + +{% for import in asset_info.imports_for_referenced_assets %} +{{ import }} {% endfor %} + +{% if asset_info.entity_def.name in asset_info.eligible_assets %} +{% set entity_name = asset_info.name %} +{{ entity_name }}.Attributes.update_forward_refs() +{% endif %} diff --git a/pyatlan/generator/templates/modules.jinja2 b/pyatlan/generator/templates/modules.jinja2 new file mode 100644 index 000000000..07313e224 --- /dev/null +++ b/pyatlan/generator/templates/modules.jinja2 @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. +{% from 'macros.jinja2' import gen_properties %} +{% from 'macros.jinja2' import gen_property_class_vars %} +{% from 'macros.jinja2' import gen_property_relationship_class_vars %} + +{% include 'imports.jinja2' %} +{% for import in module.imports%} +{{ import }} +{% endfor %} +{% for asset_info in module.ordered_asset_infos %} + {% set entity_def = asset_info.entity_def %} + {% set file_name = 'methods/imports/' + entity_def.name | to_snake_case + '.jinja2' %} + {% if existz('templates/' + file_name) %} +{% include file_name %} + {% endif %} +{% endfor %} +{% if module_name == 'asset00' %} +{% include 'globals.jinja2' %} +{% endif %} + +{% for asset_info in module.ordered_asset_infos %} + {% set entity_def = asset_info.entity_def %} +{%- set super_classes = ['AtlanObject'] if not entity_def.super_types else entity_def.super_types %} + +class {{ entity_def.name }}({{super_classes[0]}} {%- if "Asset" in super_classes %}, type_name='{{ entity_def.name }}'{% endif %}): + """Description""" +{% if entity_def.name == "Referenceable" %} + {% include 'referenceable_methods.jinja2' %} + {% include 'properties.jinja2' %} + {% include 'referenceable_attributes.jinja2' %} +{%- else %} + {% set file_name = 'methods/asset/' + entity_def.name | to_snake_case + '.jinja2' %} + {% if existz('templates/' + file_name) %} + {% include file_name %} + {% endif %} + + type_name: str = Field(default="{{ entity_def.name }}", allow_mutation=False) + + @validator('type_name') + def validate_type_name(cls, v): + if v != "{{ entity_def.name }}": + raise ValueError('must be {{ entity_def.name }}') + return v + + {% include 'properties.jinja2' %} + {%- if entity_def.attribute_defs or entity_def.relationship_attribute_defs %} + class Attributes({{super_classes[0]}}.Attributes): + {%- for attribute_def in entity_def.attribute_defs %} + {%- set type = attribute_def.typeName | get_type %} + {{attribute_def.name | to_snake_case }}: Optional[{{type}}] = Field(default=None, description='') + {%- endfor %} + {%- for attribute_def in entity_def.relationship_attribute_defs %} + {%- set type = attribute_def.typeName | get_type %} + {{attribute_def.name | to_snake_case }}: Optional[{{type}}]= Field(default=None, description='') # relationship + {%- endfor %} + {% set file_name = 'methods/attribute/' + entity_def.name | to_snake_case + '.jinja2' %} + {% if existz('templates/' + file_name) %} + {% include file_name %} + {% endif %} + attributes: '{{entity_def.name}}.Attributes' = Field( + default_factory = lambda: {{entity_def.name}}.Attributes(), + description='Map of attributes in the instance and their values. The specific keys of this map will vary by ' + 'type, so are described in the sub-types of this schema.\n', + ) + {%- endif %} +{% endif %} +{% endfor %} + +{% for asset_info in module.ordered_asset_infos %} + {% set entity_def = asset_info.entity_def %} +{{entity_def.name}}.Attributes.update_forward_refs() +{% endfor %} diff --git a/pyatlan/generator/templates/referenceable_attributes.jinja2 b/pyatlan/generator/templates/referenceable_attributes.jinja2 index 219eec12a..db5a1a46b 100644 --- a/pyatlan/generator/templates/referenceable_attributes.jinja2 +++ b/pyatlan/generator/templates/referenceable_attributes.jinja2 @@ -3,11 +3,11 @@ {%- for attribute_def in entity_def.attribute_defs %} {%- set type = attribute_def.typeName | get_type %} {%- set default_value = "''" if attribute_def.name == "qualifiedName" else "None" %} - {{attribute_def.name | to_snake_case }}: Optional[{{type}}]= Field({{ default_value }}, description='' , alias='{{attribute_def.name}}') + {{attribute_def.name | to_snake_case }}: Optional[{{type}}]= Field(default={{ default_value }}, description='') {%- endfor %} {%- for attribute_def in entity_def.relationship_attribute_defs %} {%- set type = attribute_def.typeName | get_type %} - {{attribute_def.name | to_snake_case }}: {% if attribute_def.isOptional %}Optional[{% endif %}{{type}}{% if attribute_def.isOptional %}]{% endif %} = Field({% if attribute_def.isOptional %}None,{% endif %} description='', alias='{{attribute_def.name}}') # relationship + {{attribute_def.name | to_snake_case }}: {% if attribute_def.isOptional %}Optional[{% endif %}{{type}}{% if attribute_def.isOptional %}]{% endif %} = Field({% if attribute_def.isOptional %}default=None,{% endif %} description='') # relationship {%- endfor %} def validate_required(self): @@ -79,115 +79,109 @@ ) """Unique fully-qualified name of the asset in Atlan.""" - type_name: str = Field("Referenceable", description='Name of the type definition that defines this instance.\n' + type_name: str = Field(default="Referenceable", description='Name of the type definition that defines this instance.' ) _metadata_proxy: CustomMetadataProxy = PrivateAttr() attributes: {{entity_def.name}}.Attributes = Field( default_factory = lambda : {{entity_def.name}}.Attributes(), description='Map of attributes in the instance and their values. The specific keys of this map will vary ' - 'by type, so are described in the sub-types of this schema.\n', + 'by type, so are described in the sub-types of this schema.', ) business_attributes: Optional[dict[str, Any]] = Field( - None, - description='Map of custom metadata attributes and values defined on the entity.\n', - alias='businessAttributes' + default=None, + description='Map of custom metadata attributes and values defined on the entity.', ) created_by: Optional[str] = Field( - None, - description='Username of the user who created the object.\n', + default=None, + description='Username of the user who created the object.', example='jsmith', ) create_time: Optional[int] = Field( - None, - description='Time (epoch) at which this object was created, in milliseconds.\n', + default=None, + description='Time (epoch) at which this object was created, in milliseconds.', example=1648852296555, ) delete_handler: Optional[str] = Field( - None, + default=None, description="Details on the handler used for deletion of the asset.", example="Hard", ) guid: str = Field( - "", - description='Unique identifier of the entity instance.\n', + default="", + description='Unique identifier of the entity instance.', example='917ffec9-fa84-4c59-8e6c-c7b114d04be3', ) - is_incomplete: Optional[bool] = Field(True, description='', example=True) - labels: Optional[list[str]] = Field(None, description='Internal use only.') + is_incomplete: Optional[bool] = Field(default=True, description='', example=True) + labels: Optional[list[str]] = Field(default=None, description='Internal use only.') relationship_attributes: Optional[dict[str, Any]] = Field( - None, + default=None, description='Map of relationships for the entity. The specific keys of this map will vary by type, ' - 'so are described in the sub-types of this schema.\n', + 'so are described in the sub-types of this schema.', ) status: Optional[EntityStatus] = Field( - None, + default=None, description="Status of the entity", example=EntityStatus.ACTIVE ) updated_by: Optional[str] = Field( - None, - description='Username of the user who last assets_updated the object.\n', + default=None, + description='Username of the user who last assets_updated the object.', example='jsmith', ) update_time: Optional[int] = Field( - None, - description='Time (epoch) at which this object was last assets_updated, in milliseconds.\n', + default=None, + description='Time (epoch) at which this object was last assets_updated, in milliseconds.', example=1649172284333, ) version: Optional[int] = Field( - None, description='Version of this object.\n', example=2 + default=None, description='Version of this object.', example=2 ) atlan_tags: Optional[list[AtlanTag]] = Field( - None, description="Atlan tags", alias="classifications" + default=None, description="Atlan tags", ) classification_names: Optional[list[str]] = Field( - None, description="The names of the classifications that exist on the asset." + default=None, description="The names of the classifications that exist on the asset." ) display_text: Optional[str] = Field( - None, - description="Human-readable name of the entity..\n", + default=None, + description="Human-readable name of the entity..", ) entity_status: Optional[str] = Field( - None, - description="Status of the entity (if this is a related entity).\n", + default=None, + description="Status of the entity (if this is a related entity).", ) relationship_guid: Optional[str] = Field( - None, - description="Unique identifier of the relationship (when this is a related entity).\n", + default=None, + description="Unique identifier of the relationship (when this is a related entity).", ) relationship_status: Optional[str] = Field( - None, - description="Status of the relationship (when this is a related entity).\n", + default=None, + description="Status of the relationship (when this is a related entity).", ) relationship_type: Optional[str] = Field( - None, - description="Status of the relationship (when this is a related entity).\n", + default=None, + description="Status of the relationship (when this is a related entity).", ) meaning_names: Optional[list[str]] = Field( - None, description="Names of assigned_terms that have been linked to this asset." + default=None, description="Names of assigned_terms that have been linked to this asset." ) - meanings: Optional[list[Meaning]] = Field( - None, description="", alias="meanings" - ) - custom_attributes: Optional[dict[str, Any]] = Field(None, description="", alias="customAttributes") - scrubbed: Optional[bool] = Field( - None, description="", alias="fields removed from results" - ) - pending_tasks: Optional[list[str]] = Field(None) + meanings: Optional[list[Meaning]] = Field(default=None, description="") + custom_attributes: Optional[dict[str, Any]] = Field(default=None, description="") + scrubbed: Optional[bool] = Field(default=None, description="") + pending_tasks: Optional[list[str]] = Field(default=None) - unique_attributes: Optional[dict[str, Any]] = Field(None) + unique_attributes: Optional[dict[str, Any]] = Field(default=None) append_relationship_attributes: Optional[dict[str, Any]] = Field( - None, - alias="appendRelationshipAttributes", + default=None, description="Map of append relationship attributes.", ) remove_relationship_attributes: Optional[dict[str, Any]] = Field( - None, - alias="removeRelationshipAttributes", + default=None, description="Map of remove relationship attributes.", ) semantic: Optional[SaveSemantic] = Field( + default=None, exclude=True, description=( "Semantic for how this relationship should be saved, " diff --git a/pyatlan/generator/templates/structs.jinja2 b/pyatlan/generator/templates/structs.jinja2 index 8cb570eb3..dfe4e7171 100644 --- a/pyatlan/generator/templates/structs.jinja2 +++ b/pyatlan/generator/templates/structs.jinja2 @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import datetime from typing import Optional, Union -from pydantic import BaseModel, Extra, Field +from pydantic.v1 import BaseModel, Extra, Field from pyatlan.model.enums import ( BadgeComparisonOperator, @@ -53,7 +53,7 @@ class {{struct.name}}(AtlanObject): {% endif %} {%- for attribute_def in struct.attribute_defs %} {%- set type = attribute_def.type_name | get_type %} - {{attribute_def.name | to_snake_case }}: {% if attribute_def.is_optional %}Optional[{% endif %}{{type}}{% if attribute_def.is_optional %}]{% endif %} = Field({% if attribute_def.is_optional %}None,{% endif %} description='' ) + {{attribute_def.name | to_snake_case }}: {% if attribute_def.is_optional %}Optional[{% endif %}{{type}}{% if attribute_def.is_optional %}]{% endif %} = Field({% if attribute_def.is_optional %}default=None,{% endif %} description='' ) {%- endfor %} {% endfor %} diff --git a/pyatlan/model/api_tokens.py b/pyatlan/model/api_tokens.py index 2bc594dea..915176b4c 100644 --- a/pyatlan/model/api_tokens.py +++ b/pyatlan/model/api_tokens.py @@ -3,7 +3,7 @@ import json from typing import Any, Optional -from pydantic import Field, root_validator +from pydantic.v1 import Field, root_validator from pyatlan.model.core import AtlanObject @@ -13,11 +13,15 @@ class Config: frozen = True guid: Optional[str] = Field( - description="Unique identifier (GUID) of the linked persona.", alias="id" + default=None, + description="Unique identifier (GUID) of the linked persona.", + alias="id", + ) + persona: Optional[str] = Field( + default=None, description="Unique name of the linked persona." ) - persona: Optional[str] = Field(description="Unique name of the linked persona.") persona_qualified_name: Optional[str] = Field( - description="Unique qualified_name of the persona" + default=None, description="Unique qualified_name of the persona" ) @@ -28,18 +32,25 @@ class ApiTokenAttributes(AtlanObject): alias="access.token.lifespan", ) access_token: Optional[str] = Field( - description="The actual API token that can be used as a bearer token." + default=None, + description="The actual API token that can be used as a bearer token.", ) client_id: Optional[str] = Field( - description="Unique client identifier (GUID) of the API token." + default=None, + description="Unique client identifier (GUID) of the API token.", ) created_at: Optional[int] = Field( description="Epoch time, in milliseconds, at which the API token was created." ) - created_by: Optional[str] = Field(description="User who created the API token.") - description: Optional[str] = Field(description="Explanation of the API token.") + created_by: Optional[str] = Field( + default=None, description="User who created the API token." + ) + description: Optional[str] = Field( + default=None, description="Explanation of the API token." + ) display_name: Optional[str] = Field( - description="Human-readable name provided when creating the token." + default=None, + description="Human-readable name provided when creating the token.", ) personas: Optional[list[Any]] = Field( default_factory=list, @@ -49,7 +60,8 @@ class ApiTokenAttributes(AtlanObject): default_factory=set, description="Personas associated with the API token." ) purposes: Optional[Any] = Field( - description="Possible future placeholder for purposes associated with the token." + default=None, + description="Possible future placeholder for purposes associated with the token.", ) workspace_permissions: Optional[set[str]] = Field( default_factory=set, @@ -78,18 +90,22 @@ def check_embedded_objects(cls, values): return values guid: Optional[str] = Field( - description="Unique identifier (GUID) of the API token.", alias="id" + default=None, + description="Unique identifier (GUID) of the API token.", + alias="id", ) client_id: Optional[str] = Field( + default=None, description="Unique client identifier (GUID) of the API token.", alias="clientId", ) display_name: Optional[str] = Field( + default=None, description="Human-readable name provided when creating the token.", alias="displayName", ) attributes: Optional[ApiTokenAttributes] = Field( - description="Detailed characteristics of the API token." + default=None, description="Detailed characteristics of the API token." ) @root_validator(pre=True) @@ -107,17 +123,21 @@ def copy_values(cls, values): class ApiTokenRequest(AtlanObject): display_name: Optional[str] = Field( - description="Human-readable name provided when creating the token." + default=None, + description="Human-readable name provided when creating the token.", ) description: str = Field(default="", description="Explanation of the token.") personas: Optional[set[str]] = Field( + default=None, description="Deprecated (now unused): GUIDs of personas that are associated with the token.", ) persona_qualified_names: Optional[set[str]] = Field( + default=None, description="Unique qualified_names of personas that are associated with the token.", ) validity_seconds: Optional[int] = Field( - description="Length of time, in seconds, after which the token will expire and no longer be usable." + default=None, + description="Length of time, in seconds, after which the token will expire and no longer be usable.", ) @root_validator(pre=True) @@ -133,10 +153,14 @@ def set_max_validity(cls, values): class ApiTokenResponse(AtlanObject): - total_record: Optional[int] = Field(description="Total number of API tokens.") + total_record: Optional[int] = Field( + default=None, description="Total number of API tokens." + ) filter_record: Optional[int] = Field( - description="Number of API records that matched the specified filters." + default=None, + description="Number of API records that matched the specified filters.", ) records: Optional[list[ApiToken]] = Field( - description="Actual API tokens that matched the specified filters." + default=None, + description="Actual API tokens that matched the specified filters.", ) diff --git a/pyatlan/model/assets/__init__.py b/pyatlan/model/assets/__init__.py index 55a857b41..e26edbfb6 100644 --- a/pyatlan/model/assets/__init__.py +++ b/pyatlan/model/assets/__init__.py @@ -1,205 +1,430 @@ # Copyright 2022 Atlan Pte. Ltd. -from .asset00 import ( - SQL, - Airflow, - AirflowDag, - AirflowTask, - Asset, - AtlasGlossary, - AtlasGlossaryCategory, - AtlasGlossaryTerm, - Catalog, - Column, - ColumnProcess, - Database, - DataDomain, - DataMesh, - DataProduct, - DataQuality, - Dbt, - DbtMetric, - DbtModel, - DbtModelColumn, - DbtSource, - DbtTest, - File, - Folder, - Function, - Link, - MaterialisedView, - Matillion, - MatillionComponent, - MatillionGroup, - MatillionJob, - MatillionProject, - MCIncident, - MCMonitor, - Metric, - MonteCarlo, - Namespace, - Procedure, - Process, - Query, - Readme, - Referenceable, - Resource, - Schema, - SchemaRegistry, - SchemaRegistrySubject, - SnowflakeDynamicTable, - SnowflakePipe, - SnowflakeStream, - SnowflakeTag, - Soda, - SodaCheck, - Table, - TablePartition, - Tag, - View, - validate_single_required_field, -) -from .asset01 import DataSet -from .asset02 import TagAttachment -from .asset03 import Connection -from .asset05 import Badge -from .asset06 import AccessControl, AuthPolicy -from .asset07 import ProcessExecution -from .asset08 import AuthService -from .asset09 import Cloud -from .asset10 import Infrastructure -from .asset11 import BIProcess -from .asset12 import DbtProcess -from .asset13 import Persona -from .asset14 import Purpose -from .asset15 import Collection -from .asset17 import ObjectStore -from .asset19 import BI -from .asset20 import SaaS -from .asset23 import EventStore -from .asset24 import NoSQL -from .asset27 import Insight -from .asset28 import API -from .asset31 import Google -from .asset32 import Azure -from .asset33 import AWS -from .asset34 import DbtColumnProcess -from .asset35 import S3 -from .asset36 import ADLS -from .asset37 import GCS -from .asset40 import Preset -from .asset41 import Mode -from .asset42 import Sigma -from .asset43 import Tableau -from .asset44 import Looker -from .asset45 import Redash -from .asset46 import Sisense -from .asset47 import DataStudio -from .asset48 import Metabase -from .asset49 import QuickSight -from .asset50 import Thoughtspot -from .asset51 import PowerBI -from .asset52 import MicroStrategy -from .asset53 import Qlik -from .asset54 import Salesforce -from .asset55 import ReadmeTemplate -from .asset56 import Kafka -from .asset57 import DynamoDB -from .asset58 import MongoDB -from .asset59 import DbtTag -from .asset60 import APIPath, APISpec -from .asset61 import DataStudioAsset -from .asset62 import S3Bucket, S3Object -from .asset63 import ADLSAccount, ADLSContainer, ADLSObject -from .asset64 import GCSBucket, GCSObject -from .asset65 import PresetChart, PresetDashboard, PresetDataset, PresetWorkspace -from .asset66 import ModeChart, ModeCollection, ModeQuery, ModeReport, ModeWorkspace -from .asset67 import SigmaDataset, SigmaDatasetColumn -from .asset68 import SigmaDataElement, SigmaDataElementField, SigmaPage, SigmaWorkbook -from .asset69 import ( - TableauCalculatedField, - TableauDashboard, - TableauDatasource, - TableauDatasourceField, - TableauFlow, - TableauProject, - TableauSite, - TableauWorkbook, - TableauWorksheet, -) -from .asset70 import TableauMetric -from .asset71 import ( - LookerDashboard, - LookerExplore, - LookerField, - LookerFolder, - LookerLook, - LookerModel, - LookerProject, - LookerQuery, - LookerTile, - LookerView, -) -from .asset72 import RedashDashboard -from .asset73 import RedashQuery, RedashVisualization -from .asset74 import ( - SisenseDashboard, - SisenseDatamodel, - SisenseDatamodelTable, - SisenseFolder, - SisenseWidget, -) -from .asset75 import MetabaseCollection, MetabaseDashboard, MetabaseQuestion -from .asset76 import ( - QuickSightAnalysis, - QuickSightAnalysisVisual, - QuickSightDashboard, - QuickSightDashboardVisual, - QuickSightDataset, - QuickSightDatasetField, - QuickSightFolder, -) -from .asset77 import ThoughtspotDashlet, ThoughtspotLiveboard -from .asset78 import ThoughtspotAnswer -from .asset79 import ( - PowerBIColumn, - PowerBIDashboard, - PowerBIDataflow, - PowerBIDataset, - PowerBIDatasource, - PowerBIMeasure, - PowerBIPage, - PowerBIReport, - PowerBITable, - PowerBITile, - PowerBIWorkspace, -) -from .asset80 import ( - MicroStrategyAttribute, - MicroStrategyCube, - MicroStrategyDocument, - MicroStrategyDossier, - MicroStrategyFact, - MicroStrategyMetric, - MicroStrategyProject, - MicroStrategyReport, - MicroStrategyVisualization, -) -from .asset81 import QlikApp, QlikChart, QlikDataset, QlikSheet, QlikSpace -from .asset82 import ( - SalesforceDashboard, - SalesforceField, - SalesforceObject, - SalesforceOrganization, - SalesforceReport, -) -from .asset84 import MongoDBCollection, MongoDBDatabase -from .asset85 import DynamoDBSecondaryIndex -from .asset86 import ( - DynamoDBGlobalSecondaryIndex, - DynamoDBLocalSecondaryIndex, - DynamoDBTable, -) -from .asset87 import KafkaConsumerGroup, KafkaTopic -from .asset88 import QlikStream -from .asset89 import AzureEventHub -from .asset90 import AzureEventHubConsumerGroup +# isort: skip_file +from .referenceable import Referenceable +from .asset import Asset +from .data_set import DataSet +from .tag_attachment import TagAttachment +from .connection import Connection +from .process import Process +from .atlas_glossary_category import AtlasGlossaryCategory +from .badge import Badge +from .access_control import AccessControl +from .namespace import Namespace +from .catalog import Catalog +from .atlas_glossary import AtlasGlossary +from .auth_policy import AuthPolicy +from .process_execution import ProcessExecution +from .atlas_glossary_term import AtlasGlossaryTerm +from .auth_service import AuthService +from .cloud import Cloud +from .infrastructure import Infrastructure +from .b_i_process import BIProcess +from .dbt_process import DbtProcess +from .column_process import ColumnProcess +from .persona import Persona +from .purpose import Purpose +from .collection import Collection +from .folder import Folder +from .airflow import Airflow +from .object_store import ObjectStore +from .data_quality import DataQuality +from .b_i import BI +from .saa_s import SaaS +from .resource import Resource +from .data_mesh import DataMesh +from .s_q_l import SQL +from .event_store import EventStore +from .no_s_q_l import NoSQL +from .matillion import Matillion +from .dbt import Dbt +from .insight import Insight +from .a_p_i import API +from .tag import Tag +from .schema_registry import SchemaRegistry +from .google import Google +from .azure import Azure +from .a_w_s import AWS +from .dbt_column_process import DbtColumnProcess +from .airflow_dag import AirflowDag +from .airflow_task import AirflowTask +from .s3 import S3 +from .a_d_l_s import ADLS +from .g_c_s import GCS +from .monte_carlo import MonteCarlo +from .metric import Metric +from .soda import Soda +from .preset import Preset +from .mode import Mode +from .sigma import Sigma +from .tableau import Tableau +from .looker import Looker +from .redash import Redash +from .sisense import Sisense +from .data_studio import DataStudio +from .metabase import Metabase +from .quick_sight import QuickSight +from .thoughtspot import Thoughtspot +from .power_b_i import PowerBI +from .micro_strategy import MicroStrategy +from .qlik import Qlik +from .salesforce import Salesforce +from .readme_template import ReadmeTemplate +from .readme import Readme +from .file import File +from .link import Link +from .data_domain import DataDomain +from .data_product import DataProduct +from .table import Table +from .query import Query +from .schema import Schema +from .snowflake_pipe import SnowflakePipe +from .view import View +from .materialised_view import MaterialisedView +from .function import Function +from .table_partition import TablePartition +from .column import Column +from .snowflake_stream import SnowflakeStream +from .database import Database +from .procedure import Procedure +from .snowflake_tag import SnowflakeTag +from .kafka import Kafka +from .dynamo_d_b import DynamoDB +from .mongo_d_b import MongoDB +from .matillion_group import MatillionGroup +from .matillion_job import MatillionJob +from .matillion_project import MatillionProject +from .matillion_component import MatillionComponent +from .dbt_model_column import DbtModelColumn +from .dbt_tag import DbtTag +from .dbt_test import DbtTest +from .dbt_model import DbtModel +from .dbt_metric import DbtMetric +from .dbt_source import DbtSource +from .a_p_i_spec import APISpec +from .a_p_i_path import APIPath +from .schema_registry_subject import SchemaRegistrySubject +from .data_studio_asset import DataStudioAsset +from .s3_bucket import S3Bucket +from .s3_object import S3Object +from .a_d_l_s_account import ADLSAccount +from .a_d_l_s_container import ADLSContainer +from .a_d_l_s_object import ADLSObject +from .g_c_s_object import GCSObject +from .g_c_s_bucket import GCSBucket +from .m_c_incident import MCIncident +from .m_c_monitor import MCMonitor +from .soda_check import SodaCheck +from .preset_chart import PresetChart +from .preset_dataset import PresetDataset +from .preset_dashboard import PresetDashboard +from .preset_workspace import PresetWorkspace +from .mode_report import ModeReport +from .mode_query import ModeQuery +from .mode_chart import ModeChart +from .mode_workspace import ModeWorkspace +from .mode_collection import ModeCollection +from .sigma_dataset_column import SigmaDatasetColumn +from .sigma_dataset import SigmaDataset +from .sigma_workbook import SigmaWorkbook +from .sigma_data_element_field import SigmaDataElementField +from .sigma_page import SigmaPage +from .sigma_data_element import SigmaDataElement +from .tableau_workbook import TableauWorkbook +from .tableau_datasource_field import TableauDatasourceField +from .tableau_calculated_field import TableauCalculatedField +from .tableau_project import TableauProject +from .tableau_metric import TableauMetric +from .tableau_site import TableauSite +from .tableau_datasource import TableauDatasource +from .tableau_dashboard import TableauDashboard +from .tableau_flow import TableauFlow +from .tableau_worksheet import TableauWorksheet +from .looker_look import LookerLook +from .looker_dashboard import LookerDashboard +from .looker_folder import LookerFolder +from .looker_tile import LookerTile +from .looker_model import LookerModel +from .looker_explore import LookerExplore +from .looker_project import LookerProject +from .looker_query import LookerQuery +from .looker_field import LookerField +from .looker_view import LookerView +from .redash_dashboard import RedashDashboard +from .redash_query import RedashQuery +from .redash_visualization import RedashVisualization +from .sisense_folder import SisenseFolder +from .sisense_widget import SisenseWidget +from .sisense_datamodel import SisenseDatamodel +from .sisense_datamodel_table import SisenseDatamodelTable +from .sisense_dashboard import SisenseDashboard +from .metabase_question import MetabaseQuestion +from .metabase_collection import MetabaseCollection +from .metabase_dashboard import MetabaseDashboard +from .quick_sight_folder import QuickSightFolder +from .quick_sight_dashboard_visual import QuickSightDashboardVisual +from .quick_sight_analysis_visual import QuickSightAnalysisVisual +from .quick_sight_dataset_field import QuickSightDatasetField +from .quick_sight_analysis import QuickSightAnalysis +from .quick_sight_dashboard import QuickSightDashboard +from .quick_sight_dataset import QuickSightDataset +from .thoughtspot_liveboard import ThoughtspotLiveboard +from .thoughtspot_dashlet import ThoughtspotDashlet +from .thoughtspot_answer import ThoughtspotAnswer +from .power_b_i_report import PowerBIReport +from .power_b_i_measure import PowerBIMeasure +from .power_b_i_column import PowerBIColumn +from .power_b_i_table import PowerBITable +from .power_b_i_tile import PowerBITile +from .power_b_i_datasource import PowerBIDatasource +from .power_b_i_workspace import PowerBIWorkspace +from .power_b_i_dataset import PowerBIDataset +from .power_b_i_dashboard import PowerBIDashboard +from .power_b_i_dataflow import PowerBIDataflow +from .power_b_i_page import PowerBIPage +from .micro_strategy_report import MicroStrategyReport +from .micro_strategy_project import MicroStrategyProject +from .micro_strategy_metric import MicroStrategyMetric +from .micro_strategy_cube import MicroStrategyCube +from .micro_strategy_dossier import MicroStrategyDossier +from .micro_strategy_fact import MicroStrategyFact +from .micro_strategy_document import MicroStrategyDocument +from .micro_strategy_attribute import MicroStrategyAttribute +from .micro_strategy_visualization import MicroStrategyVisualization +from .qlik_space import QlikSpace +from .qlik_app import QlikApp +from .qlik_chart import QlikChart +from .qlik_dataset import QlikDataset +from .qlik_sheet import QlikSheet +from .salesforce_object import SalesforceObject +from .salesforce_field import SalesforceField +from .salesforce_organization import SalesforceOrganization +from .salesforce_dashboard import SalesforceDashboard +from .salesforce_report import SalesforceReport +from .snowflake_dynamic_table import SnowflakeDynamicTable +from .mongo_d_b_collection import MongoDBCollection +from .dynamo_d_b_secondary_index import DynamoDBSecondaryIndex +from .dynamo_dbtable import DynamoDBTable +from .mongo_d_b_database import MongoDBDatabase +from .kafka_topic import KafkaTopic +from .kafka_consumer_group import KafkaConsumerGroup +from .qlik_stream import QlikStream +from .dynamo_d_b_local_secondary_index import DynamoDBLocalSecondaryIndex +from .dynamo_d_b_global_secondary_index import DynamoDBGlobalSecondaryIndex +from .azure_event_hub import AzureEventHub +from .azure_event_hub_consumer_group import AzureEventHubConsumerGroup + + +# Update asset forward references: +localns = locals() +Referenceable.Attributes.update_forward_refs(**localns) +Asset.Attributes.update_forward_refs(**localns) +DataSet.Attributes.update_forward_refs(**localns) +TagAttachment.Attributes.update_forward_refs(**localns) +Connection.Attributes.update_forward_refs(**localns) +Process.Attributes.update_forward_refs(**localns) +AtlasGlossaryCategory.Attributes.update_forward_refs(**localns) +Badge.Attributes.update_forward_refs(**localns) +AccessControl.Attributes.update_forward_refs(**localns) +Namespace.Attributes.update_forward_refs(**localns) +Catalog.Attributes.update_forward_refs(**localns) +AtlasGlossary.Attributes.update_forward_refs(**localns) +AuthPolicy.Attributes.update_forward_refs(**localns) +ProcessExecution.Attributes.update_forward_refs(**localns) +AtlasGlossaryTerm.Attributes.update_forward_refs(**localns) +AuthService.Attributes.update_forward_refs(**localns) +Cloud.Attributes.update_forward_refs(**localns) +Infrastructure.Attributes.update_forward_refs(**localns) +BIProcess.Attributes.update_forward_refs(**localns) +DbtProcess.Attributes.update_forward_refs(**localns) +ColumnProcess.Attributes.update_forward_refs(**localns) +Persona.Attributes.update_forward_refs(**localns) +Purpose.Attributes.update_forward_refs(**localns) +Collection.Attributes.update_forward_refs(**localns) +Folder.Attributes.update_forward_refs(**localns) +Airflow.Attributes.update_forward_refs(**localns) +ObjectStore.Attributes.update_forward_refs(**localns) +DataQuality.Attributes.update_forward_refs(**localns) +BI.Attributes.update_forward_refs(**localns) +SaaS.Attributes.update_forward_refs(**localns) +Resource.Attributes.update_forward_refs(**localns) +DataMesh.Attributes.update_forward_refs(**localns) +SQL.Attributes.update_forward_refs(**localns) +EventStore.Attributes.update_forward_refs(**localns) +NoSQL.Attributes.update_forward_refs(**localns) +Matillion.Attributes.update_forward_refs(**localns) +Dbt.Attributes.update_forward_refs(**localns) +Insight.Attributes.update_forward_refs(**localns) +API.Attributes.update_forward_refs(**localns) +Tag.Attributes.update_forward_refs(**localns) +SchemaRegistry.Attributes.update_forward_refs(**localns) +Google.Attributes.update_forward_refs(**localns) +Azure.Attributes.update_forward_refs(**localns) +AWS.Attributes.update_forward_refs(**localns) +DbtColumnProcess.Attributes.update_forward_refs(**localns) +AirflowDag.Attributes.update_forward_refs(**localns) +AirflowTask.Attributes.update_forward_refs(**localns) +S3.Attributes.update_forward_refs(**localns) +ADLS.Attributes.update_forward_refs(**localns) +GCS.Attributes.update_forward_refs(**localns) +MonteCarlo.Attributes.update_forward_refs(**localns) +Metric.Attributes.update_forward_refs(**localns) +Soda.Attributes.update_forward_refs(**localns) +Preset.Attributes.update_forward_refs(**localns) +Mode.Attributes.update_forward_refs(**localns) +Sigma.Attributes.update_forward_refs(**localns) +Tableau.Attributes.update_forward_refs(**localns) +Looker.Attributes.update_forward_refs(**localns) +Redash.Attributes.update_forward_refs(**localns) +Sisense.Attributes.update_forward_refs(**localns) +DataStudio.Attributes.update_forward_refs(**localns) +Metabase.Attributes.update_forward_refs(**localns) +QuickSight.Attributes.update_forward_refs(**localns) +Thoughtspot.Attributes.update_forward_refs(**localns) +PowerBI.Attributes.update_forward_refs(**localns) +MicroStrategy.Attributes.update_forward_refs(**localns) +Qlik.Attributes.update_forward_refs(**localns) +Salesforce.Attributes.update_forward_refs(**localns) +ReadmeTemplate.Attributes.update_forward_refs(**localns) +Readme.Attributes.update_forward_refs(**localns) +File.Attributes.update_forward_refs(**localns) +Link.Attributes.update_forward_refs(**localns) +DataDomain.Attributes.update_forward_refs(**localns) +DataProduct.Attributes.update_forward_refs(**localns) +Table.Attributes.update_forward_refs(**localns) +Query.Attributes.update_forward_refs(**localns) +Schema.Attributes.update_forward_refs(**localns) +SnowflakePipe.Attributes.update_forward_refs(**localns) +View.Attributes.update_forward_refs(**localns) +MaterialisedView.Attributes.update_forward_refs(**localns) +Function.Attributes.update_forward_refs(**localns) +TablePartition.Attributes.update_forward_refs(**localns) +Column.Attributes.update_forward_refs(**localns) +SnowflakeStream.Attributes.update_forward_refs(**localns) +Database.Attributes.update_forward_refs(**localns) +Procedure.Attributes.update_forward_refs(**localns) +SnowflakeTag.Attributes.update_forward_refs(**localns) +Kafka.Attributes.update_forward_refs(**localns) +DynamoDB.Attributes.update_forward_refs(**localns) +MongoDB.Attributes.update_forward_refs(**localns) +MatillionGroup.Attributes.update_forward_refs(**localns) +MatillionJob.Attributes.update_forward_refs(**localns) +MatillionProject.Attributes.update_forward_refs(**localns) +MatillionComponent.Attributes.update_forward_refs(**localns) +DbtModelColumn.Attributes.update_forward_refs(**localns) +DbtTag.Attributes.update_forward_refs(**localns) +DbtTest.Attributes.update_forward_refs(**localns) +DbtModel.Attributes.update_forward_refs(**localns) +DbtMetric.Attributes.update_forward_refs(**localns) +DbtSource.Attributes.update_forward_refs(**localns) +APISpec.Attributes.update_forward_refs(**localns) +APIPath.Attributes.update_forward_refs(**localns) +SchemaRegistrySubject.Attributes.update_forward_refs(**localns) +DataStudioAsset.Attributes.update_forward_refs(**localns) +S3Bucket.Attributes.update_forward_refs(**localns) +S3Object.Attributes.update_forward_refs(**localns) +ADLSAccount.Attributes.update_forward_refs(**localns) +ADLSContainer.Attributes.update_forward_refs(**localns) +ADLSObject.Attributes.update_forward_refs(**localns) +GCSObject.Attributes.update_forward_refs(**localns) +GCSBucket.Attributes.update_forward_refs(**localns) +MCIncident.Attributes.update_forward_refs(**localns) +MCMonitor.Attributes.update_forward_refs(**localns) +SodaCheck.Attributes.update_forward_refs(**localns) +PresetChart.Attributes.update_forward_refs(**localns) +PresetDataset.Attributes.update_forward_refs(**localns) +PresetDashboard.Attributes.update_forward_refs(**localns) +PresetWorkspace.Attributes.update_forward_refs(**localns) +ModeReport.Attributes.update_forward_refs(**localns) +ModeQuery.Attributes.update_forward_refs(**localns) +ModeChart.Attributes.update_forward_refs(**localns) +ModeWorkspace.Attributes.update_forward_refs(**localns) +ModeCollection.Attributes.update_forward_refs(**localns) +SigmaDatasetColumn.Attributes.update_forward_refs(**localns) +SigmaDataset.Attributes.update_forward_refs(**localns) +SigmaWorkbook.Attributes.update_forward_refs(**localns) +SigmaDataElementField.Attributes.update_forward_refs(**localns) +SigmaPage.Attributes.update_forward_refs(**localns) +SigmaDataElement.Attributes.update_forward_refs(**localns) +TableauWorkbook.Attributes.update_forward_refs(**localns) +TableauDatasourceField.Attributes.update_forward_refs(**localns) +TableauCalculatedField.Attributes.update_forward_refs(**localns) +TableauProject.Attributes.update_forward_refs(**localns) +TableauMetric.Attributes.update_forward_refs(**localns) +TableauSite.Attributes.update_forward_refs(**localns) +TableauDatasource.Attributes.update_forward_refs(**localns) +TableauDashboard.Attributes.update_forward_refs(**localns) +TableauFlow.Attributes.update_forward_refs(**localns) +TableauWorksheet.Attributes.update_forward_refs(**localns) +LookerLook.Attributes.update_forward_refs(**localns) +LookerDashboard.Attributes.update_forward_refs(**localns) +LookerFolder.Attributes.update_forward_refs(**localns) +LookerTile.Attributes.update_forward_refs(**localns) +LookerModel.Attributes.update_forward_refs(**localns) +LookerExplore.Attributes.update_forward_refs(**localns) +LookerProject.Attributes.update_forward_refs(**localns) +LookerQuery.Attributes.update_forward_refs(**localns) +LookerField.Attributes.update_forward_refs(**localns) +LookerView.Attributes.update_forward_refs(**localns) +RedashDashboard.Attributes.update_forward_refs(**localns) +RedashQuery.Attributes.update_forward_refs(**localns) +RedashVisualization.Attributes.update_forward_refs(**localns) +SisenseFolder.Attributes.update_forward_refs(**localns) +SisenseWidget.Attributes.update_forward_refs(**localns) +SisenseDatamodel.Attributes.update_forward_refs(**localns) +SisenseDatamodelTable.Attributes.update_forward_refs(**localns) +SisenseDashboard.Attributes.update_forward_refs(**localns) +MetabaseQuestion.Attributes.update_forward_refs(**localns) +MetabaseCollection.Attributes.update_forward_refs(**localns) +MetabaseDashboard.Attributes.update_forward_refs(**localns) +QuickSightFolder.Attributes.update_forward_refs(**localns) +QuickSightDashboardVisual.Attributes.update_forward_refs(**localns) +QuickSightAnalysisVisual.Attributes.update_forward_refs(**localns) +QuickSightDatasetField.Attributes.update_forward_refs(**localns) +QuickSightAnalysis.Attributes.update_forward_refs(**localns) +QuickSightDashboard.Attributes.update_forward_refs(**localns) +QuickSightDataset.Attributes.update_forward_refs(**localns) +ThoughtspotLiveboard.Attributes.update_forward_refs(**localns) +ThoughtspotDashlet.Attributes.update_forward_refs(**localns) +ThoughtspotAnswer.Attributes.update_forward_refs(**localns) +PowerBIReport.Attributes.update_forward_refs(**localns) +PowerBIMeasure.Attributes.update_forward_refs(**localns) +PowerBIColumn.Attributes.update_forward_refs(**localns) +PowerBITable.Attributes.update_forward_refs(**localns) +PowerBITile.Attributes.update_forward_refs(**localns) +PowerBIDatasource.Attributes.update_forward_refs(**localns) +PowerBIWorkspace.Attributes.update_forward_refs(**localns) +PowerBIDataset.Attributes.update_forward_refs(**localns) +PowerBIDashboard.Attributes.update_forward_refs(**localns) +PowerBIDataflow.Attributes.update_forward_refs(**localns) +PowerBIPage.Attributes.update_forward_refs(**localns) +MicroStrategyReport.Attributes.update_forward_refs(**localns) +MicroStrategyProject.Attributes.update_forward_refs(**localns) +MicroStrategyMetric.Attributes.update_forward_refs(**localns) +MicroStrategyCube.Attributes.update_forward_refs(**localns) +MicroStrategyDossier.Attributes.update_forward_refs(**localns) +MicroStrategyFact.Attributes.update_forward_refs(**localns) +MicroStrategyDocument.Attributes.update_forward_refs(**localns) +MicroStrategyAttribute.Attributes.update_forward_refs(**localns) +MicroStrategyVisualization.Attributes.update_forward_refs(**localns) +QlikSpace.Attributes.update_forward_refs(**localns) +QlikApp.Attributes.update_forward_refs(**localns) +QlikChart.Attributes.update_forward_refs(**localns) +QlikDataset.Attributes.update_forward_refs(**localns) +QlikSheet.Attributes.update_forward_refs(**localns) +SalesforceObject.Attributes.update_forward_refs(**localns) +SalesforceField.Attributes.update_forward_refs(**localns) +SalesforceOrganization.Attributes.update_forward_refs(**localns) +SalesforceDashboard.Attributes.update_forward_refs(**localns) +SalesforceReport.Attributes.update_forward_refs(**localns) +SnowflakeDynamicTable.Attributes.update_forward_refs(**localns) +MongoDBCollection.Attributes.update_forward_refs(**localns) +DynamoDBSecondaryIndex.Attributes.update_forward_refs(**localns) +DynamoDBTable.Attributes.update_forward_refs(**localns) +MongoDBDatabase.Attributes.update_forward_refs(**localns) +KafkaTopic.Attributes.update_forward_refs(**localns) +KafkaConsumerGroup.Attributes.update_forward_refs(**localns) +QlikStream.Attributes.update_forward_refs(**localns) +DynamoDBLocalSecondaryIndex.Attributes.update_forward_refs(**localns) +DynamoDBGlobalSecondaryIndex.Attributes.update_forward_refs(**localns) +AzureEventHub.Attributes.update_forward_refs(**localns) +AzureEventHubConsumerGroup.Attributes.update_forward_refs(**localns) diff --git a/pyatlan/model/assets/asset36.py b/pyatlan/model/assets/a_d_l_s.py similarity index 86% rename from pyatlan/model/assets/asset36.py rename to pyatlan/model/assets/a_d_l_s.py index 4c7e1275a..235e92fd7 100644 --- a/pyatlan/model/assets/asset36.py +++ b/pyatlan/model/assets/a_d_l_s.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField from pyatlan.model.structs import AzureTag -from .asset17 import ObjectStore +from .object_store import ObjectStore class ADLS(ObjectStore): """Description""" - type_name: str = Field("ADLS", allow_mutation=False) + type_name: str = Field(default="ADLS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -132,27 +132,16 @@ def azure_tags(self, azure_tags: Optional[list[AzureTag]]): self.attributes.azure_tags = azure_tags class Attributes(ObjectStore.Attributes): - adls_account_qualified_name: Optional[str] = Field( - None, description="", alias="adlsAccountQualifiedName" - ) - azure_resource_id: Optional[str] = Field( - None, description="", alias="azureResourceId" - ) - azure_location: Optional[str] = Field( - None, description="", alias="azureLocation" - ) + adls_account_qualified_name: Optional[str] = Field(default=None, description="") + azure_resource_id: Optional[str] = Field(default=None, description="") + azure_location: Optional[str] = Field(default=None, description="") adls_account_secondary_location: Optional[str] = Field( - None, description="", alias="adlsAccountSecondaryLocation" - ) - azure_tags: Optional[list[AzureTag]] = Field( - None, description="", alias="azureTags" + default=None, description="" ) + azure_tags: Optional[list[AzureTag]] = Field(default=None, description="") attributes: "ADLS.Attributes" = Field( default_factory=lambda: ADLS.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -ADLS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/a_d_l_s_account.py b/pyatlan/model/assets/a_d_l_s_account.py new file mode 100644 index 000000000..d1a2d1085 --- /dev/null +++ b/pyatlan/model/assets/a_d_l_s_account.py @@ -0,0 +1,350 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import ( + ADLSAccessTier, + ADLSAccountStatus, + ADLSEncryptionTypes, + ADLSPerformance, + ADLSProvisionState, + ADLSReplicationType, + ADLSStorageKind, + AtlanConnectorType, +) +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .a_d_l_s import ADLS + + +class ADLSAccount(ADLS): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> ADLSAccount: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = ADLSAccount.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="ADLSAccount", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ADLSAccount": + raise ValueError("must be ADLSAccount") + return v + + def __setattr__(self, name, value): + if name in ADLSAccount._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + ADLS_E_TAG: ClassVar[KeywordField] = KeywordField("adlsETag", "adlsETag") + """ + Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata. + """ # noqa: E501 + ADLS_ENCRYPTION_TYPE: ClassVar[KeywordField] = KeywordField( + "adlsEncryptionType", "adlsEncryptionType" + ) + """ + Type of encryption for this account. + """ + ADLS_ACCOUNT_RESOURCE_GROUP: ClassVar[KeywordTextField] = KeywordTextField( + "adlsAccountResourceGroup", + "adlsAccountResourceGroup.keyword", + "adlsAccountResourceGroup", + ) + """ + Resource group for this account. + """ + ADLS_ACCOUNT_SUBSCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( + "adlsAccountSubscription", + "adlsAccountSubscription.keyword", + "adlsAccountSubscription", + ) + """ + Subscription for this account. + """ + ADLS_ACCOUNT_PERFORMANCE: ClassVar[KeywordField] = KeywordField( + "adlsAccountPerformance", "adlsAccountPerformance" + ) + """ + Performance of this account. + """ + ADLS_ACCOUNT_REPLICATION: ClassVar[KeywordField] = KeywordField( + "adlsAccountReplication", "adlsAccountReplication" + ) + """ + Replication of this account. + """ + ADLS_ACCOUNT_KIND: ClassVar[KeywordField] = KeywordField( + "adlsAccountKind", "adlsAccountKind" + ) + """ + Kind of this account. + """ + ADLS_PRIMARY_DISK_STATE: ClassVar[KeywordField] = KeywordField( + "adlsPrimaryDiskState", "adlsPrimaryDiskState" + ) + """ + Primary disk state of this account. + """ + ADLS_ACCOUNT_PROVISION_STATE: ClassVar[KeywordField] = KeywordField( + "adlsAccountProvisionState", "adlsAccountProvisionState" + ) + """ + Provision state of this account. + """ + ADLS_ACCOUNT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( + "adlsAccountAccessTier", "adlsAccountAccessTier" + ) + """ + Access tier of this account. + """ + + ADLS_CONTAINERS: ClassVar[RelationField] = RelationField("adlsContainers") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "adls_e_tag", + "adls_encryption_type", + "adls_account_resource_group", + "adls_account_subscription", + "adls_account_performance", + "adls_account_replication", + "adls_account_kind", + "adls_primary_disk_state", + "adls_account_provision_state", + "adls_account_access_tier", + "adls_containers", + ] + + @property + def adls_e_tag(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.adls_e_tag + + @adls_e_tag.setter + def adls_e_tag(self, adls_e_tag: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_e_tag = adls_e_tag + + @property + def adls_encryption_type(self) -> Optional[ADLSEncryptionTypes]: + return None if self.attributes is None else self.attributes.adls_encryption_type + + @adls_encryption_type.setter + def adls_encryption_type(self, adls_encryption_type: Optional[ADLSEncryptionTypes]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_encryption_type = adls_encryption_type + + @property + def adls_account_resource_group(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_resource_group + ) + + @adls_account_resource_group.setter + def adls_account_resource_group(self, adls_account_resource_group: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_resource_group = adls_account_resource_group + + @property + def adls_account_subscription(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_subscription + ) + + @adls_account_subscription.setter + def adls_account_subscription(self, adls_account_subscription: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_subscription = adls_account_subscription + + @property + def adls_account_performance(self) -> Optional[ADLSPerformance]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_performance + ) + + @adls_account_performance.setter + def adls_account_performance( + self, adls_account_performance: Optional[ADLSPerformance] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_performance = adls_account_performance + + @property + def adls_account_replication(self) -> Optional[ADLSReplicationType]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_replication + ) + + @adls_account_replication.setter + def adls_account_replication( + self, adls_account_replication: Optional[ADLSReplicationType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_replication = adls_account_replication + + @property + def adls_account_kind(self) -> Optional[ADLSStorageKind]: + return None if self.attributes is None else self.attributes.adls_account_kind + + @adls_account_kind.setter + def adls_account_kind(self, adls_account_kind: Optional[ADLSStorageKind]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_kind = adls_account_kind + + @property + def adls_primary_disk_state(self) -> Optional[ADLSAccountStatus]: + return ( + None if self.attributes is None else self.attributes.adls_primary_disk_state + ) + + @adls_primary_disk_state.setter + def adls_primary_disk_state( + self, adls_primary_disk_state: Optional[ADLSAccountStatus] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_primary_disk_state = adls_primary_disk_state + + @property + def adls_account_provision_state(self) -> Optional[ADLSProvisionState]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_provision_state + ) + + @adls_account_provision_state.setter + def adls_account_provision_state( + self, adls_account_provision_state: Optional[ADLSProvisionState] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_provision_state = adls_account_provision_state + + @property + def adls_account_access_tier(self) -> Optional[ADLSAccessTier]: + return ( + None + if self.attributes is None + else self.attributes.adls_account_access_tier + ) + + @adls_account_access_tier.setter + def adls_account_access_tier( + self, adls_account_access_tier: Optional[ADLSAccessTier] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account_access_tier = adls_account_access_tier + + @property + def adls_containers(self) -> Optional[list[ADLSContainer]]: + return None if self.attributes is None else self.attributes.adls_containers + + @adls_containers.setter + def adls_containers(self, adls_containers: Optional[list[ADLSContainer]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_containers = adls_containers + + class Attributes(ADLS.Attributes): + adls_e_tag: Optional[str] = Field(default=None, description="") + adls_encryption_type: Optional[ADLSEncryptionTypes] = Field( + default=None, description="" + ) + adls_account_resource_group: Optional[str] = Field(default=None, description="") + adls_account_subscription: Optional[str] = Field(default=None, description="") + adls_account_performance: Optional[ADLSPerformance] = Field( + default=None, description="" + ) + adls_account_replication: Optional[ADLSReplicationType] = Field( + default=None, description="" + ) + adls_account_kind: Optional[ADLSStorageKind] = Field( + default=None, description="" + ) + adls_primary_disk_state: Optional[ADLSAccountStatus] = Field( + default=None, description="" + ) + adls_account_provision_state: Optional[ADLSProvisionState] = Field( + default=None, description="" + ) + adls_account_access_tier: Optional[ADLSAccessTier] = Field( + default=None, description="" + ) + adls_containers: Optional[list[ADLSContainer]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str + ) -> ADLSAccount.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + + return ADLSAccount.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=connector_type.value, + ) + + attributes: "ADLSAccount.Attributes" = Field( + default_factory=lambda: ADLSAccount.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .a_d_l_s_container import ADLSContainer # noqa diff --git a/pyatlan/model/assets/a_d_l_s_container.py b/pyatlan/model/assets/a_d_l_s_container.py new file mode 100644 index 000000000..fbc1526b6 --- /dev/null +++ b/pyatlan/model/assets/a_d_l_s_container.py @@ -0,0 +1,282 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import ADLSLeaseState, ADLSLeaseStatus, AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .a_d_l_s import ADLS + + +class ADLSContainer(ADLS): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, adls_account_qualified_name: str) -> ADLSContainer: + validate_required_fields( + ["name", "adls_account_qualified_name"], [name, adls_account_qualified_name] + ) + attributes = ADLSContainer.Attributes.create( + name=name, adls_account_qualified_name=adls_account_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="ADLSContainer", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ADLSContainer": + raise ValueError("must be ADLSContainer") + return v + + def __setattr__(self, name, value): + if name in ADLSContainer._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + ADLS_CONTAINER_URL: ClassVar[KeywordTextField] = KeywordTextField( + "adlsContainerUrl", "adlsContainerUrl.keyword", "adlsContainerUrl" + ) + """ + URL of this container. + """ + ADLS_CONTAINER_LEASE_STATE: ClassVar[KeywordField] = KeywordField( + "adlsContainerLeaseState", "adlsContainerLeaseState" + ) + """ + Lease state of this container. + """ + ADLS_CONTAINER_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( + "adlsContainerLeaseStatus", "adlsContainerLeaseStatus" + ) + """ + Lease status of this container. + """ + ADLS_CONTAINER_ENCRYPTION_SCOPE: ClassVar[KeywordField] = KeywordField( + "adlsContainerEncryptionScope", "adlsContainerEncryptionScope" + ) + """ + Encryption scope of this container. + """ + ADLS_CONTAINER_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ + BooleanField + ] = BooleanField( + "adlsContainerVersionLevelImmutabilitySupport", + "adlsContainerVersionLevelImmutabilitySupport", + ) + """ + Whether this container supports version-level immutability (true) or not (false). + """ + ADLS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( + "adlsObjectCount", "adlsObjectCount" + ) + """ + Number of objects that exist within this container. + """ + + ADLS_OBJECTS: ClassVar[RelationField] = RelationField("adlsObjects") + """ + TBC + """ + ADLS_ACCOUNT: ClassVar[RelationField] = RelationField("adlsAccount") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "adls_container_url", + "adls_container_lease_state", + "adls_container_lease_status", + "adls_container_encryption_scope", + "adls_container_version_level_immutability_support", + "adls_object_count", + "adls_objects", + "adls_account", + ] + + @property + def adls_container_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.adls_container_url + + @adls_container_url.setter + def adls_container_url(self, adls_container_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container_url = adls_container_url + + @property + def adls_container_lease_state(self) -> Optional[ADLSLeaseState]: + return ( + None + if self.attributes is None + else self.attributes.adls_container_lease_state + ) + + @adls_container_lease_state.setter + def adls_container_lease_state( + self, adls_container_lease_state: Optional[ADLSLeaseState] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container_lease_state = adls_container_lease_state + + @property + def adls_container_lease_status(self) -> Optional[ADLSLeaseStatus]: + return ( + None + if self.attributes is None + else self.attributes.adls_container_lease_status + ) + + @adls_container_lease_status.setter + def adls_container_lease_status( + self, adls_container_lease_status: Optional[ADLSLeaseStatus] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container_lease_status = adls_container_lease_status + + @property + def adls_container_encryption_scope(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_container_encryption_scope + ) + + @adls_container_encryption_scope.setter + def adls_container_encryption_scope( + self, adls_container_encryption_scope: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container_encryption_scope = ( + adls_container_encryption_scope + ) + + @property + def adls_container_version_level_immutability_support(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.adls_container_version_level_immutability_support + ) + + @adls_container_version_level_immutability_support.setter + def adls_container_version_level_immutability_support( + self, adls_container_version_level_immutability_support: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container_version_level_immutability_support = ( + adls_container_version_level_immutability_support + ) + + @property + def adls_object_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.adls_object_count + + @adls_object_count.setter + def adls_object_count(self, adls_object_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_count = adls_object_count + + @property + def adls_objects(self) -> Optional[list[ADLSObject]]: + return None if self.attributes is None else self.attributes.adls_objects + + @adls_objects.setter + def adls_objects(self, adls_objects: Optional[list[ADLSObject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_objects = adls_objects + + @property + def adls_account(self) -> Optional[ADLSAccount]: + return None if self.attributes is None else self.attributes.adls_account + + @adls_account.setter + def adls_account(self, adls_account: Optional[ADLSAccount]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_account = adls_account + + class Attributes(ADLS.Attributes): + adls_container_url: Optional[str] = Field(default=None, description="") + adls_container_lease_state: Optional[ADLSLeaseState] = Field( + default=None, description="" + ) + adls_container_lease_status: Optional[ADLSLeaseStatus] = Field( + default=None, description="" + ) + adls_container_encryption_scope: Optional[str] = Field( + default=None, description="" + ) + adls_container_version_level_immutability_support: Optional[bool] = Field( + default=None, description="" + ) + adls_object_count: Optional[int] = Field(default=None, description="") + adls_objects: Optional[list[ADLSObject]] = Field( + default=None, description="" + ) # relationship + adls_account: Optional[ADLSAccount] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, adls_account_qualified_name: str + ) -> ADLSContainer.Attributes: + validate_required_fields( + ["name", "adls_account_qualified_name"], + [name, adls_account_qualified_name], + ) + + # Split the adls_account_qualified_name to extract necessary information + fields = adls_account_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid adls_account_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid adls_account_qualified_name") from e + + return ADLSContainer.Attributes( + name=name, + adls_account_qualified_name=adls_account_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{adls_account_qualified_name}/{name}", + connector_name=connector_type.value, + adls_account=ADLSAccount.ref_by_qualified_name( + adls_account_qualified_name + ), + ) + + attributes: "ADLSContainer.Attributes" = Field( + default_factory=lambda: ADLSContainer.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .a_d_l_s_account import ADLSAccount # noqa +from .a_d_l_s_object import ADLSObject # noqa diff --git a/pyatlan/model/assets/a_d_l_s_object.py b/pyatlan/model/assets/a_d_l_s_object.py new file mode 100644 index 000000000..4effa7ab3 --- /dev/null +++ b/pyatlan/model/assets/a_d_l_s_object.py @@ -0,0 +1,538 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import ( + ADLSAccessTier, + ADLSLeaseState, + ADLSLeaseStatus, + ADLSObjectArchiveStatus, + ADLSObjectType, + AtlanConnectorType, +) +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) +from pyatlan.utils import get_parent_qualified_name, init_guid, validate_required_fields + +from .a_d_l_s import ADLS + + +class ADLSObject(ADLS): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: str, + adls_container_qualified_name: str, + ) -> ADLSObject: + validate_required_fields( + ["name", "adls_container_qualified_name"], + [name, adls_container_qualified_name], + ) + attributes = ADLSObject.Attributes.create( + name=name, adls_container_qualified_name=adls_container_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="ADLSObject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ADLSObject": + raise ValueError("must be ADLSObject") + return v + + def __setattr__(self, name, value): + if name in ADLSObject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + ADLS_OBJECT_URL: ClassVar[KeywordTextField] = KeywordTextField( + "adlsObjectUrl", "adlsObjectUrl.keyword", "adlsObjectUrl" + ) + """ + URL of this object. + """ + ADLS_OBJECT_VERSION_ID: ClassVar[KeywordField] = KeywordField( + "adlsObjectVersionId", "adlsObjectVersionId" + ) + """ + Identifier of the version of this object, from ADLS. + """ + ADLS_OBJECT_TYPE: ClassVar[KeywordField] = KeywordField( + "adlsObjectType", "adlsObjectType" + ) + """ + Type of this object. + """ + ADLS_OBJECT_SIZE: ClassVar[NumericField] = NumericField( + "adlsObjectSize", "adlsObjectSize" + ) + """ + Size of this object. + """ + ADLS_OBJECT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( + "adlsObjectAccessTier", "adlsObjectAccessTier" + ) + """ + Access tier of this object. + """ + ADLS_OBJECT_ACCESS_TIER_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( + "adlsObjectAccessTierLastModifiedTime", "adlsObjectAccessTierLastModifiedTime" + ) + """ + Time (epoch) when the acccess tier for this object was last modified, in milliseconds. + """ + ADLS_OBJECT_ARCHIVE_STATUS: ClassVar[KeywordField] = KeywordField( + "adlsObjectArchiveStatus", "adlsObjectArchiveStatus" + ) + """ + Archive status of this object. + """ + ADLS_OBJECT_SERVER_ENCRYPTED: ClassVar[BooleanField] = BooleanField( + "adlsObjectServerEncrypted", "adlsObjectServerEncrypted" + ) + """ + Whether this object is server encrypted (true) or not (false). + """ + ADLS_OBJECT_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ + BooleanField + ] = BooleanField( + "adlsObjectVersionLevelImmutabilitySupport", + "adlsObjectVersionLevelImmutabilitySupport", + ) + """ + Whether this object supports version-level immutability (true) or not (false). + """ + ADLS_OBJECT_CACHE_CONTROL: ClassVar[TextField] = TextField( + "adlsObjectCacheControl", "adlsObjectCacheControl" + ) + """ + Cache control of this object. + """ + ADLS_OBJECT_CONTENT_TYPE: ClassVar[TextField] = TextField( + "adlsObjectContentType", "adlsObjectContentType" + ) + """ + Content type of this object. + """ + ADLS_OBJECT_CONTENT_MD5HASH: ClassVar[KeywordField] = KeywordField( + "adlsObjectContentMD5Hash", "adlsObjectContentMD5Hash" + ) + """ + MD5 hash of this object's contents. + """ + ADLS_OBJECT_CONTENT_LANGUAGE: ClassVar[KeywordTextField] = KeywordTextField( + "adlsObjectContentLanguage", + "adlsObjectContentLanguage.keyword", + "adlsObjectContentLanguage", + ) + """ + Language of this object's contents. + """ + ADLS_OBJECT_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( + "adlsObjectLeaseStatus", "adlsObjectLeaseStatus" + ) + """ + Status of this object's lease. + """ + ADLS_OBJECT_LEASE_STATE: ClassVar[KeywordField] = KeywordField( + "adlsObjectLeaseState", "adlsObjectLeaseState" + ) + """ + State of this object's lease. + """ + ADLS_OBJECT_METADATA: ClassVar[KeywordField] = KeywordField( + "adlsObjectMetadata", "adlsObjectMetadata" + ) + """ + Metadata associated with this object, from ADLS. + """ + ADLS_CONTAINER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "adlsContainerQualifiedName", + "adlsContainerQualifiedName", + "adlsContainerQualifiedName.text", + ) + """ + Unique name of the container this object exists within. + """ + + ADLS_CONTAINER: ClassVar[RelationField] = RelationField("adlsContainer") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "adls_object_url", + "adls_object_version_id", + "adls_object_type", + "adls_object_size", + "adls_object_access_tier", + "adls_object_access_tier_last_modified_time", + "adls_object_archive_status", + "adls_object_server_encrypted", + "adls_object_version_level_immutability_support", + "adls_object_cache_control", + "adls_object_content_type", + "adls_object_content_m_d5_hash", + "adls_object_content_language", + "adls_object_lease_status", + "adls_object_lease_state", + "adls_object_metadata", + "adls_container_qualified_name", + "adls_container", + ] + + @property + def adls_object_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.adls_object_url + + @adls_object_url.setter + def adls_object_url(self, adls_object_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_url = adls_object_url + + @property + def adls_object_version_id(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.adls_object_version_id + ) + + @adls_object_version_id.setter + def adls_object_version_id(self, adls_object_version_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_version_id = adls_object_version_id + + @property + def adls_object_type(self) -> Optional[ADLSObjectType]: + return None if self.attributes is None else self.attributes.adls_object_type + + @adls_object_type.setter + def adls_object_type(self, adls_object_type: Optional[ADLSObjectType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_type = adls_object_type + + @property + def adls_object_size(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.adls_object_size + + @adls_object_size.setter + def adls_object_size(self, adls_object_size: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_size = adls_object_size + + @property + def adls_object_access_tier(self) -> Optional[ADLSAccessTier]: + return ( + None if self.attributes is None else self.attributes.adls_object_access_tier + ) + + @adls_object_access_tier.setter + def adls_object_access_tier( + self, adls_object_access_tier: Optional[ADLSAccessTier] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_access_tier = adls_object_access_tier + + @property + def adls_object_access_tier_last_modified_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_access_tier_last_modified_time + ) + + @adls_object_access_tier_last_modified_time.setter + def adls_object_access_tier_last_modified_time( + self, adls_object_access_tier_last_modified_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_access_tier_last_modified_time = ( + adls_object_access_tier_last_modified_time + ) + + @property + def adls_object_archive_status(self) -> Optional[ADLSObjectArchiveStatus]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_archive_status + ) + + @adls_object_archive_status.setter + def adls_object_archive_status( + self, adls_object_archive_status: Optional[ADLSObjectArchiveStatus] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_archive_status = adls_object_archive_status + + @property + def adls_object_server_encrypted(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_server_encrypted + ) + + @adls_object_server_encrypted.setter + def adls_object_server_encrypted( + self, adls_object_server_encrypted: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_server_encrypted = adls_object_server_encrypted + + @property + def adls_object_version_level_immutability_support(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_version_level_immutability_support + ) + + @adls_object_version_level_immutability_support.setter + def adls_object_version_level_immutability_support( + self, adls_object_version_level_immutability_support: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_version_level_immutability_support = ( + adls_object_version_level_immutability_support + ) + + @property + def adls_object_cache_control(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_cache_control + ) + + @adls_object_cache_control.setter + def adls_object_cache_control(self, adls_object_cache_control: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_cache_control = adls_object_cache_control + + @property + def adls_object_content_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_content_type + ) + + @adls_object_content_type.setter + def adls_object_content_type(self, adls_object_content_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_content_type = adls_object_content_type + + @property + def adls_object_content_m_d5_hash(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_content_m_d5_hash + ) + + @adls_object_content_m_d5_hash.setter + def adls_object_content_m_d5_hash( + self, adls_object_content_m_d5_hash: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_content_m_d5_hash = adls_object_content_m_d5_hash + + @property + def adls_object_content_language(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_content_language + ) + + @adls_object_content_language.setter + def adls_object_content_language(self, adls_object_content_language: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_content_language = adls_object_content_language + + @property + def adls_object_lease_status(self) -> Optional[ADLSLeaseStatus]: + return ( + None + if self.attributes is None + else self.attributes.adls_object_lease_status + ) + + @adls_object_lease_status.setter + def adls_object_lease_status( + self, adls_object_lease_status: Optional[ADLSLeaseStatus] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_lease_status = adls_object_lease_status + + @property + def adls_object_lease_state(self) -> Optional[ADLSLeaseState]: + return ( + None if self.attributes is None else self.attributes.adls_object_lease_state + ) + + @adls_object_lease_state.setter + def adls_object_lease_state( + self, adls_object_lease_state: Optional[ADLSLeaseState] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_lease_state = adls_object_lease_state + + @property + def adls_object_metadata(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.adls_object_metadata + + @adls_object_metadata.setter + def adls_object_metadata(self, adls_object_metadata: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_object_metadata = adls_object_metadata + + @property + def adls_container_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.adls_container_qualified_name + ) + + @adls_container_qualified_name.setter + def adls_container_qualified_name( + self, adls_container_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container_qualified_name = adls_container_qualified_name + + @property + def adls_container(self) -> Optional[ADLSContainer]: + return None if self.attributes is None else self.attributes.adls_container + + @adls_container.setter + def adls_container(self, adls_container: Optional[ADLSContainer]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.adls_container = adls_container + + class Attributes(ADLS.Attributes): + adls_object_url: Optional[str] = Field(default=None, description="") + adls_object_version_id: Optional[str] = Field(default=None, description="") + adls_object_type: Optional[ADLSObjectType] = Field(default=None, description="") + adls_object_size: Optional[int] = Field(default=None, description="") + adls_object_access_tier: Optional[ADLSAccessTier] = Field( + default=None, description="" + ) + adls_object_access_tier_last_modified_time: Optional[datetime] = Field( + default=None, description="" + ) + adls_object_archive_status: Optional[ADLSObjectArchiveStatus] = Field( + default=None, description="" + ) + adls_object_server_encrypted: Optional[bool] = Field( + default=None, description="" + ) + adls_object_version_level_immutability_support: Optional[bool] = Field( + default=None, description="" + ) + adls_object_cache_control: Optional[str] = Field(default=None, description="") + adls_object_content_type: Optional[str] = Field(default=None, description="") + adls_object_content_m_d5_hash: Optional[str] = Field( + default=None, description="" + ) + adls_object_content_language: Optional[str] = Field( + default=None, description="" + ) + adls_object_lease_status: Optional[ADLSLeaseStatus] = Field( + default=None, description="" + ) + adls_object_lease_state: Optional[ADLSLeaseState] = Field( + default=None, description="" + ) + adls_object_metadata: Optional[dict[str, str]] = Field( + default=None, description="" + ) + adls_container_qualified_name: Optional[str] = Field( + default=None, description="" + ) + adls_container: Optional[ADLSContainer] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, adls_container_qualified_name: str + ) -> ADLSObject.Attributes: + validate_required_fields( + ["name", "adls_container_qualified_name"], + [name, adls_container_qualified_name], + ) + + # Split the qualified_name to extract necessary information + fields = adls_container_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid qualified_name") from e + adls_account_qualified_name = get_parent_qualified_name( + adls_container_qualified_name + ) + + return ADLSObject.Attributes( + name=name, + adls_container_qualified_name=adls_container_qualified_name, + qualified_name=f"{adls_container_qualified_name}/{name}", + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + connector_name=connector_type.value, + adls_container=ADLSContainer.ref_by_qualified_name( + adls_container_qualified_name + ), + adls_account_qualified_name=adls_account_qualified_name, + ) + + attributes: "ADLSObject.Attributes" = Field( + default_factory=lambda: ADLSObject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .a_d_l_s_container import ADLSContainer # noqa diff --git a/pyatlan/model/assets/asset28.py b/pyatlan/model/assets/a_p_i.py similarity index 86% rename from pyatlan/model/assets/asset28.py rename to pyatlan/model/assets/a_p_i.py index 1cf00ef9e..e0c6d2bae 100644 --- a/pyatlan/model/assets/asset28.py +++ b/pyatlan/model/assets/a_p_i.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( BooleanField, @@ -14,13 +14,13 @@ KeywordTextField, ) -from .asset00 import Catalog +from .catalog import Catalog class API(Catalog): """Description""" - type_name: str = Field("API", allow_mutation=False) + type_name: str = Field(default="API", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -140,26 +140,17 @@ def api_is_auth_optional(self, api_is_auth_optional: Optional[bool]): self.attributes.api_is_auth_optional = api_is_auth_optional class Attributes(Catalog.Attributes): - api_spec_type: Optional[str] = Field(None, description="", alias="apiSpecType") - api_spec_version: Optional[str] = Field( - None, description="", alias="apiSpecVersion" - ) - api_spec_name: Optional[str] = Field(None, description="", alias="apiSpecName") - api_spec_qualified_name: Optional[str] = Field( - None, description="", alias="apiSpecQualifiedName" - ) + api_spec_type: Optional[str] = Field(default=None, description="") + api_spec_version: Optional[str] = Field(default=None, description="") + api_spec_name: Optional[str] = Field(default=None, description="") + api_spec_qualified_name: Optional[str] = Field(default=None, description="") api_external_docs: Optional[dict[str, str]] = Field( - None, description="", alias="apiExternalDocs" - ) - api_is_auth_optional: Optional[bool] = Field( - None, description="", alias="apiIsAuthOptional" + default=None, description="" ) + api_is_auth_optional: Optional[bool] = Field(default=None, description="") attributes: "API.Attributes" = Field( default_factory=lambda: API.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -API.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/a_p_i_path.py b/pyatlan/model/assets/a_p_i_path.py new file mode 100644 index 000000000..8f5347bea --- /dev/null +++ b/pyatlan/model/assets/a_p_i_path.py @@ -0,0 +1,249 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + RelationField, + TextField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .a_p_i import API + + +class APIPath(API): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, path_raw_uri: str, spec_qualified_name: str) -> APIPath: + validate_required_fields( + ["path_raw_uri", "spec_qualified_name"], [path_raw_uri, spec_qualified_name] + ) + attributes = APIPath.Attributes.create( + path_raw_uri=path_raw_uri, spec_qualified_name=spec_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="APIPath", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "APIPath": + raise ValueError("must be APIPath") + return v + + def __setattr__(self, name, value): + if name in APIPath._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + API_PATH_SUMMARY: ClassVar[TextField] = TextField( + "apiPathSummary", "apiPathSummary" + ) + """ + Descriptive summary intended to apply to all operations in this path. + """ + API_PATH_RAW_URI: ClassVar[KeywordTextField] = KeywordTextField( + "apiPathRawURI", "apiPathRawURI", "apiPathRawURI.text" + ) + """ + Absolute path to an individual endpoint. + """ + API_PATH_IS_TEMPLATED: ClassVar[BooleanField] = BooleanField( + "apiPathIsTemplated", "apiPathIsTemplated" + ) + """ + Whether the endpoint's path contains replaceable parameters (true) or not (false). + """ + API_PATH_AVAILABLE_OPERATIONS: ClassVar[KeywordField] = KeywordField( + "apiPathAvailableOperations", "apiPathAvailableOperations" + ) + """ + List of the operations available on the endpoint. + """ + API_PATH_AVAILABLE_RESPONSE_CODES: ClassVar[KeywordField] = KeywordField( + "apiPathAvailableResponseCodes", "apiPathAvailableResponseCodes" + ) + """ + Response codes available on the path across all operations. + """ + API_PATH_IS_INGRESS_EXPOSED: ClassVar[BooleanField] = BooleanField( + "apiPathIsIngressExposed", "apiPathIsIngressExposed" + ) + """ + Whether the path is exposed as an ingress (true) or not (false). + """ + + API_SPEC: ClassVar[RelationField] = RelationField("apiSpec") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "api_path_summary", + "api_path_raw_u_r_i", + "api_path_is_templated", + "api_path_available_operations", + "api_path_available_response_codes", + "api_path_is_ingress_exposed", + "api_spec", + ] + + @property + def api_path_summary(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_path_summary + + @api_path_summary.setter + def api_path_summary(self, api_path_summary: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_path_summary = api_path_summary + + @property + def api_path_raw_u_r_i(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_path_raw_u_r_i + + @api_path_raw_u_r_i.setter + def api_path_raw_u_r_i(self, api_path_raw_u_r_i: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_path_raw_u_r_i = api_path_raw_u_r_i + + @property + def api_path_is_templated(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.api_path_is_templated + ) + + @api_path_is_templated.setter + def api_path_is_templated(self, api_path_is_templated: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_path_is_templated = api_path_is_templated + + @property + def api_path_available_operations(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.api_path_available_operations + ) + + @api_path_available_operations.setter + def api_path_available_operations( + self, api_path_available_operations: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_path_available_operations = api_path_available_operations + + @property + def api_path_available_response_codes(self) -> Optional[dict[str, str]]: + return ( + None + if self.attributes is None + else self.attributes.api_path_available_response_codes + ) + + @api_path_available_response_codes.setter + def api_path_available_response_codes( + self, api_path_available_response_codes: Optional[dict[str, str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_path_available_response_codes = ( + api_path_available_response_codes + ) + + @property + def api_path_is_ingress_exposed(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.api_path_is_ingress_exposed + ) + + @api_path_is_ingress_exposed.setter + def api_path_is_ingress_exposed(self, api_path_is_ingress_exposed: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_path_is_ingress_exposed = api_path_is_ingress_exposed + + @property + def api_spec(self) -> Optional[APISpec]: + return None if self.attributes is None else self.attributes.api_spec + + @api_spec.setter + def api_spec(self, api_spec: Optional[APISpec]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec = api_spec + + class Attributes(API.Attributes): + api_path_summary: Optional[str] = Field(default=None, description="") + api_path_raw_u_r_i: Optional[str] = Field(default=None, description="") + api_path_is_templated: Optional[bool] = Field(default=None, description="") + api_path_available_operations: Optional[set[str]] = Field( + default=None, description="" + ) + api_path_available_response_codes: Optional[dict[str, str]] = Field( + default=None, description="" + ) + api_path_is_ingress_exposed: Optional[bool] = Field( + default=None, description="" + ) + api_spec: Optional[APISpec] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, path_raw_uri: str, spec_qualified_name: str + ) -> APIPath.Attributes: + validate_required_fields( + ["path_raw_uri", "spec_qualified_name"], + [path_raw_uri, spec_qualified_name], + ) + + # Split the spec_qualified_name to extract necessary information + fields = spec_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid spec_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid spec_qualified_name") from e + + return APIPath.Attributes( + api_path_raw_u_r_i=path_raw_uri, + name=path_raw_uri, + api_spec_qualified_name=spec_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{spec_qualified_name}{path_raw_uri}", + connector_name=connector_type.value, + api_spec=APISpec.ref_by_qualified_name(spec_qualified_name), + ) + + attributes: "APIPath.Attributes" = Field( + default_factory=lambda: APIPath.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .a_p_i_spec import APISpec # noqa diff --git a/pyatlan/model/assets/a_p_i_spec.py b/pyatlan/model/assets/a_p_i_spec.py new file mode 100644 index 000000000..e4246f84c --- /dev/null +++ b/pyatlan/model/assets/a_p_i_spec.py @@ -0,0 +1,275 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .a_p_i import API + + +class APISpec(API): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> APISpec: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = APISpec.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="APISpec", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "APISpec": + raise ValueError("must be APISpec") + return v + + def __setattr__(self, name, value): + if name in APISpec._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + API_SPEC_TERMS_OF_SERVICE_URL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecTermsOfServiceURL", + "apiSpecTermsOfServiceURL", + "apiSpecTermsOfServiceURL.text", + ) + """ + URL to the terms of service for the API specification. + """ + API_SPEC_CONTACT_EMAIL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecContactEmail", "apiSpecContactEmail", "apiSpecContactEmail.text" + ) + """ + Email address for a contact responsible for the API specification. + """ + API_SPEC_CONTACT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecContactName", "apiSpecContactName.keyword", "apiSpecContactName" + ) + """ + Name of the contact responsible for the API specification. + """ + API_SPEC_CONTACT_URL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecContactURL", "apiSpecContactURL", "apiSpecContactURL.text" + ) + """ + URL pointing to the contact information. + """ + API_SPEC_LICENSE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecLicenseName", "apiSpecLicenseName.keyword", "apiSpecLicenseName" + ) + """ + Name of the license under which the API specification is available. + """ + API_SPEC_LICENSE_URL: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecLicenseURL", "apiSpecLicenseURL", "apiSpecLicenseURL.text" + ) + """ + URL to the license under which the API specification is available. + """ + API_SPEC_CONTRACT_VERSION: ClassVar[KeywordField] = KeywordField( + "apiSpecContractVersion", "apiSpecContractVersion" + ) + """ + Version of the contract for the API specification. + """ + API_SPEC_SERVICE_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "apiSpecServiceAlias", "apiSpecServiceAlias", "apiSpecServiceAlias.text" + ) + """ + Service alias for the API specification. + """ + + API_PATHS: ClassVar[RelationField] = RelationField("apiPaths") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "api_spec_terms_of_service_url", + "api_spec_contact_email", + "api_spec_contact_name", + "api_spec_contact_url", + "api_spec_license_name", + "api_spec_license_url", + "api_spec_contract_version", + "api_spec_service_alias", + "api_paths", + ] + + @property + def api_spec_terms_of_service_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.api_spec_terms_of_service_url + ) + + @api_spec_terms_of_service_url.setter + def api_spec_terms_of_service_url( + self, api_spec_terms_of_service_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_terms_of_service_url = api_spec_terms_of_service_url + + @property + def api_spec_contact_email(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_contact_email + ) + + @api_spec_contact_email.setter + def api_spec_contact_email(self, api_spec_contact_email: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contact_email = api_spec_contact_email + + @property + def api_spec_contact_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_contact_name + ) + + @api_spec_contact_name.setter + def api_spec_contact_name(self, api_spec_contact_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contact_name = api_spec_contact_name + + @property + def api_spec_contact_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_contact_url + + @api_spec_contact_url.setter + def api_spec_contact_url(self, api_spec_contact_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contact_url = api_spec_contact_url + + @property + def api_spec_license_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_license_name + ) + + @api_spec_license_name.setter + def api_spec_license_name(self, api_spec_license_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_license_name = api_spec_license_name + + @property + def api_spec_license_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.api_spec_license_url + + @api_spec_license_url.setter + def api_spec_license_url(self, api_spec_license_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_license_url = api_spec_license_url + + @property + def api_spec_contract_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.api_spec_contract_version + ) + + @api_spec_contract_version.setter + def api_spec_contract_version(self, api_spec_contract_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_contract_version = api_spec_contract_version + + @property + def api_spec_service_alias(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.api_spec_service_alias + ) + + @api_spec_service_alias.setter + def api_spec_service_alias(self, api_spec_service_alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_spec_service_alias = api_spec_service_alias + + @property + def api_paths(self) -> Optional[list[APIPath]]: + return None if self.attributes is None else self.attributes.api_paths + + @api_paths.setter + def api_paths(self, api_paths: Optional[list[APIPath]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.api_paths = api_paths + + class Attributes(API.Attributes): + api_spec_terms_of_service_url: Optional[str] = Field( + default=None, description="" + ) + api_spec_contact_email: Optional[str] = Field(default=None, description="") + api_spec_contact_name: Optional[str] = Field(default=None, description="") + api_spec_contact_url: Optional[str] = Field(default=None, description="") + api_spec_license_name: Optional[str] = Field(default=None, description="") + api_spec_license_url: Optional[str] = Field(default=None, description="") + api_spec_contract_version: Optional[str] = Field(default=None, description="") + api_spec_service_alias: Optional[str] = Field(default=None, description="") + api_paths: Optional[list[APIPath]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str + ) -> APISpec.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + + return APISpec.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=connector_type.value, + ) + + attributes: "APISpec.Attributes" = Field( + default_factory=lambda: APISpec.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .a_p_i_path import APIPath # noqa diff --git a/pyatlan/model/assets/asset33.py b/pyatlan/model/assets/a_w_s.py similarity index 85% rename from pyatlan/model/assets/asset33.py rename to pyatlan/model/assets/a_w_s.py index aa6c347d6..51de44f14 100644 --- a/pyatlan/model/assets/asset33.py +++ b/pyatlan/model/assets/a_w_s.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField from pyatlan.model.structs import AwsTag -from .asset09 import Cloud +from .cloud import Cloud class AWS(Cloud): """Description""" - type_name: str = Field("AWS", allow_mutation=False) + type_name: str = Field(default="AWS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -178,27 +178,18 @@ def aws_tags(self, aws_tags: Optional[list[AwsTag]]): self.attributes.aws_tags = aws_tags class Attributes(Cloud.Attributes): - aws_arn: Optional[str] = Field(None, description="", alias="awsArn") - aws_partition: Optional[str] = Field(None, description="", alias="awsPartition") - aws_service: Optional[str] = Field(None, description="", alias="awsService") - aws_region: Optional[str] = Field(None, description="", alias="awsRegion") - aws_account_id: Optional[str] = Field( - None, description="", alias="awsAccountId" - ) - aws_resource_id: Optional[str] = Field( - None, description="", alias="awsResourceId" - ) - aws_owner_name: Optional[str] = Field( - None, description="", alias="awsOwnerName" - ) - aws_owner_id: Optional[str] = Field(None, description="", alias="awsOwnerId") - aws_tags: Optional[list[AwsTag]] = Field(None, description="", alias="awsTags") + aws_arn: Optional[str] = Field(default=None, description="") + aws_partition: Optional[str] = Field(default=None, description="") + aws_service: Optional[str] = Field(default=None, description="") + aws_region: Optional[str] = Field(default=None, description="") + aws_account_id: Optional[str] = Field(default=None, description="") + aws_resource_id: Optional[str] = Field(default=None, description="") + aws_owner_name: Optional[str] = Field(default=None, description="") + aws_owner_id: Optional[str] = Field(default=None, description="") + aws_tags: Optional[list[AwsTag]] = Field(default=None, description="") attributes: "AWS.Attributes" = Field( default_factory=lambda: AWS.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -AWS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/access_control.py b/pyatlan/model/assets/access_control.py new file mode 100644 index 000000000..ce1bab9ae --- /dev/null +++ b/pyatlan/model/assets/access_control.py @@ -0,0 +1,238 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, RelationField + +from .asset import Asset + + +class AccessControl(Asset, type_name="AccessControl"): + """Description""" + + type_name: str = Field(default="AccessControl", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AccessControl": + raise ValueError("must be AccessControl") + return v + + def __setattr__(self, name, value): + if name in AccessControl._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + IS_ACCESS_CONTROL_ENABLED: ClassVar[BooleanField] = BooleanField( + "isAccessControlEnabled", "isAccessControlEnabled" + ) + """ + TBC + """ + DENY_CUSTOM_METADATA_GUIDS: ClassVar[KeywordField] = KeywordField( + "denyCustomMetadataGuids", "denyCustomMetadataGuids" + ) + """ + TBC + """ + DENY_ASSET_TABS: ClassVar[KeywordField] = KeywordField( + "denyAssetTabs", "denyAssetTabs" + ) + """ + TBC + """ + DENY_ASSET_FILTERS: ClassVar[KeywordField] = KeywordField( + "denyAssetFilters", "denyAssetFilters" + ) + """ + TBC + """ + CHANNEL_LINK: ClassVar[KeywordField] = KeywordField("channelLink", "channelLink") + """ + TBC + """ + DENY_ASSET_TYPES: ClassVar[KeywordField] = KeywordField( + "denyAssetTypes", "denyAssetTypes" + ) + """ + TBC + """ + DENY_NAVIGATION_PAGES: ClassVar[KeywordField] = KeywordField( + "denyNavigationPages", "denyNavigationPages" + ) + """ + TBC + """ + DEFAULT_NAVIGATION: ClassVar[KeywordField] = KeywordField( + "defaultNavigation", "defaultNavigation" + ) + """ + TBC + """ + DISPLAY_PREFERENCES: ClassVar[KeywordField] = KeywordField( + "displayPreferences", "displayPreferences" + ) + """ + TBC + """ + + POLICIES: ClassVar[RelationField] = RelationField("policies") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "is_access_control_enabled", + "deny_custom_metadata_guids", + "deny_asset_tabs", + "deny_asset_filters", + "channel_link", + "deny_asset_types", + "deny_navigation_pages", + "default_navigation", + "display_preferences", + "policies", + ] + + @property + def is_access_control_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.is_access_control_enabled + ) + + @is_access_control_enabled.setter + def is_access_control_enabled(self, is_access_control_enabled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_access_control_enabled = is_access_control_enabled + + @property + def deny_custom_metadata_guids(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.deny_custom_metadata_guids + ) + + @deny_custom_metadata_guids.setter + def deny_custom_metadata_guids( + self, deny_custom_metadata_guids: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_custom_metadata_guids = deny_custom_metadata_guids + + @property + def deny_asset_tabs(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.deny_asset_tabs + + @deny_asset_tabs.setter + def deny_asset_tabs(self, deny_asset_tabs: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_asset_tabs = deny_asset_tabs + + @property + def deny_asset_filters(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.deny_asset_filters + + @deny_asset_filters.setter + def deny_asset_filters(self, deny_asset_filters: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_asset_filters = deny_asset_filters + + @property + def channel_link(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.channel_link + + @channel_link.setter + def channel_link(self, channel_link: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.channel_link = channel_link + + @property + def deny_asset_types(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.deny_asset_types + + @deny_asset_types.setter + def deny_asset_types(self, deny_asset_types: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_asset_types = deny_asset_types + + @property + def deny_navigation_pages(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.deny_navigation_pages + ) + + @deny_navigation_pages.setter + def deny_navigation_pages(self, deny_navigation_pages: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.deny_navigation_pages = deny_navigation_pages + + @property + def default_navigation(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.default_navigation + + @default_navigation.setter + def default_navigation(self, default_navigation: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_navigation = default_navigation + + @property + def display_preferences(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.display_preferences + + @display_preferences.setter + def display_preferences(self, display_preferences: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.display_preferences = display_preferences + + @property + def policies(self) -> Optional[list[AuthPolicy]]: + return None if self.attributes is None else self.attributes.policies + + @policies.setter + def policies(self, policies: Optional[list[AuthPolicy]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.policies = policies + + class Attributes(Asset.Attributes): + is_access_control_enabled: Optional[bool] = Field(default=None, description="") + deny_custom_metadata_guids: Optional[set[str]] = Field( + default=None, description="" + ) + deny_asset_tabs: Optional[set[str]] = Field(default=None, description="") + deny_asset_filters: Optional[set[str]] = Field(default=None, description="") + channel_link: Optional[str] = Field(default=None, description="") + deny_asset_types: Optional[set[str]] = Field(default=None, description="") + deny_navigation_pages: Optional[set[str]] = Field(default=None, description="") + default_navigation: Optional[str] = Field(default=None, description="") + display_preferences: Optional[set[str]] = Field(default=None, description="") + policies: Optional[list[AuthPolicy]] = Field( + default=None, description="" + ) # relationship + + attributes: "AccessControl.Attributes" = Field( + default_factory=lambda: AccessControl.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .auth_policy import AuthPolicy # noqa diff --git a/pyatlan/model/assets/airflow.py b/pyatlan/model/assets/airflow.py new file mode 100644 index 000000000..4e7500e62 --- /dev/null +++ b/pyatlan/model/assets/airflow.py @@ -0,0 +1,206 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import OpenLineageRunState +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField + +from .catalog import Catalog + + +class Airflow(Catalog): + """Description""" + + type_name: str = Field(default="Airflow", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Airflow": + raise ValueError("must be Airflow") + return v + + def __setattr__(self, name, value): + if name in Airflow._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + AIRFLOW_TAGS: ClassVar[KeywordField] = KeywordField("airflowTags", "airflowTags") + """ + Tags assigned to the asset in Airflow. + """ + AIRFLOW_RUN_VERSION: ClassVar[KeywordField] = KeywordField( + "airflowRunVersion", "airflowRunVersion" + ) + """ + Version of the run in Airflow. + """ + AIRFLOW_RUN_OPEN_LINEAGE_VERSION: ClassVar[KeywordField] = KeywordField( + "airflowRunOpenLineageVersion", "airflowRunOpenLineageVersion" + ) + """ + Version of the run in OpenLineage. + """ + AIRFLOW_RUN_NAME: ClassVar[KeywordField] = KeywordField( + "airflowRunName", "airflowRunName" + ) + """ + Name of the run. + """ + AIRFLOW_RUN_TYPE: ClassVar[KeywordField] = KeywordField( + "airflowRunType", "airflowRunType" + ) + """ + Type of the run. + """ + AIRFLOW_RUN_START_TIME: ClassVar[NumericField] = NumericField( + "airflowRunStartTime", "airflowRunStartTime" + ) + """ + Start time of the run. + """ + AIRFLOW_RUN_END_TIME: ClassVar[NumericField] = NumericField( + "airflowRunEndTime", "airflowRunEndTime" + ) + """ + End time of the run. + """ + AIRFLOW_RUN_OPEN_LINEAGE_STATE: ClassVar[KeywordField] = KeywordField( + "airflowRunOpenLineageState", "airflowRunOpenLineageState" + ) + """ + State of the run in OpenLineage. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "airflow_tags", + "airflow_run_version", + "airflow_run_open_lineage_version", + "airflow_run_name", + "airflow_run_type", + "airflow_run_start_time", + "airflow_run_end_time", + "airflow_run_open_lineage_state", + ] + + @property + def airflow_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.airflow_tags + + @airflow_tags.setter + def airflow_tags(self, airflow_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_tags = airflow_tags + + @property + def airflow_run_version(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_run_version + + @airflow_run_version.setter + def airflow_run_version(self, airflow_run_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_version = airflow_run_version + + @property + def airflow_run_open_lineage_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.airflow_run_open_lineage_version + ) + + @airflow_run_open_lineage_version.setter + def airflow_run_open_lineage_version( + self, airflow_run_open_lineage_version: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_open_lineage_version = ( + airflow_run_open_lineage_version + ) + + @property + def airflow_run_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_run_name + + @airflow_run_name.setter + def airflow_run_name(self, airflow_run_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_name = airflow_run_name + + @property + def airflow_run_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_run_type + + @airflow_run_type.setter + def airflow_run_type(self, airflow_run_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_type = airflow_run_type + + @property + def airflow_run_start_time(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.airflow_run_start_time + ) + + @airflow_run_start_time.setter + def airflow_run_start_time(self, airflow_run_start_time: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_start_time = airflow_run_start_time + + @property + def airflow_run_end_time(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.airflow_run_end_time + + @airflow_run_end_time.setter + def airflow_run_end_time(self, airflow_run_end_time: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_end_time = airflow_run_end_time + + @property + def airflow_run_open_lineage_state(self) -> Optional[OpenLineageRunState]: + return ( + None + if self.attributes is None + else self.attributes.airflow_run_open_lineage_state + ) + + @airflow_run_open_lineage_state.setter + def airflow_run_open_lineage_state( + self, airflow_run_open_lineage_state: Optional[OpenLineageRunState] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_run_open_lineage_state = airflow_run_open_lineage_state + + class Attributes(Catalog.Attributes): + airflow_tags: Optional[set[str]] = Field(default=None, description="") + airflow_run_version: Optional[str] = Field(default=None, description="") + airflow_run_open_lineage_version: Optional[str] = Field( + default=None, description="" + ) + airflow_run_name: Optional[str] = Field(default=None, description="") + airflow_run_type: Optional[str] = Field(default=None, description="") + airflow_run_start_time: Optional[datetime] = Field(default=None, description="") + airflow_run_end_time: Optional[datetime] = Field(default=None, description="") + airflow_run_open_lineage_state: Optional[OpenLineageRunState] = Field( + default=None, description="" + ) + + attributes: "Airflow.Attributes" = Field( + default_factory=lambda: Airflow.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/airflow_dag.py b/pyatlan/model/assets/airflow_dag.py new file mode 100644 index 000000000..4be8aa352 --- /dev/null +++ b/pyatlan/model/assets/airflow_dag.py @@ -0,0 +1,104 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .airflow import Airflow + + +class AirflowDag(Airflow): + """Description""" + + type_name: str = Field(default="AirflowDag", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AirflowDag": + raise ValueError("must be AirflowDag") + return v + + def __setattr__(self, name, value): + if name in AirflowDag._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + AIRFLOW_DAG_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "airflowDagSchedule", "airflowDagSchedule" + ) + """ + Schedule for the DAG. + """ + AIRFLOW_DAG_SCHEDULE_DELTA: ClassVar[NumericField] = NumericField( + "airflowDagScheduleDelta", "airflowDagScheduleDelta" + ) + """ + Duration between scheduled runs, in seconds. + """ + + AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "airflow_dag_schedule", + "airflow_dag_schedule_delta", + "airflow_tasks", + ] + + @property + def airflow_dag_schedule(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_dag_schedule + + @airflow_dag_schedule.setter + def airflow_dag_schedule(self, airflow_dag_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_dag_schedule = airflow_dag_schedule + + @property + def airflow_dag_schedule_delta(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.airflow_dag_schedule_delta + ) + + @airflow_dag_schedule_delta.setter + def airflow_dag_schedule_delta(self, airflow_dag_schedule_delta: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_dag_schedule_delta = airflow_dag_schedule_delta + + @property + def airflow_tasks(self) -> Optional[list[AirflowTask]]: + return None if self.attributes is None else self.attributes.airflow_tasks + + @airflow_tasks.setter + def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_tasks = airflow_tasks + + class Attributes(Airflow.Attributes): + airflow_dag_schedule: Optional[str] = Field(default=None, description="") + airflow_dag_schedule_delta: Optional[int] = Field(default=None, description="") + airflow_tasks: Optional[list[AirflowTask]] = Field( + default=None, description="" + ) # relationship + + attributes: "AirflowDag.Attributes" = Field( + default_factory=lambda: AirflowDag.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .airflow_task import AirflowTask # noqa diff --git a/pyatlan/model/assets/airflow_task.py b/pyatlan/model/assets/airflow_task.py new file mode 100644 index 000000000..c3ce66b78 --- /dev/null +++ b/pyatlan/model/assets/airflow_task.py @@ -0,0 +1,353 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .airflow import Airflow + + +class AirflowTask(Airflow): + """Description""" + + type_name: str = Field(default="AirflowTask", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AirflowTask": + raise ValueError("must be AirflowTask") + return v + + def __setattr__(self, name, value): + if name in AirflowTask._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + AIRFLOW_TASK_OPERATOR_CLASS: ClassVar[KeywordTextField] = KeywordTextField( + "airflowTaskOperatorClass", + "airflowTaskOperatorClass.keyword", + "airflowTaskOperatorClass", + ) + """ + Class name for the operator this task uses. + """ + AIRFLOW_DAG_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "airflowDagName", "airflowDagName.keyword", "airflowDagName" + ) + """ + Simple name of the DAG this task is contained within. + """ + AIRFLOW_DAG_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "airflowDagQualifiedName", "airflowDagQualifiedName" + ) + """ + Unique name of the DAG this task is contained within. + """ + AIRFLOW_TASK_CONNECTION_ID: ClassVar[KeywordTextField] = KeywordTextField( + "airflowTaskConnectionId", + "airflowTaskConnectionId.keyword", + "airflowTaskConnectionId", + ) + """ + Identifier for the connection this task accesses. + """ + AIRFLOW_TASK_SQL: ClassVar[KeywordField] = KeywordField( + "airflowTaskSql", "airflowTaskSql" + ) + """ + SQL code that executes through this task. + """ + AIRFLOW_TASK_RETRY_NUMBER: ClassVar[NumericField] = NumericField( + "airflowTaskRetryNumber", "airflowTaskRetryNumber" + ) + """ + Retry count for this task running. + """ + AIRFLOW_TASK_POOL: ClassVar[KeywordField] = KeywordField( + "airflowTaskPool", "airflowTaskPool" + ) + """ + Pool on which this run happened. + """ + AIRFLOW_TASK_POOL_SLOTS: ClassVar[NumericField] = NumericField( + "airflowTaskPoolSlots", "airflowTaskPoolSlots" + ) + """ + Pool slots used for the run. + """ + AIRFLOW_TASK_QUEUE: ClassVar[KeywordField] = KeywordField( + "airflowTaskQueue", "airflowTaskQueue" + ) + """ + Queue on which this run happened. + """ + AIRFLOW_TASK_PRIORITY_WEIGHT: ClassVar[NumericField] = NumericField( + "airflowTaskPriorityWeight", "airflowTaskPriorityWeight" + ) + """ + Priority of the run. + """ + AIRFLOW_TASK_TRIGGER_RULE: ClassVar[KeywordField] = KeywordField( + "airflowTaskTriggerRule", "airflowTaskTriggerRule" + ) + """ + Trigger for the run. + """ + + OUTPUTS: ClassVar[RelationField] = RelationField("outputs") + """ + TBC + """ + PROCESS: ClassVar[RelationField] = RelationField("process") + """ + TBC + """ + INPUTS: ClassVar[RelationField] = RelationField("inputs") + """ + TBC + """ + AIRFLOW_DAG: ClassVar[RelationField] = RelationField("airflowDag") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "airflow_task_operator_class", + "airflow_dag_name", + "airflow_dag_qualified_name", + "airflow_task_connection_id", + "airflow_task_sql", + "airflow_task_retry_number", + "airflow_task_pool", + "airflow_task_pool_slots", + "airflow_task_queue", + "airflow_task_priority_weight", + "airflow_task_trigger_rule", + "outputs", + "process", + "inputs", + "airflow_dag", + ] + + @property + def airflow_task_operator_class(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.airflow_task_operator_class + ) + + @airflow_task_operator_class.setter + def airflow_task_operator_class(self, airflow_task_operator_class: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_operator_class = airflow_task_operator_class + + @property + def airflow_dag_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_dag_name + + @airflow_dag_name.setter + def airflow_dag_name(self, airflow_dag_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_dag_name = airflow_dag_name + + @property + def airflow_dag_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.airflow_dag_qualified_name + ) + + @airflow_dag_qualified_name.setter + def airflow_dag_qualified_name(self, airflow_dag_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_dag_qualified_name = airflow_dag_qualified_name + + @property + def airflow_task_connection_id(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.airflow_task_connection_id + ) + + @airflow_task_connection_id.setter + def airflow_task_connection_id(self, airflow_task_connection_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_connection_id = airflow_task_connection_id + + @property + def airflow_task_sql(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_task_sql + + @airflow_task_sql.setter + def airflow_task_sql(self, airflow_task_sql: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_sql = airflow_task_sql + + @property + def airflow_task_retry_number(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.airflow_task_retry_number + ) + + @airflow_task_retry_number.setter + def airflow_task_retry_number(self, airflow_task_retry_number: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_retry_number = airflow_task_retry_number + + @property + def airflow_task_pool(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_task_pool + + @airflow_task_pool.setter + def airflow_task_pool(self, airflow_task_pool: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_pool = airflow_task_pool + + @property + def airflow_task_pool_slots(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.airflow_task_pool_slots + ) + + @airflow_task_pool_slots.setter + def airflow_task_pool_slots(self, airflow_task_pool_slots: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_pool_slots = airflow_task_pool_slots + + @property + def airflow_task_queue(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.airflow_task_queue + + @airflow_task_queue.setter + def airflow_task_queue(self, airflow_task_queue: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_queue = airflow_task_queue + + @property + def airflow_task_priority_weight(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.airflow_task_priority_weight + ) + + @airflow_task_priority_weight.setter + def airflow_task_priority_weight(self, airflow_task_priority_weight: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_priority_weight = airflow_task_priority_weight + + @property + def airflow_task_trigger_rule(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.airflow_task_trigger_rule + ) + + @airflow_task_trigger_rule.setter + def airflow_task_trigger_rule(self, airflow_task_trigger_rule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_task_trigger_rule = airflow_task_trigger_rule + + @property + def outputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.outputs + + @outputs.setter + def outputs(self, outputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.outputs = outputs + + @property + def process(self) -> Optional[Process]: + return None if self.attributes is None else self.attributes.process + + @process.setter + def process(self, process: Optional[Process]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.process = process + + @property + def inputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.inputs + + @inputs.setter + def inputs(self, inputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.inputs = inputs + + @property + def airflow_dag(self) -> Optional[AirflowDag]: + return None if self.attributes is None else self.attributes.airflow_dag + + @airflow_dag.setter + def airflow_dag(self, airflow_dag: Optional[AirflowDag]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_dag = airflow_dag + + class Attributes(Airflow.Attributes): + airflow_task_operator_class: Optional[str] = Field(default=None, description="") + airflow_dag_name: Optional[str] = Field(default=None, description="") + airflow_dag_qualified_name: Optional[str] = Field(default=None, description="") + airflow_task_connection_id: Optional[str] = Field(default=None, description="") + airflow_task_sql: Optional[str] = Field(default=None, description="") + airflow_task_retry_number: Optional[int] = Field(default=None, description="") + airflow_task_pool: Optional[str] = Field(default=None, description="") + airflow_task_pool_slots: Optional[int] = Field(default=None, description="") + airflow_task_queue: Optional[str] = Field(default=None, description="") + airflow_task_priority_weight: Optional[int] = Field( + default=None, description="" + ) + airflow_task_trigger_rule: Optional[str] = Field(default=None, description="") + outputs: Optional[list[Catalog]] = Field( + default=None, description="" + ) # relationship + process: Optional[Process] = Field(default=None, description="") # relationship + inputs: Optional[list[Catalog]] = Field( + default=None, description="" + ) # relationship + airflow_dag: Optional[AirflowDag] = Field( + default=None, description="" + ) # relationship + + attributes: "AirflowTask.Attributes" = Field( + default_factory=lambda: AirflowTask.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .airflow_dag import AirflowDag # noqa +from .catalog import Catalog # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/asset.py b/pyatlan/model/assets/asset.py new file mode 100644 index 000000000..d196a0cab --- /dev/null +++ b/pyatlan/model/assets/asset.py @@ -0,0 +1,3180 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +import sys +from datetime import datetime +from typing import TYPE_CHECKING, ClassVar, Optional, Type, TypeVar + +from pydantic.v1 import Field, validator + +from pyatlan.errors import ErrorCode +from pyatlan.model.core import Announcement +from pyatlan.model.enums import ( + AnnouncementType, + CertificateStatus, + SaveSemantic, + SourceCostUnitType, +) +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + KeywordTextStemmedField, + NumericField, + NumericRankField, + RelationField, + TextField, +) +from pyatlan.model.structs import PopularityInsights, StarredDetails +from pyatlan.utils import init_guid, move_struct, validate_required_fields + +from .referenceable import Referenceable + +SelfAsset = TypeVar("SelfAsset", bound="Asset") + + +class Asset(Referenceable): + """Description""" + + _subtypes_: dict[str, type] = dict() + + def __init_subclass__(cls, type_name=None): + cls._subtypes_[type_name or cls.__name__.lower()] = cls + + def trim_to_required(self: SelfAsset) -> SelfAsset: + return self.create_for_modification( + qualified_name=self.qualified_name or "", name=self.name or "" + ) + + def trim_to_reference(self: SelfAsset) -> SelfAsset: + if self.guid and self.guid.strip(): + return self.ref_by_guid(self.guid) + if self.qualified_name and self.qualified_name.strip(): + return self.ref_by_qualified_name(self.qualified_name) + if ( + self.unique_attributes + and (qualified_name := self.unique_attributes.get("qualified_name")) + and qualified_name.strip() + ): + return self.ref_by_qualified_name(qualified_name) + raise ErrorCode.MISSING_REQUIRED_RELATIONSHIP_PARAM.exception_with_parameters( + self.type_name, "guid, qualifiedName" + ) + + @classmethod + @init_guid + def create(cls: Type[SelfAsset], *args, **kwargs) -> SelfAsset: + raise NotImplementedError( + "Create has not been implemented for this class. Please submit an enhancement" + "request if you need it implemented." + ) + + @classmethod + def create_for_modification( + cls: type[SelfAsset], qualified_name: str = "", name: str = "" + ) -> SelfAsset: + if cls.__name__ == "Asset": + raise ErrorCode.METHOD_CAN_NOT_BE_INVOKED_ON_ASSET.exception_with_parameters() + validate_required_fields( + ["name", "qualified_name"], + [name, qualified_name], + ) + return cls(attributes=cls.Attributes(qualified_name=qualified_name, name=name)) + + @classmethod + def ref_by_guid( + cls: type[SelfAsset], guid: str, semantic: SaveSemantic = SaveSemantic.REPLACE + ) -> SelfAsset: + retval: SelfAsset = cls(attributes=cls.Attributes()) + retval.guid = guid + retval.semantic = semantic + return retval + + @classmethod + def ref_by_qualified_name( + cls: type[SelfAsset], + qualified_name: str, + semantic: SaveSemantic = SaveSemantic.REPLACE, + ) -> SelfAsset: + ret_value: SelfAsset = cls( + attributes=cls.Attributes(name="", qualified_name=qualified_name) + ) + ret_value.unique_attributes = {"qualifiedName": qualified_name} + ret_value.semantic = semantic + return ret_value + + @classmethod + def __get_validators__(cls): + yield cls._convert_to_real_type_ + + @classmethod + def _convert_to_real_type_(cls, data): + if isinstance(data, Asset): + return data + + # Handle the case where asset data is a list + if isinstance(data, list): + return [cls._convert_to_real_type_(item) for item in data] + + data_type = ( + data.get("type_name") if "type_name" in data else data.get("typeName") + ) + + if data_type is None: + if issubclass(cls, Asset): + return cls(**data) + raise ValueError("Missing 'type' in Asset") + + sub = cls._subtypes_.get(data_type) + if sub is None: + sub = getattr(sys.modules["pyatlan.model.assets"], data_type) + + if sub is None: + raise TypeError(f"Unsupport sub-type: {data_type}") + + move_struct(data) + return sub(**data) + + if TYPE_CHECKING: + from pyatlan.model.lineage import FluentLineage + + @classmethod + def lineage(cls, guid: str, include_archived: bool = False) -> "FluentLineage": + """ + Start a FluentLineage that can be used to get a LineageListRequest that can be used to retrieve all downstream + assets. Additional conditions can be chained onto the returned FluentLineage before any asset retrieval is + attempted, ensuring all conditions are pushed-down for optimal retrieval. (To change the default direction of + downstream chain a .direction() call + + :param guid: unique identifier (GUID) for the starting point of lineage + :param include_archived: when True, archived (soft-deleted) assets in lineage will be included + :returns: a FluentLineage that can be used to get a LineageListRequest that can be used to retrieve all + downstream assets + """ + from pyatlan.model.lineage import FluentLineage + + if not include_archived: + return FluentLineage( + starting_guid=guid, + where_assets=FluentLineage.ACTIVE, + where_relationships=FluentLineage.ACTIVE, + includes_in_results=FluentLineage.ACTIVE, + ) + return FluentLineage(starting_guid=guid) + + def has_announcement(self) -> bool: + return bool( + self.attributes + and ( + self.attributes.announcement_title or self.attributes.announcement_type + ) + ) + + def set_announcement(self, announcement: Announcement) -> None: + self.attributes.announcement_type = announcement.announcement_type.value + self.attributes.announcement_title = announcement.announcement_title + self.attributes.announcement_message = announcement.announcement_message + + def get_announcment(self) -> Optional[Announcement]: + if self.attributes.announcement_type and self.attributes.announcement_title: + return Announcement( + announcement_type=AnnouncementType[ + self.attributes.announcement_type.upper() + ], + announcement_title=self.attributes.announcement_title, + announcement_message=self.attributes.announcement_message, + ) + return None + + def remove_announcement(self): + self.attributes.remove_announcement() + + def remove_description(self): + self.attributes.remove_description() + + def remove_user_description(self): + self.attributes.remove_user_description() + + def remove_owners(self): + self.attributes.remove_owners() + + def remove_certificate(self): + self.attributes.remove_certificate() + + type_name: str = Field(default="Asset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Asset": + raise ValueError("must be Asset") + return v + + def __setattr__(self, name, value): + if name in Asset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + NAME: ClassVar[KeywordTextStemmedField] = KeywordTextStemmedField( + "name", "name.keyword", "name", "name.stemmed" + ) + """ + Name of this asset. Fallback for display purposes, if displayName is empty. + """ + DISPLAY_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "displayName", "displayName.keyword", "displayName" + ) + """ + Human-readable name of this asset used for display purposes (in user interface). + """ + DESCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( + "description", "description.keyword", "description" + ) + """ + Description of this asset, for example as crawled from a source. Fallback for display purposes, if userDescription is empty. + """ # noqa: E501 + USER_DESCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( + "userDescription", "userDescription.keyword", "userDescription" + ) + """ + Description of this asset, as provided by a user. If present, this will be used for the description in user interface. + """ # noqa: E501 + TENANT_ID: ClassVar[KeywordField] = KeywordField("tenantId", "tenantId") + """ + Name of the Atlan workspace in which this asset exists. + """ + CERTIFICATE_STATUS: ClassVar[KeywordTextField] = KeywordTextField( + "certificateStatus", "certificateStatus", "certificateStatus.text" + ) + """ + Status of this asset's certification. + """ + CERTIFICATE_STATUS_MESSAGE: ClassVar[KeywordField] = KeywordField( + "certificateStatusMessage", "certificateStatusMessage" + ) + """ + Human-readable descriptive message used to provide further detail to certificateStatus. + """ + CERTIFICATE_UPDATED_BY: ClassVar[KeywordField] = KeywordField( + "certificateUpdatedBy", "certificateUpdatedBy" + ) + """ + Name of the user who last updated the certification of this asset. + """ + CERTIFICATE_UPDATED_AT: ClassVar[NumericField] = NumericField( + "certificateUpdatedAt", "certificateUpdatedAt" + ) + """ + Time (epoch) at which the certification was last updated, in milliseconds. + """ + ANNOUNCEMENT_TITLE: ClassVar[KeywordField] = KeywordField( + "announcementTitle", "announcementTitle" + ) + """ + Brief title for the announcement on this asset. Required when announcementType is specified. + """ + ANNOUNCEMENT_MESSAGE: ClassVar[KeywordField] = KeywordField( + "announcementMessage", "announcementMessage" + ) + """ + Detailed message to include in the announcement on this asset. + """ + ANNOUNCEMENT_TYPE: ClassVar[KeywordField] = KeywordField( + "announcementType", "announcementType" + ) + """ + Type of announcement on this asset. + """ + ANNOUNCEMENT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "announcementUpdatedAt", "announcementUpdatedAt" + ) + """ + Time (epoch) at which the announcement was last updated, in milliseconds. + """ + ANNOUNCEMENT_UPDATED_BY: ClassVar[KeywordField] = KeywordField( + "announcementUpdatedBy", "announcementUpdatedBy" + ) + """ + Name of the user who last updated the announcement. + """ + OWNER_USERS: ClassVar[KeywordField] = KeywordField("ownerUsers", "ownerUsers") + """ + List of users who own this asset. + """ + OWNER_GROUPS: ClassVar[KeywordField] = KeywordField("ownerGroups", "ownerGroups") + """ + List of groups who own this asset. + """ + ADMIN_USERS: ClassVar[KeywordField] = KeywordField("adminUsers", "adminUsers") + """ + List of users who administer this asset. (This is only used for certain asset types.) + """ + ADMIN_GROUPS: ClassVar[KeywordField] = KeywordField("adminGroups", "adminGroups") + """ + List of groups who administer this asset. (This is only used for certain asset types.) + """ + VIEWER_USERS: ClassVar[KeywordField] = KeywordField("viewerUsers", "viewerUsers") + """ + List of users who can view assets contained in a collection. (This is only used for certain asset types.) + """ + VIEWER_GROUPS: ClassVar[KeywordField] = KeywordField("viewerGroups", "viewerGroups") + """ + List of groups who can view assets contained in a collection. (This is only used for certain asset types.) + """ + CONNECTOR_NAME: ClassVar[KeywordField] = KeywordField( + "connectorName", "connectorName" + ) + """ + Type of the connector through which this asset is accessible. + """ + CONNECTION_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "connectionName", "connectionName", "connectionName.text" + ) + """ + Simple name of the connection through which this asset is accessible. + """ + CONNECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "connectionQualifiedName", + "connectionQualifiedName", + "connectionQualifiedName.text", + ) + """ + Unique name of the connection through which this asset is accessible. + """ + HAS_LINEAGE: ClassVar[BooleanField] = BooleanField("__hasLineage", "__hasLineage") + """ + Whether this asset has lineage (true) or not (false). + """ + IS_DISCOVERABLE: ClassVar[BooleanField] = BooleanField( + "isDiscoverable", "isDiscoverable" + ) + """ + Whether this asset is discoverable through the UI (true) or not (false). + """ + IS_EDITABLE: ClassVar[BooleanField] = BooleanField("isEditable", "isEditable") + """ + Whether this asset can be edited in the UI (true) or not (false). + """ + SUB_TYPE: ClassVar[KeywordField] = KeywordField("subType", "subType") + """ + Subtype of this asset. + """ + VIEW_SCORE: ClassVar[NumericRankField] = NumericRankField( + "viewScore", "viewScore", "viewScore.rank_feature" + ) + """ + View score for this asset. + """ + POPULARITY_SCORE: ClassVar[NumericRankField] = NumericRankField( + "popularityScore", "popularityScore", "popularityScore.rank_feature" + ) + """ + Popularity score for this asset. + """ + SOURCE_OWNERS: ClassVar[KeywordField] = KeywordField("sourceOwners", "sourceOwners") + """ + List of owners of this asset, in the source system. + """ + SOURCE_CREATED_BY: ClassVar[KeywordField] = KeywordField( + "sourceCreatedBy", "sourceCreatedBy" + ) + """ + Name of the user who created this asset, in the source system. + """ + SOURCE_CREATED_AT: ClassVar[NumericField] = NumericField( + "sourceCreatedAt", "sourceCreatedAt" + ) + """ + Time (epoch) at which this asset was created in the source system, in milliseconds. + """ + SOURCE_UPDATED_AT: ClassVar[NumericField] = NumericField( + "sourceUpdatedAt", "sourceUpdatedAt" + ) + """ + Time (epoch) at which this asset was last updated in the source system, in milliseconds. + """ + SOURCE_UPDATED_BY: ClassVar[KeywordField] = KeywordField( + "sourceUpdatedBy", "sourceUpdatedBy" + ) + """ + Name of the user who last updated this asset, in the source system. + """ + SOURCE_URL: ClassVar[KeywordField] = KeywordField("sourceURL", "sourceURL") + """ + URL to the resource within the source application, used to create a button to view this asset in the source application. + """ # noqa: E501 + SOURCE_EMBED_URL: ClassVar[KeywordField] = KeywordField( + "sourceEmbedURL", "sourceEmbedURL" + ) + """ + URL to create an embed for a resource (for example, an image of a dashboard) within Atlan. + """ + LAST_SYNC_WORKFLOW_NAME: ClassVar[KeywordField] = KeywordField( + "lastSyncWorkflowName", "lastSyncWorkflowName" + ) + """ + Name of the crawler that last synchronized this asset. + """ + LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( + "lastSyncRunAt", "lastSyncRunAt" + ) + """ + Time (epoch) at which this asset was last crawled, in milliseconds. + """ + LAST_SYNC_RUN: ClassVar[KeywordField] = KeywordField("lastSyncRun", "lastSyncRun") + """ + Name of the last run of the crawler that last synchronized this asset. + """ + ADMIN_ROLES: ClassVar[KeywordField] = KeywordField("adminRoles", "adminRoles") + """ + List of roles who administer this asset. (This is only used for Connection assets.) + """ + SOURCE_READ_COUNT: ClassVar[NumericField] = NumericField( + "sourceReadCount", "sourceReadCount" + ) + """ + Total count of all read operations at source. + """ + SOURCE_READ_USER_COUNT: ClassVar[NumericField] = NumericField( + "sourceReadUserCount", "sourceReadUserCount" + ) + """ + Total number of unique users that read data from asset. + """ + SOURCE_LAST_READ_AT: ClassVar[NumericField] = NumericField( + "sourceLastReadAt", "sourceLastReadAt" + ) + """ + Timestamp of most recent read operation. + """ + LAST_ROW_CHANGED_AT: ClassVar[NumericField] = NumericField( + "lastRowChangedAt", "lastRowChangedAt" + ) + """ + Time (epoch) of the last operation that inserted, updated, or deleted rows, in milliseconds. + """ + SOURCE_TOTAL_COST: ClassVar[NumericField] = NumericField( + "sourceTotalCost", "sourceTotalCost" + ) + """ + Total cost of all operations at source. + """ + SOURCE_COST_UNIT: ClassVar[KeywordField] = KeywordField( + "sourceCostUnit", "sourceCostUnit" + ) + """ + The unit of measure for sourceTotalCost. + """ + SOURCE_READ_QUERY_COST: ClassVar[NumericField] = NumericField( + "sourceReadQueryCost", "sourceReadQueryCost" + ) + """ + Total cost of read queries at source. + """ + SOURCE_READ_RECENT_USER_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadRecentUserList", "sourceReadRecentUserList" + ) + """ + List of usernames of the most recent users who read this asset. + """ + SOURCE_READ_RECENT_USER_RECORD_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadRecentUserRecordList", "sourceReadRecentUserRecordList" + ) + """ + List of usernames with extra insights for the most recent users who read this asset. + """ + SOURCE_READ_TOP_USER_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadTopUserList", "sourceReadTopUserList" + ) + """ + List of usernames of the users who read this asset the most. + """ + SOURCE_READ_TOP_USER_RECORD_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadTopUserRecordList", "sourceReadTopUserRecordList" + ) + """ + List of usernames with extra insights for the users who read this asset the most. + """ + SOURCE_READ_POPULAR_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadPopularQueryRecordList", "sourceReadPopularQueryRecordList" + ) + """ + List of the most popular queries that accessed this asset. + """ + SOURCE_READ_EXPENSIVE_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadExpensiveQueryRecordList", "sourceReadExpensiveQueryRecordList" + ) + """ + List of the most expensive queries that accessed this asset. + """ + SOURCE_READ_SLOW_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( + "sourceReadSlowQueryRecordList", "sourceReadSlowQueryRecordList" + ) + """ + List of the slowest queries that accessed this asset. + """ + SOURCE_QUERY_COMPUTE_COST_LIST: ClassVar[KeywordField] = KeywordField( + "sourceQueryComputeCostList", "sourceQueryComputeCostList" + ) + """ + List of most expensive warehouse names. + """ + SOURCE_QUERY_COMPUTE_COST_RECORD_LIST: ClassVar[KeywordField] = KeywordField( + "sourceQueryComputeCostRecordList", "sourceQueryComputeCostRecordList" + ) + """ + List of most expensive warehouses with extra insights. + """ + DBT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtQualifiedName", "dbtQualifiedName", "dbtQualifiedName.text" + ) + """ + Unique name of this asset in dbt. + """ + ASSET_DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtAlias", "assetDbtAlias.keyword", "assetDbtAlias" + ) + """ + Alias of this asset in dbt. + """ + ASSET_DBT_META: ClassVar[KeywordField] = KeywordField( + "assetDbtMeta", "assetDbtMeta" + ) + """ + Metadata for this asset in dbt, specifically everything under the 'meta' key in the dbt object. + """ + ASSET_DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtUniqueId", "assetDbtUniqueId.keyword", "assetDbtUniqueId" + ) + """ + Unique identifier of this asset in dbt. + """ + ASSET_DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtAccountName", "assetDbtAccountName.keyword", "assetDbtAccountName" + ) + """ + Name of the account in which this asset exists in dbt. + """ + ASSET_DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtProjectName", "assetDbtProjectName.keyword", "assetDbtProjectName" + ) + """ + Name of the project in which this asset exists in dbt. + """ + ASSET_DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtPackageName", "assetDbtPackageName.keyword", "assetDbtPackageName" + ) + """ + Name of the package in which this asset exists in dbt. + """ + ASSET_DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtJobName", "assetDbtJobName.keyword", "assetDbtJobName" + ) + """ + Name of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "assetDbtJobSchedule", "assetDbtJobSchedule" + ) + """ + Schedule of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "assetDbtJobStatus", "assetDbtJobStatus" + ) + """ + Status of the job that materialized this asset in dbt. + """ + ASSET_DBT_TEST_STATUS: ClassVar[KeywordField] = KeywordField( + "assetDbtTestStatus", "assetDbtTestStatus" + ) + """ + All associated dbt test statuses. + """ + ASSET_DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[TextField] = TextField( + "assetDbtJobScheduleCronHumanized", "assetDbtJobScheduleCronHumanized" + ) + """ + Human-readable cron schedule of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( + "assetDbtJobLastRun", "assetDbtJobLastRun" + ) + """ + Time (epoch) at which the job that materialized this asset in dbt last ran, in milliseconds. + """ + ASSET_DBT_JOB_LAST_RUN_URL: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunUrl", "assetDbtJobLastRunUrl" + ) + """ + URL of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_CREATED_AT: ClassVar[NumericField] = NumericField( + "assetDbtJobLastRunCreatedAt", "assetDbtJobLastRunCreatedAt" + ) + """ + Time (epoch) at which the job that materialized this asset in dbt was last created, in milliseconds. + """ + ASSET_DBT_JOB_LAST_RUN_UPDATED_AT: ClassVar[NumericField] = NumericField( + "assetDbtJobLastRunUpdatedAt", "assetDbtJobLastRunUpdatedAt" + ) + """ + Time (epoch) at which the job that materialized this asset in dbt was last updated, in milliseconds. + """ + ASSET_DBT_JOB_LAST_RUN_DEQUED_AT: ClassVar[NumericField] = NumericField( + "assetDbtJobLastRunDequedAt", "assetDbtJobLastRunDequedAt" + ) + """ + Time (epoch) at which the job that materialized this asset in dbt was dequeued, in milliseconds. + """ + ASSET_DBT_JOB_LAST_RUN_STARTED_AT: ClassVar[NumericField] = NumericField( + "assetDbtJobLastRunStartedAt", "assetDbtJobLastRunStartedAt" + ) + """ + Time (epoch) at which the job that materialized this asset in dbt was started running, in milliseconds. + """ + ASSET_DBT_JOB_LAST_RUN_TOTAL_DURATION: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunTotalDuration", "assetDbtJobLastRunTotalDuration" + ) + """ + Total duration of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_TOTAL_DURATION_HUMANIZED: ClassVar[ + KeywordField + ] = KeywordField( + "assetDbtJobLastRunTotalDurationHumanized", + "assetDbtJobLastRunTotalDurationHumanized", + ) + """ + Human-readable total duration of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_QUEUED_DURATION: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunQueuedDuration", "assetDbtJobLastRunQueuedDuration" + ) + """ + Total duration the job that materialized this asset in dbt spent being queued. + """ + ASSET_DBT_JOB_LAST_RUN_QUEUED_DURATION_HUMANIZED: ClassVar[ + KeywordField + ] = KeywordField( + "assetDbtJobLastRunQueuedDurationHumanized", + "assetDbtJobLastRunQueuedDurationHumanized", + ) + """ + Human-readable total duration of the last run of the job that materialized this asset in dbt spend being queued. + """ + ASSET_DBT_JOB_LAST_RUN_RUN_DURATION: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunRunDuration", "assetDbtJobLastRunRunDuration" + ) + """ + Run duration of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_RUN_DURATION_HUMANIZED: ClassVar[ + KeywordField + ] = KeywordField( + "assetDbtJobLastRunRunDurationHumanized", + "assetDbtJobLastRunRunDurationHumanized", + ) + """ + Human-readable run duration of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_GIT_BRANCH: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtJobLastRunGitBranch", + "assetDbtJobLastRunGitBranch", + "assetDbtJobLastRunGitBranch.text", + ) + """ + Branch in git from which the last run of the job that materialized this asset in dbt ran. + """ + ASSET_DBT_JOB_LAST_RUN_GIT_SHA: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunGitSha", "assetDbtJobLastRunGitSha" + ) + """ + SHA hash in git for the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_STATUS_MESSAGE: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "assetDbtJobLastRunStatusMessage", + "assetDbtJobLastRunStatusMessage.keyword", + "assetDbtJobLastRunStatusMessage", + ) + """ + Status message of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_OWNER_THREAD_ID: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunOwnerThreadId", "assetDbtJobLastRunOwnerThreadId" + ) + """ + Thread ID of the owner of the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_EXECUTED_BY_THREAD_ID: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunExecutedByThreadId", "assetDbtJobLastRunExecutedByThreadId" + ) + """ + Thread ID of the user who executed the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_ARTIFACTS_SAVED: ClassVar[BooleanField] = BooleanField( + "assetDbtJobLastRunArtifactsSaved", "assetDbtJobLastRunArtifactsSaved" + ) + """ + Whether artifacts were saved from the last run of the job that materialized this asset in dbt (true) or not (false). + """ + ASSET_DBT_JOB_LAST_RUN_ARTIFACT_S3PATH: ClassVar[KeywordField] = KeywordField( + "assetDbtJobLastRunArtifactS3Path", "assetDbtJobLastRunArtifactS3Path" + ) + """ + Path in S3 to the artifacts saved from the last run of the job that materialized this asset in dbt. + """ + ASSET_DBT_JOB_LAST_RUN_HAS_DOCS_GENERATED: ClassVar[BooleanField] = BooleanField( + "assetDbtJobLastRunHasDocsGenerated", "assetDbtJobLastRunHasDocsGenerated" + ) + """ + Whether docs were generated from the last run of the job that materialized this asset in dbt (true) or not (false). + """ + ASSET_DBT_JOB_LAST_RUN_HAS_SOURCES_GENERATED: ClassVar[BooleanField] = BooleanField( + "assetDbtJobLastRunHasSourcesGenerated", "assetDbtJobLastRunHasSourcesGenerated" + ) + """ + Whether sources were generated from the last run of the job that materialized this asset in dbt (true) or not (false). + """ # noqa: E501 + ASSET_DBT_JOB_LAST_RUN_NOTIFICATIONS_SENT: ClassVar[BooleanField] = BooleanField( + "assetDbtJobLastRunNotificationsSent", "assetDbtJobLastRunNotificationsSent" + ) + """ + Whether notifications were sent from the last run of the job that materialized this asset in dbt (true) or not (false). + """ # noqa: E501 + ASSET_DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( + "assetDbtJobNextRun", "assetDbtJobNextRun" + ) + """ + Time (epoch) when the next run of the job that materializes this asset in dbt is scheduled. + """ + ASSET_DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtJobNextRunHumanized", + "assetDbtJobNextRunHumanized.keyword", + "assetDbtJobNextRunHumanized", + ) + """ + Human-readable time when the next run of the job that materializes this asset in dbt is scheduled. + """ + ASSET_DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtEnvironmentName", + "assetDbtEnvironmentName.keyword", + "assetDbtEnvironmentName", + ) + """ + Name of the environment in which this asset is materialized in dbt. + """ + ASSET_DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordField] = KeywordField( + "assetDbtEnvironmentDbtVersion", "assetDbtEnvironmentDbtVersion" + ) + """ + Version of the environment in which this asset is materialized in dbt. + """ + ASSET_DBT_TAGS: ClassVar[KeywordTextField] = KeywordTextField( + "assetDbtTags", "assetDbtTags", "assetDbtTags.text" + ) + """ + List of tags attached to this asset in dbt. + """ + ASSET_DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( + "assetDbtSemanticLayerProxyUrl", "assetDbtSemanticLayerProxyUrl" + ) + """ + URL of the semantic layer proxy for this asset in dbt. + """ + ASSET_DBT_SOURCE_FRESHNESS_CRITERIA: ClassVar[KeywordField] = KeywordField( + "assetDbtSourceFreshnessCriteria", "assetDbtSourceFreshnessCriteria" + ) + """ + Freshness criteria for the source of this asset in dbt. + """ + SAMPLE_DATA_URL: ClassVar[KeywordTextField] = KeywordTextField( + "sampleDataUrl", "sampleDataUrl", "sampleDataUrl.text" + ) + """ + URL for sample data for this asset. + """ + ASSET_TAGS: ClassVar[KeywordTextField] = KeywordTextField( + "assetTags", "assetTags", "assetTags.text" + ) + """ + List of tags attached to this asset. + """ + ASSET_MC_INCIDENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "assetMcIncidentNames", "assetMcIncidentNames.keyword", "assetMcIncidentNames" + ) + """ + List of Monte Carlo incident names attached to this asset. + """ + ASSET_MC_INCIDENT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "assetMcIncidentQualifiedNames", + "assetMcIncidentQualifiedNames", + "assetMcIncidentQualifiedNames.text", + ) + """ + List of unique Monte Carlo incident names attached to this asset. + """ + ASSET_MC_MONITOR_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "assetMcMonitorNames", "assetMcMonitorNames.keyword", "assetMcMonitorNames" + ) + """ + List of Monte Carlo monitor names attached to this asset. + """ + ASSET_MC_MONITOR_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "assetMcMonitorQualifiedNames", + "assetMcMonitorQualifiedNames", + "assetMcMonitorQualifiedNames.text", + ) + """ + List of unique Monte Carlo monitor names attached to this asset. + """ + ASSET_MC_MONITOR_STATUSES: ClassVar[KeywordField] = KeywordField( + "assetMcMonitorStatuses", "assetMcMonitorStatuses" + ) + """ + Statuses of all associated Monte Carlo monitors. + """ + ASSET_MC_MONITOR_TYPES: ClassVar[KeywordField] = KeywordField( + "assetMcMonitorTypes", "assetMcMonitorTypes" + ) + """ + Types of all associated Monte Carlo monitors. + """ + ASSET_MC_MONITOR_SCHEDULE_TYPES: ClassVar[KeywordField] = KeywordField( + "assetMcMonitorScheduleTypes", "assetMcMonitorScheduleTypes" + ) + """ + Schedules of all associated Monte Carlo monitors. + """ + ASSET_MC_INCIDENT_TYPES: ClassVar[KeywordField] = KeywordField( + "assetMcIncidentTypes", "assetMcIncidentTypes" + ) + """ + List of Monte Carlo incident types associated with this asset. + """ + ASSET_MC_INCIDENT_SUB_TYPES: ClassVar[KeywordField] = KeywordField( + "assetMcIncidentSubTypes", "assetMcIncidentSubTypes" + ) + """ + List of Monte Carlo incident sub-types associated with this asset. + """ + ASSET_MC_INCIDENT_SEVERITIES: ClassVar[KeywordField] = KeywordField( + "assetMcIncidentSeverities", "assetMcIncidentSeverities" + ) + """ + List of Monte Carlo incident severities associated with this asset. + """ + ASSET_MC_INCIDENT_STATES: ClassVar[KeywordField] = KeywordField( + "assetMcIncidentStates", "assetMcIncidentStates" + ) + """ + List of Monte Carlo incident states associated with this asset. + """ + ASSET_MC_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( + "assetMcLastSyncRunAt", "assetMcLastSyncRunAt" + ) + """ + Time (epoch) at which this asset was last synced from Monte Carlo. + """ + STARRED_BY: ClassVar[KeywordField] = KeywordField("starredBy", "starredBy") + """ + Users who have starred this asset. + """ + STARRED_DETAILS_LIST: ClassVar[KeywordField] = KeywordField( + "starredDetailsList", "starredDetailsList" + ) + """ + List of usernames with extra information of the users who have starred an asset. + """ + STARRED_COUNT: ClassVar[NumericField] = NumericField("starredCount", "starredCount") + """ + Number of users who have starred this asset. + """ + ASSET_SODA_DQ_STATUS: ClassVar[KeywordField] = KeywordField( + "assetSodaDQStatus", "assetSodaDQStatus" + ) + """ + Status of data quality from Soda. + """ + ASSET_SODA_CHECK_COUNT: ClassVar[NumericField] = NumericField( + "assetSodaCheckCount", "assetSodaCheckCount" + ) + """ + Number of checks done via Soda. + """ + ASSET_SODA_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( + "assetSodaLastSyncRunAt", "assetSodaLastSyncRunAt" + ) + """ + + """ + ASSET_SODA_LAST_SCAN_AT: ClassVar[NumericField] = NumericField( + "assetSodaLastScanAt", "assetSodaLastScanAt" + ) + """ + + """ + ASSET_SODA_CHECK_STATUSES: ClassVar[KeywordField] = KeywordField( + "assetSodaCheckStatuses", "assetSodaCheckStatuses" + ) + """ + All associated Soda check statuses. + """ + ASSET_SODA_SOURCE_URL: ClassVar[KeywordField] = KeywordField( + "assetSodaSourceURL", "assetSodaSourceURL" + ) + """ + + """ + ASSET_ICON: ClassVar[KeywordField] = KeywordField("assetIcon", "assetIcon") + """ + Name of the icon to use for this asset. (Only applies to glossaries, currently.) + """ + IS_PARTIAL: ClassVar[BooleanField] = BooleanField("isPartial", "isPartial") + """ + TBC + """ + IS_AI_GENERATED: ClassVar[BooleanField] = BooleanField( + "isAIGenerated", "isAIGenerated" + ) + """ + + """ + ASSET_COVER_IMAGE: ClassVar[KeywordField] = KeywordField( + "assetCoverImage", "assetCoverImage" + ) + """ + TBC + """ + ASSET_THEME_HEX: ClassVar[KeywordField] = KeywordField( + "assetThemeHex", "assetThemeHex" + ) + """ + Color (in hexadecimal RGB) to use to represent this asset. + """ + + SCHEMA_REGISTRY_SUBJECTS: ClassVar[RelationField] = RelationField( + "schemaRegistrySubjects" + ) + """ + TBC + """ + MC_MONITORS: ClassVar[RelationField] = RelationField("mcMonitors") + """ + TBC + """ + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[RelationField] = RelationField( + "outputPortDataProducts" + ) + """ + TBC + """ + FILES: ClassVar[RelationField] = RelationField("files") + """ + TBC + """ + MC_INCIDENTS: ClassVar[RelationField] = RelationField("mcIncidents") + """ + TBC + """ + LINKS: ClassVar[RelationField] = RelationField("links") + """ + TBC + """ + METRICS: ClassVar[RelationField] = RelationField("metrics") + """ + TBC + """ + README: ClassVar[RelationField] = RelationField("readme") + """ + TBC + """ + SODA_CHECKS: ClassVar[RelationField] = RelationField("sodaChecks") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "name", + "display_name", + "description", + "user_description", + "tenant_id", + "certificate_status", + "certificate_status_message", + "certificate_updated_by", + "certificate_updated_at", + "announcement_title", + "announcement_message", + "announcement_type", + "announcement_updated_at", + "announcement_updated_by", + "owner_users", + "owner_groups", + "admin_users", + "admin_groups", + "viewer_users", + "viewer_groups", + "connector_name", + "connection_name", + "connection_qualified_name", + "has_lineage", + "is_discoverable", + "is_editable", + "sub_type", + "view_score", + "popularity_score", + "source_owners", + "source_created_by", + "source_created_at", + "source_updated_at", + "source_updated_by", + "source_url", + "source_embed_url", + "last_sync_workflow_name", + "last_sync_run_at", + "last_sync_run", + "admin_roles", + "source_read_count", + "source_read_user_count", + "source_last_read_at", + "last_row_changed_at", + "source_total_cost", + "source_cost_unit", + "source_read_query_cost", + "source_read_recent_user_list", + "source_read_recent_user_record_list", + "source_read_top_user_list", + "source_read_top_user_record_list", + "source_read_popular_query_record_list", + "source_read_expensive_query_record_list", + "source_read_slow_query_record_list", + "source_query_compute_cost_list", + "source_query_compute_cost_record_list", + "dbt_qualified_name", + "asset_dbt_alias", + "asset_dbt_meta", + "asset_dbt_unique_id", + "asset_dbt_account_name", + "asset_dbt_project_name", + "asset_dbt_package_name", + "asset_dbt_job_name", + "asset_dbt_job_schedule", + "asset_dbt_job_status", + "asset_dbt_test_status", + "asset_dbt_job_schedule_cron_humanized", + "asset_dbt_job_last_run", + "asset_dbt_job_last_run_url", + "asset_dbt_job_last_run_created_at", + "asset_dbt_job_last_run_updated_at", + "asset_dbt_job_last_run_dequed_at", + "asset_dbt_job_last_run_started_at", + "asset_dbt_job_last_run_total_duration", + "asset_dbt_job_last_run_total_duration_humanized", + "asset_dbt_job_last_run_queued_duration", + "asset_dbt_job_last_run_queued_duration_humanized", + "asset_dbt_job_last_run_run_duration", + "asset_dbt_job_last_run_run_duration_humanized", + "asset_dbt_job_last_run_git_branch", + "asset_dbt_job_last_run_git_sha", + "asset_dbt_job_last_run_status_message", + "asset_dbt_job_last_run_owner_thread_id", + "asset_dbt_job_last_run_executed_by_thread_id", + "asset_dbt_job_last_run_artifacts_saved", + "asset_dbt_job_last_run_artifact_s3_path", + "asset_dbt_job_last_run_has_docs_generated", + "asset_dbt_job_last_run_has_sources_generated", + "asset_dbt_job_last_run_notifications_sent", + "asset_dbt_job_next_run", + "asset_dbt_job_next_run_humanized", + "asset_dbt_environment_name", + "asset_dbt_environment_dbt_version", + "asset_dbt_tags", + "asset_dbt_semantic_layer_proxy_url", + "asset_dbt_source_freshness_criteria", + "sample_data_url", + "asset_tags", + "asset_mc_incident_names", + "asset_mc_incident_qualified_names", + "asset_mc_monitor_names", + "asset_mc_monitor_qualified_names", + "asset_mc_monitor_statuses", + "asset_mc_monitor_types", + "asset_mc_monitor_schedule_types", + "asset_mc_incident_types", + "asset_mc_incident_sub_types", + "asset_mc_incident_severities", + "asset_mc_incident_states", + "asset_mc_last_sync_run_at", + "starred_by", + "starred_details_list", + "starred_count", + "asset_soda_d_q_status", + "asset_soda_check_count", + "asset_soda_last_sync_run_at", + "asset_soda_last_scan_at", + "asset_soda_check_statuses", + "asset_soda_source_url", + "asset_icon", + "is_partial", + "is_a_i_generated", + "asset_cover_image", + "asset_theme_hex", + "schema_registry_subjects", + "mc_monitors", + "output_port_data_products", + "files", + "mc_incidents", + "links", + "metrics", + "readme", + "soda_checks", + "assigned_terms", + ] + + @property + def name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.name + + @name.setter + def name(self, name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.name = name + + @property + def display_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.display_name + + @display_name.setter + def display_name(self, display_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.display_name = display_name + + @property + def description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.description + + @description.setter + def description(self, description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.description = description + + @property + def user_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.user_description + + @user_description.setter + def user_description(self, user_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.user_description = user_description + + @property + def tenant_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tenant_id + + @tenant_id.setter + def tenant_id(self, tenant_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tenant_id = tenant_id + + @property + def certificate_status(self) -> Optional[CertificateStatus]: + return None if self.attributes is None else self.attributes.certificate_status + + @certificate_status.setter + def certificate_status(self, certificate_status: Optional[CertificateStatus]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certificate_status = certificate_status + + @property + def certificate_status_message(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.certificate_status_message + ) + + @certificate_status_message.setter + def certificate_status_message(self, certificate_status_message: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certificate_status_message = certificate_status_message + + @property + def certificate_updated_by(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.certificate_updated_by + ) + + @certificate_updated_by.setter + def certificate_updated_by(self, certificate_updated_by: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certificate_updated_by = certificate_updated_by + + @property + def certificate_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.certificate_updated_at + ) + + @certificate_updated_at.setter + def certificate_updated_at(self, certificate_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certificate_updated_at = certificate_updated_at + + @property + def announcement_title(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.announcement_title + + @announcement_title.setter + def announcement_title(self, announcement_title: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.announcement_title = announcement_title + + @property + def announcement_message(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.announcement_message + + @announcement_message.setter + def announcement_message(self, announcement_message: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.announcement_message = announcement_message + + @property + def announcement_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.announcement_type + + @announcement_type.setter + def announcement_type(self, announcement_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.announcement_type = announcement_type + + @property + def announcement_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.announcement_updated_at + ) + + @announcement_updated_at.setter + def announcement_updated_at(self, announcement_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.announcement_updated_at = announcement_updated_at + + @property + def announcement_updated_by(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.announcement_updated_by + ) + + @announcement_updated_by.setter + def announcement_updated_by(self, announcement_updated_by: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.announcement_updated_by = announcement_updated_by + + @property + def owner_users(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.owner_users + + @owner_users.setter + def owner_users(self, owner_users: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.owner_users = owner_users + + @property + def owner_groups(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.owner_groups + + @owner_groups.setter + def owner_groups(self, owner_groups: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.owner_groups = owner_groups + + @property + def admin_users(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.admin_users + + @admin_users.setter + def admin_users(self, admin_users: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.admin_users = admin_users + + @property + def admin_groups(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.admin_groups + + @admin_groups.setter + def admin_groups(self, admin_groups: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.admin_groups = admin_groups + + @property + def viewer_users(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.viewer_users + + @viewer_users.setter + def viewer_users(self, viewer_users: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.viewer_users = viewer_users + + @property + def viewer_groups(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.viewer_groups + + @viewer_groups.setter + def viewer_groups(self, viewer_groups: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.viewer_groups = viewer_groups + + @property + def connector_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.connector_name + + @connector_name.setter + def connector_name(self, connector_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connector_name = connector_name + + @property + def connection_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.connection_name + + @connection_name.setter + def connection_name(self, connection_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connection_name = connection_name + + @property + def connection_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.connection_qualified_name + ) + + @connection_qualified_name.setter + def connection_qualified_name(self, connection_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connection_qualified_name = connection_qualified_name + + @property + def has_lineage(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.has_lineage + + @has_lineage.setter + def has_lineage(self, has_lineage: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.has_lineage = has_lineage + + @property + def is_discoverable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_discoverable + + @is_discoverable.setter + def is_discoverable(self, is_discoverable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_discoverable = is_discoverable + + @property + def is_editable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_editable + + @is_editable.setter + def is_editable(self, is_editable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_editable = is_editable + + @property + def sub_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sub_type + + @sub_type.setter + def sub_type(self, sub_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sub_type = sub_type + + @property + def view_score(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.view_score + + @view_score.setter + def view_score(self, view_score: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_score = view_score + + @property + def popularity_score(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.popularity_score + + @popularity_score.setter + def popularity_score(self, popularity_score: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.popularity_score = popularity_score + + @property + def source_owners(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_owners + + @source_owners.setter + def source_owners(self, source_owners: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_owners = source_owners + + @property + def source_created_by(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_created_by + + @source_created_by.setter + def source_created_by(self, source_created_by: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_created_by = source_created_by + + @property + def source_created_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.source_created_at + + @source_created_at.setter + def source_created_at(self, source_created_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_created_at = source_created_at + + @property + def source_updated_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.source_updated_at + + @source_updated_at.setter + def source_updated_at(self, source_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_updated_at = source_updated_at + + @property + def source_updated_by(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_updated_by + + @source_updated_by.setter + def source_updated_by(self, source_updated_by: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_updated_by = source_updated_by + + @property + def source_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_url + + @source_url.setter + def source_url(self, source_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_url = source_url + + @property + def source_embed_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_embed_url + + @source_embed_url.setter + def source_embed_url(self, source_embed_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_embed_url = source_embed_url + + @property + def last_sync_workflow_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.last_sync_workflow_name + ) + + @last_sync_workflow_name.setter + def last_sync_workflow_name(self, last_sync_workflow_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_sync_workflow_name = last_sync_workflow_name + + @property + def last_sync_run_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_sync_run_at + + @last_sync_run_at.setter + def last_sync_run_at(self, last_sync_run_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_sync_run_at = last_sync_run_at + + @property + def last_sync_run(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.last_sync_run + + @last_sync_run.setter + def last_sync_run(self, last_sync_run: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_sync_run = last_sync_run + + @property + def admin_roles(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.admin_roles + + @admin_roles.setter + def admin_roles(self, admin_roles: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.admin_roles = admin_roles + + @property + def source_read_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_read_count + + @source_read_count.setter + def source_read_count(self, source_read_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_count = source_read_count + + @property + def source_read_user_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.source_read_user_count + ) + + @source_read_user_count.setter + def source_read_user_count(self, source_read_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_user_count = source_read_user_count + + @property + def source_last_read_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.source_last_read_at + + @source_last_read_at.setter + def source_last_read_at(self, source_last_read_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_read_at = source_last_read_at + + @property + def last_row_changed_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_row_changed_at + + @last_row_changed_at.setter + def last_row_changed_at(self, last_row_changed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_row_changed_at = last_row_changed_at + + @property + def source_total_cost(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.source_total_cost + + @source_total_cost.setter + def source_total_cost(self, source_total_cost: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_total_cost = source_total_cost + + @property + def source_cost_unit(self) -> Optional[SourceCostUnitType]: + return None if self.attributes is None else self.attributes.source_cost_unit + + @source_cost_unit.setter + def source_cost_unit(self, source_cost_unit: Optional[SourceCostUnitType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_cost_unit = source_cost_unit + + @property + def source_read_query_cost(self) -> Optional[float]: + return ( + None if self.attributes is None else self.attributes.source_read_query_cost + ) + + @source_read_query_cost.setter + def source_read_query_cost(self, source_read_query_cost: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_query_cost = source_read_query_cost + + @property + def source_read_recent_user_list(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_recent_user_list + ) + + @source_read_recent_user_list.setter + def source_read_recent_user_list( + self, source_read_recent_user_list: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_recent_user_list = source_read_recent_user_list + + @property + def source_read_recent_user_record_list(self) -> Optional[list[PopularityInsights]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_recent_user_record_list + ) + + @source_read_recent_user_record_list.setter + def source_read_recent_user_record_list( + self, source_read_recent_user_record_list: Optional[list[PopularityInsights]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_recent_user_record_list = ( + source_read_recent_user_record_list + ) + + @property + def source_read_top_user_list(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_top_user_list + ) + + @source_read_top_user_list.setter + def source_read_top_user_list(self, source_read_top_user_list: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_top_user_list = source_read_top_user_list + + @property + def source_read_top_user_record_list(self) -> Optional[list[PopularityInsights]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_top_user_record_list + ) + + @source_read_top_user_record_list.setter + def source_read_top_user_record_list( + self, source_read_top_user_record_list: Optional[list[PopularityInsights]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_top_user_record_list = ( + source_read_top_user_record_list + ) + + @property + def source_read_popular_query_record_list( + self, + ) -> Optional[list[PopularityInsights]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_popular_query_record_list + ) + + @source_read_popular_query_record_list.setter + def source_read_popular_query_record_list( + self, source_read_popular_query_record_list: Optional[list[PopularityInsights]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_popular_query_record_list = ( + source_read_popular_query_record_list + ) + + @property + def source_read_expensive_query_record_list( + self, + ) -> Optional[list[PopularityInsights]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_expensive_query_record_list + ) + + @source_read_expensive_query_record_list.setter + def source_read_expensive_query_record_list( + self, + source_read_expensive_query_record_list: Optional[list[PopularityInsights]], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_expensive_query_record_list = ( + source_read_expensive_query_record_list + ) + + @property + def source_read_slow_query_record_list(self) -> Optional[list[PopularityInsights]]: + return ( + None + if self.attributes is None + else self.attributes.source_read_slow_query_record_list + ) + + @source_read_slow_query_record_list.setter + def source_read_slow_query_record_list( + self, source_read_slow_query_record_list: Optional[list[PopularityInsights]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_read_slow_query_record_list = ( + source_read_slow_query_record_list + ) + + @property + def source_query_compute_cost_list(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.source_query_compute_cost_list + ) + + @source_query_compute_cost_list.setter + def source_query_compute_cost_list( + self, source_query_compute_cost_list: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_query_compute_cost_list = source_query_compute_cost_list + + @property + def source_query_compute_cost_record_list( + self, + ) -> Optional[list[PopularityInsights]]: + return ( + None + if self.attributes is None + else self.attributes.source_query_compute_cost_record_list + ) + + @source_query_compute_cost_record_list.setter + def source_query_compute_cost_record_list( + self, source_query_compute_cost_record_list: Optional[list[PopularityInsights]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_query_compute_cost_record_list = ( + source_query_compute_cost_record_list + ) + + @property + def dbt_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_qualified_name + + @dbt_qualified_name.setter + def dbt_qualified_name(self, dbt_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_qualified_name = dbt_qualified_name + + @property + def asset_dbt_alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_dbt_alias + + @asset_dbt_alias.setter + def asset_dbt_alias(self, asset_dbt_alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_alias = asset_dbt_alias + + @property + def asset_dbt_meta(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_dbt_meta + + @asset_dbt_meta.setter + def asset_dbt_meta(self, asset_dbt_meta: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_meta = asset_dbt_meta + + @property + def asset_dbt_unique_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_dbt_unique_id + + @asset_dbt_unique_id.setter + def asset_dbt_unique_id(self, asset_dbt_unique_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_unique_id = asset_dbt_unique_id + + @property + def asset_dbt_account_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_account_name + ) + + @asset_dbt_account_name.setter + def asset_dbt_account_name(self, asset_dbt_account_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_account_name = asset_dbt_account_name + + @property + def asset_dbt_project_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_project_name + ) + + @asset_dbt_project_name.setter + def asset_dbt_project_name(self, asset_dbt_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_project_name = asset_dbt_project_name + + @property + def asset_dbt_package_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_package_name + ) + + @asset_dbt_package_name.setter + def asset_dbt_package_name(self, asset_dbt_package_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_package_name = asset_dbt_package_name + + @property + def asset_dbt_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_dbt_job_name + + @asset_dbt_job_name.setter + def asset_dbt_job_name(self, asset_dbt_job_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_name = asset_dbt_job_name + + @property + def asset_dbt_job_schedule(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_job_schedule + ) + + @asset_dbt_job_schedule.setter + def asset_dbt_job_schedule(self, asset_dbt_job_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_schedule = asset_dbt_job_schedule + + @property + def asset_dbt_job_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_dbt_job_status + + @asset_dbt_job_status.setter + def asset_dbt_job_status(self, asset_dbt_job_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_status = asset_dbt_job_status + + @property + def asset_dbt_test_status(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_test_status + ) + + @asset_dbt_test_status.setter + def asset_dbt_test_status(self, asset_dbt_test_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_test_status = asset_dbt_test_status + + @property + def asset_dbt_job_schedule_cron_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_schedule_cron_humanized + ) + + @asset_dbt_job_schedule_cron_humanized.setter + def asset_dbt_job_schedule_cron_humanized( + self, asset_dbt_job_schedule_cron_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_schedule_cron_humanized = ( + asset_dbt_job_schedule_cron_humanized + ) + + @property + def asset_dbt_job_last_run(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_job_last_run + ) + + @asset_dbt_job_last_run.setter + def asset_dbt_job_last_run(self, asset_dbt_job_last_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run = asset_dbt_job_last_run + + @property + def asset_dbt_job_last_run_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_url + ) + + @asset_dbt_job_last_run_url.setter + def asset_dbt_job_last_run_url(self, asset_dbt_job_last_run_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_url = asset_dbt_job_last_run_url + + @property + def asset_dbt_job_last_run_created_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_created_at + ) + + @asset_dbt_job_last_run_created_at.setter + def asset_dbt_job_last_run_created_at( + self, asset_dbt_job_last_run_created_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_created_at = ( + asset_dbt_job_last_run_created_at + ) + + @property + def asset_dbt_job_last_run_updated_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_updated_at + ) + + @asset_dbt_job_last_run_updated_at.setter + def asset_dbt_job_last_run_updated_at( + self, asset_dbt_job_last_run_updated_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_updated_at = ( + asset_dbt_job_last_run_updated_at + ) + + @property + def asset_dbt_job_last_run_dequed_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_dequed_at + ) + + @asset_dbt_job_last_run_dequed_at.setter + def asset_dbt_job_last_run_dequed_at( + self, asset_dbt_job_last_run_dequed_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_dequed_at = ( + asset_dbt_job_last_run_dequed_at + ) + + @property + def asset_dbt_job_last_run_started_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_started_at + ) + + @asset_dbt_job_last_run_started_at.setter + def asset_dbt_job_last_run_started_at( + self, asset_dbt_job_last_run_started_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_started_at = ( + asset_dbt_job_last_run_started_at + ) + + @property + def asset_dbt_job_last_run_total_duration(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_total_duration + ) + + @asset_dbt_job_last_run_total_duration.setter + def asset_dbt_job_last_run_total_duration( + self, asset_dbt_job_last_run_total_duration: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_total_duration = ( + asset_dbt_job_last_run_total_duration + ) + + @property + def asset_dbt_job_last_run_total_duration_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_total_duration_humanized + ) + + @asset_dbt_job_last_run_total_duration_humanized.setter + def asset_dbt_job_last_run_total_duration_humanized( + self, asset_dbt_job_last_run_total_duration_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_total_duration_humanized = ( + asset_dbt_job_last_run_total_duration_humanized + ) + + @property + def asset_dbt_job_last_run_queued_duration(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_queued_duration + ) + + @asset_dbt_job_last_run_queued_duration.setter + def asset_dbt_job_last_run_queued_duration( + self, asset_dbt_job_last_run_queued_duration: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_queued_duration = ( + asset_dbt_job_last_run_queued_duration + ) + + @property + def asset_dbt_job_last_run_queued_duration_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_queued_duration_humanized + ) + + @asset_dbt_job_last_run_queued_duration_humanized.setter + def asset_dbt_job_last_run_queued_duration_humanized( + self, asset_dbt_job_last_run_queued_duration_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_queued_duration_humanized = ( + asset_dbt_job_last_run_queued_duration_humanized + ) + + @property + def asset_dbt_job_last_run_run_duration(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_run_duration + ) + + @asset_dbt_job_last_run_run_duration.setter + def asset_dbt_job_last_run_run_duration( + self, asset_dbt_job_last_run_run_duration: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_run_duration = ( + asset_dbt_job_last_run_run_duration + ) + + @property + def asset_dbt_job_last_run_run_duration_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_run_duration_humanized + ) + + @asset_dbt_job_last_run_run_duration_humanized.setter + def asset_dbt_job_last_run_run_duration_humanized( + self, asset_dbt_job_last_run_run_duration_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_run_duration_humanized = ( + asset_dbt_job_last_run_run_duration_humanized + ) + + @property + def asset_dbt_job_last_run_git_branch(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_git_branch + ) + + @asset_dbt_job_last_run_git_branch.setter + def asset_dbt_job_last_run_git_branch( + self, asset_dbt_job_last_run_git_branch: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_git_branch = ( + asset_dbt_job_last_run_git_branch + ) + + @property + def asset_dbt_job_last_run_git_sha(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_git_sha + ) + + @asset_dbt_job_last_run_git_sha.setter + def asset_dbt_job_last_run_git_sha( + self, asset_dbt_job_last_run_git_sha: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_git_sha = asset_dbt_job_last_run_git_sha + + @property + def asset_dbt_job_last_run_status_message(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_status_message + ) + + @asset_dbt_job_last_run_status_message.setter + def asset_dbt_job_last_run_status_message( + self, asset_dbt_job_last_run_status_message: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_status_message = ( + asset_dbt_job_last_run_status_message + ) + + @property + def asset_dbt_job_last_run_owner_thread_id(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_owner_thread_id + ) + + @asset_dbt_job_last_run_owner_thread_id.setter + def asset_dbt_job_last_run_owner_thread_id( + self, asset_dbt_job_last_run_owner_thread_id: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_owner_thread_id = ( + asset_dbt_job_last_run_owner_thread_id + ) + + @property + def asset_dbt_job_last_run_executed_by_thread_id(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_executed_by_thread_id + ) + + @asset_dbt_job_last_run_executed_by_thread_id.setter + def asset_dbt_job_last_run_executed_by_thread_id( + self, asset_dbt_job_last_run_executed_by_thread_id: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_executed_by_thread_id = ( + asset_dbt_job_last_run_executed_by_thread_id + ) + + @property + def asset_dbt_job_last_run_artifacts_saved(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_artifacts_saved + ) + + @asset_dbt_job_last_run_artifacts_saved.setter + def asset_dbt_job_last_run_artifacts_saved( + self, asset_dbt_job_last_run_artifacts_saved: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_artifacts_saved = ( + asset_dbt_job_last_run_artifacts_saved + ) + + @property + def asset_dbt_job_last_run_artifact_s3_path(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_artifact_s3_path + ) + + @asset_dbt_job_last_run_artifact_s3_path.setter + def asset_dbt_job_last_run_artifact_s3_path( + self, asset_dbt_job_last_run_artifact_s3_path: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_artifact_s3_path = ( + asset_dbt_job_last_run_artifact_s3_path + ) + + @property + def asset_dbt_job_last_run_has_docs_generated(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_has_docs_generated + ) + + @asset_dbt_job_last_run_has_docs_generated.setter + def asset_dbt_job_last_run_has_docs_generated( + self, asset_dbt_job_last_run_has_docs_generated: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_has_docs_generated = ( + asset_dbt_job_last_run_has_docs_generated + ) + + @property + def asset_dbt_job_last_run_has_sources_generated(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_has_sources_generated + ) + + @asset_dbt_job_last_run_has_sources_generated.setter + def asset_dbt_job_last_run_has_sources_generated( + self, asset_dbt_job_last_run_has_sources_generated: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_has_sources_generated = ( + asset_dbt_job_last_run_has_sources_generated + ) + + @property + def asset_dbt_job_last_run_notifications_sent(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_last_run_notifications_sent + ) + + @asset_dbt_job_last_run_notifications_sent.setter + def asset_dbt_job_last_run_notifications_sent( + self, asset_dbt_job_last_run_notifications_sent: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_last_run_notifications_sent = ( + asset_dbt_job_last_run_notifications_sent + ) + + @property + def asset_dbt_job_next_run(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.asset_dbt_job_next_run + ) + + @asset_dbt_job_next_run.setter + def asset_dbt_job_next_run(self, asset_dbt_job_next_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_next_run = asset_dbt_job_next_run + + @property + def asset_dbt_job_next_run_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_job_next_run_humanized + ) + + @asset_dbt_job_next_run_humanized.setter + def asset_dbt_job_next_run_humanized( + self, asset_dbt_job_next_run_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_job_next_run_humanized = ( + asset_dbt_job_next_run_humanized + ) + + @property + def asset_dbt_environment_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_environment_name + ) + + @asset_dbt_environment_name.setter + def asset_dbt_environment_name(self, asset_dbt_environment_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_environment_name = asset_dbt_environment_name + + @property + def asset_dbt_environment_dbt_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_environment_dbt_version + ) + + @asset_dbt_environment_dbt_version.setter + def asset_dbt_environment_dbt_version( + self, asset_dbt_environment_dbt_version: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_environment_dbt_version = ( + asset_dbt_environment_dbt_version + ) + + @property + def asset_dbt_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.asset_dbt_tags + + @asset_dbt_tags.setter + def asset_dbt_tags(self, asset_dbt_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_tags = asset_dbt_tags + + @property + def asset_dbt_semantic_layer_proxy_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_semantic_layer_proxy_url + ) + + @asset_dbt_semantic_layer_proxy_url.setter + def asset_dbt_semantic_layer_proxy_url( + self, asset_dbt_semantic_layer_proxy_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_semantic_layer_proxy_url = ( + asset_dbt_semantic_layer_proxy_url + ) + + @property + def asset_dbt_source_freshness_criteria(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_dbt_source_freshness_criteria + ) + + @asset_dbt_source_freshness_criteria.setter + def asset_dbt_source_freshness_criteria( + self, asset_dbt_source_freshness_criteria: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_dbt_source_freshness_criteria = ( + asset_dbt_source_freshness_criteria + ) + + @property + def sample_data_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sample_data_url + + @sample_data_url.setter + def sample_data_url(self, sample_data_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sample_data_url = sample_data_url + + @property + def asset_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.asset_tags + + @asset_tags.setter + def asset_tags(self, asset_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_tags = asset_tags + + @property + def asset_mc_incident_names(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.asset_mc_incident_names + ) + + @asset_mc_incident_names.setter + def asset_mc_incident_names(self, asset_mc_incident_names: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_incident_names = asset_mc_incident_names + + @property + def asset_mc_incident_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_incident_qualified_names + ) + + @asset_mc_incident_qualified_names.setter + def asset_mc_incident_qualified_names( + self, asset_mc_incident_qualified_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_incident_qualified_names = ( + asset_mc_incident_qualified_names + ) + + @property + def asset_mc_monitor_names(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.asset_mc_monitor_names + ) + + @asset_mc_monitor_names.setter + def asset_mc_monitor_names(self, asset_mc_monitor_names: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_monitor_names = asset_mc_monitor_names + + @property + def asset_mc_monitor_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_monitor_qualified_names + ) + + @asset_mc_monitor_qualified_names.setter + def asset_mc_monitor_qualified_names( + self, asset_mc_monitor_qualified_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_monitor_qualified_names = ( + asset_mc_monitor_qualified_names + ) + + @property + def asset_mc_monitor_statuses(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_monitor_statuses + ) + + @asset_mc_monitor_statuses.setter + def asset_mc_monitor_statuses(self, asset_mc_monitor_statuses: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_monitor_statuses = asset_mc_monitor_statuses + + @property + def asset_mc_monitor_types(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.asset_mc_monitor_types + ) + + @asset_mc_monitor_types.setter + def asset_mc_monitor_types(self, asset_mc_monitor_types: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_monitor_types = asset_mc_monitor_types + + @property + def asset_mc_monitor_schedule_types(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_monitor_schedule_types + ) + + @asset_mc_monitor_schedule_types.setter + def asset_mc_monitor_schedule_types( + self, asset_mc_monitor_schedule_types: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_monitor_schedule_types = ( + asset_mc_monitor_schedule_types + ) + + @property + def asset_mc_incident_types(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.asset_mc_incident_types + ) + + @asset_mc_incident_types.setter + def asset_mc_incident_types(self, asset_mc_incident_types: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_incident_types = asset_mc_incident_types + + @property + def asset_mc_incident_sub_types(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_incident_sub_types + ) + + @asset_mc_incident_sub_types.setter + def asset_mc_incident_sub_types( + self, asset_mc_incident_sub_types: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_incident_sub_types = asset_mc_incident_sub_types + + @property + def asset_mc_incident_severities(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_incident_severities + ) + + @asset_mc_incident_severities.setter + def asset_mc_incident_severities( + self, asset_mc_incident_severities: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_incident_severities = asset_mc_incident_severities + + @property + def asset_mc_incident_states(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_incident_states + ) + + @asset_mc_incident_states.setter + def asset_mc_incident_states(self, asset_mc_incident_states: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_incident_states = asset_mc_incident_states + + @property + def asset_mc_last_sync_run_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.asset_mc_last_sync_run_at + ) + + @asset_mc_last_sync_run_at.setter + def asset_mc_last_sync_run_at(self, asset_mc_last_sync_run_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_last_sync_run_at = asset_mc_last_sync_run_at + + @property + def starred_by(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.starred_by + + @starred_by.setter + def starred_by(self, starred_by: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.starred_by = starred_by + + @property + def starred_details_list(self) -> Optional[list[StarredDetails]]: + return None if self.attributes is None else self.attributes.starred_details_list + + @starred_details_list.setter + def starred_details_list( + self, starred_details_list: Optional[list[StarredDetails]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.starred_details_list = starred_details_list + + @property + def starred_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.starred_count + + @starred_count.setter + def starred_count(self, starred_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.starred_count = starred_count + + @property + def asset_soda_d_q_status(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_soda_d_q_status + ) + + @asset_soda_d_q_status.setter + def asset_soda_d_q_status(self, asset_soda_d_q_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_soda_d_q_status = asset_soda_d_q_status + + @property + def asset_soda_check_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.asset_soda_check_count + ) + + @asset_soda_check_count.setter + def asset_soda_check_count(self, asset_soda_check_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_soda_check_count = asset_soda_check_count + + @property + def asset_soda_last_sync_run_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.asset_soda_last_sync_run_at + ) + + @asset_soda_last_sync_run_at.setter + def asset_soda_last_sync_run_at( + self, asset_soda_last_sync_run_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_soda_last_sync_run_at = asset_soda_last_sync_run_at + + @property + def asset_soda_last_scan_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.asset_soda_last_scan_at + ) + + @asset_soda_last_scan_at.setter + def asset_soda_last_scan_at(self, asset_soda_last_scan_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_soda_last_scan_at = asset_soda_last_scan_at + + @property + def asset_soda_check_statuses(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.asset_soda_check_statuses + ) + + @asset_soda_check_statuses.setter + def asset_soda_check_statuses(self, asset_soda_check_statuses: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_soda_check_statuses = asset_soda_check_statuses + + @property + def asset_soda_source_url(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.asset_soda_source_url + ) + + @asset_soda_source_url.setter + def asset_soda_source_url(self, asset_soda_source_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_soda_source_url = asset_soda_source_url + + @property + def asset_icon(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_icon + + @asset_icon.setter + def asset_icon(self, asset_icon: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_icon = asset_icon + + @property + def is_partial(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partial + + @is_partial.setter + def is_partial(self, is_partial: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partial = is_partial + + @property + def is_a_i_generated(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_a_i_generated + + @is_a_i_generated.setter + def is_a_i_generated(self, is_a_i_generated: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_a_i_generated = is_a_i_generated + + @property + def asset_cover_image(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_cover_image + + @asset_cover_image.setter + def asset_cover_image(self, asset_cover_image: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_cover_image = asset_cover_image + + @property + def asset_theme_hex(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.asset_theme_hex + + @asset_theme_hex.setter + def asset_theme_hex(self, asset_theme_hex: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_theme_hex = asset_theme_hex + + @property + def schema_registry_subjects(self) -> Optional[list[SchemaRegistrySubject]]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subjects + ) + + @schema_registry_subjects.setter + def schema_registry_subjects( + self, schema_registry_subjects: Optional[list[SchemaRegistrySubject]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subjects = schema_registry_subjects + + @property + def mc_monitors(self) -> Optional[list[MCMonitor]]: + return None if self.attributes is None else self.attributes.mc_monitors + + @mc_monitors.setter + def mc_monitors(self, mc_monitors: Optional[list[MCMonitor]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitors = mc_monitors + + @property + def output_port_data_products(self) -> Optional[list[DataProduct]]: + return ( + None + if self.attributes is None + else self.attributes.output_port_data_products + ) + + @output_port_data_products.setter + def output_port_data_products( + self, output_port_data_products: Optional[list[DataProduct]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_port_data_products = output_port_data_products + + @property + def files(self) -> Optional[list[File]]: + return None if self.attributes is None else self.attributes.files + + @files.setter + def files(self, files: Optional[list[File]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.files = files + + @property + def mc_incidents(self) -> Optional[list[MCIncident]]: + return None if self.attributes is None else self.attributes.mc_incidents + + @mc_incidents.setter + def mc_incidents(self, mc_incidents: Optional[list[MCIncident]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incidents = mc_incidents + + @property + def links(self) -> Optional[list[Link]]: + return None if self.attributes is None else self.attributes.links + + @links.setter + def links(self, links: Optional[list[Link]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.links = links + + @property + def metrics(self) -> Optional[list[Metric]]: + return None if self.attributes is None else self.attributes.metrics + + @metrics.setter + def metrics(self, metrics: Optional[list[Metric]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metrics = metrics + + @property + def readme(self) -> Optional[Readme]: + return None if self.attributes is None else self.attributes.readme + + @readme.setter + def readme(self, readme: Optional[Readme]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.readme = readme + + @property + def soda_checks(self) -> Optional[list[SodaCheck]]: + return None if self.attributes is None else self.attributes.soda_checks + + @soda_checks.setter + def soda_checks(self, soda_checks: Optional[list[SodaCheck]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_checks = soda_checks + + @property + def assigned_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.meanings + + @assigned_terms.setter + def assigned_terms(self, assigned_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.meanings = assigned_terms + + class Attributes(Referenceable.Attributes): + name: Optional[str] = Field(default=None, description="") + display_name: Optional[str] = Field(default=None, description="") + description: Optional[str] = Field(default=None, description="") + user_description: Optional[str] = Field(default=None, description="") + tenant_id: Optional[str] = Field(default=None, description="") + certificate_status: Optional[CertificateStatus] = Field( + default=None, description="" + ) + certificate_status_message: Optional[str] = Field(default=None, description="") + certificate_updated_by: Optional[str] = Field(default=None, description="") + certificate_updated_at: Optional[datetime] = Field(default=None, description="") + announcement_title: Optional[str] = Field(default=None, description="") + announcement_message: Optional[str] = Field(default=None, description="") + announcement_type: Optional[str] = Field(default=None, description="") + announcement_updated_at: Optional[datetime] = Field( + default=None, description="" + ) + announcement_updated_by: Optional[str] = Field(default=None, description="") + owner_users: Optional[set[str]] = Field(default=None, description="") + owner_groups: Optional[set[str]] = Field(default=None, description="") + admin_users: Optional[set[str]] = Field(default=None, description="") + admin_groups: Optional[set[str]] = Field(default=None, description="") + viewer_users: Optional[set[str]] = Field(default=None, description="") + viewer_groups: Optional[set[str]] = Field(default=None, description="") + connector_name: Optional[str] = Field(default=None, description="") + connection_name: Optional[str] = Field(default=None, description="") + connection_qualified_name: Optional[str] = Field(default=None, description="") + has_lineage: Optional[bool] = Field(default=None, description="") + is_discoverable: Optional[bool] = Field(default=None, description="") + is_editable: Optional[bool] = Field(default=None, description="") + sub_type: Optional[str] = Field(default=None, description="") + view_score: Optional[float] = Field(default=None, description="") + popularity_score: Optional[float] = Field(default=None, description="") + source_owners: Optional[str] = Field(default=None, description="") + source_created_by: Optional[str] = Field(default=None, description="") + source_created_at: Optional[datetime] = Field(default=None, description="") + source_updated_at: Optional[datetime] = Field(default=None, description="") + source_updated_by: Optional[str] = Field(default=None, description="") + source_url: Optional[str] = Field(default=None, description="") + source_embed_url: Optional[str] = Field(default=None, description="") + last_sync_workflow_name: Optional[str] = Field(default=None, description="") + last_sync_run_at: Optional[datetime] = Field(default=None, description="") + last_sync_run: Optional[str] = Field(default=None, description="") + admin_roles: Optional[set[str]] = Field(default=None, description="") + source_read_count: Optional[int] = Field(default=None, description="") + source_read_user_count: Optional[int] = Field(default=None, description="") + source_last_read_at: Optional[datetime] = Field(default=None, description="") + last_row_changed_at: Optional[datetime] = Field(default=None, description="") + source_total_cost: Optional[float] = Field(default=None, description="") + source_cost_unit: Optional[SourceCostUnitType] = Field( + default=None, description="" + ) + source_read_query_cost: Optional[float] = Field(default=None, description="") + source_read_recent_user_list: Optional[set[str]] = Field( + default=None, description="" + ) + source_read_recent_user_record_list: Optional[list[PopularityInsights]] = Field( + default=None, description="" + ) + source_read_top_user_list: Optional[set[str]] = Field( + default=None, description="" + ) + source_read_top_user_record_list: Optional[list[PopularityInsights]] = Field( + default=None, description="" + ) + source_read_popular_query_record_list: Optional[ + list[PopularityInsights] + ] = Field(default=None, description="") + source_read_expensive_query_record_list: Optional[ + list[PopularityInsights] + ] = Field(default=None, description="") + source_read_slow_query_record_list: Optional[list[PopularityInsights]] = Field( + default=None, description="" + ) + source_query_compute_cost_list: Optional[set[str]] = Field( + default=None, description="" + ) + source_query_compute_cost_record_list: Optional[ + list[PopularityInsights] + ] = Field(default=None, description="") + dbt_qualified_name: Optional[str] = Field(default=None, description="") + asset_dbt_alias: Optional[str] = Field(default=None, description="") + asset_dbt_meta: Optional[str] = Field(default=None, description="") + asset_dbt_unique_id: Optional[str] = Field(default=None, description="") + asset_dbt_account_name: Optional[str] = Field(default=None, description="") + asset_dbt_project_name: Optional[str] = Field(default=None, description="") + asset_dbt_package_name: Optional[str] = Field(default=None, description="") + asset_dbt_job_name: Optional[str] = Field(default=None, description="") + asset_dbt_job_schedule: Optional[str] = Field(default=None, description="") + asset_dbt_job_status: Optional[str] = Field(default=None, description="") + asset_dbt_test_status: Optional[str] = Field(default=None, description="") + asset_dbt_job_schedule_cron_humanized: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run: Optional[datetime] = Field(default=None, description="") + asset_dbt_job_last_run_url: Optional[str] = Field(default=None, description="") + asset_dbt_job_last_run_created_at: Optional[datetime] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_updated_at: Optional[datetime] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_dequed_at: Optional[datetime] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_started_at: Optional[datetime] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_total_duration: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_total_duration_humanized: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_queued_duration: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_queued_duration_humanized: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_run_duration: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_run_duration_humanized: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_git_branch: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_git_sha: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_status_message: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_owner_thread_id: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_executed_by_thread_id: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_artifacts_saved: Optional[bool] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_artifact_s3_path: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_has_docs_generated: Optional[bool] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_has_sources_generated: Optional[bool] = Field( + default=None, description="" + ) + asset_dbt_job_last_run_notifications_sent: Optional[bool] = Field( + default=None, description="" + ) + asset_dbt_job_next_run: Optional[datetime] = Field(default=None, description="") + asset_dbt_job_next_run_humanized: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_environment_name: Optional[str] = Field(default=None, description="") + asset_dbt_environment_dbt_version: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_tags: Optional[set[str]] = Field(default=None, description="") + asset_dbt_semantic_layer_proxy_url: Optional[str] = Field( + default=None, description="" + ) + asset_dbt_source_freshness_criteria: Optional[str] = Field( + default=None, description="" + ) + sample_data_url: Optional[str] = Field(default=None, description="") + asset_tags: Optional[set[str]] = Field(default=None, description="") + asset_mc_incident_names: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_incident_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_monitor_names: Optional[set[str]] = Field(default=None, description="") + asset_mc_monitor_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_monitor_statuses: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_monitor_types: Optional[set[str]] = Field(default=None, description="") + asset_mc_monitor_schedule_types: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_incident_types: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_incident_sub_types: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_incident_severities: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_incident_states: Optional[set[str]] = Field( + default=None, description="" + ) + asset_mc_last_sync_run_at: Optional[datetime] = Field( + default=None, description="" + ) + starred_by: Optional[set[str]] = Field(default=None, description="") + starred_details_list: Optional[list[StarredDetails]] = Field( + default=None, description="" + ) + starred_count: Optional[int] = Field(default=None, description="") + asset_soda_d_q_status: Optional[str] = Field(default=None, description="") + asset_soda_check_count: Optional[int] = Field(default=None, description="") + asset_soda_last_sync_run_at: Optional[datetime] = Field( + default=None, description="" + ) + asset_soda_last_scan_at: Optional[datetime] = Field( + default=None, description="" + ) + asset_soda_check_statuses: Optional[str] = Field(default=None, description="") + asset_soda_source_url: Optional[str] = Field(default=None, description="") + asset_icon: Optional[str] = Field(default=None, description="") + is_partial: Optional[bool] = Field(default=None, description="") + is_a_i_generated: Optional[bool] = Field(default=None, description="") + asset_cover_image: Optional[str] = Field(default=None, description="") + asset_theme_hex: Optional[str] = Field(default=None, description="") + schema_registry_subjects: Optional[list[SchemaRegistrySubject]] = Field( + default=None, description="" + ) # relationship + mc_monitors: Optional[list[MCMonitor]] = Field( + default=None, description="" + ) # relationship + output_port_data_products: Optional[list[DataProduct]] = Field( + default=None, description="" + ) # relationship + files: Optional[list[File]] = Field( + default=None, description="" + ) # relationship + mc_incidents: Optional[list[MCIncident]] = Field( + default=None, description="" + ) # relationship + links: Optional[list[Link]] = Field( + default=None, description="" + ) # relationship + metrics: Optional[list[Metric]] = Field( + default=None, description="" + ) # relationship + readme: Optional[Readme] = Field(default=None, description="") # relationship + soda_checks: Optional[list[SodaCheck]] = Field( + default=None, description="" + ) # relationship + meanings: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + + def remove_description(self): + self.description = None + + def remove_user_description(self): + self.user_description = None + + def remove_owners(self): + self.owner_groups = None + self.owner_users = None + + def remove_certificate(self): + self.certificate_status = None + self.certificate_status_message = None + + def remove_announcement(self): + self.announcement_message = None + self.announcement_title = None + self.announcement_type = None + + attributes: "Asset.Attributes" = Field( + default_factory=lambda: Asset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .atlas_glossary_term import AtlasGlossaryTerm # noqa +from .data_product import DataProduct # noqa +from .file import File # noqa +from .link import Link # noqa +from .m_c_incident import MCIncident # noqa +from .m_c_monitor import MCMonitor # noqa +from .metric import Metric # noqa +from .readme import Readme # noqa +from .schema_registry_subject import SchemaRegistrySubject # noqa +from .soda_check import SodaCheck # noqa diff --git a/pyatlan/model/assets/asset00.py b/pyatlan/model/assets/asset00.py deleted file mode 100644 index 16cac7af9..000000000 --- a/pyatlan/model/assets/asset00.py +++ /dev/null @@ -1,16317 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -import hashlib -import sys -import uuid -from datetime import datetime -from io import StringIO -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Type, TypeVar -from urllib.parse import quote, unquote - -from pydantic import Field, PrivateAttr, StrictStr, root_validator, validator - -from pyatlan.errors import ErrorCode -from pyatlan.model.core import Announcement, AtlanObject, AtlanTag, Meaning -from pyatlan.model.custom_metadata import CustomMetadataDict, CustomMetadataProxy -from pyatlan.model.enums import ( - AnnouncementType, - AtlanConnectorType, - AtlanIcon, - AtlasGlossaryCategoryType, - AtlasGlossaryTermType, - AtlasGlossaryType, - CertificateStatus, - DataProductCriticality, - DataProductSensitivity, - DataProductStatus, - EntityStatus, - FileType, - IconType, - MatillionJobType, - OpenLineageRunState, - SaveSemantic, - SchemaRegistrySchemaCompatibility, - SchemaRegistrySchemaType, - SourceCostUnitType, -) -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - InternalKeywordField, - InternalKeywordTextField, - InternalNumericField, - KeywordField, - KeywordTextField, - KeywordTextStemmedField, - NumericField, - NumericRankField, - RelationField, - TextField, -) -from pyatlan.model.search import IndexSearchRequest -from pyatlan.model.structs import ( - ColumnValueFrequencyMap, - DbtMetricFilter, - Histogram, - MCRuleComparison, - MCRuleSchedule, - PopularityInsights, - SourceTagAttribute, - StarredDetails, -) -from pyatlan.utils import ( - init_guid, - move_struct, - next_id, - to_camel_case, - validate_required_fields, -) - - -def validate_single_required_field(field_names: list[str], values: list[Any]): - indexes = [idx for idx, value in enumerate(values) if value is not None] - if not indexes: - raise ValueError( - f"One of the following parameters are required: {', '.join(field_names)}" - ) - if len(indexes) > 1: - names = [field_names[idx] for idx in indexes] - raise ValueError( - f"Only one of the following parameters are allowed: {', '.join(names)}" - ) - - -SelfAsset = TypeVar("SelfAsset", bound="Asset") - - -class Referenceable(AtlanObject): - """Description""" - - def __init__(__pydantic_self__, **data: Any) -> None: - super().__init__(**data) - __pydantic_self__.__fields_set__.update(["attributes", "type_name"]) - __pydantic_self__._metadata_proxy = CustomMetadataProxy( - __pydantic_self__.business_attributes - ) - - def json(self, *args, **kwargs) -> str: - self.business_attributes = self._metadata_proxy.business_attributes - return super().json(**kwargs) - - def validate_required(self): - if not self.create_time or self.created_by: - self.attributes.validate_required() - - def get_custom_metadata(self, name: str) -> CustomMetadataDict: - return self._metadata_proxy.get_custom_metadata(name=name) - - def set_custom_metadata(self, custom_metadata: CustomMetadataDict): - return self._metadata_proxy.set_custom_metadata(custom_metadata=custom_metadata) - - def flush_custom_metadata(self): - self.business_attributes = self._metadata_proxy.business_attributes - - @classmethod - def can_be_archived(self) -> bool: - """ - Indicates if an asset can be archived via the asset.delete_by_guid method. - :returns: True if archiving is supported - """ - return True - - @property - def atlan_tag_names(self) -> list[str]: - from pyatlan.cache.atlan_tag_cache import AtlanTagCache - from pyatlan.model.constants import DELETED_ - - if self.classification_names: - return [ - AtlanTagCache.get_name_for_id(tag_id) or DELETED_ - for tag_id in self.classification_names - ] - return [] - - def __setattr__(self, name, value): - if name in Referenceable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - _convenience_properties: ClassVar[list[str]] = [ - "qualified_name", - "assigned_terms", - ] - - @property - def qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qualified_name - - @qualified_name.setter - def qualified_name(self, qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qualified_name = qualified_name - - @property - def assigned_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.meanings - - @assigned_terms.setter - def assigned_terms(self, assigned_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.meanings = assigned_terms - - class Attributes(AtlanObject): - qualified_name: Optional[str] = Field("", description="", alias="qualifiedName") - meanings: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="meanings" - ) # relationship - - def validate_required(self): - pass - - TYPE_NAME: ClassVar[KeywordTextField] = InternalKeywordTextField( - "typeName", "__typeName.keyword", "__typeName", "__typeName" - ) - """Type of the asset. For example Table, Column, and so on.""" - - GUID: ClassVar[KeywordField] = InternalKeywordField("guid", "__guid", "__guid") - """Globally unique identifier (GUID) of any object in Atlan.""" - - CREATED_BY: ClassVar[KeywordField] = InternalKeywordField( - "createdBy", "__createdBy", "__createdBy" - ) - """Atlan user who created this asset.""" - - UPDATED_BY: ClassVar[KeywordField] = InternalKeywordField( - "updatedBy", "__modifiedBy", "__modifiedBy" - ) - """Atlan user who last updated the asset.""" - - STATUS: ClassVar[KeywordField] = InternalKeywordField( - "status", "__state", "__state" - ) - """Asset status in Atlan (active vs deleted).""" - - ATLAN_TAGS: ClassVar[KeywordTextField] = InternalKeywordTextField( - "classificationNames", - "__traitNames", - "__classificationsText", - "__classificationNames", - ) - """ - All directly-assigned Atlan tags that exist on an asset, searchable by internal hashed-string ID of the Atlan tag. - """ - - PROPAGATED_ATLAN_TAGS: ClassVar[KeywordTextField] = InternalKeywordTextField( - "classificationNames", - "__propagatedTraitNames", - "__classificationsText", - "__propagatedClassificationNames", - ) - """All propagated Atlan tags that exist on an asset, searchable by internal hashed-string ID of the Atlan tag.""" - - ASSIGNED_TERMS: ClassVar[KeywordTextField] = InternalKeywordTextField( - "meanings", "__meanings", "__meaningsText", "__meanings" - ) - """All terms attached to an asset, searchable by the term's qualifiedName.""" - - SUPER_TYPE_NAMES: ClassVar[KeywordTextField] = InternalKeywordTextField( - "typeName", "__superTypeNames.keyword", "__superTypeNames", "__superTypeNames" - ) - """All super types of an asset.""" - - CREATE_TIME: ClassVar[NumericField] = InternalNumericField( - "createTime", "__timestamp", "__timestamp" - ) - """Time (in milliseconds) when the asset was created.""" - - UPDATE_TIME: ClassVar[NumericField] = InternalNumericField( - "updateTime", "__modificationTimestamp", "__modificationTimestamp" - ) - """Time (in milliseconds) when the asset was last updated.""" - - QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "qualifiedName", "qualifiedName", "qualifiedName.text" - ) - """Unique fully-qualified name of the asset in Atlan.""" - - type_name: str = Field( - "Referenceable", - description="Name of the type definition that defines this instance.\n", - ) - _metadata_proxy: CustomMetadataProxy = PrivateAttr() - attributes: Referenceable.Attributes = Field( - default_factory=lambda: Referenceable.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary " - "by type, so are described in the sub-types of this schema.\n", - ) - business_attributes: Optional[dict[str, Any]] = Field( - None, - description="Map of custom metadata attributes and values defined on the entity.\n", - alias="businessAttributes", - ) - created_by: Optional[str] = Field( - None, - description="Username of the user who created the object.\n", - example="jsmith", - ) - create_time: Optional[int] = Field( - None, - description="Time (epoch) at which this object was created, in milliseconds.\n", - example=1648852296555, - ) - delete_handler: Optional[str] = Field( - None, - description="Details on the handler used for deletion of the asset.", - example="Hard", - ) - guid: str = Field( - "", - description="Unique identifier of the entity instance.\n", - example="917ffec9-fa84-4c59-8e6c-c7b114d04be3", - ) - is_incomplete: Optional[bool] = Field(True, description="", example=True) - labels: Optional[list[str]] = Field(None, description="Internal use only.") - relationship_attributes: Optional[dict[str, Any]] = Field( - None, - description="Map of relationships for the entity. The specific keys of this map will vary by type, " - "so are described in the sub-types of this schema.\n", - ) - status: Optional[EntityStatus] = Field( - None, description="Status of the entity", example=EntityStatus.ACTIVE - ) - updated_by: Optional[str] = Field( - None, - description="Username of the user who last assets_updated the object.\n", - example="jsmith", - ) - update_time: Optional[int] = Field( - None, - description="Time (epoch) at which this object was last assets_updated, in milliseconds.\n", - example=1649172284333, - ) - version: Optional[int] = Field( - None, description="Version of this object.\n", example=2 - ) - atlan_tags: Optional[list[AtlanTag]] = Field( - None, description="Atlan tags", alias="classifications" - ) - classification_names: Optional[list[str]] = Field( - None, description="The names of the classifications that exist on the asset." - ) - display_text: Optional[str] = Field( - None, - description="Human-readable name of the entity..\n", - ) - entity_status: Optional[str] = Field( - None, - description="Status of the entity (if this is a related entity).\n", - ) - relationship_guid: Optional[str] = Field( - None, - description="Unique identifier of the relationship (when this is a related entity).\n", - ) - relationship_status: Optional[str] = Field( - None, - description="Status of the relationship (when this is a related entity).\n", - ) - relationship_type: Optional[str] = Field( - None, - description="Status of the relationship (when this is a related entity).\n", - ) - meaning_names: Optional[list[str]] = Field( - None, description="Names of assigned_terms that have been linked to this asset." - ) - meanings: Optional[list[Meaning]] = Field(None, description="", alias="meanings") - custom_attributes: Optional[dict[str, Any]] = Field( - None, description="", alias="customAttributes" - ) - scrubbed: Optional[bool] = Field( - None, description="", alias="fields removed from results" - ) - pending_tasks: Optional[list[str]] = Field(None) - - unique_attributes: Optional[dict[str, Any]] = Field(None) - - append_relationship_attributes: Optional[dict[str, Any]] = Field( - None, - alias="appendRelationshipAttributes", - description="Map of append relationship attributes.", - ) - remove_relationship_attributes: Optional[dict[str, Any]] = Field( - None, - alias="removeRelationshipAttributes", - description="Map of remove relationship attributes.", - ) - semantic: Optional[SaveSemantic] = Field( - exclude=True, - description=( - "Semantic for how this relationship should be saved, " - "if used in an asset request on which `.save()` is called." - ), - ) - - -class Asset(Referenceable): - """Description""" - - _subtypes_: dict[str, type] = dict() - - def __init_subclass__(cls, type_name=None): - cls._subtypes_[type_name or cls.__name__.lower()] = cls - - def trim_to_required(self: SelfAsset) -> SelfAsset: - return self.create_for_modification( - qualified_name=self.qualified_name or "", name=self.name or "" - ) - - def trim_to_reference(self: SelfAsset) -> SelfAsset: - if self.guid and self.guid.strip(): - return self.ref_by_guid(self.guid) - if self.qualified_name and self.qualified_name.strip(): - return self.ref_by_qualified_name(self.qualified_name) - if ( - self.unique_attributes - and (qualified_name := self.unique_attributes.get("qualified_name")) - and qualified_name.strip() - ): - return self.ref_by_qualified_name(qualified_name) - raise ErrorCode.MISSING_REQUIRED_RELATIONSHIP_PARAM.exception_with_parameters( - self.type_name, "guid, qualifiedName" - ) - - @classmethod - @init_guid - def create(cls: Type[SelfAsset], *args, **kwargs) -> SelfAsset: - raise NotImplementedError( - "Create has not been implemented for this class. Please submit an enhancement" - "request if you need it implemented." - ) - - @classmethod - def create_for_modification( - cls: type[SelfAsset], qualified_name: str = "", name: str = "" - ) -> SelfAsset: - if cls.__name__ == "Asset": - raise ErrorCode.METHOD_CAN_NOT_BE_INVOKED_ON_ASSET.exception_with_parameters() - validate_required_fields( - ["name", "qualified_name"], - [name, qualified_name], - ) - return cls(attributes=cls.Attributes(qualified_name=qualified_name, name=name)) - - @classmethod - def ref_by_guid( - cls: type[SelfAsset], guid: str, semantic: SaveSemantic = SaveSemantic.REPLACE - ) -> SelfAsset: - retval: SelfAsset = cls(attributes=cls.Attributes()) - retval.guid = guid - retval.semantic = semantic - return retval - - @classmethod - def ref_by_qualified_name( - cls: type[SelfAsset], - qualified_name: str, - semantic: SaveSemantic = SaveSemantic.REPLACE, - ) -> SelfAsset: - ret_value: SelfAsset = cls( - attributes=cls.Attributes(name="", qualified_name=qualified_name) - ) - ret_value.unique_attributes = {"qualifiedName": qualified_name} - ret_value.semantic = semantic - return ret_value - - @classmethod - def __get_validators__(cls): - yield cls._convert_to_real_type_ - - @classmethod - def _convert_to_real_type_(cls, data): - if isinstance(data, Asset): - return data - - # Handle the case where asset data is a list - if isinstance(data, list): - return [cls._convert_to_real_type_(item) for item in data] - - data_type = ( - data.get("type_name") if "type_name" in data else data.get("typeName") - ) - - if data_type is None: - if issubclass(cls, Asset): - return cls(**data) - raise ValueError("Missing 'type' in Asset") - - sub = cls._subtypes_.get(data_type) - if sub is None: - sub = getattr(sys.modules["pyatlan.model.assets"], data_type) - - if sub is None: - raise TypeError(f"Unsupport sub-type: {data_type}") - - move_struct(data) - return sub(**data) - - if TYPE_CHECKING: - from pyatlan.model.lineage import FluentLineage - - @classmethod - def lineage(cls, guid: str, include_archived: bool = False) -> "FluentLineage": - """ - Start a FluentLineage that can be used to get a LineageListRequest that can be used to retrieve all downstream - assets. Additional conditions can be chained onto the returned FluentLineage before any asset retrieval is - attempted, ensuring all conditions are pushed-down for optimal retrieval. (To change the default direction of - downstream chain a .direction() call - - :param guid: unique identifier (GUID) for the starting point of lineage - :param include_archived: when True, archived (soft-deleted) assets in lineage will be included - :returns: a FluentLineage that can be used to get a LineageListRequest that can be used to retrieve all - downstream assets - """ - from pyatlan.model.lineage import FluentLineage - - if not include_archived: - return FluentLineage( - starting_guid=guid, - where_assets=FluentLineage.ACTIVE, - where_relationships=FluentLineage.ACTIVE, - includes_in_results=FluentLineage.ACTIVE, - ) - return FluentLineage(starting_guid=guid) - - def has_announcement(self) -> bool: - return bool( - self.attributes - and ( - self.attributes.announcement_title or self.attributes.announcement_type - ) - ) - - def set_announcement(self, announcement: Announcement) -> None: - self.attributes.announcement_type = announcement.announcement_type.value - self.attributes.announcement_title = announcement.announcement_title - self.attributes.announcement_message = announcement.announcement_message - - def get_announcment(self) -> Optional[Announcement]: - if self.attributes.announcement_type and self.attributes.announcement_title: - return Announcement( - announcement_type=AnnouncementType[ - self.attributes.announcement_type.upper() - ], - announcement_title=self.attributes.announcement_title, - announcement_message=self.attributes.announcement_message, - ) - return None - - def remove_announcement(self): - self.attributes.remove_announcement() - - def remove_description(self): - self.attributes.remove_description() - - def remove_user_description(self): - self.attributes.remove_user_description() - - def remove_owners(self): - self.attributes.remove_owners() - - def remove_certificate(self): - self.attributes.remove_certificate() - - type_name: str = Field("Asset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Asset": - raise ValueError("must be Asset") - return v - - def __setattr__(self, name, value): - if name in Asset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - NAME: ClassVar[KeywordTextStemmedField] = KeywordTextStemmedField( - "name", "name.keyword", "name", "name.stemmed" - ) - """ - Name of this asset. Fallback for display purposes, if displayName is empty. - """ - DISPLAY_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "displayName", "displayName.keyword", "displayName" - ) - """ - Human-readable name of this asset used for display purposes (in user interface). - """ - DESCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( - "description", "description.keyword", "description" - ) - """ - Description of this asset, for example as crawled from a source. Fallback for display purposes, if userDescription is empty. - """ # noqa: E501 - USER_DESCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( - "userDescription", "userDescription.keyword", "userDescription" - ) - """ - Description of this asset, as provided by a user. If present, this will be used for the description in user interface. - """ # noqa: E501 - TENANT_ID: ClassVar[KeywordField] = KeywordField("tenantId", "tenantId") - """ - Name of the Atlan workspace in which this asset exists. - """ - CERTIFICATE_STATUS: ClassVar[KeywordTextField] = KeywordTextField( - "certificateStatus", "certificateStatus", "certificateStatus.text" - ) - """ - Status of this asset's certification. - """ - CERTIFICATE_STATUS_MESSAGE: ClassVar[KeywordField] = KeywordField( - "certificateStatusMessage", "certificateStatusMessage" - ) - """ - Human-readable descriptive message used to provide further detail to certificateStatus. - """ - CERTIFICATE_UPDATED_BY: ClassVar[KeywordField] = KeywordField( - "certificateUpdatedBy", "certificateUpdatedBy" - ) - """ - Name of the user who last updated the certification of this asset. - """ - CERTIFICATE_UPDATED_AT: ClassVar[NumericField] = NumericField( - "certificateUpdatedAt", "certificateUpdatedAt" - ) - """ - Time (epoch) at which the certification was last updated, in milliseconds. - """ - ANNOUNCEMENT_TITLE: ClassVar[KeywordField] = KeywordField( - "announcementTitle", "announcementTitle" - ) - """ - Brief title for the announcement on this asset. Required when announcementType is specified. - """ - ANNOUNCEMENT_MESSAGE: ClassVar[KeywordField] = KeywordField( - "announcementMessage", "announcementMessage" - ) - """ - Detailed message to include in the announcement on this asset. - """ - ANNOUNCEMENT_TYPE: ClassVar[KeywordField] = KeywordField( - "announcementType", "announcementType" - ) - """ - Type of announcement on this asset. - """ - ANNOUNCEMENT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "announcementUpdatedAt", "announcementUpdatedAt" - ) - """ - Time (epoch) at which the announcement was last updated, in milliseconds. - """ - ANNOUNCEMENT_UPDATED_BY: ClassVar[KeywordField] = KeywordField( - "announcementUpdatedBy", "announcementUpdatedBy" - ) - """ - Name of the user who last updated the announcement. - """ - OWNER_USERS: ClassVar[KeywordField] = KeywordField("ownerUsers", "ownerUsers") - """ - List of users who own this asset. - """ - OWNER_GROUPS: ClassVar[KeywordField] = KeywordField("ownerGroups", "ownerGroups") - """ - List of groups who own this asset. - """ - ADMIN_USERS: ClassVar[KeywordField] = KeywordField("adminUsers", "adminUsers") - """ - List of users who administer this asset. (This is only used for certain asset types.) - """ - ADMIN_GROUPS: ClassVar[KeywordField] = KeywordField("adminGroups", "adminGroups") - """ - List of groups who administer this asset. (This is only used for certain asset types.) - """ - VIEWER_USERS: ClassVar[KeywordField] = KeywordField("viewerUsers", "viewerUsers") - """ - List of users who can view assets contained in a collection. (This is only used for certain asset types.) - """ - VIEWER_GROUPS: ClassVar[KeywordField] = KeywordField("viewerGroups", "viewerGroups") - """ - List of groups who can view assets contained in a collection. (This is only used for certain asset types.) - """ - CONNECTOR_NAME: ClassVar[KeywordField] = KeywordField( - "connectorName", "connectorName" - ) - """ - Type of the connector through which this asset is accessible. - """ - CONNECTION_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "connectionName", "connectionName", "connectionName.text" - ) - """ - Simple name of the connection through which this asset is accessible. - """ - CONNECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "connectionQualifiedName", - "connectionQualifiedName", - "connectionQualifiedName.text", - ) - """ - Unique name of the connection through which this asset is accessible. - """ - HAS_LINEAGE: ClassVar[BooleanField] = BooleanField("__hasLineage", "__hasLineage") - """ - Whether this asset has lineage (true) or not (false). - """ - IS_DISCOVERABLE: ClassVar[BooleanField] = BooleanField( - "isDiscoverable", "isDiscoverable" - ) - """ - Whether this asset is discoverable through the UI (true) or not (false). - """ - IS_EDITABLE: ClassVar[BooleanField] = BooleanField("isEditable", "isEditable") - """ - Whether this asset can be edited in the UI (true) or not (false). - """ - SUB_TYPE: ClassVar[KeywordField] = KeywordField("subType", "subType") - """ - Subtype of this asset. - """ - VIEW_SCORE: ClassVar[NumericRankField] = NumericRankField( - "viewScore", "viewScore", "viewScore.rank_feature" - ) - """ - View score for this asset. - """ - POPULARITY_SCORE: ClassVar[NumericRankField] = NumericRankField( - "popularityScore", "popularityScore", "popularityScore.rank_feature" - ) - """ - Popularity score for this asset. - """ - SOURCE_OWNERS: ClassVar[KeywordField] = KeywordField("sourceOwners", "sourceOwners") - """ - List of owners of this asset, in the source system. - """ - SOURCE_CREATED_BY: ClassVar[KeywordField] = KeywordField( - "sourceCreatedBy", "sourceCreatedBy" - ) - """ - Name of the user who created this asset, in the source system. - """ - SOURCE_CREATED_AT: ClassVar[NumericField] = NumericField( - "sourceCreatedAt", "sourceCreatedAt" - ) - """ - Time (epoch) at which this asset was created in the source system, in milliseconds. - """ - SOURCE_UPDATED_AT: ClassVar[NumericField] = NumericField( - "sourceUpdatedAt", "sourceUpdatedAt" - ) - """ - Time (epoch) at which this asset was last updated in the source system, in milliseconds. - """ - SOURCE_UPDATED_BY: ClassVar[KeywordField] = KeywordField( - "sourceUpdatedBy", "sourceUpdatedBy" - ) - """ - Name of the user who last updated this asset, in the source system. - """ - SOURCE_URL: ClassVar[KeywordField] = KeywordField("sourceURL", "sourceURL") - """ - URL to the resource within the source application, used to create a button to view this asset in the source application. - """ # noqa: E501 - SOURCE_EMBED_URL: ClassVar[KeywordField] = KeywordField( - "sourceEmbedURL", "sourceEmbedURL" - ) - """ - URL to create an embed for a resource (for example, an image of a dashboard) within Atlan. - """ - LAST_SYNC_WORKFLOW_NAME: ClassVar[KeywordField] = KeywordField( - "lastSyncWorkflowName", "lastSyncWorkflowName" - ) - """ - Name of the crawler that last synchronized this asset. - """ - LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( - "lastSyncRunAt", "lastSyncRunAt" - ) - """ - Time (epoch) at which this asset was last crawled, in milliseconds. - """ - LAST_SYNC_RUN: ClassVar[KeywordField] = KeywordField("lastSyncRun", "lastSyncRun") - """ - Name of the last run of the crawler that last synchronized this asset. - """ - ADMIN_ROLES: ClassVar[KeywordField] = KeywordField("adminRoles", "adminRoles") - """ - List of roles who administer this asset. (This is only used for Connection assets.) - """ - SOURCE_READ_COUNT: ClassVar[NumericField] = NumericField( - "sourceReadCount", "sourceReadCount" - ) - """ - Total count of all read operations at source. - """ - SOURCE_READ_USER_COUNT: ClassVar[NumericField] = NumericField( - "sourceReadUserCount", "sourceReadUserCount" - ) - """ - Total number of unique users that read data from asset. - """ - SOURCE_LAST_READ_AT: ClassVar[NumericField] = NumericField( - "sourceLastReadAt", "sourceLastReadAt" - ) - """ - Timestamp of most recent read operation. - """ - LAST_ROW_CHANGED_AT: ClassVar[NumericField] = NumericField( - "lastRowChangedAt", "lastRowChangedAt" - ) - """ - Time (epoch) of the last operation that inserted, updated, or deleted rows, in milliseconds. - """ - SOURCE_TOTAL_COST: ClassVar[NumericField] = NumericField( - "sourceTotalCost", "sourceTotalCost" - ) - """ - Total cost of all operations at source. - """ - SOURCE_COST_UNIT: ClassVar[KeywordField] = KeywordField( - "sourceCostUnit", "sourceCostUnit" - ) - """ - The unit of measure for sourceTotalCost. - """ - SOURCE_READ_QUERY_COST: ClassVar[NumericField] = NumericField( - "sourceReadQueryCost", "sourceReadQueryCost" - ) - """ - Total cost of read queries at source. - """ - SOURCE_READ_RECENT_USER_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadRecentUserList", "sourceReadRecentUserList" - ) - """ - List of usernames of the most recent users who read this asset. - """ - SOURCE_READ_RECENT_USER_RECORD_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadRecentUserRecordList", "sourceReadRecentUserRecordList" - ) - """ - List of usernames with extra insights for the most recent users who read this asset. - """ - SOURCE_READ_TOP_USER_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadTopUserList", "sourceReadTopUserList" - ) - """ - List of usernames of the users who read this asset the most. - """ - SOURCE_READ_TOP_USER_RECORD_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadTopUserRecordList", "sourceReadTopUserRecordList" - ) - """ - List of usernames with extra insights for the users who read this asset the most. - """ - SOURCE_READ_POPULAR_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadPopularQueryRecordList", "sourceReadPopularQueryRecordList" - ) - """ - List of the most popular queries that accessed this asset. - """ - SOURCE_READ_EXPENSIVE_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadExpensiveQueryRecordList", "sourceReadExpensiveQueryRecordList" - ) - """ - List of the most expensive queries that accessed this asset. - """ - SOURCE_READ_SLOW_QUERY_RECORD_LIST: ClassVar[KeywordField] = KeywordField( - "sourceReadSlowQueryRecordList", "sourceReadSlowQueryRecordList" - ) - """ - List of the slowest queries that accessed this asset. - """ - SOURCE_QUERY_COMPUTE_COST_LIST: ClassVar[KeywordField] = KeywordField( - "sourceQueryComputeCostList", "sourceQueryComputeCostList" - ) - """ - List of most expensive warehouse names. - """ - SOURCE_QUERY_COMPUTE_COST_RECORD_LIST: ClassVar[KeywordField] = KeywordField( - "sourceQueryComputeCostRecordList", "sourceQueryComputeCostRecordList" - ) - """ - List of most expensive warehouses with extra insights. - """ - DBT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtQualifiedName", "dbtQualifiedName", "dbtQualifiedName.text" - ) - """ - Unique name of this asset in dbt. - """ - ASSET_DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtAlias", "assetDbtAlias.keyword", "assetDbtAlias" - ) - """ - Alias of this asset in dbt. - """ - ASSET_DBT_META: ClassVar[KeywordField] = KeywordField( - "assetDbtMeta", "assetDbtMeta" - ) - """ - Metadata for this asset in dbt, specifically everything under the 'meta' key in the dbt object. - """ - ASSET_DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtUniqueId", "assetDbtUniqueId.keyword", "assetDbtUniqueId" - ) - """ - Unique identifier of this asset in dbt. - """ - ASSET_DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtAccountName", "assetDbtAccountName.keyword", "assetDbtAccountName" - ) - """ - Name of the account in which this asset exists in dbt. - """ - ASSET_DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtProjectName", "assetDbtProjectName.keyword", "assetDbtProjectName" - ) - """ - Name of the project in which this asset exists in dbt. - """ - ASSET_DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtPackageName", "assetDbtPackageName.keyword", "assetDbtPackageName" - ) - """ - Name of the package in which this asset exists in dbt. - """ - ASSET_DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtJobName", "assetDbtJobName.keyword", "assetDbtJobName" - ) - """ - Name of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "assetDbtJobSchedule", "assetDbtJobSchedule" - ) - """ - Schedule of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "assetDbtJobStatus", "assetDbtJobStatus" - ) - """ - Status of the job that materialized this asset in dbt. - """ - ASSET_DBT_TEST_STATUS: ClassVar[KeywordField] = KeywordField( - "assetDbtTestStatus", "assetDbtTestStatus" - ) - """ - All associated dbt test statuses. - """ - ASSET_DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[TextField] = TextField( - "assetDbtJobScheduleCronHumanized", "assetDbtJobScheduleCronHumanized" - ) - """ - Human-readable cron schedule of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( - "assetDbtJobLastRun", "assetDbtJobLastRun" - ) - """ - Time (epoch) at which the job that materialized this asset in dbt last ran, in milliseconds. - """ - ASSET_DBT_JOB_LAST_RUN_URL: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunUrl", "assetDbtJobLastRunUrl" - ) - """ - URL of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_CREATED_AT: ClassVar[NumericField] = NumericField( - "assetDbtJobLastRunCreatedAt", "assetDbtJobLastRunCreatedAt" - ) - """ - Time (epoch) at which the job that materialized this asset in dbt was last created, in milliseconds. - """ - ASSET_DBT_JOB_LAST_RUN_UPDATED_AT: ClassVar[NumericField] = NumericField( - "assetDbtJobLastRunUpdatedAt", "assetDbtJobLastRunUpdatedAt" - ) - """ - Time (epoch) at which the job that materialized this asset in dbt was last updated, in milliseconds. - """ - ASSET_DBT_JOB_LAST_RUN_DEQUED_AT: ClassVar[NumericField] = NumericField( - "assetDbtJobLastRunDequedAt", "assetDbtJobLastRunDequedAt" - ) - """ - Time (epoch) at which the job that materialized this asset in dbt was dequeued, in milliseconds. - """ - ASSET_DBT_JOB_LAST_RUN_STARTED_AT: ClassVar[NumericField] = NumericField( - "assetDbtJobLastRunStartedAt", "assetDbtJobLastRunStartedAt" - ) - """ - Time (epoch) at which the job that materialized this asset in dbt was started running, in milliseconds. - """ - ASSET_DBT_JOB_LAST_RUN_TOTAL_DURATION: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunTotalDuration", "assetDbtJobLastRunTotalDuration" - ) - """ - Total duration of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_TOTAL_DURATION_HUMANIZED: ClassVar[ - KeywordField - ] = KeywordField( - "assetDbtJobLastRunTotalDurationHumanized", - "assetDbtJobLastRunTotalDurationHumanized", - ) - """ - Human-readable total duration of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_QUEUED_DURATION: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunQueuedDuration", "assetDbtJobLastRunQueuedDuration" - ) - """ - Total duration the job that materialized this asset in dbt spent being queued. - """ - ASSET_DBT_JOB_LAST_RUN_QUEUED_DURATION_HUMANIZED: ClassVar[ - KeywordField - ] = KeywordField( - "assetDbtJobLastRunQueuedDurationHumanized", - "assetDbtJobLastRunQueuedDurationHumanized", - ) - """ - Human-readable total duration of the last run of the job that materialized this asset in dbt spend being queued. - """ - ASSET_DBT_JOB_LAST_RUN_RUN_DURATION: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunRunDuration", "assetDbtJobLastRunRunDuration" - ) - """ - Run duration of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_RUN_DURATION_HUMANIZED: ClassVar[ - KeywordField - ] = KeywordField( - "assetDbtJobLastRunRunDurationHumanized", - "assetDbtJobLastRunRunDurationHumanized", - ) - """ - Human-readable run duration of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_GIT_BRANCH: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtJobLastRunGitBranch", - "assetDbtJobLastRunGitBranch", - "assetDbtJobLastRunGitBranch.text", - ) - """ - Branch in git from which the last run of the job that materialized this asset in dbt ran. - """ - ASSET_DBT_JOB_LAST_RUN_GIT_SHA: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunGitSha", "assetDbtJobLastRunGitSha" - ) - """ - SHA hash in git for the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_STATUS_MESSAGE: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "assetDbtJobLastRunStatusMessage", - "assetDbtJobLastRunStatusMessage.keyword", - "assetDbtJobLastRunStatusMessage", - ) - """ - Status message of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_OWNER_THREAD_ID: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunOwnerThreadId", "assetDbtJobLastRunOwnerThreadId" - ) - """ - Thread ID of the owner of the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_EXECUTED_BY_THREAD_ID: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunExecutedByThreadId", "assetDbtJobLastRunExecutedByThreadId" - ) - """ - Thread ID of the user who executed the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_ARTIFACTS_SAVED: ClassVar[BooleanField] = BooleanField( - "assetDbtJobLastRunArtifactsSaved", "assetDbtJobLastRunArtifactsSaved" - ) - """ - Whether artifacts were saved from the last run of the job that materialized this asset in dbt (true) or not (false). - """ - ASSET_DBT_JOB_LAST_RUN_ARTIFACT_S3PATH: ClassVar[KeywordField] = KeywordField( - "assetDbtJobLastRunArtifactS3Path", "assetDbtJobLastRunArtifactS3Path" - ) - """ - Path in S3 to the artifacts saved from the last run of the job that materialized this asset in dbt. - """ - ASSET_DBT_JOB_LAST_RUN_HAS_DOCS_GENERATED: ClassVar[BooleanField] = BooleanField( - "assetDbtJobLastRunHasDocsGenerated", "assetDbtJobLastRunHasDocsGenerated" - ) - """ - Whether docs were generated from the last run of the job that materialized this asset in dbt (true) or not (false). - """ - ASSET_DBT_JOB_LAST_RUN_HAS_SOURCES_GENERATED: ClassVar[BooleanField] = BooleanField( - "assetDbtJobLastRunHasSourcesGenerated", "assetDbtJobLastRunHasSourcesGenerated" - ) - """ - Whether sources were generated from the last run of the job that materialized this asset in dbt (true) or not (false). - """ # noqa: E501 - ASSET_DBT_JOB_LAST_RUN_NOTIFICATIONS_SENT: ClassVar[BooleanField] = BooleanField( - "assetDbtJobLastRunNotificationsSent", "assetDbtJobLastRunNotificationsSent" - ) - """ - Whether notifications were sent from the last run of the job that materialized this asset in dbt (true) or not (false). - """ # noqa: E501 - ASSET_DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( - "assetDbtJobNextRun", "assetDbtJobNextRun" - ) - """ - Time (epoch) when the next run of the job that materializes this asset in dbt is scheduled. - """ - ASSET_DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtJobNextRunHumanized", - "assetDbtJobNextRunHumanized.keyword", - "assetDbtJobNextRunHumanized", - ) - """ - Human-readable time when the next run of the job that materializes this asset in dbt is scheduled. - """ - ASSET_DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtEnvironmentName", - "assetDbtEnvironmentName.keyword", - "assetDbtEnvironmentName", - ) - """ - Name of the environment in which this asset is materialized in dbt. - """ - ASSET_DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordField] = KeywordField( - "assetDbtEnvironmentDbtVersion", "assetDbtEnvironmentDbtVersion" - ) - """ - Version of the environment in which this asset is materialized in dbt. - """ - ASSET_DBT_TAGS: ClassVar[KeywordTextField] = KeywordTextField( - "assetDbtTags", "assetDbtTags", "assetDbtTags.text" - ) - """ - List of tags attached to this asset in dbt. - """ - ASSET_DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( - "assetDbtSemanticLayerProxyUrl", "assetDbtSemanticLayerProxyUrl" - ) - """ - URL of the semantic layer proxy for this asset in dbt. - """ - ASSET_DBT_SOURCE_FRESHNESS_CRITERIA: ClassVar[KeywordField] = KeywordField( - "assetDbtSourceFreshnessCriteria", "assetDbtSourceFreshnessCriteria" - ) - """ - Freshness criteria for the source of this asset in dbt. - """ - SAMPLE_DATA_URL: ClassVar[KeywordTextField] = KeywordTextField( - "sampleDataUrl", "sampleDataUrl", "sampleDataUrl.text" - ) - """ - URL for sample data for this asset. - """ - ASSET_TAGS: ClassVar[KeywordTextField] = KeywordTextField( - "assetTags", "assetTags", "assetTags.text" - ) - """ - List of tags attached to this asset. - """ - ASSET_MC_INCIDENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "assetMcIncidentNames", "assetMcIncidentNames.keyword", "assetMcIncidentNames" - ) - """ - List of Monte Carlo incident names attached to this asset. - """ - ASSET_MC_INCIDENT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "assetMcIncidentQualifiedNames", - "assetMcIncidentQualifiedNames", - "assetMcIncidentQualifiedNames.text", - ) - """ - List of unique Monte Carlo incident names attached to this asset. - """ - ASSET_MC_MONITOR_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "assetMcMonitorNames", "assetMcMonitorNames.keyword", "assetMcMonitorNames" - ) - """ - List of Monte Carlo monitor names attached to this asset. - """ - ASSET_MC_MONITOR_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "assetMcMonitorQualifiedNames", - "assetMcMonitorQualifiedNames", - "assetMcMonitorQualifiedNames.text", - ) - """ - List of unique Monte Carlo monitor names attached to this asset. - """ - ASSET_MC_MONITOR_STATUSES: ClassVar[KeywordField] = KeywordField( - "assetMcMonitorStatuses", "assetMcMonitorStatuses" - ) - """ - Statuses of all associated Monte Carlo monitors. - """ - ASSET_MC_MONITOR_TYPES: ClassVar[KeywordField] = KeywordField( - "assetMcMonitorTypes", "assetMcMonitorTypes" - ) - """ - Types of all associated Monte Carlo monitors. - """ - ASSET_MC_MONITOR_SCHEDULE_TYPES: ClassVar[KeywordField] = KeywordField( - "assetMcMonitorScheduleTypes", "assetMcMonitorScheduleTypes" - ) - """ - Schedules of all associated Monte Carlo monitors. - """ - ASSET_MC_INCIDENT_TYPES: ClassVar[KeywordField] = KeywordField( - "assetMcIncidentTypes", "assetMcIncidentTypes" - ) - """ - List of Monte Carlo incident types associated with this asset. - """ - ASSET_MC_INCIDENT_SUB_TYPES: ClassVar[KeywordField] = KeywordField( - "assetMcIncidentSubTypes", "assetMcIncidentSubTypes" - ) - """ - List of Monte Carlo incident sub-types associated with this asset. - """ - ASSET_MC_INCIDENT_SEVERITIES: ClassVar[KeywordField] = KeywordField( - "assetMcIncidentSeverities", "assetMcIncidentSeverities" - ) - """ - List of Monte Carlo incident severities associated with this asset. - """ - ASSET_MC_INCIDENT_STATES: ClassVar[KeywordField] = KeywordField( - "assetMcIncidentStates", "assetMcIncidentStates" - ) - """ - List of Monte Carlo incident states associated with this asset. - """ - ASSET_MC_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( - "assetMcLastSyncRunAt", "assetMcLastSyncRunAt" - ) - """ - Time (epoch) at which this asset was last synced from Monte Carlo. - """ - STARRED_BY: ClassVar[KeywordField] = KeywordField("starredBy", "starredBy") - """ - Users who have starred this asset. - """ - STARRED_DETAILS_LIST: ClassVar[KeywordField] = KeywordField( - "starredDetailsList", "starredDetailsList" - ) - """ - List of usernames with extra information of the users who have starred an asset. - """ - STARRED_COUNT: ClassVar[NumericField] = NumericField("starredCount", "starredCount") - """ - Number of users who have starred this asset. - """ - ASSET_SODA_DQ_STATUS: ClassVar[KeywordField] = KeywordField( - "assetSodaDQStatus", "assetSodaDQStatus" - ) - """ - Status of data quality from Soda. - """ - ASSET_SODA_CHECK_COUNT: ClassVar[NumericField] = NumericField( - "assetSodaCheckCount", "assetSodaCheckCount" - ) - """ - Number of checks done via Soda. - """ - ASSET_SODA_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( - "assetSodaLastSyncRunAt", "assetSodaLastSyncRunAt" - ) - """ - - """ - ASSET_SODA_LAST_SCAN_AT: ClassVar[NumericField] = NumericField( - "assetSodaLastScanAt", "assetSodaLastScanAt" - ) - """ - - """ - ASSET_SODA_CHECK_STATUSES: ClassVar[KeywordField] = KeywordField( - "assetSodaCheckStatuses", "assetSodaCheckStatuses" - ) - """ - All associated Soda check statuses. - """ - ASSET_SODA_SOURCE_URL: ClassVar[KeywordField] = KeywordField( - "assetSodaSourceURL", "assetSodaSourceURL" - ) - """ - - """ - ASSET_ICON: ClassVar[KeywordField] = KeywordField("assetIcon", "assetIcon") - """ - Name of the icon to use for this asset. (Only applies to glossaries, currently.) - """ - IS_PARTIAL: ClassVar[BooleanField] = BooleanField("isPartial", "isPartial") - """ - TBC - """ - IS_AI_GENERATED: ClassVar[BooleanField] = BooleanField( - "isAIGenerated", "isAIGenerated" - ) - """ - - """ - ASSET_COVER_IMAGE: ClassVar[KeywordField] = KeywordField( - "assetCoverImage", "assetCoverImage" - ) - """ - TBC - """ - ASSET_THEME_HEX: ClassVar[KeywordField] = KeywordField( - "assetThemeHex", "assetThemeHex" - ) - """ - Color (in hexadecimal RGB) to use to represent this asset. - """ - - SCHEMA_REGISTRY_SUBJECTS: ClassVar[RelationField] = RelationField( - "schemaRegistrySubjects" - ) - """ - TBC - """ - MC_MONITORS: ClassVar[RelationField] = RelationField("mcMonitors") - """ - TBC - """ - OUTPUT_PORT_DATA_PRODUCTS: ClassVar[RelationField] = RelationField( - "outputPortDataProducts" - ) - """ - TBC - """ - FILES: ClassVar[RelationField] = RelationField("files") - """ - TBC - """ - MC_INCIDENTS: ClassVar[RelationField] = RelationField("mcIncidents") - """ - TBC - """ - LINKS: ClassVar[RelationField] = RelationField("links") - """ - TBC - """ - METRICS: ClassVar[RelationField] = RelationField("metrics") - """ - TBC - """ - README: ClassVar[RelationField] = RelationField("readme") - """ - TBC - """ - SODA_CHECKS: ClassVar[RelationField] = RelationField("sodaChecks") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "name", - "display_name", - "description", - "user_description", - "tenant_id", - "certificate_status", - "certificate_status_message", - "certificate_updated_by", - "certificate_updated_at", - "announcement_title", - "announcement_message", - "announcement_type", - "announcement_updated_at", - "announcement_updated_by", - "owner_users", - "owner_groups", - "admin_users", - "admin_groups", - "viewer_users", - "viewer_groups", - "connector_name", - "connection_name", - "connection_qualified_name", - "has_lineage", - "is_discoverable", - "is_editable", - "sub_type", - "view_score", - "popularity_score", - "source_owners", - "source_created_by", - "source_created_at", - "source_updated_at", - "source_updated_by", - "source_url", - "source_embed_url", - "last_sync_workflow_name", - "last_sync_run_at", - "last_sync_run", - "admin_roles", - "source_read_count", - "source_read_user_count", - "source_last_read_at", - "last_row_changed_at", - "source_total_cost", - "source_cost_unit", - "source_read_query_cost", - "source_read_recent_user_list", - "source_read_recent_user_record_list", - "source_read_top_user_list", - "source_read_top_user_record_list", - "source_read_popular_query_record_list", - "source_read_expensive_query_record_list", - "source_read_slow_query_record_list", - "source_query_compute_cost_list", - "source_query_compute_cost_record_list", - "dbt_qualified_name", - "asset_dbt_alias", - "asset_dbt_meta", - "asset_dbt_unique_id", - "asset_dbt_account_name", - "asset_dbt_project_name", - "asset_dbt_package_name", - "asset_dbt_job_name", - "asset_dbt_job_schedule", - "asset_dbt_job_status", - "asset_dbt_test_status", - "asset_dbt_job_schedule_cron_humanized", - "asset_dbt_job_last_run", - "asset_dbt_job_last_run_url", - "asset_dbt_job_last_run_created_at", - "asset_dbt_job_last_run_updated_at", - "asset_dbt_job_last_run_dequed_at", - "asset_dbt_job_last_run_started_at", - "asset_dbt_job_last_run_total_duration", - "asset_dbt_job_last_run_total_duration_humanized", - "asset_dbt_job_last_run_queued_duration", - "asset_dbt_job_last_run_queued_duration_humanized", - "asset_dbt_job_last_run_run_duration", - "asset_dbt_job_last_run_run_duration_humanized", - "asset_dbt_job_last_run_git_branch", - "asset_dbt_job_last_run_git_sha", - "asset_dbt_job_last_run_status_message", - "asset_dbt_job_last_run_owner_thread_id", - "asset_dbt_job_last_run_executed_by_thread_id", - "asset_dbt_job_last_run_artifacts_saved", - "asset_dbt_job_last_run_artifact_s3_path", - "asset_dbt_job_last_run_has_docs_generated", - "asset_dbt_job_last_run_has_sources_generated", - "asset_dbt_job_last_run_notifications_sent", - "asset_dbt_job_next_run", - "asset_dbt_job_next_run_humanized", - "asset_dbt_environment_name", - "asset_dbt_environment_dbt_version", - "asset_dbt_tags", - "asset_dbt_semantic_layer_proxy_url", - "asset_dbt_source_freshness_criteria", - "sample_data_url", - "asset_tags", - "asset_mc_incident_names", - "asset_mc_incident_qualified_names", - "asset_mc_monitor_names", - "asset_mc_monitor_qualified_names", - "asset_mc_monitor_statuses", - "asset_mc_monitor_types", - "asset_mc_monitor_schedule_types", - "asset_mc_incident_types", - "asset_mc_incident_sub_types", - "asset_mc_incident_severities", - "asset_mc_incident_states", - "asset_mc_last_sync_run_at", - "starred_by", - "starred_details_list", - "starred_count", - "asset_soda_d_q_status", - "asset_soda_check_count", - "asset_soda_last_sync_run_at", - "asset_soda_last_scan_at", - "asset_soda_check_statuses", - "asset_soda_source_url", - "asset_icon", - "is_partial", - "is_a_i_generated", - "asset_cover_image", - "asset_theme_hex", - "schema_registry_subjects", - "mc_monitors", - "output_port_data_products", - "files", - "mc_incidents", - "links", - "metrics", - "readme", - "soda_checks", - "assigned_terms", - ] - - @property - def name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.name - - @name.setter - def name(self, name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.name = name - - @property - def display_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.display_name - - @display_name.setter - def display_name(self, display_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.display_name = display_name - - @property - def description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.description - - @description.setter - def description(self, description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.description = description - - @property - def user_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.user_description - - @user_description.setter - def user_description(self, user_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.user_description = user_description - - @property - def tenant_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tenant_id - - @tenant_id.setter - def tenant_id(self, tenant_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tenant_id = tenant_id - - @property - def certificate_status(self) -> Optional[CertificateStatus]: - return None if self.attributes is None else self.attributes.certificate_status - - @certificate_status.setter - def certificate_status(self, certificate_status: Optional[CertificateStatus]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certificate_status = certificate_status - - @property - def certificate_status_message(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.certificate_status_message - ) - - @certificate_status_message.setter - def certificate_status_message(self, certificate_status_message: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certificate_status_message = certificate_status_message - - @property - def certificate_updated_by(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.certificate_updated_by - ) - - @certificate_updated_by.setter - def certificate_updated_by(self, certificate_updated_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certificate_updated_by = certificate_updated_by - - @property - def certificate_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.certificate_updated_at - ) - - @certificate_updated_at.setter - def certificate_updated_at(self, certificate_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certificate_updated_at = certificate_updated_at - - @property - def announcement_title(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.announcement_title - - @announcement_title.setter - def announcement_title(self, announcement_title: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.announcement_title = announcement_title - - @property - def announcement_message(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.announcement_message - - @announcement_message.setter - def announcement_message(self, announcement_message: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.announcement_message = announcement_message - - @property - def announcement_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.announcement_type - - @announcement_type.setter - def announcement_type(self, announcement_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.announcement_type = announcement_type - - @property - def announcement_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.announcement_updated_at - ) - - @announcement_updated_at.setter - def announcement_updated_at(self, announcement_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.announcement_updated_at = announcement_updated_at - - @property - def announcement_updated_by(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.announcement_updated_by - ) - - @announcement_updated_by.setter - def announcement_updated_by(self, announcement_updated_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.announcement_updated_by = announcement_updated_by - - @property - def owner_users(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.owner_users - - @owner_users.setter - def owner_users(self, owner_users: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.owner_users = owner_users - - @property - def owner_groups(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.owner_groups - - @owner_groups.setter - def owner_groups(self, owner_groups: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.owner_groups = owner_groups - - @property - def admin_users(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.admin_users - - @admin_users.setter - def admin_users(self, admin_users: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.admin_users = admin_users - - @property - def admin_groups(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.admin_groups - - @admin_groups.setter - def admin_groups(self, admin_groups: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.admin_groups = admin_groups - - @property - def viewer_users(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.viewer_users - - @viewer_users.setter - def viewer_users(self, viewer_users: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.viewer_users = viewer_users - - @property - def viewer_groups(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.viewer_groups - - @viewer_groups.setter - def viewer_groups(self, viewer_groups: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.viewer_groups = viewer_groups - - @property - def connector_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.connector_name - - @connector_name.setter - def connector_name(self, connector_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connector_name = connector_name - - @property - def connection_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.connection_name - - @connection_name.setter - def connection_name(self, connection_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connection_name = connection_name - - @property - def connection_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.connection_qualified_name - ) - - @connection_qualified_name.setter - def connection_qualified_name(self, connection_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connection_qualified_name = connection_qualified_name - - @property - def has_lineage(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.has_lineage - - @has_lineage.setter - def has_lineage(self, has_lineage: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.has_lineage = has_lineage - - @property - def is_discoverable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_discoverable - - @is_discoverable.setter - def is_discoverable(self, is_discoverable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_discoverable = is_discoverable - - @property - def is_editable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_editable - - @is_editable.setter - def is_editable(self, is_editable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_editable = is_editable - - @property - def sub_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sub_type - - @sub_type.setter - def sub_type(self, sub_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sub_type = sub_type - - @property - def view_score(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.view_score - - @view_score.setter - def view_score(self, view_score: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_score = view_score - - @property - def popularity_score(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.popularity_score - - @popularity_score.setter - def popularity_score(self, popularity_score: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.popularity_score = popularity_score - - @property - def source_owners(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_owners - - @source_owners.setter - def source_owners(self, source_owners: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_owners = source_owners - - @property - def source_created_by(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_created_by - - @source_created_by.setter - def source_created_by(self, source_created_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_created_by = source_created_by - - @property - def source_created_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.source_created_at - - @source_created_at.setter - def source_created_at(self, source_created_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_created_at = source_created_at - - @property - def source_updated_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.source_updated_at - - @source_updated_at.setter - def source_updated_at(self, source_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_updated_at = source_updated_at - - @property - def source_updated_by(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_updated_by - - @source_updated_by.setter - def source_updated_by(self, source_updated_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_updated_by = source_updated_by - - @property - def source_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_url - - @source_url.setter - def source_url(self, source_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_url = source_url - - @property - def source_embed_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_embed_url - - @source_embed_url.setter - def source_embed_url(self, source_embed_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_embed_url = source_embed_url - - @property - def last_sync_workflow_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.last_sync_workflow_name - ) - - @last_sync_workflow_name.setter - def last_sync_workflow_name(self, last_sync_workflow_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_sync_workflow_name = last_sync_workflow_name - - @property - def last_sync_run_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_sync_run_at - - @last_sync_run_at.setter - def last_sync_run_at(self, last_sync_run_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_sync_run_at = last_sync_run_at - - @property - def last_sync_run(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.last_sync_run - - @last_sync_run.setter - def last_sync_run(self, last_sync_run: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_sync_run = last_sync_run - - @property - def admin_roles(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.admin_roles - - @admin_roles.setter - def admin_roles(self, admin_roles: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.admin_roles = admin_roles - - @property - def source_read_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_read_count - - @source_read_count.setter - def source_read_count(self, source_read_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_count = source_read_count - - @property - def source_read_user_count(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.source_read_user_count - ) - - @source_read_user_count.setter - def source_read_user_count(self, source_read_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_user_count = source_read_user_count - - @property - def source_last_read_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.source_last_read_at - - @source_last_read_at.setter - def source_last_read_at(self, source_last_read_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_last_read_at = source_last_read_at - - @property - def last_row_changed_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_row_changed_at - - @last_row_changed_at.setter - def last_row_changed_at(self, last_row_changed_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_row_changed_at = last_row_changed_at - - @property - def source_total_cost(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.source_total_cost - - @source_total_cost.setter - def source_total_cost(self, source_total_cost: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_total_cost = source_total_cost - - @property - def source_cost_unit(self) -> Optional[SourceCostUnitType]: - return None if self.attributes is None else self.attributes.source_cost_unit - - @source_cost_unit.setter - def source_cost_unit(self, source_cost_unit: Optional[SourceCostUnitType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_cost_unit = source_cost_unit - - @property - def source_read_query_cost(self) -> Optional[float]: - return ( - None if self.attributes is None else self.attributes.source_read_query_cost - ) - - @source_read_query_cost.setter - def source_read_query_cost(self, source_read_query_cost: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_query_cost = source_read_query_cost - - @property - def source_read_recent_user_list(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_recent_user_list - ) - - @source_read_recent_user_list.setter - def source_read_recent_user_list( - self, source_read_recent_user_list: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_recent_user_list = source_read_recent_user_list - - @property - def source_read_recent_user_record_list(self) -> Optional[list[PopularityInsights]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_recent_user_record_list - ) - - @source_read_recent_user_record_list.setter - def source_read_recent_user_record_list( - self, source_read_recent_user_record_list: Optional[list[PopularityInsights]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_recent_user_record_list = ( - source_read_recent_user_record_list - ) - - @property - def source_read_top_user_list(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_top_user_list - ) - - @source_read_top_user_list.setter - def source_read_top_user_list(self, source_read_top_user_list: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_top_user_list = source_read_top_user_list - - @property - def source_read_top_user_record_list(self) -> Optional[list[PopularityInsights]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_top_user_record_list - ) - - @source_read_top_user_record_list.setter - def source_read_top_user_record_list( - self, source_read_top_user_record_list: Optional[list[PopularityInsights]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_top_user_record_list = ( - source_read_top_user_record_list - ) - - @property - def source_read_popular_query_record_list( - self, - ) -> Optional[list[PopularityInsights]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_popular_query_record_list - ) - - @source_read_popular_query_record_list.setter - def source_read_popular_query_record_list( - self, source_read_popular_query_record_list: Optional[list[PopularityInsights]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_popular_query_record_list = ( - source_read_popular_query_record_list - ) - - @property - def source_read_expensive_query_record_list( - self, - ) -> Optional[list[PopularityInsights]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_expensive_query_record_list - ) - - @source_read_expensive_query_record_list.setter - def source_read_expensive_query_record_list( - self, - source_read_expensive_query_record_list: Optional[list[PopularityInsights]], - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_expensive_query_record_list = ( - source_read_expensive_query_record_list - ) - - @property - def source_read_slow_query_record_list(self) -> Optional[list[PopularityInsights]]: - return ( - None - if self.attributes is None - else self.attributes.source_read_slow_query_record_list - ) - - @source_read_slow_query_record_list.setter - def source_read_slow_query_record_list( - self, source_read_slow_query_record_list: Optional[list[PopularityInsights]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_read_slow_query_record_list = ( - source_read_slow_query_record_list - ) - - @property - def source_query_compute_cost_list(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.source_query_compute_cost_list - ) - - @source_query_compute_cost_list.setter - def source_query_compute_cost_list( - self, source_query_compute_cost_list: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_query_compute_cost_list = source_query_compute_cost_list - - @property - def source_query_compute_cost_record_list( - self, - ) -> Optional[list[PopularityInsights]]: - return ( - None - if self.attributes is None - else self.attributes.source_query_compute_cost_record_list - ) - - @source_query_compute_cost_record_list.setter - def source_query_compute_cost_record_list( - self, source_query_compute_cost_record_list: Optional[list[PopularityInsights]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_query_compute_cost_record_list = ( - source_query_compute_cost_record_list - ) - - @property - def dbt_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_qualified_name - - @dbt_qualified_name.setter - def dbt_qualified_name(self, dbt_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_qualified_name = dbt_qualified_name - - @property - def asset_dbt_alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_dbt_alias - - @asset_dbt_alias.setter - def asset_dbt_alias(self, asset_dbt_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_alias = asset_dbt_alias - - @property - def asset_dbt_meta(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_dbt_meta - - @asset_dbt_meta.setter - def asset_dbt_meta(self, asset_dbt_meta: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_meta = asset_dbt_meta - - @property - def asset_dbt_unique_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_dbt_unique_id - - @asset_dbt_unique_id.setter - def asset_dbt_unique_id(self, asset_dbt_unique_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_unique_id = asset_dbt_unique_id - - @property - def asset_dbt_account_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_account_name - ) - - @asset_dbt_account_name.setter - def asset_dbt_account_name(self, asset_dbt_account_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_account_name = asset_dbt_account_name - - @property - def asset_dbt_project_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_project_name - ) - - @asset_dbt_project_name.setter - def asset_dbt_project_name(self, asset_dbt_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_project_name = asset_dbt_project_name - - @property - def asset_dbt_package_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_package_name - ) - - @asset_dbt_package_name.setter - def asset_dbt_package_name(self, asset_dbt_package_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_package_name = asset_dbt_package_name - - @property - def asset_dbt_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_dbt_job_name - - @asset_dbt_job_name.setter - def asset_dbt_job_name(self, asset_dbt_job_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_name = asset_dbt_job_name - - @property - def asset_dbt_job_schedule(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_job_schedule - ) - - @asset_dbt_job_schedule.setter - def asset_dbt_job_schedule(self, asset_dbt_job_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_schedule = asset_dbt_job_schedule - - @property - def asset_dbt_job_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_dbt_job_status - - @asset_dbt_job_status.setter - def asset_dbt_job_status(self, asset_dbt_job_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_status = asset_dbt_job_status - - @property - def asset_dbt_test_status(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_test_status - ) - - @asset_dbt_test_status.setter - def asset_dbt_test_status(self, asset_dbt_test_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_test_status = asset_dbt_test_status - - @property - def asset_dbt_job_schedule_cron_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_schedule_cron_humanized - ) - - @asset_dbt_job_schedule_cron_humanized.setter - def asset_dbt_job_schedule_cron_humanized( - self, asset_dbt_job_schedule_cron_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_schedule_cron_humanized = ( - asset_dbt_job_schedule_cron_humanized - ) - - @property - def asset_dbt_job_last_run(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_job_last_run - ) - - @asset_dbt_job_last_run.setter - def asset_dbt_job_last_run(self, asset_dbt_job_last_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run = asset_dbt_job_last_run - - @property - def asset_dbt_job_last_run_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_url - ) - - @asset_dbt_job_last_run_url.setter - def asset_dbt_job_last_run_url(self, asset_dbt_job_last_run_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_url = asset_dbt_job_last_run_url - - @property - def asset_dbt_job_last_run_created_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_created_at - ) - - @asset_dbt_job_last_run_created_at.setter - def asset_dbt_job_last_run_created_at( - self, asset_dbt_job_last_run_created_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_created_at = ( - asset_dbt_job_last_run_created_at - ) - - @property - def asset_dbt_job_last_run_updated_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_updated_at - ) - - @asset_dbt_job_last_run_updated_at.setter - def asset_dbt_job_last_run_updated_at( - self, asset_dbt_job_last_run_updated_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_updated_at = ( - asset_dbt_job_last_run_updated_at - ) - - @property - def asset_dbt_job_last_run_dequed_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_dequed_at - ) - - @asset_dbt_job_last_run_dequed_at.setter - def asset_dbt_job_last_run_dequed_at( - self, asset_dbt_job_last_run_dequed_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_dequed_at = ( - asset_dbt_job_last_run_dequed_at - ) - - @property - def asset_dbt_job_last_run_started_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_started_at - ) - - @asset_dbt_job_last_run_started_at.setter - def asset_dbt_job_last_run_started_at( - self, asset_dbt_job_last_run_started_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_started_at = ( - asset_dbt_job_last_run_started_at - ) - - @property - def asset_dbt_job_last_run_total_duration(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_total_duration - ) - - @asset_dbt_job_last_run_total_duration.setter - def asset_dbt_job_last_run_total_duration( - self, asset_dbt_job_last_run_total_duration: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_total_duration = ( - asset_dbt_job_last_run_total_duration - ) - - @property - def asset_dbt_job_last_run_total_duration_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_total_duration_humanized - ) - - @asset_dbt_job_last_run_total_duration_humanized.setter - def asset_dbt_job_last_run_total_duration_humanized( - self, asset_dbt_job_last_run_total_duration_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_total_duration_humanized = ( - asset_dbt_job_last_run_total_duration_humanized - ) - - @property - def asset_dbt_job_last_run_queued_duration(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_queued_duration - ) - - @asset_dbt_job_last_run_queued_duration.setter - def asset_dbt_job_last_run_queued_duration( - self, asset_dbt_job_last_run_queued_duration: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_queued_duration = ( - asset_dbt_job_last_run_queued_duration - ) - - @property - def asset_dbt_job_last_run_queued_duration_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_queued_duration_humanized - ) - - @asset_dbt_job_last_run_queued_duration_humanized.setter - def asset_dbt_job_last_run_queued_duration_humanized( - self, asset_dbt_job_last_run_queued_duration_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_queued_duration_humanized = ( - asset_dbt_job_last_run_queued_duration_humanized - ) - - @property - def asset_dbt_job_last_run_run_duration(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_run_duration - ) - - @asset_dbt_job_last_run_run_duration.setter - def asset_dbt_job_last_run_run_duration( - self, asset_dbt_job_last_run_run_duration: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_run_duration = ( - asset_dbt_job_last_run_run_duration - ) - - @property - def asset_dbt_job_last_run_run_duration_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_run_duration_humanized - ) - - @asset_dbt_job_last_run_run_duration_humanized.setter - def asset_dbt_job_last_run_run_duration_humanized( - self, asset_dbt_job_last_run_run_duration_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_run_duration_humanized = ( - asset_dbt_job_last_run_run_duration_humanized - ) - - @property - def asset_dbt_job_last_run_git_branch(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_git_branch - ) - - @asset_dbt_job_last_run_git_branch.setter - def asset_dbt_job_last_run_git_branch( - self, asset_dbt_job_last_run_git_branch: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_git_branch = ( - asset_dbt_job_last_run_git_branch - ) - - @property - def asset_dbt_job_last_run_git_sha(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_git_sha - ) - - @asset_dbt_job_last_run_git_sha.setter - def asset_dbt_job_last_run_git_sha( - self, asset_dbt_job_last_run_git_sha: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_git_sha = asset_dbt_job_last_run_git_sha - - @property - def asset_dbt_job_last_run_status_message(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_status_message - ) - - @asset_dbt_job_last_run_status_message.setter - def asset_dbt_job_last_run_status_message( - self, asset_dbt_job_last_run_status_message: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_status_message = ( - asset_dbt_job_last_run_status_message - ) - - @property - def asset_dbt_job_last_run_owner_thread_id(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_owner_thread_id - ) - - @asset_dbt_job_last_run_owner_thread_id.setter - def asset_dbt_job_last_run_owner_thread_id( - self, asset_dbt_job_last_run_owner_thread_id: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_owner_thread_id = ( - asset_dbt_job_last_run_owner_thread_id - ) - - @property - def asset_dbt_job_last_run_executed_by_thread_id(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_executed_by_thread_id - ) - - @asset_dbt_job_last_run_executed_by_thread_id.setter - def asset_dbt_job_last_run_executed_by_thread_id( - self, asset_dbt_job_last_run_executed_by_thread_id: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_executed_by_thread_id = ( - asset_dbt_job_last_run_executed_by_thread_id - ) - - @property - def asset_dbt_job_last_run_artifacts_saved(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_artifacts_saved - ) - - @asset_dbt_job_last_run_artifacts_saved.setter - def asset_dbt_job_last_run_artifacts_saved( - self, asset_dbt_job_last_run_artifacts_saved: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_artifacts_saved = ( - asset_dbt_job_last_run_artifacts_saved - ) - - @property - def asset_dbt_job_last_run_artifact_s3_path(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_artifact_s3_path - ) - - @asset_dbt_job_last_run_artifact_s3_path.setter - def asset_dbt_job_last_run_artifact_s3_path( - self, asset_dbt_job_last_run_artifact_s3_path: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_artifact_s3_path = ( - asset_dbt_job_last_run_artifact_s3_path - ) - - @property - def asset_dbt_job_last_run_has_docs_generated(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_has_docs_generated - ) - - @asset_dbt_job_last_run_has_docs_generated.setter - def asset_dbt_job_last_run_has_docs_generated( - self, asset_dbt_job_last_run_has_docs_generated: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_has_docs_generated = ( - asset_dbt_job_last_run_has_docs_generated - ) - - @property - def asset_dbt_job_last_run_has_sources_generated(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_has_sources_generated - ) - - @asset_dbt_job_last_run_has_sources_generated.setter - def asset_dbt_job_last_run_has_sources_generated( - self, asset_dbt_job_last_run_has_sources_generated: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_has_sources_generated = ( - asset_dbt_job_last_run_has_sources_generated - ) - - @property - def asset_dbt_job_last_run_notifications_sent(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_last_run_notifications_sent - ) - - @asset_dbt_job_last_run_notifications_sent.setter - def asset_dbt_job_last_run_notifications_sent( - self, asset_dbt_job_last_run_notifications_sent: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_last_run_notifications_sent = ( - asset_dbt_job_last_run_notifications_sent - ) - - @property - def asset_dbt_job_next_run(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.asset_dbt_job_next_run - ) - - @asset_dbt_job_next_run.setter - def asset_dbt_job_next_run(self, asset_dbt_job_next_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_next_run = asset_dbt_job_next_run - - @property - def asset_dbt_job_next_run_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_job_next_run_humanized - ) - - @asset_dbt_job_next_run_humanized.setter - def asset_dbt_job_next_run_humanized( - self, asset_dbt_job_next_run_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_job_next_run_humanized = ( - asset_dbt_job_next_run_humanized - ) - - @property - def asset_dbt_environment_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_environment_name - ) - - @asset_dbt_environment_name.setter - def asset_dbt_environment_name(self, asset_dbt_environment_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_environment_name = asset_dbt_environment_name - - @property - def asset_dbt_environment_dbt_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_environment_dbt_version - ) - - @asset_dbt_environment_dbt_version.setter - def asset_dbt_environment_dbt_version( - self, asset_dbt_environment_dbt_version: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_environment_dbt_version = ( - asset_dbt_environment_dbt_version - ) - - @property - def asset_dbt_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.asset_dbt_tags - - @asset_dbt_tags.setter - def asset_dbt_tags(self, asset_dbt_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_tags = asset_dbt_tags - - @property - def asset_dbt_semantic_layer_proxy_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_semantic_layer_proxy_url - ) - - @asset_dbt_semantic_layer_proxy_url.setter - def asset_dbt_semantic_layer_proxy_url( - self, asset_dbt_semantic_layer_proxy_url: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_semantic_layer_proxy_url = ( - asset_dbt_semantic_layer_proxy_url - ) - - @property - def asset_dbt_source_freshness_criteria(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_dbt_source_freshness_criteria - ) - - @asset_dbt_source_freshness_criteria.setter - def asset_dbt_source_freshness_criteria( - self, asset_dbt_source_freshness_criteria: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_dbt_source_freshness_criteria = ( - asset_dbt_source_freshness_criteria - ) - - @property - def sample_data_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sample_data_url - - @sample_data_url.setter - def sample_data_url(self, sample_data_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sample_data_url = sample_data_url - - @property - def asset_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.asset_tags - - @asset_tags.setter - def asset_tags(self, asset_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_tags = asset_tags - - @property - def asset_mc_incident_names(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.asset_mc_incident_names - ) - - @asset_mc_incident_names.setter - def asset_mc_incident_names(self, asset_mc_incident_names: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_incident_names = asset_mc_incident_names - - @property - def asset_mc_incident_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_incident_qualified_names - ) - - @asset_mc_incident_qualified_names.setter - def asset_mc_incident_qualified_names( - self, asset_mc_incident_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_incident_qualified_names = ( - asset_mc_incident_qualified_names - ) - - @property - def asset_mc_monitor_names(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.asset_mc_monitor_names - ) - - @asset_mc_monitor_names.setter - def asset_mc_monitor_names(self, asset_mc_monitor_names: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_monitor_names = asset_mc_monitor_names - - @property - def asset_mc_monitor_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_monitor_qualified_names - ) - - @asset_mc_monitor_qualified_names.setter - def asset_mc_monitor_qualified_names( - self, asset_mc_monitor_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_monitor_qualified_names = ( - asset_mc_monitor_qualified_names - ) - - @property - def asset_mc_monitor_statuses(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_monitor_statuses - ) - - @asset_mc_monitor_statuses.setter - def asset_mc_monitor_statuses(self, asset_mc_monitor_statuses: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_monitor_statuses = asset_mc_monitor_statuses - - @property - def asset_mc_monitor_types(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.asset_mc_monitor_types - ) - - @asset_mc_monitor_types.setter - def asset_mc_monitor_types(self, asset_mc_monitor_types: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_monitor_types = asset_mc_monitor_types - - @property - def asset_mc_monitor_schedule_types(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_monitor_schedule_types - ) - - @asset_mc_monitor_schedule_types.setter - def asset_mc_monitor_schedule_types( - self, asset_mc_monitor_schedule_types: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_monitor_schedule_types = ( - asset_mc_monitor_schedule_types - ) - - @property - def asset_mc_incident_types(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.asset_mc_incident_types - ) - - @asset_mc_incident_types.setter - def asset_mc_incident_types(self, asset_mc_incident_types: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_incident_types = asset_mc_incident_types - - @property - def asset_mc_incident_sub_types(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_incident_sub_types - ) - - @asset_mc_incident_sub_types.setter - def asset_mc_incident_sub_types( - self, asset_mc_incident_sub_types: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_incident_sub_types = asset_mc_incident_sub_types - - @property - def asset_mc_incident_severities(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_incident_severities - ) - - @asset_mc_incident_severities.setter - def asset_mc_incident_severities( - self, asset_mc_incident_severities: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_incident_severities = asset_mc_incident_severities - - @property - def asset_mc_incident_states(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_incident_states - ) - - @asset_mc_incident_states.setter - def asset_mc_incident_states(self, asset_mc_incident_states: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_incident_states = asset_mc_incident_states - - @property - def asset_mc_last_sync_run_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.asset_mc_last_sync_run_at - ) - - @asset_mc_last_sync_run_at.setter - def asset_mc_last_sync_run_at(self, asset_mc_last_sync_run_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_mc_last_sync_run_at = asset_mc_last_sync_run_at - - @property - def starred_by(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.starred_by - - @starred_by.setter - def starred_by(self, starred_by: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.starred_by = starred_by - - @property - def starred_details_list(self) -> Optional[list[StarredDetails]]: - return None if self.attributes is None else self.attributes.starred_details_list - - @starred_details_list.setter - def starred_details_list( - self, starred_details_list: Optional[list[StarredDetails]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.starred_details_list = starred_details_list - - @property - def starred_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.starred_count - - @starred_count.setter - def starred_count(self, starred_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.starred_count = starred_count - - @property - def asset_soda_d_q_status(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_soda_d_q_status - ) - - @asset_soda_d_q_status.setter - def asset_soda_d_q_status(self, asset_soda_d_q_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_soda_d_q_status = asset_soda_d_q_status - - @property - def asset_soda_check_count(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.asset_soda_check_count - ) - - @asset_soda_check_count.setter - def asset_soda_check_count(self, asset_soda_check_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_soda_check_count = asset_soda_check_count - - @property - def asset_soda_last_sync_run_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.asset_soda_last_sync_run_at - ) - - @asset_soda_last_sync_run_at.setter - def asset_soda_last_sync_run_at( - self, asset_soda_last_sync_run_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_soda_last_sync_run_at = asset_soda_last_sync_run_at - - @property - def asset_soda_last_scan_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.asset_soda_last_scan_at - ) - - @asset_soda_last_scan_at.setter - def asset_soda_last_scan_at(self, asset_soda_last_scan_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_soda_last_scan_at = asset_soda_last_scan_at - - @property - def asset_soda_check_statuses(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_soda_check_statuses - ) - - @asset_soda_check_statuses.setter - def asset_soda_check_statuses(self, asset_soda_check_statuses: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_soda_check_statuses = asset_soda_check_statuses - - @property - def asset_soda_source_url(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.asset_soda_source_url - ) - - @asset_soda_source_url.setter - def asset_soda_source_url(self, asset_soda_source_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_soda_source_url = asset_soda_source_url - - @property - def asset_icon(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_icon - - @asset_icon.setter - def asset_icon(self, asset_icon: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_icon = asset_icon - - @property - def is_partial(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partial - - @is_partial.setter - def is_partial(self, is_partial: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_partial = is_partial - - @property - def is_a_i_generated(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_a_i_generated - - @is_a_i_generated.setter - def is_a_i_generated(self, is_a_i_generated: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_a_i_generated = is_a_i_generated - - @property - def asset_cover_image(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_cover_image - - @asset_cover_image.setter - def asset_cover_image(self, asset_cover_image: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_cover_image = asset_cover_image - - @property - def asset_theme_hex(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.asset_theme_hex - - @asset_theme_hex.setter - def asset_theme_hex(self, asset_theme_hex: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_theme_hex = asset_theme_hex - - @property - def schema_registry_subjects(self) -> Optional[list[SchemaRegistrySubject]]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subjects - ) - - @schema_registry_subjects.setter - def schema_registry_subjects( - self, schema_registry_subjects: Optional[list[SchemaRegistrySubject]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subjects = schema_registry_subjects - - @property - def mc_monitors(self) -> Optional[list[MCMonitor]]: - return None if self.attributes is None else self.attributes.mc_monitors - - @mc_monitors.setter - def mc_monitors(self, mc_monitors: Optional[list[MCMonitor]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitors = mc_monitors - - @property - def output_port_data_products(self) -> Optional[list[DataProduct]]: - return ( - None - if self.attributes is None - else self.attributes.output_port_data_products - ) - - @output_port_data_products.setter - def output_port_data_products( - self, output_port_data_products: Optional[list[DataProduct]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_port_data_products = output_port_data_products - - @property - def files(self) -> Optional[list[File]]: - return None if self.attributes is None else self.attributes.files - - @files.setter - def files(self, files: Optional[list[File]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.files = files - - @property - def mc_incidents(self) -> Optional[list[MCIncident]]: - return None if self.attributes is None else self.attributes.mc_incidents - - @mc_incidents.setter - def mc_incidents(self, mc_incidents: Optional[list[MCIncident]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incidents = mc_incidents - - @property - def links(self) -> Optional[list[Link]]: - return None if self.attributes is None else self.attributes.links - - @links.setter - def links(self, links: Optional[list[Link]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.links = links - - @property - def metrics(self) -> Optional[list[Metric]]: - return None if self.attributes is None else self.attributes.metrics - - @metrics.setter - def metrics(self, metrics: Optional[list[Metric]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metrics = metrics - - @property - def readme(self) -> Optional[Readme]: - return None if self.attributes is None else self.attributes.readme - - @readme.setter - def readme(self, readme: Optional[Readme]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.readme = readme - - @property - def soda_checks(self) -> Optional[list[SodaCheck]]: - return None if self.attributes is None else self.attributes.soda_checks - - @soda_checks.setter - def soda_checks(self, soda_checks: Optional[list[SodaCheck]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_checks = soda_checks - - @property - def assigned_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.meanings - - @assigned_terms.setter - def assigned_terms(self, assigned_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.meanings = assigned_terms - - class Attributes(Referenceable.Attributes): - name: Optional[str] = Field(None, description="", alias="name") - display_name: Optional[str] = Field(None, description="", alias="displayName") - description: Optional[str] = Field(None, description="", alias="description") - user_description: Optional[str] = Field( - None, description="", alias="userDescription" - ) - tenant_id: Optional[str] = Field(None, description="", alias="tenantId") - certificate_status: Optional[CertificateStatus] = Field( - None, description="", alias="certificateStatus" - ) - certificate_status_message: Optional[str] = Field( - None, description="", alias="certificateStatusMessage" - ) - certificate_updated_by: Optional[str] = Field( - None, description="", alias="certificateUpdatedBy" - ) - certificate_updated_at: Optional[datetime] = Field( - None, description="", alias="certificateUpdatedAt" - ) - announcement_title: Optional[str] = Field( - None, description="", alias="announcementTitle" - ) - announcement_message: Optional[str] = Field( - None, description="", alias="announcementMessage" - ) - announcement_type: Optional[str] = Field( - None, description="", alias="announcementType" - ) - announcement_updated_at: Optional[datetime] = Field( - None, description="", alias="announcementUpdatedAt" - ) - announcement_updated_by: Optional[str] = Field( - None, description="", alias="announcementUpdatedBy" - ) - owner_users: Optional[set[str]] = Field( - None, description="", alias="ownerUsers" - ) - owner_groups: Optional[set[str]] = Field( - None, description="", alias="ownerGroups" - ) - admin_users: Optional[set[str]] = Field( - None, description="", alias="adminUsers" - ) - admin_groups: Optional[set[str]] = Field( - None, description="", alias="adminGroups" - ) - viewer_users: Optional[set[str]] = Field( - None, description="", alias="viewerUsers" - ) - viewer_groups: Optional[set[str]] = Field( - None, description="", alias="viewerGroups" - ) - connector_name: Optional[str] = Field( - None, description="", alias="connectorName" - ) - connection_name: Optional[str] = Field( - None, description="", alias="connectionName" - ) - connection_qualified_name: Optional[str] = Field( - None, description="", alias="connectionQualifiedName" - ) - has_lineage: Optional[bool] = Field(None, description="", alias="__hasLineage") - is_discoverable: Optional[bool] = Field( - None, description="", alias="isDiscoverable" - ) - is_editable: Optional[bool] = Field(None, description="", alias="isEditable") - sub_type: Optional[str] = Field(None, description="", alias="subType") - view_score: Optional[float] = Field(None, description="", alias="viewScore") - popularity_score: Optional[float] = Field( - None, description="", alias="popularityScore" - ) - source_owners: Optional[str] = Field(None, description="", alias="sourceOwners") - source_created_by: Optional[str] = Field( - None, description="", alias="sourceCreatedBy" - ) - source_created_at: Optional[datetime] = Field( - None, description="", alias="sourceCreatedAt" - ) - source_updated_at: Optional[datetime] = Field( - None, description="", alias="sourceUpdatedAt" - ) - source_updated_by: Optional[str] = Field( - None, description="", alias="sourceUpdatedBy" - ) - source_url: Optional[str] = Field(None, description="", alias="sourceURL") - source_embed_url: Optional[str] = Field( - None, description="", alias="sourceEmbedURL" - ) - last_sync_workflow_name: Optional[str] = Field( - None, description="", alias="lastSyncWorkflowName" - ) - last_sync_run_at: Optional[datetime] = Field( - None, description="", alias="lastSyncRunAt" - ) - last_sync_run: Optional[str] = Field(None, description="", alias="lastSyncRun") - admin_roles: Optional[set[str]] = Field( - None, description="", alias="adminRoles" - ) - source_read_count: Optional[int] = Field( - None, description="", alias="sourceReadCount" - ) - source_read_user_count: Optional[int] = Field( - None, description="", alias="sourceReadUserCount" - ) - source_last_read_at: Optional[datetime] = Field( - None, description="", alias="sourceLastReadAt" - ) - last_row_changed_at: Optional[datetime] = Field( - None, description="", alias="lastRowChangedAt" - ) - source_total_cost: Optional[float] = Field( - None, description="", alias="sourceTotalCost" - ) - source_cost_unit: Optional[SourceCostUnitType] = Field( - None, description="", alias="sourceCostUnit" - ) - source_read_query_cost: Optional[float] = Field( - None, description="", alias="sourceReadQueryCost" - ) - source_read_recent_user_list: Optional[set[str]] = Field( - None, description="", alias="sourceReadRecentUserList" - ) - source_read_recent_user_record_list: Optional[list[PopularityInsights]] = Field( - None, description="", alias="sourceReadRecentUserRecordList" - ) - source_read_top_user_list: Optional[set[str]] = Field( - None, description="", alias="sourceReadTopUserList" - ) - source_read_top_user_record_list: Optional[list[PopularityInsights]] = Field( - None, description="", alias="sourceReadTopUserRecordList" - ) - source_read_popular_query_record_list: Optional[ - list[PopularityInsights] - ] = Field(None, description="", alias="sourceReadPopularQueryRecordList") - source_read_expensive_query_record_list: Optional[ - list[PopularityInsights] - ] = Field(None, description="", alias="sourceReadExpensiveQueryRecordList") - source_read_slow_query_record_list: Optional[list[PopularityInsights]] = Field( - None, description="", alias="sourceReadSlowQueryRecordList" - ) - source_query_compute_cost_list: Optional[set[str]] = Field( - None, description="", alias="sourceQueryComputeCostList" - ) - source_query_compute_cost_record_list: Optional[ - list[PopularityInsights] - ] = Field(None, description="", alias="sourceQueryComputeCostRecordList") - dbt_qualified_name: Optional[str] = Field( - None, description="", alias="dbtQualifiedName" - ) - asset_dbt_alias: Optional[str] = Field( - None, description="", alias="assetDbtAlias" - ) - asset_dbt_meta: Optional[str] = Field( - None, description="", alias="assetDbtMeta" - ) - asset_dbt_unique_id: Optional[str] = Field( - None, description="", alias="assetDbtUniqueId" - ) - asset_dbt_account_name: Optional[str] = Field( - None, description="", alias="assetDbtAccountName" - ) - asset_dbt_project_name: Optional[str] = Field( - None, description="", alias="assetDbtProjectName" - ) - asset_dbt_package_name: Optional[str] = Field( - None, description="", alias="assetDbtPackageName" - ) - asset_dbt_job_name: Optional[str] = Field( - None, description="", alias="assetDbtJobName" - ) - asset_dbt_job_schedule: Optional[str] = Field( - None, description="", alias="assetDbtJobSchedule" - ) - asset_dbt_job_status: Optional[str] = Field( - None, description="", alias="assetDbtJobStatus" - ) - asset_dbt_test_status: Optional[str] = Field( - None, description="", alias="assetDbtTestStatus" - ) - asset_dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="assetDbtJobScheduleCronHumanized" - ) - asset_dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="assetDbtJobLastRun" - ) - asset_dbt_job_last_run_url: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunUrl" - ) - asset_dbt_job_last_run_created_at: Optional[datetime] = Field( - None, description="", alias="assetDbtJobLastRunCreatedAt" - ) - asset_dbt_job_last_run_updated_at: Optional[datetime] = Field( - None, description="", alias="assetDbtJobLastRunUpdatedAt" - ) - asset_dbt_job_last_run_dequed_at: Optional[datetime] = Field( - None, description="", alias="assetDbtJobLastRunDequedAt" - ) - asset_dbt_job_last_run_started_at: Optional[datetime] = Field( - None, description="", alias="assetDbtJobLastRunStartedAt" - ) - asset_dbt_job_last_run_total_duration: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunTotalDuration" - ) - asset_dbt_job_last_run_total_duration_humanized: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunTotalDurationHumanized" - ) - asset_dbt_job_last_run_queued_duration: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunQueuedDuration" - ) - asset_dbt_job_last_run_queued_duration_humanized: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunQueuedDurationHumanized" - ) - asset_dbt_job_last_run_run_duration: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunRunDuration" - ) - asset_dbt_job_last_run_run_duration_humanized: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunRunDurationHumanized" - ) - asset_dbt_job_last_run_git_branch: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunGitBranch" - ) - asset_dbt_job_last_run_git_sha: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunGitSha" - ) - asset_dbt_job_last_run_status_message: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunStatusMessage" - ) - asset_dbt_job_last_run_owner_thread_id: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunOwnerThreadId" - ) - asset_dbt_job_last_run_executed_by_thread_id: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunExecutedByThreadId" - ) - asset_dbt_job_last_run_artifacts_saved: Optional[bool] = Field( - None, description="", alias="assetDbtJobLastRunArtifactsSaved" - ) - asset_dbt_job_last_run_artifact_s3_path: Optional[str] = Field( - None, description="", alias="assetDbtJobLastRunArtifactS3Path" - ) - asset_dbt_job_last_run_has_docs_generated: Optional[bool] = Field( - None, description="", alias="assetDbtJobLastRunHasDocsGenerated" - ) - asset_dbt_job_last_run_has_sources_generated: Optional[bool] = Field( - None, description="", alias="assetDbtJobLastRunHasSourcesGenerated" - ) - asset_dbt_job_last_run_notifications_sent: Optional[bool] = Field( - None, description="", alias="assetDbtJobLastRunNotificationsSent" - ) - asset_dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="assetDbtJobNextRun" - ) - asset_dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="assetDbtJobNextRunHumanized" - ) - asset_dbt_environment_name: Optional[str] = Field( - None, description="", alias="assetDbtEnvironmentName" - ) - asset_dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="assetDbtEnvironmentDbtVersion" - ) - asset_dbt_tags: Optional[set[str]] = Field( - None, description="", alias="assetDbtTags" - ) - asset_dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="assetDbtSemanticLayerProxyUrl" - ) - asset_dbt_source_freshness_criteria: Optional[str] = Field( - None, description="", alias="assetDbtSourceFreshnessCriteria" - ) - sample_data_url: Optional[str] = Field( - None, description="", alias="sampleDataUrl" - ) - asset_tags: Optional[set[str]] = Field(None, description="", alias="assetTags") - asset_mc_incident_names: Optional[set[str]] = Field( - None, description="", alias="assetMcIncidentNames" - ) - asset_mc_incident_qualified_names: Optional[set[str]] = Field( - None, description="", alias="assetMcIncidentQualifiedNames" - ) - asset_mc_monitor_names: Optional[set[str]] = Field( - None, description="", alias="assetMcMonitorNames" - ) - asset_mc_monitor_qualified_names: Optional[set[str]] = Field( - None, description="", alias="assetMcMonitorQualifiedNames" - ) - asset_mc_monitor_statuses: Optional[set[str]] = Field( - None, description="", alias="assetMcMonitorStatuses" - ) - asset_mc_monitor_types: Optional[set[str]] = Field( - None, description="", alias="assetMcMonitorTypes" - ) - asset_mc_monitor_schedule_types: Optional[set[str]] = Field( - None, description="", alias="assetMcMonitorScheduleTypes" - ) - asset_mc_incident_types: Optional[set[str]] = Field( - None, description="", alias="assetMcIncidentTypes" - ) - asset_mc_incident_sub_types: Optional[set[str]] = Field( - None, description="", alias="assetMcIncidentSubTypes" - ) - asset_mc_incident_severities: Optional[set[str]] = Field( - None, description="", alias="assetMcIncidentSeverities" - ) - asset_mc_incident_states: Optional[set[str]] = Field( - None, description="", alias="assetMcIncidentStates" - ) - asset_mc_last_sync_run_at: Optional[datetime] = Field( - None, description="", alias="assetMcLastSyncRunAt" - ) - starred_by: Optional[set[str]] = Field(None, description="", alias="starredBy") - starred_details_list: Optional[list[StarredDetails]] = Field( - None, description="", alias="starredDetailsList" - ) - starred_count: Optional[int] = Field(None, description="", alias="starredCount") - asset_soda_d_q_status: Optional[str] = Field( - None, description="", alias="assetSodaDQStatus" - ) - asset_soda_check_count: Optional[int] = Field( - None, description="", alias="assetSodaCheckCount" - ) - asset_soda_last_sync_run_at: Optional[datetime] = Field( - None, description="", alias="assetSodaLastSyncRunAt" - ) - asset_soda_last_scan_at: Optional[datetime] = Field( - None, description="", alias="assetSodaLastScanAt" - ) - asset_soda_check_statuses: Optional[str] = Field( - None, description="", alias="assetSodaCheckStatuses" - ) - asset_soda_source_url: Optional[str] = Field( - None, description="", alias="assetSodaSourceURL" - ) - asset_icon: Optional[str] = Field(None, description="", alias="assetIcon") - is_partial: Optional[bool] = Field(None, description="", alias="isPartial") - is_a_i_generated: Optional[bool] = Field( - None, description="", alias="isAIGenerated" - ) - asset_cover_image: Optional[str] = Field( - None, description="", alias="assetCoverImage" - ) - asset_theme_hex: Optional[str] = Field( - None, description="", alias="assetThemeHex" - ) - schema_registry_subjects: Optional[list[SchemaRegistrySubject]] = Field( - None, description="", alias="schemaRegistrySubjects" - ) # relationship - mc_monitors: Optional[list[MCMonitor]] = Field( - None, description="", alias="mcMonitors" - ) # relationship - output_port_data_products: Optional[list[DataProduct]] = Field( - None, description="", alias="outputPortDataProducts" - ) # relationship - files: Optional[list[File]] = Field( - None, description="", alias="files" - ) # relationship - mc_incidents: Optional[list[MCIncident]] = Field( - None, description="", alias="mcIncidents" - ) # relationship - links: Optional[list[Link]] = Field( - None, description="", alias="links" - ) # relationship - metrics: Optional[list[Metric]] = Field( - None, description="", alias="metrics" - ) # relationship - readme: Optional[Readme] = Field( - None, description="", alias="readme" - ) # relationship - soda_checks: Optional[list[SodaCheck]] = Field( - None, description="", alias="sodaChecks" - ) # relationship - meanings: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="meanings" - ) # relationship - - def remove_description(self): - self.description = None - - def remove_user_description(self): - self.user_description = None - - def remove_owners(self): - self.owner_groups = None - self.owner_users = None - - def remove_certificate(self): - self.certificate_status = None - self.certificate_status_message = None - - def remove_announcement(self): - self.announcement_message = None - self.announcement_title = None - self.announcement_type = None - - attributes: "Asset.Attributes" = Field( - default_factory=lambda: Asset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class AtlasGlossaryCategory(Asset, type_name="AtlasGlossaryCategory"): - """Description""" - - @classmethod - def can_be_archived(self) -> bool: - """ - Indicates if an asset can be archived via the asset.delete_by_guid method. - :returns: True if archiving is supported - """ - return False - - @root_validator() - def _set_qualified_name_fallback(cls, values): - if ( - "attributes" in values - and values["attributes"] - and not values["attributes"].qualified_name - ): - values["attributes"].qualified_name = values["guid"] - return values - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - anchor: AtlasGlossary, - parent_category: Optional[AtlasGlossaryCategory] = None, - ) -> AtlasGlossaryCategory: - validate_required_fields(["name", "anchor"], [name, anchor]) - return cls( - attributes=AtlasGlossaryCategory.Attributes.create( - name=name, anchor=anchor, parent_category=parent_category - ) - ) - - def trim_to_required(self) -> AtlasGlossaryCategory: - if self.anchor is None or not self.anchor.guid: - raise ValueError("anchor.guid must be available") - return self.create_for_modification( - qualified_name=self.qualified_name or "", - name=self.name or "", - glossary_guid=self.anchor.guid, - ) - - @classmethod - def create_for_modification( - cls: type[SelfAsset], - qualified_name: str = "", - name: str = "", - glossary_guid: str = "", - ) -> SelfAsset: - validate_required_fields( - ["name", "qualified_name", "glossary_guid"], - [name, qualified_name, glossary_guid], - ) - glossary = AtlasGlossary() - glossary.guid = glossary_guid - return cls( - attributes=cls.Attributes( - qualified_name=qualified_name, name=name, anchor=glossary - ) - ) - - ANCHOR: ClassVar[KeywordField] = KeywordField("anchor", "__glossary") - """Glossary in which the category is contained, searchable by the qualifiedName of the glossary.""" - - PARENT_CATEGORY: ClassVar[KeywordField] = KeywordField( - "parentCategory", "__parentCategory" - ) - """Parent category in which a subcategory is contained, searchable by the qualifiedName of the category.""" - - type_name: str = Field("AtlasGlossaryCategory", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AtlasGlossaryCategory": - raise ValueError("must be AtlasGlossaryCategory") - return v - - def __setattr__(self, name, value): - if name in AtlasGlossaryCategory._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SHORT_DESCRIPTION: ClassVar[KeywordField] = KeywordField( - "shortDescription", "shortDescription" - ) - """ - Unused. Brief summary of the category. See 'description' and 'userDescription' instead. - """ - LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( - "longDescription", "longDescription" - ) - """ - Unused. Detailed description of the category. See 'readme' instead. - """ - ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( - "additionalAttributes", "additionalAttributes" - ) - """ - Unused. Arbitrary set of additional attributes associated with the category. - """ - CATEGORY_TYPE: ClassVar[KeywordField] = KeywordField("categoryType", "categoryType") - """ - TBC - """ - - TERMS: ClassVar[RelationField] = RelationField("terms") - """ - TBC - """ - CHILDREN_CATEGORIES: ClassVar[RelationField] = RelationField("childrenCategories") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "short_description", - "long_description", - "additional_attributes", - "category_type", - "terms", - "anchor", - "parent_category", - "children_categories", - ] - - @property - def short_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.short_description - - @short_description.setter - def short_description(self, short_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.short_description = short_description - - @property - def long_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.long_description - - @long_description.setter - def long_description(self, long_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.long_description = long_description - - @property - def additional_attributes(self) -> Optional[dict[str, str]]: - return ( - None if self.attributes is None else self.attributes.additional_attributes - ) - - @additional_attributes.setter - def additional_attributes(self, additional_attributes: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.additional_attributes = additional_attributes - - @property - def category_type(self) -> Optional[AtlasGlossaryCategoryType]: - return None if self.attributes is None else self.attributes.category_type - - @category_type.setter - def category_type(self, category_type: Optional[AtlasGlossaryCategoryType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.category_type = category_type - - @property - def terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.terms - - @terms.setter - def terms(self, terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.terms = terms - - @property - def anchor(self) -> Optional[AtlasGlossary]: - return None if self.attributes is None else self.attributes.anchor - - @anchor.setter - def anchor(self, anchor: Optional[AtlasGlossary]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.anchor = anchor - - @property - def parent_category(self) -> Optional[AtlasGlossaryCategory]: - return None if self.attributes is None else self.attributes.parent_category - - @parent_category.setter - def parent_category(self, parent_category: Optional[AtlasGlossaryCategory]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_category = parent_category - - @property - def children_categories(self) -> Optional[list[AtlasGlossaryCategory]]: - return None if self.attributes is None else self.attributes.children_categories - - @children_categories.setter - def children_categories( - self, children_categories: Optional[list[AtlasGlossaryCategory]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.children_categories = children_categories - - class Attributes(Asset.Attributes): - short_description: Optional[str] = Field( - None, description="", alias="shortDescription" - ) - long_description: Optional[str] = Field( - None, description="", alias="longDescription" - ) - additional_attributes: Optional[dict[str, str]] = Field( - None, description="", alias="additionalAttributes" - ) - category_type: Optional[AtlasGlossaryCategoryType] = Field( - None, description="", alias="categoryType" - ) - terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="terms" - ) # relationship - anchor: Optional[AtlasGlossary] = Field( - None, description="", alias="anchor" - ) # relationship - parent_category: Optional[AtlasGlossaryCategory] = Field( - None, description="", alias="parentCategory" - ) # relationship - children_categories: Optional[list[AtlasGlossaryCategory]] = Field( - None, description="", alias="childrenCategories" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - anchor: AtlasGlossary, - parent_category: Optional[AtlasGlossaryCategory] = None, - ) -> AtlasGlossaryCategory.Attributes: - validate_required_fields(["name", "anchor"], [name, anchor]) - return AtlasGlossaryCategory.Attributes( - name=name, - anchor=anchor, - parent_category=parent_category, - qualified_name=next_id(), - ) - - attributes: "AtlasGlossaryCategory.Attributes" = Field( - default_factory=lambda: AtlasGlossaryCategory.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class AtlasGlossary(Asset, type_name="AtlasGlossary"): - """Description""" - - @root_validator() - def _set_qualified_name_fallback(cls, values): - if ( - "attributes" in values - and values["attributes"] - and not values["attributes"].qualified_name - ): - values["attributes"].qualified_name = values["guid"] - return values - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: StrictStr, icon: Optional[AtlanIcon] = None - ) -> AtlasGlossary: - validate_required_fields(["name"], [name]) - return AtlasGlossary( - attributes=AtlasGlossary.Attributes.create(name=name, icon=icon) - ) - - type_name: str = Field("AtlasGlossary", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AtlasGlossary": - raise ValueError("must be AtlasGlossary") - return v - - def __setattr__(self, name, value): - if name in AtlasGlossary._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SHORT_DESCRIPTION: ClassVar[KeywordField] = KeywordField( - "shortDescription", "shortDescription" - ) - """ - Unused. A short definition of the glossary. See 'description' and 'userDescription' instead. - """ - LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( - "longDescription", "longDescription" - ) - """ - Unused. A longer description of the glossary. See 'readme' instead. - """ - LANGUAGE: ClassVar[KeywordField] = KeywordField("language", "language") - """ - Unused. Language of the glossary's contents. - """ - USAGE: ClassVar[KeywordField] = KeywordField("usage", "usage") - """ - Unused. Inteded usage for the glossary. - """ - ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( - "additionalAttributes", "additionalAttributes" - ) - """ - Unused. Arbitrary set of additional attributes associated with this glossary. - """ - GLOSSARY_TYPE: ClassVar[KeywordField] = KeywordField("glossaryType", "glossaryType") - """ - TBC - """ - - TERMS: ClassVar[RelationField] = RelationField("terms") - """ - TBC - """ - CATEGORIES: ClassVar[RelationField] = RelationField("categories") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "short_description", - "long_description", - "language", - "usage", - "additional_attributes", - "glossary_type", - "terms", - "categories", - ] - - @property - def short_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.short_description - - @short_description.setter - def short_description(self, short_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.short_description = short_description - - @property - def long_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.long_description - - @long_description.setter - def long_description(self, long_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.long_description = long_description - - @property - def language(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.language - - @language.setter - def language(self, language: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.language = language - - @property - def usage(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.usage - - @usage.setter - def usage(self, usage: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.usage = usage - - @property - def additional_attributes(self) -> Optional[dict[str, str]]: - return ( - None if self.attributes is None else self.attributes.additional_attributes - ) - - @additional_attributes.setter - def additional_attributes(self, additional_attributes: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.additional_attributes = additional_attributes - - @property - def glossary_type(self) -> Optional[AtlasGlossaryType]: - return None if self.attributes is None else self.attributes.glossary_type - - @glossary_type.setter - def glossary_type(self, glossary_type: Optional[AtlasGlossaryType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.glossary_type = glossary_type - - @property - def terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.terms - - @terms.setter - def terms(self, terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.terms = terms - - @property - def categories(self) -> Optional[list[AtlasGlossaryCategory]]: - return None if self.attributes is None else self.attributes.categories - - @categories.setter - def categories(self, categories: Optional[list[AtlasGlossaryCategory]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.categories = categories - - class Attributes(Asset.Attributes): - short_description: Optional[str] = Field( - None, description="", alias="shortDescription" - ) - long_description: Optional[str] = Field( - None, description="", alias="longDescription" - ) - language: Optional[str] = Field(None, description="", alias="language") - usage: Optional[str] = Field(None, description="", alias="usage") - additional_attributes: Optional[dict[str, str]] = Field( - None, description="", alias="additionalAttributes" - ) - glossary_type: Optional[AtlasGlossaryType] = Field( - None, description="", alias="glossaryType" - ) - terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="terms" - ) # relationship - categories: Optional[list[AtlasGlossaryCategory]] = Field( - None, description="", alias="categories" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: StrictStr, icon: Optional[AtlanIcon] = None - ) -> AtlasGlossary.Attributes: - validate_required_fields(["name"], [name]) - icon_str = icon.value if icon is not None else None - return AtlasGlossary.Attributes( - name=name, qualified_name=next_id(), icon=icon_str - ) - - attributes: "AtlasGlossary.Attributes" = Field( - default_factory=lambda: AtlasGlossary.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class AtlasGlossaryTerm(Asset, type_name="AtlasGlossaryTerm"): - """Description""" - - @root_validator() - def _set_qualified_name_fallback(cls, values): - if ( - "attributes" in values - and values["attributes"] - and not values["attributes"].qualified_name - ): - values["attributes"].qualified_name = values["guid"] - return values - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - anchor: Optional[AtlasGlossary] = None, - glossary_qualified_name: Optional[StrictStr] = None, - glossary_guid: Optional[StrictStr] = None, - categories: Optional[list[AtlasGlossaryCategory]] = None, - ) -> AtlasGlossaryTerm: - validate_required_fields(["name"], [name]) - return cls( - attributes=AtlasGlossaryTerm.Attributes.create( - name=name, - anchor=anchor, - glossary_qualified_name=glossary_qualified_name, - glossary_guid=glossary_guid, - categories=categories, - ) - ) - - def trim_to_required(self) -> AtlasGlossaryTerm: - if self.anchor is None or not self.anchor.guid: - raise ValueError("anchor.guid must be available") - return self.create_for_modification( - qualified_name=self.qualified_name or "", - name=self.name or "", - glossary_guid=self.anchor.guid, - ) - - @classmethod - def create_for_modification( - cls: type[SelfAsset], - qualified_name: str = "", - name: str = "", - glossary_guid: str = "", - ) -> SelfAsset: - validate_required_fields( - ["name", "qualified_name", "glossary_guid"], - [name, qualified_name, glossary_guid], - ) - glossary = AtlasGlossary() - glossary.guid = glossary_guid - return cls( - attributes=cls.Attributes( - qualified_name=qualified_name, name=name, anchor=glossary - ) - ) - - ANCHOR: ClassVar[KeywordField] = KeywordField("anchor", "__glossary") - """Glossary in which the term is contained, searchable by the qualifiedName of the glossary.""" - - CATEGORIES: ClassVar[KeywordField] = KeywordField("categories", "__categories") - """Categories in which the term is organized, searchable by the qualifiedName of the category.""" - - type_name: str = Field("AtlasGlossaryTerm", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AtlasGlossaryTerm": - raise ValueError("must be AtlasGlossaryTerm") - return v - - def __setattr__(self, name, value): - if name in AtlasGlossaryTerm._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SHORT_DESCRIPTION: ClassVar[KeywordField] = KeywordField( - "shortDescription", "shortDescription" - ) - """ - Unused. Brief summary of the term. See 'description' and 'userDescription' instead. - """ - LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( - "longDescription", "longDescription" - ) - """ - Unused. Detailed definition of the term. See 'readme' instead. - """ - EXAMPLES: ClassVar[KeywordField] = KeywordField("examples", "examples") - """ - Unused. Exmaples of the term. - """ - ABBREVIATION: ClassVar[KeywordField] = KeywordField("abbreviation", "abbreviation") - """ - Unused. Abbreviation of the term. - """ - USAGE: ClassVar[KeywordField] = KeywordField("usage", "usage") - """ - Unused. Intended usage for the term. - """ - ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( - "additionalAttributes", "additionalAttributes" - ) - """ - Unused. Arbitrary set of additional attributes for the terrm. - """ - TERM_TYPE: ClassVar[KeywordField] = KeywordField("termType", "termType") - """ - TBC - """ - - VALID_VALUES_FOR: ClassVar[RelationField] = RelationField("validValuesFor") - """ - TBC - """ - VALID_VALUES: ClassVar[RelationField] = RelationField("validValues") - """ - TBC - """ - SEE_ALSO: ClassVar[RelationField] = RelationField("seeAlso") - """ - TBC - """ - IS_A: ClassVar[RelationField] = RelationField("isA") - """ - TBC - """ - ANTONYMS: ClassVar[RelationField] = RelationField("antonyms") - """ - TBC - """ - ASSIGNED_ENTITIES: ClassVar[RelationField] = RelationField("assignedEntities") - """ - TBC - """ - CLASSIFIES: ClassVar[RelationField] = RelationField("classifies") - """ - TBC - """ - PREFERRED_TO_TERMS: ClassVar[RelationField] = RelationField("preferredToTerms") - """ - TBC - """ - PREFERRED_TERMS: ClassVar[RelationField] = RelationField("preferredTerms") - """ - TBC - """ - TRANSLATION_TERMS: ClassVar[RelationField] = RelationField("translationTerms") - """ - TBC - """ - SYNONYMS: ClassVar[RelationField] = RelationField("synonyms") - """ - TBC - """ - REPLACED_BY: ClassVar[RelationField] = RelationField("replacedBy") - """ - TBC - """ - REPLACEMENT_TERMS: ClassVar[RelationField] = RelationField("replacementTerms") - """ - TBC - """ - TRANSLATED_TERMS: ClassVar[RelationField] = RelationField("translatedTerms") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "short_description", - "long_description", - "examples", - "abbreviation", - "usage", - "additional_attributes", - "term_type", - "valid_values_for", - "valid_values", - "see_also", - "is_a", - "antonyms", - "assigned_entities", - "classifies", - "categories", - "preferred_to_terms", - "preferred_terms", - "translation_terms", - "synonyms", - "replaced_by", - "replacement_terms", - "translated_terms", - "anchor", - ] - - @property - def short_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.short_description - - @short_description.setter - def short_description(self, short_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.short_description = short_description - - @property - def long_description(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.long_description - - @long_description.setter - def long_description(self, long_description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.long_description = long_description - - @property - def examples(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.examples - - @examples.setter - def examples(self, examples: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.examples = examples - - @property - def abbreviation(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.abbreviation - - @abbreviation.setter - def abbreviation(self, abbreviation: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.abbreviation = abbreviation - - @property - def usage(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.usage - - @usage.setter - def usage(self, usage: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.usage = usage - - @property - def additional_attributes(self) -> Optional[dict[str, str]]: - return ( - None if self.attributes is None else self.attributes.additional_attributes - ) - - @additional_attributes.setter - def additional_attributes(self, additional_attributes: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.additional_attributes = additional_attributes - - @property - def term_type(self) -> Optional[AtlasGlossaryTermType]: - return None if self.attributes is None else self.attributes.term_type - - @term_type.setter - def term_type(self, term_type: Optional[AtlasGlossaryTermType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.term_type = term_type - - @property - def valid_values_for(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.valid_values_for - - @valid_values_for.setter - def valid_values_for(self, valid_values_for: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.valid_values_for = valid_values_for - - @property - def valid_values(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.valid_values - - @valid_values.setter - def valid_values(self, valid_values: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.valid_values = valid_values - - @property - def see_also(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.see_also - - @see_also.setter - def see_also(self, see_also: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.see_also = see_also - - @property - def is_a(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.is_a - - @is_a.setter - def is_a(self, is_a: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_a = is_a - - @property - def antonyms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.antonyms - - @antonyms.setter - def antonyms(self, antonyms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.antonyms = antonyms - - @property - def assigned_entities(self) -> Optional[list[Referenceable]]: - return None if self.attributes is None else self.attributes.assigned_entities - - @assigned_entities.setter - def assigned_entities(self, assigned_entities: Optional[list[Referenceable]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.assigned_entities = assigned_entities - - @property - def classifies(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.classifies - - @classifies.setter - def classifies(self, classifies: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.classifies = classifies - - @property - def categories(self) -> Optional[list[AtlasGlossaryCategory]]: - return None if self.attributes is None else self.attributes.categories - - @categories.setter - def categories(self, categories: Optional[list[AtlasGlossaryCategory]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.categories = categories - - @property - def preferred_to_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.preferred_to_terms - - @preferred_to_terms.setter - def preferred_to_terms(self, preferred_to_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preferred_to_terms = preferred_to_terms - - @property - def preferred_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.preferred_terms - - @preferred_terms.setter - def preferred_terms(self, preferred_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preferred_terms = preferred_terms - - @property - def translation_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.translation_terms - - @translation_terms.setter - def translation_terms(self, translation_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.translation_terms = translation_terms - - @property - def synonyms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.synonyms - - @synonyms.setter - def synonyms(self, synonyms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.synonyms = synonyms - - @property - def replaced_by(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.replaced_by - - @replaced_by.setter - def replaced_by(self, replaced_by: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.replaced_by = replaced_by - - @property - def replacement_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.replacement_terms - - @replacement_terms.setter - def replacement_terms(self, replacement_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.replacement_terms = replacement_terms - - @property - def translated_terms(self) -> Optional[list[AtlasGlossaryTerm]]: - return None if self.attributes is None else self.attributes.translated_terms - - @translated_terms.setter - def translated_terms(self, translated_terms: Optional[list[AtlasGlossaryTerm]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.translated_terms = translated_terms - - @property - def anchor(self) -> Optional[AtlasGlossary]: - return None if self.attributes is None else self.attributes.anchor - - @anchor.setter - def anchor(self, anchor: Optional[AtlasGlossary]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.anchor = anchor - - class Attributes(Asset.Attributes): - short_description: Optional[str] = Field( - None, description="", alias="shortDescription" - ) - long_description: Optional[str] = Field( - None, description="", alias="longDescription" - ) - examples: Optional[set[str]] = Field(None, description="", alias="examples") - abbreviation: Optional[str] = Field(None, description="", alias="abbreviation") - usage: Optional[str] = Field(None, description="", alias="usage") - additional_attributes: Optional[dict[str, str]] = Field( - None, description="", alias="additionalAttributes" - ) - term_type: Optional[AtlasGlossaryTermType] = Field( - None, description="", alias="termType" - ) - valid_values_for: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="validValuesFor" - ) # relationship - valid_values: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="validValues" - ) # relationship - see_also: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="seeAlso" - ) # relationship - is_a: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="isA" - ) # relationship - antonyms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="antonyms" - ) # relationship - assigned_entities: Optional[list[Referenceable]] = Field( - None, description="", alias="assignedEntities" - ) # relationship - classifies: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="classifies" - ) # relationship - categories: Optional[list[AtlasGlossaryCategory]] = Field( - None, description="", alias="categories" - ) # relationship - preferred_to_terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="preferredToTerms" - ) # relationship - preferred_terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="preferredTerms" - ) # relationship - translation_terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="translationTerms" - ) # relationship - synonyms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="synonyms" - ) # relationship - replaced_by: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="replacedBy" - ) # relationship - replacement_terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="replacementTerms" - ) # relationship - translated_terms: Optional[list[AtlasGlossaryTerm]] = Field( - None, description="", alias="translatedTerms" - ) # relationship - anchor: Optional[AtlasGlossary] = Field( - None, description="", alias="anchor" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - anchor: Optional[AtlasGlossary] = None, - glossary_qualified_name: Optional[StrictStr] = None, - glossary_guid: Optional[StrictStr] = None, - categories: Optional[list[AtlasGlossaryCategory]] = None, - ) -> AtlasGlossaryTerm.Attributes: - validate_required_fields(["name"], [name]) - validate_single_required_field( - ["anchor", "glossary_qualified_name", "glossary_guid"], - [anchor, glossary_qualified_name, glossary_guid], - ) - if glossary_qualified_name: - anchor = AtlasGlossary() - anchor.unique_attributes = {"qualifiedName": glossary_qualified_name} - if glossary_guid: - anchor = AtlasGlossary() - anchor.guid = glossary_guid - return AtlasGlossaryTerm.Attributes( - name=name, - anchor=anchor, - categories=categories, - qualified_name=next_id(), - ) - - attributes: "AtlasGlossaryTerm.Attributes" = Field( - default_factory=lambda: AtlasGlossaryTerm.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Process(Asset, type_name="Process"): - """Description""" - - @classmethod - @init_guid - def create( - cls, - name: str, - connection_qualified_name: str, - inputs: list["Catalog"], - outputs: list["Catalog"], - process_id: Optional[str] = None, - parent: Optional[Process] = None, - ) -> Process: - return Process( - attributes=Process.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - process_id=process_id, - inputs=inputs, - outputs=outputs, - parent=parent, - ) - ) - - type_name: str = Field("Process", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Process": - raise ValueError("must be Process") - return v - - def __setattr__(self, name, value): - if name in Process._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CODE: ClassVar[KeywordField] = KeywordField("code", "code") - """ - Code that ran within the process. - """ - SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") - """ - SQL query that ran to produce the outputs. - """ - AST: ClassVar[KeywordField] = KeywordField("ast", "ast") - """ - Parsed AST of the code or SQL statements that describe the logic of this process. - """ - - MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") - """ - TBC - """ - AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") - """ - TBC - """ - COLUMN_PROCESSES: ClassVar[RelationField] = RelationField("columnProcesses") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "inputs", - "outputs", - "code", - "sql", - "ast", - "matillion_component", - "airflow_tasks", - "column_processes", - ] - - @property - def inputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.inputs - - @inputs.setter - def inputs(self, inputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inputs = inputs - - @property - def outputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.outputs - - @outputs.setter - def outputs(self, outputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.outputs = outputs - - @property - def code(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.code - - @code.setter - def code(self, code: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.code = code - - @property - def sql(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sql - - @sql.setter - def sql(self, sql: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql = sql - - @property - def ast(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.ast - - @ast.setter - def ast(self, ast: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.ast = ast - - @property - def matillion_component(self) -> Optional[MatillionComponent]: - return None if self.attributes is None else self.attributes.matillion_component - - @matillion_component.setter - def matillion_component(self, matillion_component: Optional[MatillionComponent]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component = matillion_component - - @property - def airflow_tasks(self) -> Optional[list[AirflowTask]]: - return None if self.attributes is None else self.attributes.airflow_tasks - - @airflow_tasks.setter - def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_tasks = airflow_tasks - - @property - def column_processes(self) -> Optional[list[ColumnProcess]]: - return None if self.attributes is None else self.attributes.column_processes - - @column_processes.setter - def column_processes(self, column_processes: Optional[list[ColumnProcess]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_processes = column_processes - - class Attributes(Asset.Attributes): - inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") - outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") - code: Optional[str] = Field(None, description="", alias="code") - sql: Optional[str] = Field(None, description="", alias="sql") - ast: Optional[str] = Field(None, description="", alias="ast") - matillion_component: Optional[MatillionComponent] = Field( - None, description="", alias="matillionComponent" - ) # relationship - airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="airflowTasks" - ) # relationship - column_processes: Optional[list[ColumnProcess]] = Field( - None, description="", alias="columnProcesses" - ) # relationship - - @staticmethod - def generate_qualified_name( - name: str, - connection_qualified_name: str, - inputs: list["Catalog"], - outputs: list["Catalog"], - parent: Optional["Process"] = None, - process_id: Optional[str] = None, - ) -> str: - def append_relationship(output: StringIO, relationship: Asset): - if relationship.guid: - output.write(relationship.guid) - - def append_relationships(output: StringIO, relationships: list["Catalog"]): - for catalog in relationships: - append_relationship(output, catalog) - - validate_required_fields( - ["name", "connection_qualified_name", "inputs", "outputs"], - [name, connection_qualified_name, inputs, outputs], - ) - if process_id and process_id.strip(): - return f"{connection_qualified_name}/{process_id}" - buffer = StringIO() - buffer.write(name) - buffer.write(connection_qualified_name) - if parent: - append_relationship(buffer, parent) - append_relationships(buffer, inputs) - append_relationships(buffer, outputs) - ret_value = hashlib.md5( - buffer.getvalue().encode(), usedforsecurity=False - ).hexdigest() - buffer.close() - return ret_value - - @classmethod - @init_guid - def create( - cls, - name: str, - connection_qualified_name: str, - inputs: list["Catalog"], - outputs: list["Catalog"], - process_id: Optional[str] = None, - parent: Optional[Process] = None, - ) -> Process.Attributes: - qualified_name = Process.Attributes.generate_qualified_name( - name=name, - connection_qualified_name=connection_qualified_name, - process_id=process_id, - inputs=inputs, - outputs=outputs, - parent=parent, - ) - connector_name = connection_qualified_name.split("/")[1] - return Process.Attributes( - name=name, - qualified_name=qualified_name, - connector_name=connector_name, - connection_qualified_name=connection_qualified_name, - inputs=inputs, - outputs=outputs, - ) - - attributes: "Process.Attributes" = Field( - default_factory=lambda: Process.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Namespace(Asset, type_name="Namespace"): - """Description""" - - type_name: str = Field("Namespace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Namespace": - raise ValueError("must be Namespace") - return v - - def __setattr__(self, name, value): - if name in Namespace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CHILDREN_QUERIES: ClassVar[RelationField] = RelationField("childrenQueries") - """ - TBC - """ - CHILDREN_FOLDERS: ClassVar[RelationField] = RelationField("childrenFolders") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "children_queries", - "children_folders", - ] - - @property - def children_queries(self) -> Optional[list[Query]]: - return None if self.attributes is None else self.attributes.children_queries - - @children_queries.setter - def children_queries(self, children_queries: Optional[list[Query]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.children_queries = children_queries - - @property - def children_folders(self) -> Optional[list[Folder]]: - return None if self.attributes is None else self.attributes.children_folders - - @children_folders.setter - def children_folders(self, children_folders: Optional[list[Folder]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.children_folders = children_folders - - class Attributes(Asset.Attributes): - children_queries: Optional[list[Query]] = Field( - None, description="", alias="childrenQueries" - ) # relationship - children_folders: Optional[list[Folder]] = Field( - None, description="", alias="childrenFolders" - ) # relationship - - attributes: "Namespace.Attributes" = Field( - default_factory=lambda: Namespace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Folder(Namespace): - """Description""" - - type_name: str = Field("Folder", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Folder": - raise ValueError("must be Folder") - return v - - def __setattr__(self, name, value): - if name in Folder._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PARENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentQualifiedName", "parentQualifiedName", "parentQualifiedName.text" - ) - """ - Unique name of the parent folder or collection in which this folder exists. - """ - COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "collectionQualifiedName", - "collectionQualifiedName", - "collectionQualifiedName.text", - ) - """ - Unique name of the collection in which this folder exists. - """ - - PARENT: ClassVar[RelationField] = RelationField("parent") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "parent_qualified_name", - "collection_qualified_name", - "parent", - ] - - @property - def parent_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.parent_qualified_name - ) - - @parent_qualified_name.setter - def parent_qualified_name(self, parent_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_qualified_name = parent_qualified_name - - @property - def collection_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.collection_qualified_name - ) - - @collection_qualified_name.setter - def collection_qualified_name(self, collection_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.collection_qualified_name = collection_qualified_name - - @property - def parent(self) -> Optional[Namespace]: - return None if self.attributes is None else self.attributes.parent - - @parent.setter - def parent(self, parent: Optional[Namespace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent = parent - - class Attributes(Namespace.Attributes): - parent_qualified_name: Optional[str] = Field( - None, description="", alias="parentQualifiedName" - ) - collection_qualified_name: Optional[str] = Field( - None, description="", alias="collectionQualifiedName" - ) - parent: Optional[Namespace] = Field( - None, description="", alias="parent" - ) # relationship - - attributes: "Folder.Attributes" = Field( - default_factory=lambda: Folder.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Catalog(Asset, type_name="Catalog"): - """Description""" - - type_name: str = Field("Catalog", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Catalog": - raise ValueError("must be Catalog") - return v - - def __setattr__(self, name, value): - if name in Catalog._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") - """ - TBC - """ - OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "outputFromAirflowTasks" - ) - """ - TBC - """ - INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "inputToAirflowTasks" - ) - """ - TBC - """ - OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( - "outputFromProcesses" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "input_to_processes", - "output_from_airflow_tasks", - "input_to_airflow_tasks", - "output_from_processes", - ] - - @property - def input_to_processes(self) -> Optional[list[Process]]: - return None if self.attributes is None else self.attributes.input_to_processes - - @input_to_processes.setter - def input_to_processes(self, input_to_processes: Optional[list[Process]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_processes = input_to_processes - - @property - def output_from_airflow_tasks(self) -> Optional[list[AirflowTask]]: - return ( - None - if self.attributes is None - else self.attributes.output_from_airflow_tasks - ) - - @output_from_airflow_tasks.setter - def output_from_airflow_tasks( - self, output_from_airflow_tasks: Optional[list[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - - @property - def input_to_airflow_tasks(self) -> Optional[list[AirflowTask]]: - return ( - None if self.attributes is None else self.attributes.input_to_airflow_tasks - ) - - @input_to_airflow_tasks.setter - def input_to_airflow_tasks( - self, input_to_airflow_tasks: Optional[list[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_airflow_tasks = input_to_airflow_tasks - - @property - def output_from_processes(self) -> Optional[list[Process]]: - return ( - None if self.attributes is None else self.attributes.output_from_processes - ) - - @output_from_processes.setter - def output_from_processes(self, output_from_processes: Optional[list[Process]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_from_processes = output_from_processes - - class Attributes(Asset.Attributes): - input_to_processes: Optional[list[Process]] = Field( - None, description="", alias="inputToProcesses" - ) # relationship - output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="outputFromAirflowTasks" - ) # relationship - input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="inputToAirflowTasks" - ) # relationship - output_from_processes: Optional[list[Process]] = Field( - None, description="", alias="outputFromProcesses" - ) # relationship - - attributes: "Catalog.Attributes" = Field( - default_factory=lambda: Catalog.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Tag(Catalog): - """Description""" - - type_name: str = Field("Tag", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Tag": - raise ValueError("must be Tag") - return v - - def __setattr__(self, name, value): - if name in Tag._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") - """ - Unique identifier of the tag in the source system. - """ - TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( - "tagAttributes", "tagAttributes" - ) - """ - Attributes associated with the tag in the source system. - """ - TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( - "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" - ) - """ - Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. - """ - MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( - "mappedClassificationName", "mappedClassificationName" - ) - """ - Name of the classification in Atlan that is mapped to this tag. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "tag_id", - "tag_attributes", - "tag_allowed_values", - "mapped_atlan_tag_name", - ] - - @property - def tag_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tag_id - - @tag_id.setter - def tag_id(self, tag_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_id = tag_id - - @property - def tag_attributes(self) -> Optional[list[SourceTagAttribute]]: - return None if self.attributes is None else self.attributes.tag_attributes - - @tag_attributes.setter - def tag_attributes(self, tag_attributes: Optional[list[SourceTagAttribute]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_attributes = tag_attributes - - @property - def tag_allowed_values(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.tag_allowed_values - - @tag_allowed_values.setter - def tag_allowed_values(self, tag_allowed_values: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_allowed_values = tag_allowed_values - - @property - def mapped_atlan_tag_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mapped_atlan_tag_name - ) - - @mapped_atlan_tag_name.setter - def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name - - class Attributes(Catalog.Attributes): - tag_id: Optional[str] = Field(None, description="", alias="tagId") - tag_attributes: Optional[list[SourceTagAttribute]] = Field( - None, description="", alias="tagAttributes" - ) - tag_allowed_values: Optional[set[str]] = Field( - None, description="", alias="tagAllowedValues" - ) - mapped_atlan_tag_name: Optional[str] = Field( - None, description="", alias="mappedClassificationName" - ) - - attributes: "Tag.Attributes" = Field( - default_factory=lambda: Tag.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ColumnProcess(Process): - """Description""" - - @classmethod - @init_guid - def create( - cls, - name: str, - connection_qualified_name: str, - inputs: list["Catalog"], - outputs: list["Catalog"], - parent: Process, - process_id: Optional[str] = None, - ) -> ColumnProcess: - return ColumnProcess( - attributes=ColumnProcess.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - process_id=process_id, - inputs=inputs, - outputs=outputs, - parent=parent, - ) - ) - - type_name: str = Field("ColumnProcess", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ColumnProcess": - raise ValueError("must be ColumnProcess") - return v - - def __setattr__(self, name, value): - if name in ColumnProcess._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - OUTPUTS: ClassVar[RelationField] = RelationField("outputs") - """ - Assets that are outputs from this process. - """ - PROCESS: ClassVar[RelationField] = RelationField("process") - """ - TBC - """ - INPUTS: ClassVar[RelationField] = RelationField("inputs") - """ - Assets that are inputs to this process. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "outputs", - "process", - "inputs", - ] - - @property - def outputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.outputs - - @outputs.setter - def outputs(self, outputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.outputs = outputs - - @property - def process(self) -> Optional[Process]: - return None if self.attributes is None else self.attributes.process - - @process.setter - def process(self, process: Optional[Process]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.process = process - - @property - def inputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.inputs - - @inputs.setter - def inputs(self, inputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inputs = inputs - - class Attributes(Process.Attributes): - outputs: Optional[list[Catalog]] = Field( - None, description="", alias="outputs" - ) # relationship - process: Optional[Process] = Field( - None, description="", alias="process" - ) # relationship - inputs: Optional[list[Catalog]] = Field( - None, description="", alias="inputs" - ) # relationship - - @classmethod - @init_guid - def create( - cls, - name: str, - connection_qualified_name: str, - inputs: list["Catalog"], - outputs: list["Catalog"], - parent: Process, - process_id: Optional[str] = None, - ) -> ColumnProcess.Attributes: - validate_required_fields(["parent"], [parent]) - qualified_name = Process.Attributes.generate_qualified_name( - name=name, - connection_qualified_name=connection_qualified_name, - process_id=process_id, - inputs=inputs, - outputs=outputs, - parent=parent, - ) - connector_name = connection_qualified_name.split("/")[1] - return ColumnProcess.Attributes( - name=name, - qualified_name=qualified_name, - connector_name=connector_name, - connection_qualified_name=connection_qualified_name, - inputs=inputs, - outputs=outputs, - process=parent, - ) - - attributes: "ColumnProcess.Attributes" = Field( - default_factory=lambda: ColumnProcess.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Airflow(Catalog): - """Description""" - - type_name: str = Field("Airflow", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Airflow": - raise ValueError("must be Airflow") - return v - - def __setattr__(self, name, value): - if name in Airflow._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - AIRFLOW_TAGS: ClassVar[KeywordField] = KeywordField("airflowTags", "airflowTags") - """ - Tags assigned to the asset in Airflow. - """ - AIRFLOW_RUN_VERSION: ClassVar[KeywordField] = KeywordField( - "airflowRunVersion", "airflowRunVersion" - ) - """ - Version of the run in Airflow. - """ - AIRFLOW_RUN_OPEN_LINEAGE_VERSION: ClassVar[KeywordField] = KeywordField( - "airflowRunOpenLineageVersion", "airflowRunOpenLineageVersion" - ) - """ - Version of the run in OpenLineage. - """ - AIRFLOW_RUN_NAME: ClassVar[KeywordField] = KeywordField( - "airflowRunName", "airflowRunName" - ) - """ - Name of the run. - """ - AIRFLOW_RUN_TYPE: ClassVar[KeywordField] = KeywordField( - "airflowRunType", "airflowRunType" - ) - """ - Type of the run. - """ - AIRFLOW_RUN_START_TIME: ClassVar[NumericField] = NumericField( - "airflowRunStartTime", "airflowRunStartTime" - ) - """ - Start time of the run. - """ - AIRFLOW_RUN_END_TIME: ClassVar[NumericField] = NumericField( - "airflowRunEndTime", "airflowRunEndTime" - ) - """ - End time of the run. - """ - AIRFLOW_RUN_OPEN_LINEAGE_STATE: ClassVar[KeywordField] = KeywordField( - "airflowRunOpenLineageState", "airflowRunOpenLineageState" - ) - """ - State of the run in OpenLineage. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "airflow_tags", - "airflow_run_version", - "airflow_run_open_lineage_version", - "airflow_run_name", - "airflow_run_type", - "airflow_run_start_time", - "airflow_run_end_time", - "airflow_run_open_lineage_state", - ] - - @property - def airflow_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.airflow_tags - - @airflow_tags.setter - def airflow_tags(self, airflow_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_tags = airflow_tags - - @property - def airflow_run_version(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_run_version - - @airflow_run_version.setter - def airflow_run_version(self, airflow_run_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_version = airflow_run_version - - @property - def airflow_run_open_lineage_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.airflow_run_open_lineage_version - ) - - @airflow_run_open_lineage_version.setter - def airflow_run_open_lineage_version( - self, airflow_run_open_lineage_version: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_open_lineage_version = ( - airflow_run_open_lineage_version - ) - - @property - def airflow_run_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_run_name - - @airflow_run_name.setter - def airflow_run_name(self, airflow_run_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_name = airflow_run_name - - @property - def airflow_run_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_run_type - - @airflow_run_type.setter - def airflow_run_type(self, airflow_run_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_type = airflow_run_type - - @property - def airflow_run_start_time(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.airflow_run_start_time - ) - - @airflow_run_start_time.setter - def airflow_run_start_time(self, airflow_run_start_time: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_start_time = airflow_run_start_time - - @property - def airflow_run_end_time(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.airflow_run_end_time - - @airflow_run_end_time.setter - def airflow_run_end_time(self, airflow_run_end_time: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_end_time = airflow_run_end_time - - @property - def airflow_run_open_lineage_state(self) -> Optional[OpenLineageRunState]: - return ( - None - if self.attributes is None - else self.attributes.airflow_run_open_lineage_state - ) - - @airflow_run_open_lineage_state.setter - def airflow_run_open_lineage_state( - self, airflow_run_open_lineage_state: Optional[OpenLineageRunState] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_run_open_lineage_state = airflow_run_open_lineage_state - - class Attributes(Catalog.Attributes): - airflow_tags: Optional[set[str]] = Field( - None, description="", alias="airflowTags" - ) - airflow_run_version: Optional[str] = Field( - None, description="", alias="airflowRunVersion" - ) - airflow_run_open_lineage_version: Optional[str] = Field( - None, description="", alias="airflowRunOpenLineageVersion" - ) - airflow_run_name: Optional[str] = Field( - None, description="", alias="airflowRunName" - ) - airflow_run_type: Optional[str] = Field( - None, description="", alias="airflowRunType" - ) - airflow_run_start_time: Optional[datetime] = Field( - None, description="", alias="airflowRunStartTime" - ) - airflow_run_end_time: Optional[datetime] = Field( - None, description="", alias="airflowRunEndTime" - ) - airflow_run_open_lineage_state: Optional[OpenLineageRunState] = Field( - None, description="", alias="airflowRunOpenLineageState" - ) - - attributes: "Airflow.Attributes" = Field( - default_factory=lambda: Airflow.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class AirflowDag(Airflow): - """Description""" - - type_name: str = Field("AirflowDag", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AirflowDag": - raise ValueError("must be AirflowDag") - return v - - def __setattr__(self, name, value): - if name in AirflowDag._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - AIRFLOW_DAG_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "airflowDagSchedule", "airflowDagSchedule" - ) - """ - Schedule for the DAG. - """ - AIRFLOW_DAG_SCHEDULE_DELTA: ClassVar[NumericField] = NumericField( - "airflowDagScheduleDelta", "airflowDagScheduleDelta" - ) - """ - Duration between scheduled runs, in seconds. - """ - - AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "airflow_dag_schedule", - "airflow_dag_schedule_delta", - "airflow_tasks", - ] - - @property - def airflow_dag_schedule(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_dag_schedule - - @airflow_dag_schedule.setter - def airflow_dag_schedule(self, airflow_dag_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_dag_schedule = airflow_dag_schedule - - @property - def airflow_dag_schedule_delta(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.airflow_dag_schedule_delta - ) - - @airflow_dag_schedule_delta.setter - def airflow_dag_schedule_delta(self, airflow_dag_schedule_delta: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_dag_schedule_delta = airflow_dag_schedule_delta - - @property - def airflow_tasks(self) -> Optional[list[AirflowTask]]: - return None if self.attributes is None else self.attributes.airflow_tasks - - @airflow_tasks.setter - def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_tasks = airflow_tasks - - class Attributes(Airflow.Attributes): - airflow_dag_schedule: Optional[str] = Field( - None, description="", alias="airflowDagSchedule" - ) - airflow_dag_schedule_delta: Optional[int] = Field( - None, description="", alias="airflowDagScheduleDelta" - ) - airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="airflowTasks" - ) # relationship - - attributes: "AirflowDag.Attributes" = Field( - default_factory=lambda: AirflowDag.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class AirflowTask(Airflow): - """Description""" - - type_name: str = Field("AirflowTask", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AirflowTask": - raise ValueError("must be AirflowTask") - return v - - def __setattr__(self, name, value): - if name in AirflowTask._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - AIRFLOW_TASK_OPERATOR_CLASS: ClassVar[KeywordTextField] = KeywordTextField( - "airflowTaskOperatorClass", - "airflowTaskOperatorClass.keyword", - "airflowTaskOperatorClass", - ) - """ - Class name for the operator this task uses. - """ - AIRFLOW_DAG_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "airflowDagName", "airflowDagName.keyword", "airflowDagName" - ) - """ - Simple name of the DAG this task is contained within. - """ - AIRFLOW_DAG_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "airflowDagQualifiedName", "airflowDagQualifiedName" - ) - """ - Unique name of the DAG this task is contained within. - """ - AIRFLOW_TASK_CONNECTION_ID: ClassVar[KeywordTextField] = KeywordTextField( - "airflowTaskConnectionId", - "airflowTaskConnectionId.keyword", - "airflowTaskConnectionId", - ) - """ - Identifier for the connection this task accesses. - """ - AIRFLOW_TASK_SQL: ClassVar[KeywordField] = KeywordField( - "airflowTaskSql", "airflowTaskSql" - ) - """ - SQL code that executes through this task. - """ - AIRFLOW_TASK_RETRY_NUMBER: ClassVar[NumericField] = NumericField( - "airflowTaskRetryNumber", "airflowTaskRetryNumber" - ) - """ - Retry count for this task running. - """ - AIRFLOW_TASK_POOL: ClassVar[KeywordField] = KeywordField( - "airflowTaskPool", "airflowTaskPool" - ) - """ - Pool on which this run happened. - """ - AIRFLOW_TASK_POOL_SLOTS: ClassVar[NumericField] = NumericField( - "airflowTaskPoolSlots", "airflowTaskPoolSlots" - ) - """ - Pool slots used for the run. - """ - AIRFLOW_TASK_QUEUE: ClassVar[KeywordField] = KeywordField( - "airflowTaskQueue", "airflowTaskQueue" - ) - """ - Queue on which this run happened. - """ - AIRFLOW_TASK_PRIORITY_WEIGHT: ClassVar[NumericField] = NumericField( - "airflowTaskPriorityWeight", "airflowTaskPriorityWeight" - ) - """ - Priority of the run. - """ - AIRFLOW_TASK_TRIGGER_RULE: ClassVar[KeywordField] = KeywordField( - "airflowTaskTriggerRule", "airflowTaskTriggerRule" - ) - """ - Trigger for the run. - """ - - OUTPUTS: ClassVar[RelationField] = RelationField("outputs") - """ - TBC - """ - PROCESS: ClassVar[RelationField] = RelationField("process") - """ - TBC - """ - INPUTS: ClassVar[RelationField] = RelationField("inputs") - """ - TBC - """ - AIRFLOW_DAG: ClassVar[RelationField] = RelationField("airflowDag") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "airflow_task_operator_class", - "airflow_dag_name", - "airflow_dag_qualified_name", - "airflow_task_connection_id", - "airflow_task_sql", - "airflow_task_retry_number", - "airflow_task_pool", - "airflow_task_pool_slots", - "airflow_task_queue", - "airflow_task_priority_weight", - "airflow_task_trigger_rule", - "outputs", - "process", - "inputs", - "airflow_dag", - ] - - @property - def airflow_task_operator_class(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.airflow_task_operator_class - ) - - @airflow_task_operator_class.setter - def airflow_task_operator_class(self, airflow_task_operator_class: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_operator_class = airflow_task_operator_class - - @property - def airflow_dag_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_dag_name - - @airflow_dag_name.setter - def airflow_dag_name(self, airflow_dag_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_dag_name = airflow_dag_name - - @property - def airflow_dag_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.airflow_dag_qualified_name - ) - - @airflow_dag_qualified_name.setter - def airflow_dag_qualified_name(self, airflow_dag_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_dag_qualified_name = airflow_dag_qualified_name - - @property - def airflow_task_connection_id(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.airflow_task_connection_id - ) - - @airflow_task_connection_id.setter - def airflow_task_connection_id(self, airflow_task_connection_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_connection_id = airflow_task_connection_id - - @property - def airflow_task_sql(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_task_sql - - @airflow_task_sql.setter - def airflow_task_sql(self, airflow_task_sql: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_sql = airflow_task_sql - - @property - def airflow_task_retry_number(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.airflow_task_retry_number - ) - - @airflow_task_retry_number.setter - def airflow_task_retry_number(self, airflow_task_retry_number: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_retry_number = airflow_task_retry_number - - @property - def airflow_task_pool(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_task_pool - - @airflow_task_pool.setter - def airflow_task_pool(self, airflow_task_pool: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_pool = airflow_task_pool - - @property - def airflow_task_pool_slots(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.airflow_task_pool_slots - ) - - @airflow_task_pool_slots.setter - def airflow_task_pool_slots(self, airflow_task_pool_slots: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_pool_slots = airflow_task_pool_slots - - @property - def airflow_task_queue(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.airflow_task_queue - - @airflow_task_queue.setter - def airflow_task_queue(self, airflow_task_queue: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_queue = airflow_task_queue - - @property - def airflow_task_priority_weight(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.airflow_task_priority_weight - ) - - @airflow_task_priority_weight.setter - def airflow_task_priority_weight(self, airflow_task_priority_weight: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_priority_weight = airflow_task_priority_weight - - @property - def airflow_task_trigger_rule(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.airflow_task_trigger_rule - ) - - @airflow_task_trigger_rule.setter - def airflow_task_trigger_rule(self, airflow_task_trigger_rule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_task_trigger_rule = airflow_task_trigger_rule - - @property - def outputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.outputs - - @outputs.setter - def outputs(self, outputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.outputs = outputs - - @property - def process(self) -> Optional[Process]: - return None if self.attributes is None else self.attributes.process - - @process.setter - def process(self, process: Optional[Process]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.process = process - - @property - def inputs(self) -> Optional[list[Catalog]]: - return None if self.attributes is None else self.attributes.inputs - - @inputs.setter - def inputs(self, inputs: Optional[list[Catalog]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inputs = inputs - - @property - def airflow_dag(self) -> Optional[AirflowDag]: - return None if self.attributes is None else self.attributes.airflow_dag - - @airflow_dag.setter - def airflow_dag(self, airflow_dag: Optional[AirflowDag]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.airflow_dag = airflow_dag - - class Attributes(Airflow.Attributes): - airflow_task_operator_class: Optional[str] = Field( - None, description="", alias="airflowTaskOperatorClass" - ) - airflow_dag_name: Optional[str] = Field( - None, description="", alias="airflowDagName" - ) - airflow_dag_qualified_name: Optional[str] = Field( - None, description="", alias="airflowDagQualifiedName" - ) - airflow_task_connection_id: Optional[str] = Field( - None, description="", alias="airflowTaskConnectionId" - ) - airflow_task_sql: Optional[str] = Field( - None, description="", alias="airflowTaskSql" - ) - airflow_task_retry_number: Optional[int] = Field( - None, description="", alias="airflowTaskRetryNumber" - ) - airflow_task_pool: Optional[str] = Field( - None, description="", alias="airflowTaskPool" - ) - airflow_task_pool_slots: Optional[int] = Field( - None, description="", alias="airflowTaskPoolSlots" - ) - airflow_task_queue: Optional[str] = Field( - None, description="", alias="airflowTaskQueue" - ) - airflow_task_priority_weight: Optional[int] = Field( - None, description="", alias="airflowTaskPriorityWeight" - ) - airflow_task_trigger_rule: Optional[str] = Field( - None, description="", alias="airflowTaskTriggerRule" - ) - outputs: Optional[list[Catalog]] = Field( - None, description="", alias="outputs" - ) # relationship - process: Optional[Process] = Field( - None, description="", alias="process" - ) # relationship - inputs: Optional[list[Catalog]] = Field( - None, description="", alias="inputs" - ) # relationship - airflow_dag: Optional[AirflowDag] = Field( - None, description="", alias="airflowDag" - ) # relationship - - attributes: "AirflowTask.Attributes" = Field( - default_factory=lambda: AirflowTask.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DataQuality(Catalog): - """Description""" - - type_name: str = Field("DataQuality", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DataQuality": - raise ValueError("must be DataQuality") - return v - - def __setattr__(self, name, value): - if name in DataQuality._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - _convenience_properties: ClassVar[list[str]] = [] - - -class Metric(DataQuality): - """Description""" - - type_name: str = Field("Metric", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Metric": - raise ValueError("must be Metric") - return v - - def __setattr__(self, name, value): - if name in Metric._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") - """ - Type of the metric. - """ - METRIC_SQL: ClassVar[KeywordField] = KeywordField("metricSQL", "metricSQL") - """ - SQL query used to compute the metric. - """ - METRIC_FILTERS: ClassVar[TextField] = TextField("metricFilters", "metricFilters") - """ - Filters to be applied to the metric query. - """ - METRIC_TIME_GRAINS: ClassVar[TextField] = TextField( - "metricTimeGrains", "metricTimeGrains" - ) - """ - List of time grains to be applied to the metric query. - """ - - METRIC_TIMESTAMP_COLUMN: ClassVar[RelationField] = RelationField( - "metricTimestampColumn" - ) - """ - TBC - """ - ASSETS: ClassVar[RelationField] = RelationField("assets") - """ - TBC - """ - METRIC_DIMENSION_COLUMNS: ClassVar[RelationField] = RelationField( - "metricDimensionColumns" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "metric_type", - "metric_s_q_l", - "metric_filters", - "metric_time_grains", - "metric_timestamp_column", - "assets", - "metric_dimension_columns", - ] - - @property - def metric_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metric_type - - @metric_type.setter - def metric_type(self, metric_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_type = metric_type - - @property - def metric_s_q_l(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metric_s_q_l - - @metric_s_q_l.setter - def metric_s_q_l(self, metric_s_q_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_s_q_l = metric_s_q_l - - @property - def metric_filters(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metric_filters - - @metric_filters.setter - def metric_filters(self, metric_filters: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_filters = metric_filters - - @property - def metric_time_grains(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.metric_time_grains - - @metric_time_grains.setter - def metric_time_grains(self, metric_time_grains: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_time_grains = metric_time_grains - - @property - def metric_timestamp_column(self) -> Optional[Column]: - return ( - None if self.attributes is None else self.attributes.metric_timestamp_column - ) - - @metric_timestamp_column.setter - def metric_timestamp_column(self, metric_timestamp_column: Optional[Column]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_timestamp_column = metric_timestamp_column - - @property - def assets(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.assets - - @assets.setter - def assets(self, assets: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.assets = assets - - @property - def metric_dimension_columns(self) -> Optional[list[Column]]: - return ( - None - if self.attributes is None - else self.attributes.metric_dimension_columns - ) - - @metric_dimension_columns.setter - def metric_dimension_columns( - self, metric_dimension_columns: Optional[list[Column]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_dimension_columns = metric_dimension_columns - - class Attributes(DataQuality.Attributes): - metric_type: Optional[str] = Field(None, description="", alias="metricType") - metric_s_q_l: Optional[str] = Field(None, description="", alias="metricSQL") - metric_filters: Optional[str] = Field( - None, description="", alias="metricFilters" - ) - metric_time_grains: Optional[set[str]] = Field( - None, description="", alias="metricTimeGrains" - ) - metric_timestamp_column: Optional[Column] = Field( - None, description="", alias="metricTimestampColumn" - ) # relationship - assets: Optional[list[Asset]] = Field( - None, description="", alias="assets" - ) # relationship - metric_dimension_columns: Optional[list[Column]] = Field( - None, description="", alias="metricDimensionColumns" - ) # relationship - - attributes: "Metric.Attributes" = Field( - default_factory=lambda: Metric.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Resource(Catalog): - """Description""" - - type_name: str = Field("Resource", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Resource": - raise ValueError("must be Resource") - return v - - def __setattr__(self, name, value): - if name in Resource._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - LINK: ClassVar[KeywordField] = KeywordField("link", "link") - """ - URL to the resource. - """ - IS_GLOBAL: ClassVar[BooleanField] = BooleanField("isGlobal", "isGlobal") - """ - Whether the resource is global (true) or not (false). - """ - REFERENCE: ClassVar[KeywordField] = KeywordField("reference", "reference") - """ - Reference to the resource. - """ - RESOURCE_METADATA: ClassVar[KeywordField] = KeywordField( - "resourceMetadata", "resourceMetadata" - ) - """ - Metadata of the resource. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "link", - "is_global", - "reference", - "resource_metadata", - ] - - @property - def link(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.link - - @link.setter - def link(self, link: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.link = link - - @property - def is_global(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_global - - @is_global.setter - def is_global(self, is_global: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_global = is_global - - @property - def reference(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.reference - - @reference.setter - def reference(self, reference: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reference = reference - - @property - def resource_metadata(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.resource_metadata - - @resource_metadata.setter - def resource_metadata(self, resource_metadata: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.resource_metadata = resource_metadata - - class Attributes(Catalog.Attributes): - link: Optional[str] = Field(None, description="", alias="link") - is_global: Optional[bool] = Field(None, description="", alias="isGlobal") - reference: Optional[str] = Field(None, description="", alias="reference") - resource_metadata: Optional[dict[str, str]] = Field( - None, description="", alias="resourceMetadata" - ) - - attributes: "Resource.Attributes" = Field( - default_factory=lambda: Resource.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Readme(Resource): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, asset: Asset, content: str, asset_name: Optional[str] = None - ) -> Readme: - return Readme( - attributes=Readme.Attributes.create( - asset=asset, content=content, asset_name=asset_name - ) - ) - - @property - def description(self) -> Optional[str]: - ret_value = self.attributes.description - return unquote(ret_value) if ret_value is not None else ret_value - - @description.setter - def description(self, description: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.description = ( - quote(description) if description is not None else description - ) - - type_name: str = Field("Readme", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Readme": - raise ValueError("must be Readme") - return v - - def __setattr__(self, name, value): - if name in Readme._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SEE_ALSO: ClassVar[RelationField] = RelationField("seeAlso") - """ - TBC - """ - ASSET: ClassVar[RelationField] = RelationField("asset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "see_also", - "asset", - ] - - @property - def see_also(self) -> Optional[list[Readme]]: - return None if self.attributes is None else self.attributes.see_also - - @see_also.setter - def see_also(self, see_also: Optional[list[Readme]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.see_also = see_also - - @property - def asset(self) -> Optional[Asset]: - return None if self.attributes is None else self.attributes.asset - - @asset.setter - def asset(self, asset: Optional[Asset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset = asset - - class Attributes(Resource.Attributes): - see_also: Optional[list[Readme]] = Field( - None, description="", alias="seeAlso" - ) # relationship - asset: Optional[Asset] = Field( - None, description="", alias="asset" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, asset: Asset, content: str, asset_name: Optional[str] = None - ) -> Readme.Attributes: - validate_required_fields(["asset", "content"], [asset, content]) - if not asset.name or len(asset.name) < 1: - if not asset_name: - raise ValueError( - "asset_name is required when name is not available from asset" - ) - elif asset_name: - raise ValueError( - "asset_name can not be given when name is available from asset" - ) - else: - asset_name = asset.name - return Readme.Attributes( - qualified_name=f"{asset.guid}/readme", - name=f"{asset_name} Readme", - asset=asset, - description=quote(content), - ) - - attributes: "Readme.Attributes" = Field( - default_factory=lambda: Readme.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class File(Resource): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str, file_type: FileType - ) -> File: - return File( - attributes=File.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - file_type=file_type, - ) - ) - - type_name: str = Field("File", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "File": - raise ValueError("must be File") - return v - - def __setattr__(self, name, value): - if name in File._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - FILE_TYPE: ClassVar[KeywordField] = KeywordField("fileType", "fileType") - """ - Type (extension) of the file. - """ - FILE_PATH: ClassVar[KeywordField] = KeywordField("filePath", "filePath") - """ - URL giving the online location where the file can be accessed. - """ - - FILE_ASSETS: ClassVar[RelationField] = RelationField("fileAssets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "file_type", - "file_path", - "file_assets", - ] - - @property - def file_type(self) -> Optional[FileType]: - return None if self.attributes is None else self.attributes.file_type - - @file_type.setter - def file_type(self, file_type: Optional[FileType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.file_type = file_type - - @property - def file_path(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.file_path - - @file_path.setter - def file_path(self, file_path: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.file_path = file_path - - @property - def file_assets(self) -> Optional[Asset]: - return None if self.attributes is None else self.attributes.file_assets - - @file_assets.setter - def file_assets(self, file_assets: Optional[Asset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.file_assets = file_assets - - class Attributes(Resource.Attributes): - file_type: Optional[FileType] = Field(None, description="", alias="fileType") - file_path: Optional[str] = Field(None, description="", alias="filePath") - file_assets: Optional[Asset] = Field( - None, description="", alias="fileAssets" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str, file_type: FileType - ) -> File.Attributes: - validate_required_fields( - ["name", "connection_qualified_name", "file_type"], - [name, connection_qualified_name, file_type], - ) - return File.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - file_type=file_type, - ) - - attributes: "File.Attributes" = Field( - default_factory=lambda: File.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Link(Resource): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, asset: Asset, name: str, link: str, idempotent: bool = False - ) -> Link: - return Link( - attributes=Link.Attributes.create( - asset=asset, name=name, link=link, idempotent=idempotent - ) - ) - - type_name: str = Field("Link", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Link": - raise ValueError("must be Link") - return v - - def __setattr__(self, name, value): - if name in Link._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") - """ - Icon for the link. - """ - ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") - """ - Type of icon for the link, for example: image or emoji. - """ - - ASSET: ClassVar[RelationField] = RelationField("asset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "icon", - "icon_type", - "asset", - ] - - @property - def icon(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.icon - - @icon.setter - def icon(self, icon: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.icon = icon - - @property - def icon_type(self) -> Optional[IconType]: - return None if self.attributes is None else self.attributes.icon_type - - @icon_type.setter - def icon_type(self, icon_type: Optional[IconType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.icon_type = icon_type - - @property - def asset(self) -> Optional[Asset]: - return None if self.attributes is None else self.attributes.asset - - @asset.setter - def asset(self, asset: Optional[Asset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset = asset - - class Attributes(Resource.Attributes): - icon: Optional[str] = Field(None, description="", alias="icon") - icon_type: Optional[IconType] = Field(None, description="", alias="iconType") - asset: Optional[Asset] = Field( - None, description="", alias="asset" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, asset: Asset, name: str, link: str, idempotent: bool - ) -> Link.Attributes: - validate_required_fields(["asset", "name", "link"], [asset, name, link]) - qn = f"{asset.qualified_name}/{name}" if idempotent else str(uuid.uuid4()) - return Link.Attributes( - qualified_name=qn, - name=name, - link=link, - asset=asset.trim_to_reference(), - ) - - attributes: "Link.Attributes" = Field( - default_factory=lambda: Link.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DataMesh(Catalog): - """Description""" - - type_name: str = Field("DataMesh", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DataMesh": - raise ValueError("must be DataMesh") - return v - - def __setattr__(self, name, value): - if name in DataMesh._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PARENT_DOMAIN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentDomainQualifiedName", - "parentDomainQualifiedName", - "parentDomainQualifiedName.text", - ) - """ - Unique name of the parent domain in which this asset exists. - """ - SUPER_DOMAIN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "superDomainQualifiedName", - "superDomainQualifiedName", - "superDomainQualifiedName.text", - ) - """ - Unique name of the top-level domain in which this asset exists. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "parent_domain_qualified_name", - "super_domain_qualified_name", - ] - - @property - def parent_domain_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.parent_domain_qualified_name - ) - - @parent_domain_qualified_name.setter - def parent_domain_qualified_name(self, parent_domain_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_domain_qualified_name = parent_domain_qualified_name - - @property - def super_domain_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.super_domain_qualified_name - ) - - @super_domain_qualified_name.setter - def super_domain_qualified_name(self, super_domain_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.super_domain_qualified_name = super_domain_qualified_name - - class Attributes(Catalog.Attributes): - parent_domain_qualified_name: Optional[str] = Field( - None, description="", alias="parentDomainQualifiedName" - ) - super_domain_qualified_name: Optional[str] = Field( - None, description="", alias="superDomainQualifiedName" - ) - - attributes: "DataMesh.Attributes" = Field( - default_factory=lambda: DataMesh.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DataDomain(DataMesh): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - icon: Optional[AtlanIcon] = None, - parent_domain: Optional[DataDomain] = None, - parent_domain_qualified_name: Optional[StrictStr] = None, - ) -> DataDomain: - validate_required_fields(["name"], [name]) - attributes = DataDomain.Attributes.create( - name=name, - icon=icon, - parent_domain=parent_domain, - parent_domain_qualified_name=parent_domain_qualified_name, - ) - return cls(attributes=attributes) - - @classmethod - def create_for_modification( - cls: type[SelfAsset], - qualified_name: str = "", - name: str = "", - ) -> SelfAsset: - validate_required_fields(["name", "qualified_name"], [name, qualified_name]) - # Split the data domain qualified_name to extract data mesh info - fields = qualified_name.split("/") - # for domain and subdomain - if len(fields) not in (3, 5): - raise ValueError(f"Invalid data domain qualified_name: {qualified_name}") - return cls( - attributes=cls.Attributes( - qualified_name=qualified_name, - name=name, - ) - ) - - type_name: str = Field("DataDomain", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DataDomain": - raise ValueError("must be DataDomain") - return v - - def __setattr__(self, name, value): - if name in DataDomain._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DATA_PRODUCTS: ClassVar[RelationField] = RelationField("dataProducts") - """ - TBC - """ - PARENT_DOMAIN: ClassVar[RelationField] = RelationField("parentDomain") - """ - TBC - """ - SUB_DOMAINS: ClassVar[RelationField] = RelationField("subDomains") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "data_products", - "parent_domain", - "sub_domains", - ] - - @property - def data_products(self) -> Optional[list[DataProduct]]: - return None if self.attributes is None else self.attributes.data_products - - @data_products.setter - def data_products(self, data_products: Optional[list[DataProduct]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_products = data_products - - @property - def parent_domain(self) -> Optional[DataDomain]: - return None if self.attributes is None else self.attributes.parent_domain - - @parent_domain.setter - def parent_domain(self, parent_domain: Optional[DataDomain]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_domain = parent_domain - - @property - def sub_domains(self) -> Optional[list[DataDomain]]: - return None if self.attributes is None else self.attributes.sub_domains - - @sub_domains.setter - def sub_domains(self, sub_domains: Optional[list[DataDomain]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sub_domains = sub_domains - - class Attributes(DataMesh.Attributes): - data_products: Optional[list[DataProduct]] = Field( - None, description="", alias="dataProducts" - ) # relationship - parent_domain: Optional[DataDomain] = Field( - None, description="", alias="parentDomain" - ) # relationship - sub_domains: Optional[list[DataDomain]] = Field( - None, description="", alias="subDomains" - ) # relationship - - @classmethod - @init_guid - def create( - cls, - *, - name: StrictStr, - icon: Optional[AtlanIcon] = None, - parent_domain: Optional[DataDomain] = None, - parent_domain_qualified_name: Optional[StrictStr] = None, - ) -> DataDomain.Attributes: - validate_required_fields(["name"], [name]) - mesh_name = to_camel_case(name) - qualified_name = f"default/domain/{mesh_name}" - # If "qualified name" of the parent domain is specified - if parent_domain_qualified_name: - parent_domain = DataDomain() - parent_domain.unique_attributes = { - "qualifiedName": parent_domain_qualified_name - } - qualified_name = f"{parent_domain_qualified_name}/domain/{mesh_name}" - icon_str = icon.value if icon is not None else None - return DataDomain.Attributes( - name=name, - parent_domain=parent_domain, - qualified_name=qualified_name, - icon=icon_str, - ) - - attributes: "DataDomain.Attributes" = Field( - default_factory=lambda: DataDomain.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DataProduct(DataMesh): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: StrictStr, - assets: IndexSearchRequest, - icon: Optional[AtlanIcon] = None, - domain: Optional[DataDomain] = None, - domain_qualified_name: Optional[StrictStr] = None, - ) -> DataProduct: - validate_required_fields(["name", "assets"], [name, assets]) - assets_dsl = assets.get_dsl_str() - attributes = DataProduct.Attributes.create( - name=name, - assets_dsl=assets_dsl, - icon=icon, - domain=domain, - domain_qualified_name=domain_qualified_name, - ) - return cls(attributes=attributes) - - @classmethod - def create_for_modification( - cls: type[SelfAsset], - qualified_name: str = "", - name: str = "", - ) -> SelfAsset: - validate_required_fields( - ["name", "qualified_name"], - [name, qualified_name], - ) - # Split the data product qualified_name to extract data mesh info - fields = qualified_name.split("/") - if len(fields) != 3: - raise ValueError(f"Invalid data product qualified_name: {qualified_name}") - return cls( - attributes=cls.Attributes( - qualified_name=qualified_name, - name=name, - ) - ) - - type_name: str = Field("DataProduct", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DataProduct": - raise ValueError("must be DataProduct") - return v - - def __setattr__(self, name, value): - if name in DataProduct._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DATA_PRODUCT_STATUS: ClassVar[KeywordField] = KeywordField( - "dataProductStatus", "dataProductStatus" - ) - """ - Status of this data product. - """ - DATA_PRODUCT_CRITICALITY: ClassVar[KeywordField] = KeywordField( - "dataProductCriticality", "dataProductCriticality" - ) - """ - Criticality of this data product. - """ - DATA_PRODUCT_SENSITIVITY: ClassVar[KeywordField] = KeywordField( - "dataProductSensitivity", "dataProductSensitivity" - ) - """ - Information sensitivity of this data product. - """ - DATA_PRODUCT_ASSETS_DSL: ClassVar[KeywordField] = KeywordField( - "dataProductAssetsDSL", "dataProductAssetsDSL" - ) - """ - Search DSL used to define which assets are part of this data product. - """ - DATA_PRODUCT_ASSETS_PLAYBOOK_FILTER: ClassVar[KeywordField] = KeywordField( - "dataProductAssetsPlaybookFilter", "dataProductAssetsPlaybookFilter" - ) - """ - Playbook filter to define which assets are part of this data product. - """ - - DATA_DOMAIN: ClassVar[RelationField] = RelationField("dataDomain") - """ - TBC - """ - OUTPUT_PORTS: ClassVar[RelationField] = RelationField("outputPorts") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "data_product_status", - "data_product_criticality", - "data_product_sensitivity", - "data_product_assets_d_s_l", - "data_product_assets_playbook_filter", - "data_domain", - "output_ports", - ] - - @property - def data_product_status(self) -> Optional[DataProductStatus]: - return None if self.attributes is None else self.attributes.data_product_status - - @data_product_status.setter - def data_product_status(self, data_product_status: Optional[DataProductStatus]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_product_status = data_product_status - - @property - def data_product_criticality(self) -> Optional[DataProductCriticality]: - return ( - None - if self.attributes is None - else self.attributes.data_product_criticality - ) - - @data_product_criticality.setter - def data_product_criticality( - self, data_product_criticality: Optional[DataProductCriticality] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_product_criticality = data_product_criticality - - @property - def data_product_sensitivity(self) -> Optional[DataProductSensitivity]: - return ( - None - if self.attributes is None - else self.attributes.data_product_sensitivity - ) - - @data_product_sensitivity.setter - def data_product_sensitivity( - self, data_product_sensitivity: Optional[DataProductSensitivity] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_product_sensitivity = data_product_sensitivity - - @property - def data_product_assets_d_s_l(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.data_product_assets_d_s_l - ) - - @data_product_assets_d_s_l.setter - def data_product_assets_d_s_l(self, data_product_assets_d_s_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_product_assets_d_s_l = data_product_assets_d_s_l - - @property - def data_product_assets_playbook_filter(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.data_product_assets_playbook_filter - ) - - @data_product_assets_playbook_filter.setter - def data_product_assets_playbook_filter( - self, data_product_assets_playbook_filter: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_product_assets_playbook_filter = ( - data_product_assets_playbook_filter - ) - - @property - def data_domain(self) -> Optional[DataDomain]: - return None if self.attributes is None else self.attributes.data_domain - - @data_domain.setter - def data_domain(self, data_domain: Optional[DataDomain]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_domain = data_domain - - @property - def output_ports(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.output_ports - - @output_ports.setter - def output_ports(self, output_ports: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_ports = output_ports - - class Attributes(DataMesh.Attributes): - data_product_status: Optional[DataProductStatus] = Field( - None, description="", alias="dataProductStatus" - ) - data_product_criticality: Optional[DataProductCriticality] = Field( - None, description="", alias="dataProductCriticality" - ) - data_product_sensitivity: Optional[DataProductSensitivity] = Field( - None, description="", alias="dataProductSensitivity" - ) - data_product_assets_d_s_l: Optional[str] = Field( - None, description="", alias="dataProductAssetsDSL" - ) - data_product_assets_playbook_filter: Optional[str] = Field( - None, description="", alias="dataProductAssetsPlaybookFilter" - ) - data_domain: Optional[DataDomain] = Field( - None, description="", alias="dataDomain" - ) # relationship - output_ports: Optional[list[Asset]] = Field( - None, description="", alias="outputPorts" - ) # relationship - - @classmethod - @init_guid - def create( - cls, - *, - name: StrictStr, - assets_dsl: StrictStr, - icon: Optional[AtlanIcon] = None, - domain: Optional[DataDomain] = None, - domain_qualified_name: Optional[StrictStr] = None, - ) -> DataProduct.Attributes: - validate_required_fields(["name"], [name]) - validate_single_required_field( - ["domain", "domain_qualified_name"], - [domain, domain_qualified_name], - ) - if domain_qualified_name: - domain = DataDomain() - domain.unique_attributes = {"qualifiedName": domain_qualified_name} - icon_str = icon.value if icon is not None else None - camel_case_name = to_camel_case(name) - return DataProduct.Attributes( - name=name, - data_product_assets_d_s_l=assets_dsl, - data_domain=domain, - qualified_name=f"default/product/{camel_case_name}", - icon=icon_str, - ) - - attributes: "DataProduct.Attributes" = Field( - default_factory=lambda: DataProduct.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SQL(Catalog): - """Description""" - - type_name: str = Field("SQL", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SQL": - raise ValueError("must be SQL") - return v - - def __setattr__(self, name, value): - if name in SQL._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") - """ - Number of times this asset has been queried. - """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" - ) - """ - Number of unique users who have queried this asset. - """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" - ) - """ - Map of unique users who have queried this asset to the number of times they have queried it. - """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) - """ - Time (epoch) at which the query count was last updated, in milliseconds. - """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) - """ - Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) - """ - Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) - """ - Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) - """ - Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) - """ - Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" - ) - """ - Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" - ) - """ - Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - Whether this asset has been profiled (true) or not (false). - """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) - """ - Time (epoch) at which this asset was last profiled, in milliseconds. - """ - - DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") - """ - TBC - """ - SQL_DBT_MODELS: ClassVar[RelationField] = RelationField("sqlDbtModels") - """ - TBC - """ - SQL_DBT_SOURCES: ClassVar[RelationField] = RelationField("sqlDBTSources") - """ - TBC - """ - DBT_MODELS: ClassVar[RelationField] = RelationField("dbtModels") - """ - TBC - """ - DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "is_profiled", - "last_profiled_at", - "dbt_sources", - "sql_dbt_models", - "sql_dbt_sources", - "dbt_models", - "dbt_tests", - ] - - @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count - - @query_count.setter - def query_count(self, query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count = query_count - - @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count - - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count - - @property - def query_user_map(self) -> Optional[dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map - - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[dict[str, int]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map - - @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at - ) - - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at - - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name - ) - - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name - - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name - - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) - - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name - - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name - - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name - - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name - - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name - - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name - - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name - - @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name - - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name - - @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled - - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled - - @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at - - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at - - @property - def dbt_sources(self) -> Optional[list[DbtSource]]: - return None if self.attributes is None else self.attributes.dbt_sources - - @dbt_sources.setter - def dbt_sources(self, dbt_sources: Optional[list[DbtSource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_sources = dbt_sources - - @property - def sql_dbt_models(self) -> Optional[list[DbtModel]]: - return None if self.attributes is None else self.attributes.sql_dbt_models - - @sql_dbt_models.setter - def sql_dbt_models(self, sql_dbt_models: Optional[list[DbtModel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_dbt_models = sql_dbt_models - - @property - def sql_dbt_sources(self) -> Optional[list[DbtSource]]: - return None if self.attributes is None else self.attributes.sql_dbt_sources - - @sql_dbt_sources.setter - def sql_dbt_sources(self, sql_dbt_sources: Optional[list[DbtSource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_dbt_sources = sql_dbt_sources - - @property - def dbt_models(self) -> Optional[list[DbtModel]]: - return None if self.attributes is None else self.attributes.dbt_models - - @dbt_models.setter - def dbt_models(self, dbt_models: Optional[list[DbtModel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_models = dbt_models - - @property - def dbt_tests(self) -> Optional[list[DbtTest]]: - return None if self.attributes is None else self.attributes.dbt_tests - - @dbt_tests.setter - def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tests = dbt_tests - - class Attributes(Catalog.Attributes): - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) - dbt_sources: Optional[list[DbtSource]] = Field( - None, description="", alias="dbtSources" - ) # relationship - sql_dbt_models: Optional[list[DbtModel]] = Field( - None, description="", alias="sqlDbtModels" - ) # relationship - sql_dbt_sources: Optional[list[DbtSource]] = Field( - None, description="", alias="sqlDBTSources" - ) # relationship - dbt_models: Optional[list[DbtModel]] = Field( - None, description="", alias="dbtModels" - ) # relationship - dbt_tests: Optional[list[DbtTest]] = Field( - None, description="", alias="dbtTests" - ) # relationship - - attributes: "SQL.Attributes" = Field( - default_factory=lambda: SQL.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Query(SQL): - """Description""" - - type_name: str = Field("Query", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Query": - raise ValueError("must be Query") - return v - - def __setattr__(self, name, value): - if name in Query._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - RAW_QUERY: ClassVar[KeywordField] = KeywordField("rawQuery", "rawQuery") - """ - Deprecated. See 'longRawQuery' instead. - """ - LONG_RAW_QUERY: ClassVar[KeywordField] = KeywordField( - "longRawQuery", "longRawQuery" - ) - """ - Raw SQL query string. - """ - RAW_QUERY_TEXT: ClassVar[RelationField] = RelationField("rawQueryText") - """ - - """ - DEFAULT_SCHEMA_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "defaultSchemaQualifiedName", - "defaultSchemaQualifiedName", - "defaultSchemaQualifiedName.text", - ) - """ - Unique name of the default schema to use for this query. - """ - DEFAULT_DATABASE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "defaultDatabaseQualifiedName", - "defaultDatabaseQualifiedName", - "defaultDatabaseQualifiedName.text", - ) - """ - Unique name of the default database to use for this query. - """ - VARIABLES_SCHEMA_BASE64: ClassVar[KeywordField] = KeywordField( - "variablesSchemaBase64", "variablesSchemaBase64" - ) - """ - Base64-encoded string of the variables to use in this query. - """ - IS_PRIVATE: ClassVar[BooleanField] = BooleanField("isPrivate", "isPrivate") - """ - Whether this query is private (true) or shared (false). - """ - IS_SQL_SNIPPET: ClassVar[BooleanField] = BooleanField( - "isSqlSnippet", "isSqlSnippet" - ) - """ - Whether this query is a SQL snippet (true) or not (false). - """ - PARENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentQualifiedName", "parentQualifiedName", "parentQualifiedName.text" - ) - """ - Unique name of the parent collection or folder in which this query exists. - """ - COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "collectionQualifiedName", - "collectionQualifiedName", - "collectionQualifiedName.text", - ) - """ - Unique name of the collection in which this query exists. - """ - IS_VISUAL_QUERY: ClassVar[BooleanField] = BooleanField( - "isVisualQuery", "isVisualQuery" - ) - """ - Whether this query is a visual query (true) or not (false). - """ - VISUAL_BUILDER_SCHEMA_BASE64: ClassVar[KeywordField] = KeywordField( - "visualBuilderSchemaBase64", "visualBuilderSchemaBase64" - ) - """ - Base64-encoded string for the visual query builder. - """ - - PARENT: ClassVar[RelationField] = RelationField("parent") - """ - TBC - """ - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - TABLES: ClassVar[RelationField] = RelationField("tables") - """ - TBC - """ - VIEWS: ClassVar[RelationField] = RelationField("views") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "raw_query", - "long_raw_query", - "raw_query_text", - "default_schema_qualified_name", - "default_database_qualified_name", - "variables_schema_base64", - "is_private", - "is_sql_snippet", - "parent_qualified_name", - "collection_qualified_name", - "is_visual_query", - "visual_builder_schema_base64", - "parent", - "columns", - "tables", - "views", - ] - - @property - def raw_query(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.raw_query - - @raw_query.setter - def raw_query(self, raw_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.raw_query = raw_query - - @property - def long_raw_query(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.long_raw_query - - @long_raw_query.setter - def long_raw_query(self, long_raw_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.long_raw_query = long_raw_query - - @property - def raw_query_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.raw_query_text - - @raw_query_text.setter - def raw_query_text(self, raw_query_text: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.raw_query_text = raw_query_text - - @property - def default_schema_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.default_schema_qualified_name - ) - - @default_schema_qualified_name.setter - def default_schema_qualified_name( - self, default_schema_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_schema_qualified_name = default_schema_qualified_name - - @property - def default_database_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.default_database_qualified_name - ) - - @default_database_qualified_name.setter - def default_database_qualified_name( - self, default_database_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_database_qualified_name = ( - default_database_qualified_name - ) - - @property - def variables_schema_base64(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.variables_schema_base64 - ) - - @variables_schema_base64.setter - def variables_schema_base64(self, variables_schema_base64: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.variables_schema_base64 = variables_schema_base64 - - @property - def is_private(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_private - - @is_private.setter - def is_private(self, is_private: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_private = is_private - - @property - def is_sql_snippet(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_sql_snippet - - @is_sql_snippet.setter - def is_sql_snippet(self, is_sql_snippet: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_sql_snippet = is_sql_snippet - - @property - def parent_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.parent_qualified_name - ) - - @parent_qualified_name.setter - def parent_qualified_name(self, parent_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_qualified_name = parent_qualified_name - - @property - def collection_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.collection_qualified_name - ) - - @collection_qualified_name.setter - def collection_qualified_name(self, collection_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.collection_qualified_name = collection_qualified_name - - @property - def is_visual_query(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_visual_query - - @is_visual_query.setter - def is_visual_query(self, is_visual_query: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_visual_query = is_visual_query - - @property - def visual_builder_schema_base64(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.visual_builder_schema_base64 - ) - - @visual_builder_schema_base64.setter - def visual_builder_schema_base64(self, visual_builder_schema_base64: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.visual_builder_schema_base64 = visual_builder_schema_base64 - - @property - def parent(self) -> Optional[Namespace]: - return None if self.attributes is None else self.attributes.parent - - @parent.setter - def parent(self, parent: Optional[Namespace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent = parent - - @property - def columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def tables(self) -> Optional[list[Table]]: - return None if self.attributes is None else self.attributes.tables - - @tables.setter - def tables(self, tables: Optional[list[Table]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tables = tables - - @property - def views(self) -> Optional[list[View]]: - return None if self.attributes is None else self.attributes.views - - @views.setter - def views(self, views: Optional[list[View]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.views = views - - class Attributes(SQL.Attributes): - raw_query: Optional[str] = Field(None, description="", alias="rawQuery") - long_raw_query: Optional[str] = Field( - None, description="", alias="longRawQuery" - ) - raw_query_text: Optional[str] = Field( - None, description="", alias="rawQueryText" - ) - default_schema_qualified_name: Optional[str] = Field( - None, description="", alias="defaultSchemaQualifiedName" - ) - default_database_qualified_name: Optional[str] = Field( - None, description="", alias="defaultDatabaseQualifiedName" - ) - variables_schema_base64: Optional[str] = Field( - None, description="", alias="variablesSchemaBase64" - ) - is_private: Optional[bool] = Field(None, description="", alias="isPrivate") - is_sql_snippet: Optional[bool] = Field( - None, description="", alias="isSqlSnippet" - ) - parent_qualified_name: Optional[str] = Field( - None, description="", alias="parentQualifiedName" - ) - collection_qualified_name: Optional[str] = Field( - None, description="", alias="collectionQualifiedName" - ) - is_visual_query: Optional[bool] = Field( - None, description="", alias="isVisualQuery" - ) - visual_builder_schema_base64: Optional[str] = Field( - None, description="", alias="visualBuilderSchemaBase64" - ) - parent: Optional[Namespace] = Field( - None, description="", alias="parent" - ) # relationship - columns: Optional[list[Column]] = Field( - None, description="", alias="columns" - ) # relationship - tables: Optional[list[Table]] = Field( - None, description="", alias="tables" - ) # relationship - views: Optional[list[View]] = Field( - None, description="", alias="views" - ) # relationship - - attributes: "Query.Attributes" = Field( - default_factory=lambda: Query.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Schema(SQL): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, database_qualified_name: str) -> Schema: - validate_required_fields( - ["name", "database_qualified_name"], [name, database_qualified_name] - ) - attributes = Schema.Attributes.create( - name=name, database_qualified_name=database_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("Schema", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Schema": - raise ValueError("must be Schema") - return v - - def __setattr__(self, name, value): - if name in Schema._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - TABLE_COUNT: ClassVar[NumericField] = NumericField("tableCount", "tableCount") - """ - Number of tables in this schema. - """ - VIEWS_COUNT: ClassVar[NumericField] = NumericField("viewsCount", "viewsCount") - """ - Number of views in this schema. - """ - - SNOWFLAKE_TAGS: ClassVar[RelationField] = RelationField("snowflakeTags") - """ - TBC - """ - FUNCTIONS: ClassVar[RelationField] = RelationField("functions") - """ - TBC - """ - TABLES: ClassVar[RelationField] = RelationField("tables") - """ - TBC - """ - DATABASE: ClassVar[RelationField] = RelationField("database") - """ - TBC - """ - PROCEDURES: ClassVar[RelationField] = RelationField("procedures") - """ - TBC - """ - VIEWS: ClassVar[RelationField] = RelationField("views") - """ - TBC - """ - MATERIALISED_VIEWS: ClassVar[RelationField] = RelationField("materialisedViews") - """ - TBC - """ - SNOWFLAKE_DYNAMIC_TABLES: ClassVar[RelationField] = RelationField( - "snowflakeDynamicTables" - ) - """ - TBC - """ - SNOWFLAKE_PIPES: ClassVar[RelationField] = RelationField("snowflakePipes") - """ - TBC - """ - SNOWFLAKE_STREAMS: ClassVar[RelationField] = RelationField("snowflakeStreams") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "table_count", - "views_count", - "snowflake_tags", - "functions", - "tables", - "database", - "procedures", - "views", - "materialised_views", - "snowflake_dynamic_tables", - "snowflake_pipes", - "snowflake_streams", - ] - - @property - def table_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.table_count - - @table_count.setter - def table_count(self, table_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_count = table_count - - @property - def views_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.views_count - - @views_count.setter - def views_count(self, views_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.views_count = views_count - - @property - def snowflake_tags(self) -> Optional[list[SnowflakeTag]]: - return None if self.attributes is None else self.attributes.snowflake_tags - - @snowflake_tags.setter - def snowflake_tags(self, snowflake_tags: Optional[list[SnowflakeTag]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_tags = snowflake_tags - - @property - def functions(self) -> Optional[list[Function]]: - return None if self.attributes is None else self.attributes.functions - - @functions.setter - def functions(self, functions: Optional[list[Function]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.functions = functions - - @property - def tables(self) -> Optional[list[Table]]: - return None if self.attributes is None else self.attributes.tables - - @tables.setter - def tables(self, tables: Optional[list[Table]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tables = tables - - @property - def database(self) -> Optional[Database]: - return None if self.attributes is None else self.attributes.database - - @database.setter - def database(self, database: Optional[Database]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database = database - - @property - def procedures(self) -> Optional[list[Procedure]]: - return None if self.attributes is None else self.attributes.procedures - - @procedures.setter - def procedures(self, procedures: Optional[list[Procedure]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.procedures = procedures - - @property - def views(self) -> Optional[list[View]]: - return None if self.attributes is None else self.attributes.views - - @views.setter - def views(self, views: Optional[list[View]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.views = views - - @property - def materialised_views(self) -> Optional[list[MaterialisedView]]: - return None if self.attributes is None else self.attributes.materialised_views - - @materialised_views.setter - def materialised_views(self, materialised_views: Optional[list[MaterialisedView]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.materialised_views = materialised_views - - @property - def snowflake_dynamic_tables(self) -> Optional[list[SnowflakeDynamicTable]]: - return ( - None - if self.attributes is None - else self.attributes.snowflake_dynamic_tables - ) - - @snowflake_dynamic_tables.setter - def snowflake_dynamic_tables( - self, snowflake_dynamic_tables: Optional[list[SnowflakeDynamicTable]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_dynamic_tables = snowflake_dynamic_tables - - @property - def snowflake_pipes(self) -> Optional[list[SnowflakePipe]]: - return None if self.attributes is None else self.attributes.snowflake_pipes - - @snowflake_pipes.setter - def snowflake_pipes(self, snowflake_pipes: Optional[list[SnowflakePipe]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_pipes = snowflake_pipes - - @property - def snowflake_streams(self) -> Optional[list[SnowflakeStream]]: - return None if self.attributes is None else self.attributes.snowflake_streams - - @snowflake_streams.setter - def snowflake_streams(self, snowflake_streams: Optional[list[SnowflakeStream]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_streams = snowflake_streams - - class Attributes(SQL.Attributes): - table_count: Optional[int] = Field(None, description="", alias="tableCount") - views_count: Optional[int] = Field(None, description="", alias="viewsCount") - snowflake_tags: Optional[list[SnowflakeTag]] = Field( - None, description="", alias="snowflakeTags" - ) # relationship - functions: Optional[list[Function]] = Field( - None, description="", alias="functions" - ) # relationship - tables: Optional[list[Table]] = Field( - None, description="", alias="tables" - ) # relationship - database: Optional[Database] = Field( - None, description="", alias="database" - ) # relationship - procedures: Optional[list[Procedure]] = Field( - None, description="", alias="procedures" - ) # relationship - views: Optional[list[View]] = Field( - None, description="", alias="views" - ) # relationship - materialised_views: Optional[list[MaterialisedView]] = Field( - None, description="", alias="materialisedViews" - ) # relationship - snowflake_dynamic_tables: Optional[list[SnowflakeDynamicTable]] = Field( - None, description="", alias="snowflakeDynamicTables" - ) # relationship - snowflake_pipes: Optional[list[SnowflakePipe]] = Field( - None, description="", alias="snowflakePipes" - ) # relationship - snowflake_streams: Optional[list[SnowflakeStream]] = Field( - None, description="", alias="snowflakeStreams" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, database_qualified_name: str - ) -> Schema.Attributes: - if not name: - raise ValueError("name cannot be blank") - validate_required_fields( - ["database_qualified_name"], [database_qualified_name] - ) - fields = database_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid database_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid database_qualified_name") from e - return Schema.Attributes( - name=name, - database_name=fields[3], - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - database_qualified_name=database_qualified_name, - qualified_name=f"{database_qualified_name}/{name}", - connector_name=connector_type.value, - database=Database.ref_by_qualified_name(database_qualified_name), - ) - - attributes: "Schema.Attributes" = Field( - default_factory=lambda: Schema.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SnowflakePipe(SQL): - """Description""" - - type_name: str = Field("SnowflakePipe", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SnowflakePipe": - raise ValueError("must be SnowflakePipe") - return v - - def __setattr__(self, name, value): - if name in SnowflakePipe._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") - """ - SQL definition of this pipe. - """ - SNOWFLAKE_PIPE_IS_AUTO_INGEST_ENABLED: ClassVar[BooleanField] = BooleanField( - "snowflakePipeIsAutoIngestEnabled", "snowflakePipeIsAutoIngestEnabled" - ) - """ - Whether auto-ingest is enabled for this pipe (true) or not (false). - """ - SNOWFLAKE_PIPE_NOTIFICATION_CHANNEL_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "snowflakePipeNotificationChannelName", - "snowflakePipeNotificationChannelName", - "snowflakePipeNotificationChannelName.text", - ) - """ - Name of the notification channel for this pipe. - """ - - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "definition", - "snowflake_pipe_is_auto_ingest_enabled", - "snowflake_pipe_notification_channel_name", - "atlan_schema", - ] - - @property - def definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.definition - - @definition.setter - def definition(self, definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.definition = definition - - @property - def snowflake_pipe_is_auto_ingest_enabled(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.snowflake_pipe_is_auto_ingest_enabled - ) - - @snowflake_pipe_is_auto_ingest_enabled.setter - def snowflake_pipe_is_auto_ingest_enabled( - self, snowflake_pipe_is_auto_ingest_enabled: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_pipe_is_auto_ingest_enabled = ( - snowflake_pipe_is_auto_ingest_enabled - ) - - @property - def snowflake_pipe_notification_channel_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.snowflake_pipe_notification_channel_name - ) - - @snowflake_pipe_notification_channel_name.setter - def snowflake_pipe_notification_channel_name( - self, snowflake_pipe_notification_channel_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_pipe_notification_channel_name = ( - snowflake_pipe_notification_channel_name - ) - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - class Attributes(SQL.Attributes): - definition: Optional[str] = Field(None, description="", alias="definition") - snowflake_pipe_is_auto_ingest_enabled: Optional[bool] = Field( - None, description="", alias="snowflakePipeIsAutoIngestEnabled" - ) - snowflake_pipe_notification_channel_name: Optional[str] = Field( - None, description="", alias="snowflakePipeNotificationChannelName" - ) - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - - attributes: "SnowflakePipe.Attributes" = Field( - default_factory=lambda: SnowflakePipe.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class View(SQL): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, schema_qualified_name: str) -> View: - validate_required_fields( - ["name", "schema_qualified_name"], [name, schema_qualified_name] - ) - attributes = View.Attributes.create( - name=name, schema_qualified_name=schema_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("View", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "View": - raise ValueError("must be View") - return v - - def __setattr__(self, name, value): - if name in View._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") - """ - Number of columns in this view. - """ - ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") - """ - Number of rows in this view. - """ - SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") - """ - Size of this view, in bytes. - """ - IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "isQueryPreview", "isQueryPreview" - ) - """ - Whether preview queries are allowed on this view (true) or not (false). - """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) - """ - Configuration for preview queries on this view. - """ - ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") - """ - Alias for this view. - """ - IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") - """ - Whether this view is temporary (true) or not (false). - """ - DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") - """ - SQL definition of this view. - """ - - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - QUERIES: ClassVar[RelationField] = RelationField("queries") - """ - TBC - """ - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "column_count", - "row_count", - "size_bytes", - "is_query_preview", - "query_preview_config", - "alias", - "is_temporary", - "definition", - "columns", - "queries", - "atlan_schema", - ] - - @property - def column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_count - - @column_count.setter - def column_count(self, column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_count = column_count - - @property - def row_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_count - - @row_count.setter - def row_count(self, row_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.row_count = row_count - - @property - def size_bytes(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.size_bytes - - @size_bytes.setter - def size_bytes(self, size_bytes: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.size_bytes = size_bytes - - @property - def is_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_query_preview - - @is_query_preview.setter - def is_query_preview(self, is_query_preview: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_query_preview = is_query_preview - - @property - def query_preview_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config - - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config - - @property - def alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.alias - - @alias.setter - def alias(self, alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.alias = alias - - @property - def is_temporary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_temporary - - @is_temporary.setter - def is_temporary(self, is_temporary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_temporary = is_temporary - - @property - def definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.definition - - @definition.setter - def definition(self, definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.definition = definition - - @property - def columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def queries(self) -> Optional[list[Query]]: - return None if self.attributes is None else self.attributes.queries - - @queries.setter - def queries(self, queries: Optional[list[Query]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.queries = queries - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - class Attributes(SQL.Attributes): - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) - query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - definition: Optional[str] = Field(None, description="", alias="definition") - columns: Optional[list[Column]] = Field( - None, description="", alias="columns" - ) # relationship - queries: Optional[list[Query]] = Field( - None, description="", alias="queries" - ) # relationship - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, schema_qualified_name: str) -> View.Attributes: - if not name: - raise ValueError("name cannot be blank") - validate_required_fields(["schema_qualified_name"], [schema_qualified_name]) - fields = schema_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid schema_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid schema_qualified_name") from e - return View.Attributes( - name=name, - database_name=fields[3], - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", - qualified_name=f"{schema_qualified_name}/{name}", - schema_qualified_name=schema_qualified_name, - schema_name=fields[4], - connector_name=connector_type.value, - atlan_schema=Schema.ref_by_qualified_name(schema_qualified_name), - ) - - attributes: "View.Attributes" = Field( - default_factory=lambda: View.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MaterialisedView(SQL): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, schema_qualified_name: str) -> MaterialisedView: - validate_required_fields( - ["name", "schema_qualified_name"], [name, schema_qualified_name] - ) - attributes = MaterialisedView.Attributes.create( - name=name, schema_qualified_name=schema_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("MaterialisedView", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MaterialisedView": - raise ValueError("must be MaterialisedView") - return v - - def __setattr__(self, name, value): - if name in MaterialisedView._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - REFRESH_MODE: ClassVar[KeywordField] = KeywordField("refreshMode", "refreshMode") - """ - Refresh mode for this materialized view. - """ - REFRESH_METHOD: ClassVar[KeywordField] = KeywordField( - "refreshMethod", "refreshMethod" - ) - """ - Refresh method for this materialized view. - """ - STALENESS: ClassVar[KeywordField] = KeywordField("staleness", "staleness") - """ - Staleness of this materialized view. - """ - STALE_SINCE_DATE: ClassVar[NumericField] = NumericField( - "staleSinceDate", "staleSinceDate" - ) - """ - Time (epoch) from which this materialized view is stale, in milliseconds. - """ - COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") - """ - Number of columns in this materialized view. - """ - ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") - """ - Number of rows in this materialized view. - """ - SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") - """ - Size of this materialized view, in bytes. - """ - IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "isQueryPreview", "isQueryPreview" - ) - """ - Whether it's possible to run a preview query on this materialized view (true) or not (false). - """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) - """ - Configuration for the query preview of this materialized view. - """ - ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") - """ - Alias for this materialized view. - """ - IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") - """ - Whether this materialized view is temporary (true) or not (false). - """ - DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") - """ - SQL definition of this materialized view. - """ - - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "refresh_mode", - "refresh_method", - "staleness", - "stale_since_date", - "column_count", - "row_count", - "size_bytes", - "is_query_preview", - "query_preview_config", - "alias", - "is_temporary", - "definition", - "columns", - "atlan_schema", - ] - - @property - def refresh_mode(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.refresh_mode - - @refresh_mode.setter - def refresh_mode(self, refresh_mode: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.refresh_mode = refresh_mode - - @property - def refresh_method(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.refresh_method - - @refresh_method.setter - def refresh_method(self, refresh_method: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.refresh_method = refresh_method - - @property - def staleness(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.staleness - - @staleness.setter - def staleness(self, staleness: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.staleness = staleness - - @property - def stale_since_date(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.stale_since_date - - @stale_since_date.setter - def stale_since_date(self, stale_since_date: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.stale_since_date = stale_since_date - - @property - def column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_count - - @column_count.setter - def column_count(self, column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_count = column_count - - @property - def row_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_count - - @row_count.setter - def row_count(self, row_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.row_count = row_count - - @property - def size_bytes(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.size_bytes - - @size_bytes.setter - def size_bytes(self, size_bytes: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.size_bytes = size_bytes - - @property - def is_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_query_preview - - @is_query_preview.setter - def is_query_preview(self, is_query_preview: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_query_preview = is_query_preview - - @property - def query_preview_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config - - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config - - @property - def alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.alias - - @alias.setter - def alias(self, alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.alias = alias - - @property - def is_temporary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_temporary - - @is_temporary.setter - def is_temporary(self, is_temporary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_temporary = is_temporary - - @property - def definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.definition - - @definition.setter - def definition(self, definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.definition = definition - - @property - def columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - class Attributes(SQL.Attributes): - refresh_mode: Optional[str] = Field(None, description="", alias="refreshMode") - refresh_method: Optional[str] = Field( - None, description="", alias="refreshMethod" - ) - staleness: Optional[str] = Field(None, description="", alias="staleness") - stale_since_date: Optional[datetime] = Field( - None, description="", alias="staleSinceDate" - ) - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) - query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - definition: Optional[str] = Field(None, description="", alias="definition") - columns: Optional[list[Column]] = Field( - None, description="", alias="columns" - ) # relationship - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, schema_qualified_name: str - ) -> MaterialisedView.Attributes: - if not name: - raise ValueError("name cannot be blank") - validate_required_fields(["schema_qualified_name"], [schema_qualified_name]) - fields = schema_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid schema_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid schema_qualified_name") from e - return MaterialisedView.Attributes( - name=name, - database_name=fields[3], - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", - qualified_name=f"{schema_qualified_name}/{name}", - schema_qualified_name=schema_qualified_name, - schema_name=fields[4], - connector_name=connector_type.value, - atlan_schema=Schema.ref_by_qualified_name(schema_qualified_name), - ) - - attributes: "MaterialisedView.Attributes" = Field( - default_factory=lambda: MaterialisedView.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Function(SQL): - """Description""" - - type_name: str = Field("Function", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Function": - raise ValueError("must be Function") - return v - - def __setattr__(self, name, value): - if name in Function._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - FUNCTION_DEFINITION: ClassVar[KeywordField] = KeywordField( - "functionDefinition", "functionDefinition" - ) - """ - Code or set of statements that determine the output of the function. - """ - FUNCTION_RETURN_TYPE: ClassVar[KeywordField] = KeywordField( - "functionReturnType", "functionReturnType" - ) - """ - Data type of the value returned by the function. - """ - FUNCTION_ARGUMENTS: ClassVar[KeywordField] = KeywordField( - "functionArguments", "functionArguments" - ) - """ - Arguments that are passed in to the function. - """ - FUNCTION_LANGUAGE: ClassVar[KeywordField] = KeywordField( - "functionLanguage", "functionLanguage" - ) - """ - Programming language in which the function is written. - """ - FUNCTION_TYPE: ClassVar[KeywordField] = KeywordField("functionType", "functionType") - """ - Type of function. - """ - FUNCTION_IS_EXTERNAL: ClassVar[BooleanField] = BooleanField( - "functionIsExternal", "functionIsExternal" - ) - """ - Whether the function is stored or executed externally (true) or internally (false). - """ - FUNCTION_IS_SECURE: ClassVar[BooleanField] = BooleanField( - "functionIsSecure", "functionIsSecure" - ) - """ - Whether sensitive information of the function is omitted for unauthorized users (true) or not (false). - """ - FUNCTION_IS_MEMOIZABLE: ClassVar[BooleanField] = BooleanField( - "functionIsMemoizable", "functionIsMemoizable" - ) - """ - Whether the function must re-compute if there are no underlying changes in the values (false) or not (true). - """ - - FUNCTION_SCHEMA: ClassVar[RelationField] = RelationField("functionSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "function_definition", - "function_return_type", - "function_arguments", - "function_language", - "function_type", - "function_is_external", - "function_is_secure", - "function_is_memoizable", - "function_schema", - ] - - @property - def function_definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.function_definition - - @function_definition.setter - def function_definition(self, function_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_definition = function_definition - - @property - def function_return_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.function_return_type - - @function_return_type.setter - def function_return_type(self, function_return_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_return_type = function_return_type - - @property - def function_arguments(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.function_arguments - - @function_arguments.setter - def function_arguments(self, function_arguments: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_arguments = function_arguments - - @property - def function_language(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.function_language - - @function_language.setter - def function_language(self, function_language: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_language = function_language - - @property - def function_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.function_type - - @function_type.setter - def function_type(self, function_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_type = function_type - - @property - def function_is_external(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.function_is_external - - @function_is_external.setter - def function_is_external(self, function_is_external: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_is_external = function_is_external - - @property - def function_is_secure(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.function_is_secure - - @function_is_secure.setter - def function_is_secure(self, function_is_secure: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_is_secure = function_is_secure - - @property - def function_is_memoizable(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.function_is_memoizable - ) - - @function_is_memoizable.setter - def function_is_memoizable(self, function_is_memoizable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_is_memoizable = function_is_memoizable - - @property - def function_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.function_schema - - @function_schema.setter - def function_schema(self, function_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.function_schema = function_schema - - class Attributes(SQL.Attributes): - function_definition: Optional[str] = Field( - None, description="", alias="functionDefinition" - ) - function_return_type: Optional[str] = Field( - None, description="", alias="functionReturnType" - ) - function_arguments: Optional[set[str]] = Field( - None, description="", alias="functionArguments" - ) - function_language: Optional[str] = Field( - None, description="", alias="functionLanguage" - ) - function_type: Optional[str] = Field(None, description="", alias="functionType") - function_is_external: Optional[bool] = Field( - None, description="", alias="functionIsExternal" - ) - function_is_secure: Optional[bool] = Field( - None, description="", alias="functionIsSecure" - ) - function_is_memoizable: Optional[bool] = Field( - None, description="", alias="functionIsMemoizable" - ) - function_schema: Optional[Schema] = Field( - None, description="", alias="functionSchema" - ) # relationship - - attributes: "Function.Attributes" = Field( - default_factory=lambda: Function.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TablePartition(SQL): - """Description""" - - type_name: str = Field("TablePartition", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TablePartition": - raise ValueError("must be TablePartition") - return v - - def __setattr__(self, name, value): - if name in TablePartition._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CONSTRAINT: ClassVar[KeywordField] = KeywordField("constraint", "constraint") - """ - Constraint that defines this table partition. - """ - COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") - """ - Number of columns in this partition. - """ - ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") - """ - Number of rows in this partition. - """ - SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") - """ - Size of this partition, in bytes. - """ - ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") - """ - Alias for this partition. - """ - IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") - """ - Whether this partition is temporary (true) or not (false). - """ - IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "isQueryPreview", "isQueryPreview" - ) - """ - Whether preview queries for this partition are allowed (true) or not (false). - """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) - """ - Configuration for the preview queries. - """ - EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( - "externalLocation", "externalLocation" - ) - """ - External location of this partition, for example: an S3 object location. - """ - EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( - "externalLocationRegion", "externalLocationRegion" - ) - """ - Region of the external location of this partition, for example: S3 region. - """ - EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( - "externalLocationFormat", "externalLocationFormat" - ) - """ - Format of the external location of this partition, for example: JSON, CSV, PARQUET, etc. - """ - IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( - "isPartitioned", "isPartitioned" - ) - """ - Whether this partition is further partitioned (true) or not (false). - """ - PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( - "partitionStrategy", "partitionStrategy" - ) - """ - Partition strategy of this partition. - """ - PARTITION_COUNT: ClassVar[NumericField] = NumericField( - "partitionCount", "partitionCount" - ) - """ - Number of sub-partitions of this partition. - """ - PARTITION_LIST: ClassVar[KeywordField] = KeywordField( - "partitionList", "partitionList" - ) - """ - List of sub-partitions in this partition. - """ - - CHILD_TABLE_PARTITIONS: ClassVar[RelationField] = RelationField( - "childTablePartitions" - ) - """ - TBC - """ - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - PARENT_TABLE_PARTITION: ClassVar[RelationField] = RelationField( - "parentTablePartition" - ) - """ - TBC - """ - PARENT_TABLE: ClassVar[RelationField] = RelationField("parentTable") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "constraint", - "column_count", - "row_count", - "size_bytes", - "alias", - "is_temporary", - "is_query_preview", - "query_preview_config", - "external_location", - "external_location_region", - "external_location_format", - "is_partitioned", - "partition_strategy", - "partition_count", - "partition_list", - "child_table_partitions", - "columns", - "parent_table_partition", - "parent_table", - ] - - @property - def constraint(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.constraint - - @constraint.setter - def constraint(self, constraint: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.constraint = constraint - - @property - def column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_count - - @column_count.setter - def column_count(self, column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_count = column_count - - @property - def row_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_count - - @row_count.setter - def row_count(self, row_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.row_count = row_count - - @property - def size_bytes(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.size_bytes - - @size_bytes.setter - def size_bytes(self, size_bytes: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.size_bytes = size_bytes - - @property - def alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.alias - - @alias.setter - def alias(self, alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.alias = alias - - @property - def is_temporary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_temporary - - @is_temporary.setter - def is_temporary(self, is_temporary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_temporary = is_temporary - - @property - def is_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_query_preview - - @is_query_preview.setter - def is_query_preview(self, is_query_preview: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_query_preview = is_query_preview - - @property - def query_preview_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config - - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config - - @property - def external_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.external_location - - @external_location.setter - def external_location(self, external_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location = external_location - - @property - def external_location_region(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.external_location_region - ) - - @external_location_region.setter - def external_location_region(self, external_location_region: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_region = external_location_region - - @property - def external_location_format(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.external_location_format - ) - - @external_location_format.setter - def external_location_format(self, external_location_format: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_format = external_location_format - - @property - def is_partitioned(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partitioned - - @is_partitioned.setter - def is_partitioned(self, is_partitioned: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_partitioned = is_partitioned - - @property - def partition_strategy(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_strategy - - @partition_strategy.setter - def partition_strategy(self, partition_strategy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_strategy = partition_strategy - - @property - def partition_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.partition_count - - @partition_count.setter - def partition_count(self, partition_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_count = partition_count - - @property - def partition_list(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_list - - @partition_list.setter - def partition_list(self, partition_list: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_list = partition_list - - @property - def child_table_partitions(self) -> Optional[list[TablePartition]]: - return ( - None if self.attributes is None else self.attributes.child_table_partitions - ) - - @child_table_partitions.setter - def child_table_partitions( - self, child_table_partitions: Optional[list[TablePartition]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.child_table_partitions = child_table_partitions - - @property - def columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def parent_table_partition(self) -> Optional[TablePartition]: - return ( - None if self.attributes is None else self.attributes.parent_table_partition - ) - - @parent_table_partition.setter - def parent_table_partition(self, parent_table_partition: Optional[TablePartition]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_table_partition = parent_table_partition - - @property - def parent_table(self) -> Optional[Table]: - return None if self.attributes is None else self.attributes.parent_table - - @parent_table.setter - def parent_table(self, parent_table: Optional[Table]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_table = parent_table - - class Attributes(SQL.Attributes): - constraint: Optional[str] = Field(None, description="", alias="constraint") - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) - query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - external_location: Optional[str] = Field( - None, description="", alias="externalLocation" - ) - external_location_region: Optional[str] = Field( - None, description="", alias="externalLocationRegion" - ) - external_location_format: Optional[str] = Field( - None, description="", alias="externalLocationFormat" - ) - is_partitioned: Optional[bool] = Field( - None, description="", alias="isPartitioned" - ) - partition_strategy: Optional[str] = Field( - None, description="", alias="partitionStrategy" - ) - partition_count: Optional[int] = Field( - None, description="", alias="partitionCount" - ) - partition_list: Optional[str] = Field( - None, description="", alias="partitionList" - ) - child_table_partitions: Optional[list[TablePartition]] = Field( - None, description="", alias="childTablePartitions" - ) # relationship - columns: Optional[list[Column]] = Field( - None, description="", alias="columns" - ) # relationship - parent_table_partition: Optional[TablePartition] = Field( - None, description="", alias="parentTablePartition" - ) # relationship - parent_table: Optional[Table] = Field( - None, description="", alias="parentTable" - ) # relationship - - attributes: "TablePartition.Attributes" = Field( - default_factory=lambda: TablePartition.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Column(SQL): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, parent_qualified_name: str, parent_type: type, order: int - ) -> Column: - return Column( - attributes=Column.Attributes.create( - name=name, - parent_qualified_name=parent_qualified_name, - parent_type=parent_type, - order=order, - ) - ) - - type_name: str = Field("Column", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Column": - raise ValueError("must be Column") - return v - - def __setattr__(self, name, value): - if name in Column._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "dataType", "dataType", "dataType.text" - ) - """ - Data type of values in this column. - """ - SUB_DATA_TYPE: ClassVar[KeywordField] = KeywordField("subDataType", "subDataType") - """ - Sub-data type of this column. - """ - RAW_DATA_TYPE_DEFINITION: ClassVar[KeywordField] = KeywordField( - "rawDataTypeDefinition", "rawDataTypeDefinition" - ) - """ - - """ - ORDER: ClassVar[NumericField] = NumericField("order", "order") - """ - Order (position) in which this column appears in the table (starting at 1). - """ - NESTED_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "nestedColumnCount", "nestedColumnCount" - ) - """ - Number of columns nested within this (STRUCT or NESTED) column. - """ - IS_PARTITION: ClassVar[BooleanField] = BooleanField("isPartition", "isPartition") - """ - Whether this column is a partition column (true) or not (false). - """ - PARTITION_ORDER: ClassVar[NumericField] = NumericField( - "partitionOrder", "partitionOrder" - ) - """ - Order (position) of this partition column in the table. - """ - IS_CLUSTERED: ClassVar[BooleanField] = BooleanField("isClustered", "isClustered") - """ - Whether this column is a clustered column (true) or not (false). - """ - IS_PRIMARY: ClassVar[BooleanField] = BooleanField("isPrimary", "isPrimary") - """ - When true, this column is the primary key for the table. - """ - IS_FOREIGN: ClassVar[BooleanField] = BooleanField("isForeign", "isForeign") - """ - When true, this column is a foreign key to another table. NOTE: this must be true when using the foreignKeyTo relationship to specify columns that refer to this column as a foreign key. - """ # noqa: E501 - IS_INDEXED: ClassVar[BooleanField] = BooleanField("isIndexed", "isIndexed") - """ - When true, this column is indexed in the database. - """ - IS_SORT: ClassVar[BooleanField] = BooleanField("isSort", "isSort") - """ - Whether this column is a sort column (true) or not (false). - """ - IS_DIST: ClassVar[BooleanField] = BooleanField("isDist", "isDist") - """ - Whether this column is a distribution column (true) or not (false). - """ - IS_PINNED: ClassVar[BooleanField] = BooleanField("isPinned", "isPinned") - """ - Whether this column is pinned (true) or not (false). - """ - PINNED_BY: ClassVar[KeywordField] = KeywordField("pinnedBy", "pinnedBy") - """ - User who pinned this column. - """ - PINNED_AT: ClassVar[NumericField] = NumericField("pinnedAt", "pinnedAt") - """ - Time (epoch) at which this column was pinned, in milliseconds. - """ - PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") - """ - Total number of digits allowed, when the dataType is numeric. - """ - DEFAULT_VALUE: ClassVar[KeywordField] = KeywordField("defaultValue", "defaultValue") - """ - Default value for this column. - """ - IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") - """ - When true, the values in this column can be null. - """ - NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") - """ - Number of digits allowed to the right of the decimal point. - """ - MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") - """ - Maximum length of a value in this column. - """ - VALIDATIONS: ClassVar[KeywordField] = KeywordField("validations", "validations") - """ - Validations for this column. - """ - PARENT_COLUMN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentColumnQualifiedName", - "parentColumnQualifiedName", - "parentColumnQualifiedName.text", - ) - """ - Unique name of the column this column is nested within, for STRUCT and NESTED columns. - """ - PARENT_COLUMN_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentColumnName", "parentColumnName.keyword", "parentColumnName" - ) - """ - Simple name of the column this column is nested within, for STRUCT and NESTED columns. - """ - COLUMN_DISTINCT_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnDistinctValuesCount", "columnDistinctValuesCount" - ) - """ - Number of rows that contain distinct values. - """ - COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" - ) - """ - Number of rows that contain distinct values. - """ - COLUMN_HISTOGRAM: ClassVar[KeywordField] = KeywordField( - "columnHistogram", "columnHistogram" - ) - """ - List of values in a histogram that represents the contents of this column. - """ - COLUMN_MAX: ClassVar[NumericField] = NumericField("columnMax", "columnMax") - """ - Greatest value in a numeric column. - """ - COLUMN_MIN: ClassVar[NumericField] = NumericField("columnMin", "columnMin") - """ - Least value in a numeric column. - """ - COLUMN_MEAN: ClassVar[NumericField] = NumericField("columnMean", "columnMean") - """ - Arithmetic mean of the values in a numeric column. - """ - COLUMN_SUM: ClassVar[NumericField] = NumericField("columnSum", "columnSum") - """ - Calculated sum of the values in a numeric column. - """ - COLUMN_MEDIAN: ClassVar[NumericField] = NumericField("columnMedian", "columnMedian") - """ - Calculated median of the values in a numeric column. - """ - COLUMN_STANDARD_DEVIATION: ClassVar[NumericField] = NumericField( - "columnStandardDeviation", "columnStandardDeviation" - ) - """ - Calculated standard deviation of the values in a numeric column. - """ - COLUMN_UNIQUE_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnUniqueValuesCount", "columnUniqueValuesCount" - ) - """ - Number of rows in which a value in this column appears only once. - """ - COLUMN_UNIQUE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnUniqueValuesCountLong", "columnUniqueValuesCountLong" - ) - """ - Number of rows in which a value in this column appears only once. - """ - COLUMN_AVERAGE: ClassVar[NumericField] = NumericField( - "columnAverage", "columnAverage" - ) - """ - Average value in this column. - """ - COLUMN_AVERAGE_LENGTH: ClassVar[NumericField] = NumericField( - "columnAverageLength", "columnAverageLength" - ) - """ - Average length of values in a string column. - """ - COLUMN_DUPLICATE_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnDuplicateValuesCount", "columnDuplicateValuesCount" - ) - """ - Number of rows that contain duplicate values. - """ - COLUMN_DUPLICATE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnDuplicateValuesCountLong", "columnDuplicateValuesCountLong" - ) - """ - Number of rows that contain duplicate values. - """ - COLUMN_MAXIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( - "columnMaximumStringLength", "columnMaximumStringLength" - ) - """ - Length of the longest value in a string column. - """ - COLUMN_MAXS: ClassVar[KeywordField] = KeywordField("columnMaxs", "columnMaxs") - """ - List of the greatest values in a column. - """ - COLUMN_MINIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( - "columnMinimumStringLength", "columnMinimumStringLength" - ) - """ - Length of the shortest value in a string column. - """ - COLUMN_MINS: ClassVar[KeywordField] = KeywordField("columnMins", "columnMins") - """ - List of the least values in a column. - """ - COLUMN_MISSING_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnMissingValuesCount", "columnMissingValuesCount" - ) - """ - Number of rows in a column that do not contain content. - """ - COLUMN_MISSING_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnMissingValuesCountLong", "columnMissingValuesCountLong" - ) - """ - Number of rows in a column that do not contain content. - """ - COLUMN_MISSING_VALUES_PERCENTAGE: ClassVar[NumericField] = NumericField( - "columnMissingValuesPercentage", "columnMissingValuesPercentage" - ) - """ - Percentage of rows in a column that do not contain content. - """ - COLUMN_UNIQUENESS_PERCENTAGE: ClassVar[NumericField] = NumericField( - "columnUniquenessPercentage", "columnUniquenessPercentage" - ) - """ - Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique. - """ # noqa: E501 - COLUMN_VARIANCE: ClassVar[NumericField] = NumericField( - "columnVariance", "columnVariance" - ) - """ - Calculated variance of the values in a numeric column. - """ - COLUMN_TOP_VALUES: ClassVar[KeywordField] = KeywordField( - "columnTopValues", "columnTopValues" - ) - """ - List of top values in this column. - """ - COLUMN_DEPTH_LEVEL: ClassVar[NumericField] = NumericField( - "columnDepthLevel", "columnDepthLevel" - ) - """ - Level of nesting of this column, used for STRUCT and NESTED columns. - """ - - SNOWFLAKE_DYNAMIC_TABLE: ClassVar[RelationField] = RelationField( - "snowflakeDynamicTable" - ) - """ - TBC - """ - VIEW: ClassVar[RelationField] = RelationField("view") - """ - TBC - """ - NESTED_COLUMNS: ClassVar[RelationField] = RelationField("nestedColumns") - """ - TBC - """ - DATA_QUALITY_METRIC_DIMENSIONS: ClassVar[RelationField] = RelationField( - "dataQualityMetricDimensions" - ) - """ - TBC - """ - DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField("dbtModelColumns") - """ - TBC - """ - TABLE: ClassVar[RelationField] = RelationField("table") - """ - TBC - """ - COLUMN_DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField( - "columnDbtModelColumns" - ) - """ - TBC - """ - MATERIALISED_VIEW: ClassVar[RelationField] = RelationField("materialisedView") - """ - TBC - """ - PARENT_COLUMN: ClassVar[RelationField] = RelationField("parentColumn") - """ - TBC - """ - QUERIES: ClassVar[RelationField] = RelationField("queries") - """ - TBC - """ - METRIC_TIMESTAMPS: ClassVar[RelationField] = RelationField("metricTimestamps") - """ - TBC - """ - FOREIGN_KEY_TO: ClassVar[RelationField] = RelationField("foreignKeyTo") - """ - TBC - """ - FOREIGN_KEY_FROM: ClassVar[RelationField] = RelationField("foreignKeyFrom") - """ - TBC - """ - DBT_METRICS: ClassVar[RelationField] = RelationField("dbtMetrics") - """ - TBC - """ - TABLE_PARTITION: ClassVar[RelationField] = RelationField("tablePartition") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "data_type", - "sub_data_type", - "raw_data_type_definition", - "order", - "nested_column_count", - "is_partition", - "partition_order", - "is_clustered", - "is_primary", - "is_foreign", - "is_indexed", - "is_sort", - "is_dist", - "is_pinned", - "pinned_by", - "pinned_at", - "precision", - "default_value", - "is_nullable", - "numeric_scale", - "max_length", - "validations", - "parent_column_qualified_name", - "parent_column_name", - "column_distinct_values_count", - "column_distinct_values_count_long", - "column_histogram", - "column_max", - "column_min", - "column_mean", - "column_sum", - "column_median", - "column_standard_deviation", - "column_unique_values_count", - "column_unique_values_count_long", - "column_average", - "column_average_length", - "column_duplicate_values_count", - "column_duplicate_values_count_long", - "column_maximum_string_length", - "column_maxs", - "column_minimum_string_length", - "column_mins", - "column_missing_values_count", - "column_missing_values_count_long", - "column_missing_values_percentage", - "column_uniqueness_percentage", - "column_variance", - "column_top_values", - "column_depth_level", - "snowflake_dynamic_table", - "view", - "nested_columns", - "data_quality_metric_dimensions", - "dbt_model_columns", - "table", - "column_dbt_model_columns", - "materialised_view", - "parent_column", - "queries", - "metric_timestamps", - "foreign_key_to", - "foreign_key_from", - "dbt_metrics", - "table_partition", - ] - - @property - def data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.data_type - - @data_type.setter - def data_type(self, data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_type = data_type - - @property - def sub_data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sub_data_type - - @sub_data_type.setter - def sub_data_type(self, sub_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sub_data_type = sub_data_type - - @property - def raw_data_type_definition(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.raw_data_type_definition - ) - - @raw_data_type_definition.setter - def raw_data_type_definition(self, raw_data_type_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.raw_data_type_definition = raw_data_type_definition - - @property - def order(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.order - - @order.setter - def order(self, order: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.order = order - - @property - def nested_column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.nested_column_count - - @nested_column_count.setter - def nested_column_count(self, nested_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.nested_column_count = nested_column_count - - @property - def is_partition(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partition - - @is_partition.setter - def is_partition(self, is_partition: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_partition = is_partition - - @property - def partition_order(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.partition_order - - @partition_order.setter - def partition_order(self, partition_order: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_order = partition_order - - @property - def is_clustered(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_clustered - - @is_clustered.setter - def is_clustered(self, is_clustered: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_clustered = is_clustered - - @property - def is_primary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_primary - - @is_primary.setter - def is_primary(self, is_primary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_primary = is_primary - - @property - def is_foreign(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_foreign - - @is_foreign.setter - def is_foreign(self, is_foreign: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_foreign = is_foreign - - @property - def is_indexed(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_indexed - - @is_indexed.setter - def is_indexed(self, is_indexed: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_indexed = is_indexed - - @property - def is_sort(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_sort - - @is_sort.setter - def is_sort(self, is_sort: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_sort = is_sort - - @property - def is_dist(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_dist - - @is_dist.setter - def is_dist(self, is_dist: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_dist = is_dist - - @property - def is_pinned(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_pinned - - @is_pinned.setter - def is_pinned(self, is_pinned: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_pinned = is_pinned - - @property - def pinned_by(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.pinned_by - - @pinned_by.setter - def pinned_by(self, pinned_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.pinned_by = pinned_by - - @property - def pinned_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.pinned_at - - @pinned_at.setter - def pinned_at(self, pinned_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.pinned_at = pinned_at - - @property - def precision(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.precision - - @precision.setter - def precision(self, precision: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.precision = precision - - @property - def default_value(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.default_value - - @default_value.setter - def default_value(self, default_value: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_value = default_value - - @property - def is_nullable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_nullable - - @is_nullable.setter - def is_nullable(self, is_nullable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_nullable = is_nullable - - @property - def numeric_scale(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.numeric_scale - - @numeric_scale.setter - def numeric_scale(self, numeric_scale: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.numeric_scale = numeric_scale - - @property - def max_length(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.max_length - - @max_length.setter - def max_length(self, max_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.max_length = max_length - - @property - def validations(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.validations - - @validations.setter - def validations(self, validations: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.validations = validations - - @property - def parent_column_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.parent_column_qualified_name - ) - - @parent_column_qualified_name.setter - def parent_column_qualified_name(self, parent_column_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_column_qualified_name = parent_column_qualified_name - - @property - def parent_column_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.parent_column_name - - @parent_column_name.setter - def parent_column_name(self, parent_column_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_column_name = parent_column_name - - @property - def column_distinct_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_distinct_values_count - ) - - @column_distinct_values_count.setter - def column_distinct_values_count(self, column_distinct_values_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_distinct_values_count = column_distinct_values_count - - @property - def column_distinct_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_distinct_values_count_long - ) - - @column_distinct_values_count_long.setter - def column_distinct_values_count_long( - self, column_distinct_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_distinct_values_count_long = ( - column_distinct_values_count_long - ) - - @property - def column_histogram(self) -> Optional[Histogram]: - return None if self.attributes is None else self.attributes.column_histogram - - @column_histogram.setter - def column_histogram(self, column_histogram: Optional[Histogram]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_histogram = column_histogram - - @property - def column_max(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_max - - @column_max.setter - def column_max(self, column_max: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_max = column_max - - @property - def column_min(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_min - - @column_min.setter - def column_min(self, column_min: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_min = column_min - - @property - def column_mean(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_mean - - @column_mean.setter - def column_mean(self, column_mean: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_mean = column_mean - - @property - def column_sum(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_sum - - @column_sum.setter - def column_sum(self, column_sum: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_sum = column_sum - - @property - def column_median(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_median - - @column_median.setter - def column_median(self, column_median: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_median = column_median - - @property - def column_standard_deviation(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.column_standard_deviation - ) - - @column_standard_deviation.setter - def column_standard_deviation(self, column_standard_deviation: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_standard_deviation = column_standard_deviation - - @property - def column_unique_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_unique_values_count - ) - - @column_unique_values_count.setter - def column_unique_values_count(self, column_unique_values_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_unique_values_count = column_unique_values_count - - @property - def column_unique_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_unique_values_count_long - ) - - @column_unique_values_count_long.setter - def column_unique_values_count_long( - self, column_unique_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_unique_values_count_long = ( - column_unique_values_count_long - ) - - @property - def column_average(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_average - - @column_average.setter - def column_average(self, column_average: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_average = column_average - - @property - def column_average_length(self) -> Optional[float]: - return ( - None if self.attributes is None else self.attributes.column_average_length - ) - - @column_average_length.setter - def column_average_length(self, column_average_length: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_average_length = column_average_length - - @property - def column_duplicate_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_duplicate_values_count - ) - - @column_duplicate_values_count.setter - def column_duplicate_values_count( - self, column_duplicate_values_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_duplicate_values_count = column_duplicate_values_count - - @property - def column_duplicate_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_duplicate_values_count_long - ) - - @column_duplicate_values_count_long.setter - def column_duplicate_values_count_long( - self, column_duplicate_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_duplicate_values_count_long = ( - column_duplicate_values_count_long - ) - - @property - def column_maximum_string_length(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_maximum_string_length - ) - - @column_maximum_string_length.setter - def column_maximum_string_length(self, column_maximum_string_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_maximum_string_length = column_maximum_string_length - - @property - def column_maxs(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.column_maxs - - @column_maxs.setter - def column_maxs(self, column_maxs: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_maxs = column_maxs - - @property - def column_minimum_string_length(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_minimum_string_length - ) - - @column_minimum_string_length.setter - def column_minimum_string_length(self, column_minimum_string_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_minimum_string_length = column_minimum_string_length - - @property - def column_mins(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.column_mins - - @column_mins.setter - def column_mins(self, column_mins: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_mins = column_mins - - @property - def column_missing_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_missing_values_count - ) - - @column_missing_values_count.setter - def column_missing_values_count(self, column_missing_values_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_missing_values_count = column_missing_values_count - - @property - def column_missing_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_missing_values_count_long - ) - - @column_missing_values_count_long.setter - def column_missing_values_count_long( - self, column_missing_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_missing_values_count_long = ( - column_missing_values_count_long - ) - - @property - def column_missing_values_percentage(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.column_missing_values_percentage - ) - - @column_missing_values_percentage.setter - def column_missing_values_percentage( - self, column_missing_values_percentage: Optional[float] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_missing_values_percentage = ( - column_missing_values_percentage - ) - - @property - def column_uniqueness_percentage(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.column_uniqueness_percentage - ) - - @column_uniqueness_percentage.setter - def column_uniqueness_percentage( - self, column_uniqueness_percentage: Optional[float] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_uniqueness_percentage = column_uniqueness_percentage - - @property - def column_variance(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_variance - - @column_variance.setter - def column_variance(self, column_variance: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_variance = column_variance - - @property - def column_top_values(self) -> Optional[list[ColumnValueFrequencyMap]]: - return None if self.attributes is None else self.attributes.column_top_values - - @column_top_values.setter - def column_top_values( - self, column_top_values: Optional[list[ColumnValueFrequencyMap]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_top_values = column_top_values - - @property - def column_depth_level(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_depth_level - - @column_depth_level.setter - def column_depth_level(self, column_depth_level: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_depth_level = column_depth_level - - @property - def snowflake_dynamic_table(self) -> Optional[SnowflakeDynamicTable]: - return ( - None if self.attributes is None else self.attributes.snowflake_dynamic_table - ) - - @snowflake_dynamic_table.setter - def snowflake_dynamic_table( - self, snowflake_dynamic_table: Optional[SnowflakeDynamicTable] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_dynamic_table = snowflake_dynamic_table - - @property - def view(self) -> Optional[View]: - return None if self.attributes is None else self.attributes.view - - @view.setter - def view(self, view: Optional[View]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view = view - - @property - def nested_columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.nested_columns - - @nested_columns.setter - def nested_columns(self, nested_columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.nested_columns = nested_columns - - @property - def data_quality_metric_dimensions(self) -> Optional[list[Metric]]: - return ( - None - if self.attributes is None - else self.attributes.data_quality_metric_dimensions - ) - - @data_quality_metric_dimensions.setter - def data_quality_metric_dimensions( - self, data_quality_metric_dimensions: Optional[list[Metric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_quality_metric_dimensions = data_quality_metric_dimensions - - @property - def dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: - return None if self.attributes is None else self.attributes.dbt_model_columns - - @dbt_model_columns.setter - def dbt_model_columns(self, dbt_model_columns: Optional[list[DbtModelColumn]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_columns = dbt_model_columns - - @property - def table(self) -> Optional[Table]: - return None if self.attributes is None else self.attributes.table - - @table.setter - def table(self, table: Optional[Table]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table = table - - @property - def column_dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: - return ( - None - if self.attributes is None - else self.attributes.column_dbt_model_columns - ) - - @column_dbt_model_columns.setter - def column_dbt_model_columns( - self, column_dbt_model_columns: Optional[list[DbtModelColumn]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_dbt_model_columns = column_dbt_model_columns - - @property - def materialised_view(self) -> Optional[MaterialisedView]: - return None if self.attributes is None else self.attributes.materialised_view - - @materialised_view.setter - def materialised_view(self, materialised_view: Optional[MaterialisedView]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.materialised_view = materialised_view - - @property - def parent_column(self) -> Optional[Column]: - return None if self.attributes is None else self.attributes.parent_column - - @parent_column.setter - def parent_column(self, parent_column: Optional[Column]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_column = parent_column - - @property - def queries(self) -> Optional[list[Query]]: - return None if self.attributes is None else self.attributes.queries - - @queries.setter - def queries(self, queries: Optional[list[Query]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.queries = queries - - @property - def metric_timestamps(self) -> Optional[list[Metric]]: - return None if self.attributes is None else self.attributes.metric_timestamps - - @metric_timestamps.setter - def metric_timestamps(self, metric_timestamps: Optional[list[Metric]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_timestamps = metric_timestamps - - @property - def foreign_key_to(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.foreign_key_to - - @foreign_key_to.setter - def foreign_key_to(self, foreign_key_to: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.foreign_key_to = foreign_key_to - - @property - def foreign_key_from(self) -> Optional[Column]: - return None if self.attributes is None else self.attributes.foreign_key_from - - @foreign_key_from.setter - def foreign_key_from(self, foreign_key_from: Optional[Column]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.foreign_key_from = foreign_key_from - - @property - def dbt_metrics(self) -> Optional[list[DbtMetric]]: - return None if self.attributes is None else self.attributes.dbt_metrics - - @dbt_metrics.setter - def dbt_metrics(self, dbt_metrics: Optional[list[DbtMetric]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_metrics = dbt_metrics - - @property - def table_partition(self) -> Optional[TablePartition]: - return None if self.attributes is None else self.attributes.table_partition - - @table_partition.setter - def table_partition(self, table_partition: Optional[TablePartition]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_partition = table_partition - - class Attributes(SQL.Attributes): - data_type: Optional[str] = Field(None, description="", alias="dataType") - sub_data_type: Optional[str] = Field(None, description="", alias="subDataType") - raw_data_type_definition: Optional[str] = Field( - None, description="", alias="rawDataTypeDefinition" - ) - order: Optional[int] = Field(None, description="", alias="order") - nested_column_count: Optional[int] = Field( - None, description="", alias="nestedColumnCount" - ) - is_partition: Optional[bool] = Field(None, description="", alias="isPartition") - partition_order: Optional[int] = Field( - None, description="", alias="partitionOrder" - ) - is_clustered: Optional[bool] = Field(None, description="", alias="isClustered") - is_primary: Optional[bool] = Field(None, description="", alias="isPrimary") - is_foreign: Optional[bool] = Field(None, description="", alias="isForeign") - is_indexed: Optional[bool] = Field(None, description="", alias="isIndexed") - is_sort: Optional[bool] = Field(None, description="", alias="isSort") - is_dist: Optional[bool] = Field(None, description="", alias="isDist") - is_pinned: Optional[bool] = Field(None, description="", alias="isPinned") - pinned_by: Optional[str] = Field(None, description="", alias="pinnedBy") - pinned_at: Optional[datetime] = Field(None, description="", alias="pinnedAt") - precision: Optional[int] = Field(None, description="", alias="precision") - default_value: Optional[str] = Field(None, description="", alias="defaultValue") - is_nullable: Optional[bool] = Field(None, description="", alias="isNullable") - numeric_scale: Optional[float] = Field( - None, description="", alias="numericScale" - ) - max_length: Optional[int] = Field(None, description="", alias="maxLength") - validations: Optional[dict[str, str]] = Field( - None, description="", alias="validations" - ) - parent_column_qualified_name: Optional[str] = Field( - None, description="", alias="parentColumnQualifiedName" - ) - parent_column_name: Optional[str] = Field( - None, description="", alias="parentColumnName" - ) - column_distinct_values_count: Optional[int] = Field( - None, description="", alias="columnDistinctValuesCount" - ) - column_distinct_values_count_long: Optional[int] = Field( - None, description="", alias="columnDistinctValuesCountLong" - ) - column_histogram: Optional[Histogram] = Field( - None, description="", alias="columnHistogram" - ) - column_max: Optional[float] = Field(None, description="", alias="columnMax") - column_min: Optional[float] = Field(None, description="", alias="columnMin") - column_mean: Optional[float] = Field(None, description="", alias="columnMean") - column_sum: Optional[float] = Field(None, description="", alias="columnSum") - column_median: Optional[float] = Field( - None, description="", alias="columnMedian" - ) - column_standard_deviation: Optional[float] = Field( - None, description="", alias="columnStandardDeviation" - ) - column_unique_values_count: Optional[int] = Field( - None, description="", alias="columnUniqueValuesCount" - ) - column_unique_values_count_long: Optional[int] = Field( - None, description="", alias="columnUniqueValuesCountLong" - ) - column_average: Optional[float] = Field( - None, description="", alias="columnAverage" - ) - column_average_length: Optional[float] = Field( - None, description="", alias="columnAverageLength" - ) - column_duplicate_values_count: Optional[int] = Field( - None, description="", alias="columnDuplicateValuesCount" - ) - column_duplicate_values_count_long: Optional[int] = Field( - None, description="", alias="columnDuplicateValuesCountLong" - ) - column_maximum_string_length: Optional[int] = Field( - None, description="", alias="columnMaximumStringLength" - ) - column_maxs: Optional[set[str]] = Field( - None, description="", alias="columnMaxs" - ) - column_minimum_string_length: Optional[int] = Field( - None, description="", alias="columnMinimumStringLength" - ) - column_mins: Optional[set[str]] = Field( - None, description="", alias="columnMins" - ) - column_missing_values_count: Optional[int] = Field( - None, description="", alias="columnMissingValuesCount" - ) - column_missing_values_count_long: Optional[int] = Field( - None, description="", alias="columnMissingValuesCountLong" - ) - column_missing_values_percentage: Optional[float] = Field( - None, description="", alias="columnMissingValuesPercentage" - ) - column_uniqueness_percentage: Optional[float] = Field( - None, description="", alias="columnUniquenessPercentage" - ) - column_variance: Optional[float] = Field( - None, description="", alias="columnVariance" - ) - column_top_values: Optional[list[ColumnValueFrequencyMap]] = Field( - None, description="", alias="columnTopValues" - ) - column_depth_level: Optional[int] = Field( - None, description="", alias="columnDepthLevel" - ) - snowflake_dynamic_table: Optional[SnowflakeDynamicTable] = Field( - None, description="", alias="snowflakeDynamicTable" - ) # relationship - view: Optional[View] = Field(None, description="", alias="view") # relationship - nested_columns: Optional[list[Column]] = Field( - None, description="", alias="nestedColumns" - ) # relationship - data_quality_metric_dimensions: Optional[list[Metric]] = Field( - None, description="", alias="dataQualityMetricDimensions" - ) # relationship - dbt_model_columns: Optional[list[DbtModelColumn]] = Field( - None, description="", alias="dbtModelColumns" - ) # relationship - table: Optional[Table] = Field( - None, description="", alias="table" - ) # relationship - column_dbt_model_columns: Optional[list[DbtModelColumn]] = Field( - None, description="", alias="columnDbtModelColumns" - ) # relationship - materialised_view: Optional[MaterialisedView] = Field( - None, description="", alias="materialisedView" - ) # relationship - parent_column: Optional[Column] = Field( - None, description="", alias="parentColumn" - ) # relationship - queries: Optional[list[Query]] = Field( - None, description="", alias="queries" - ) # relationship - metric_timestamps: Optional[list[Metric]] = Field( - None, description="", alias="metricTimestamps" - ) # relationship - foreign_key_to: Optional[list[Column]] = Field( - None, description="", alias="foreignKeyTo" - ) # relationship - foreign_key_from: Optional[Column] = Field( - None, description="", alias="foreignKeyFrom" - ) # relationship - dbt_metrics: Optional[list[DbtMetric]] = Field( - None, description="", alias="dbtMetrics" - ) # relationship - table_partition: Optional[TablePartition] = Field( - None, description="", alias="tablePartition" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, parent_qualified_name: str, parent_type: type, order: int - ) -> Column.Attributes: - validate_required_fields( - ["name", "parent_qualified_name", "parent_type", "order"], - [name, parent_qualified_name, parent_type, order], - ) - fields = parent_qualified_name.split("/") - if len(fields) != 6: - raise ValueError("Invalid parent_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid parent_qualified_name") from e - if order < 0: - raise ValueError("Order must be be a positive integer") - ret_value = Column.Attributes( - name=name, - qualified_name=f"{parent_qualified_name}/{name}", - connector_name=connector_type.value, - schema_name=fields[4], - schema_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}/{fields[4]}", - database_name=fields[3], - database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - order=order, - ) - if parent_type == Table: - ret_value.table_qualified_name = parent_qualified_name - ret_value.table = Table.ref_by_qualified_name(parent_qualified_name) - ret_value.table_name = fields[5] - elif parent_type == View: - ret_value.view_qualified_name = parent_qualified_name - ret_value.view = View.ref_by_qualified_name(parent_qualified_name) - ret_value.view_name = fields[5] - elif parent_type == MaterialisedView: - ret_value.view_qualified_name = parent_qualified_name - ret_value.materialised_view = MaterialisedView.ref_by_qualified_name( - parent_qualified_name - ) - ret_value.view_name = fields[5] - else: - raise ValueError( - "parent_type must be either Table, View or MaterializeView" - ) - return ret_value - - attributes: "Column.Attributes" = Field( - default_factory=lambda: Column.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SnowflakeStream(SQL): - """Description""" - - type_name: str = Field("SnowflakeStream", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SnowflakeStream": - raise ValueError("must be SnowflakeStream") - return v - - def __setattr__(self, name, value): - if name in SnowflakeStream._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SNOWFLAKE_STREAM_TYPE: ClassVar[KeywordField] = KeywordField( - "snowflakeStreamType", "snowflakeStreamType" - ) - """ - Type of this stream, for example: standard, append-only, insert-only, etc. - """ - SNOWFLAKE_STREAM_SOURCE_TYPE: ClassVar[KeywordField] = KeywordField( - "snowflakeStreamSourceType", "snowflakeStreamSourceType" - ) - """ - Type of the source of this stream. - """ - SNOWFLAKE_STREAM_MODE: ClassVar[KeywordField] = KeywordField( - "snowflakeStreamMode", "snowflakeStreamMode" - ) - """ - Mode of this stream. - """ - SNOWFLAKE_STREAM_IS_STALE: ClassVar[BooleanField] = BooleanField( - "snowflakeStreamIsStale", "snowflakeStreamIsStale" - ) - """ - Whether this stream is stale (true) or not (false). - """ - SNOWFLAKE_STREAM_STALE_AFTER: ClassVar[NumericField] = NumericField( - "snowflakeStreamStaleAfter", "snowflakeStreamStaleAfter" - ) - """ - Time (epoch) after which this stream will be stale, in milliseconds. - """ - - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "snowflake_stream_type", - "snowflake_stream_source_type", - "snowflake_stream_mode", - "snowflake_stream_is_stale", - "snowflake_stream_stale_after", - "atlan_schema", - ] - - @property - def snowflake_stream_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.snowflake_stream_type - ) - - @snowflake_stream_type.setter - def snowflake_stream_type(self, snowflake_stream_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_stream_type = snowflake_stream_type - - @property - def snowflake_stream_source_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.snowflake_stream_source_type - ) - - @snowflake_stream_source_type.setter - def snowflake_stream_source_type(self, snowflake_stream_source_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_stream_source_type = snowflake_stream_source_type - - @property - def snowflake_stream_mode(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.snowflake_stream_mode - ) - - @snowflake_stream_mode.setter - def snowflake_stream_mode(self, snowflake_stream_mode: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_stream_mode = snowflake_stream_mode - - @property - def snowflake_stream_is_stale(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.snowflake_stream_is_stale - ) - - @snowflake_stream_is_stale.setter - def snowflake_stream_is_stale(self, snowflake_stream_is_stale: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_stream_is_stale = snowflake_stream_is_stale - - @property - def snowflake_stream_stale_after(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.snowflake_stream_stale_after - ) - - @snowflake_stream_stale_after.setter - def snowflake_stream_stale_after( - self, snowflake_stream_stale_after: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.snowflake_stream_stale_after = snowflake_stream_stale_after - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - class Attributes(SQL.Attributes): - snowflake_stream_type: Optional[str] = Field( - None, description="", alias="snowflakeStreamType" - ) - snowflake_stream_source_type: Optional[str] = Field( - None, description="", alias="snowflakeStreamSourceType" - ) - snowflake_stream_mode: Optional[str] = Field( - None, description="", alias="snowflakeStreamMode" - ) - snowflake_stream_is_stale: Optional[bool] = Field( - None, description="", alias="snowflakeStreamIsStale" - ) - snowflake_stream_stale_after: Optional[datetime] = Field( - None, description="", alias="snowflakeStreamStaleAfter" - ) - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - - attributes: "SnowflakeStream.Attributes" = Field( - default_factory=lambda: SnowflakeStream.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Procedure(SQL): - """Description""" - - type_name: str = Field("Procedure", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Procedure": - raise ValueError("must be Procedure") - return v - - def __setattr__(self, name, value): - if name in Procedure._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") - """ - SQL definition of the procedure. - """ - - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "definition", - "atlan_schema", - ] - - @property - def definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.definition - - @definition.setter - def definition(self, definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.definition = definition - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - class Attributes(SQL.Attributes): - definition: Optional[str] = Field(None, description="", alias="definition") - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - - attributes: "Procedure.Attributes" = Field( - default_factory=lambda: Procedure.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SnowflakeTag(Tag): - """Description""" - - type_name: str = Field("SnowflakeTag", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SnowflakeTag": - raise ValueError("must be SnowflakeTag") - return v - - def __setattr__(self, name, value): - if name in SnowflakeTag._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") - """ - Unique identifier of the tag in the source system. - """ - TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( - "tagAttributes", "tagAttributes" - ) - """ - Attributes associated with the tag in the source system. - """ - TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( - "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" - ) - """ - Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. - """ - MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( - "mappedClassificationName", "mappedClassificationName" - ) - """ - Name of the classification in Atlan that is mapped to this tag. - """ - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") - """ - Number of times this asset has been queried. - """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" - ) - """ - Number of unique users who have queried this asset. - """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" - ) - """ - Map of unique users who have queried this asset to the number of times they have queried it. - """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) - """ - Time (epoch) at which the query count was last updated, in milliseconds. - """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) - """ - Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) - """ - Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) - """ - Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) - """ - Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) - """ - Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" - ) - """ - Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" - ) - """ - Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - Whether this asset has been profiled (true) or not (false). - """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) - """ - Time (epoch) at which this asset was last profiled, in milliseconds. - """ - - DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") - """ - TBC - """ - SQL_DBT_MODELS: ClassVar[RelationField] = RelationField("sqlDbtModels") - """ - TBC - """ - SQL_DBT_SOURCES: ClassVar[RelationField] = RelationField("sqlDBTSources") - """ - TBC - """ - DBT_MODELS: ClassVar[RelationField] = RelationField("dbtModels") - """ - TBC - """ - DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") - """ - TBC - """ - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "tag_id", - "tag_attributes", - "tag_allowed_values", - "mapped_atlan_tag_name", - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "is_profiled", - "last_profiled_at", - "dbt_sources", - "sql_dbt_models", - "sql_dbt_sources", - "dbt_models", - "dbt_tests", - "atlan_schema", - ] - - @property - def tag_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tag_id - - @tag_id.setter - def tag_id(self, tag_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_id = tag_id - - @property - def tag_attributes(self) -> Optional[list[SourceTagAttribute]]: - return None if self.attributes is None else self.attributes.tag_attributes - - @tag_attributes.setter - def tag_attributes(self, tag_attributes: Optional[list[SourceTagAttribute]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_attributes = tag_attributes - - @property - def tag_allowed_values(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.tag_allowed_values - - @tag_allowed_values.setter - def tag_allowed_values(self, tag_allowed_values: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tag_allowed_values = tag_allowed_values - - @property - def mapped_atlan_tag_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mapped_atlan_tag_name - ) - - @mapped_atlan_tag_name.setter - def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name - - @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count - - @query_count.setter - def query_count(self, query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count = query_count - - @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count - - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count - - @property - def query_user_map(self) -> Optional[dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map - - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[dict[str, int]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map - - @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at - ) - - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at - - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name - ) - - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name - - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name - - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) - - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name - - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name - - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name - - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name - - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name - - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name - - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name - - @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name - - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name - - @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled - - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled - - @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at - - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at - - @property - def dbt_sources(self) -> Optional[list[DbtSource]]: - return None if self.attributes is None else self.attributes.dbt_sources - - @dbt_sources.setter - def dbt_sources(self, dbt_sources: Optional[list[DbtSource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_sources = dbt_sources - - @property - def sql_dbt_models(self) -> Optional[list[DbtModel]]: - return None if self.attributes is None else self.attributes.sql_dbt_models - - @sql_dbt_models.setter - def sql_dbt_models(self, sql_dbt_models: Optional[list[DbtModel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_dbt_models = sql_dbt_models - - @property - def sql_dbt_sources(self) -> Optional[list[DbtSource]]: - return None if self.attributes is None else self.attributes.sql_dbt_sources - - @sql_dbt_sources.setter - def sql_dbt_sources(self, sql_dbt_sources: Optional[list[DbtSource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_dbt_sources = sql_dbt_sources - - @property - def dbt_models(self) -> Optional[list[DbtModel]]: - return None if self.attributes is None else self.attributes.dbt_models - - @dbt_models.setter - def dbt_models(self, dbt_models: Optional[list[DbtModel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_models = dbt_models - - @property - def dbt_tests(self) -> Optional[list[DbtTest]]: - return None if self.attributes is None else self.attributes.dbt_tests - - @dbt_tests.setter - def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tests = dbt_tests - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - class Attributes(Tag.Attributes): - tag_id: Optional[str] = Field(None, description="", alias="tagId") - tag_attributes: Optional[list[SourceTagAttribute]] = Field( - None, description="", alias="tagAttributes" - ) - tag_allowed_values: Optional[set[str]] = Field( - None, description="", alias="tagAllowedValues" - ) - mapped_atlan_tag_name: Optional[str] = Field( - None, description="", alias="mappedClassificationName" - ) - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) - dbt_sources: Optional[list[DbtSource]] = Field( - None, description="", alias="dbtSources" - ) # relationship - sql_dbt_models: Optional[list[DbtModel]] = Field( - None, description="", alias="sqlDbtModels" - ) # relationship - sql_dbt_sources: Optional[list[DbtSource]] = Field( - None, description="", alias="sqlDBTSources" - ) # relationship - dbt_models: Optional[list[DbtModel]] = Field( - None, description="", alias="dbtModels" - ) # relationship - dbt_tests: Optional[list[DbtTest]] = Field( - None, description="", alias="dbtTests" - ) # relationship - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - - attributes: "SnowflakeTag.Attributes" = Field( - default_factory=lambda: SnowflakeTag.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Matillion(Catalog): - """Description""" - - type_name: str = Field("Matillion", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Matillion": - raise ValueError("must be Matillion") - return v - - def __setattr__(self, name, value): - if name in Matillion._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MATILLION_VERSION: ClassVar[KeywordField] = KeywordField( - "matillionVersion", "matillionVersion" - ) - """ - Current point in time state of a project. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "matillion_version", - ] - - @property - def matillion_version(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.matillion_version - - @matillion_version.setter - def matillion_version(self, matillion_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_version = matillion_version - - class Attributes(Catalog.Attributes): - matillion_version: Optional[str] = Field( - None, description="", alias="matillionVersion" - ) - - attributes: "Matillion.Attributes" = Field( - default_factory=lambda: Matillion.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MatillionGroup(Matillion): - """Description""" - - type_name: str = Field("MatillionGroup", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MatillionGroup": - raise ValueError("must be MatillionGroup") - return v - - def __setattr__(self, name, value): - if name in MatillionGroup._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MATILLION_PROJECT_COUNT: ClassVar[NumericField] = NumericField( - "matillionProjectCount", "matillionProjectCount" - ) - """ - Number of projects within the group. - """ - - MATILLION_PROJECTS: ClassVar[RelationField] = RelationField("matillionProjects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "matillion_project_count", - "matillion_projects", - ] - - @property - def matillion_project_count(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.matillion_project_count - ) - - @matillion_project_count.setter - def matillion_project_count(self, matillion_project_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_project_count = matillion_project_count - - @property - def matillion_projects(self) -> Optional[list[MatillionProject]]: - return None if self.attributes is None else self.attributes.matillion_projects - - @matillion_projects.setter - def matillion_projects(self, matillion_projects: Optional[list[MatillionProject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_projects = matillion_projects - - class Attributes(Matillion.Attributes): - matillion_project_count: Optional[int] = Field( - None, description="", alias="matillionProjectCount" - ) - matillion_projects: Optional[list[MatillionProject]] = Field( - None, description="", alias="matillionProjects" - ) # relationship - - attributes: "MatillionGroup.Attributes" = Field( - default_factory=lambda: MatillionGroup.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MatillionJob(Matillion): - """Description""" - - type_name: str = Field("MatillionJob", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MatillionJob": - raise ValueError("must be MatillionJob") - return v - - def __setattr__(self, name, value): - if name in MatillionJob._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MATILLION_JOB_TYPE: ClassVar[KeywordField] = KeywordField( - "matillionJobType", "matillionJobType" - ) - """ - Type of the job, for example: orchestration or transformation. - """ - MATILLION_JOB_PATH: ClassVar[KeywordTextField] = KeywordTextField( - "matillionJobPath", "matillionJobPath", "matillionJobPath.text" - ) - """ - Path of the job within the project. Jobs can be managed at multiple folder levels within a project. - """ - MATILLION_JOB_COMPONENT_COUNT: ClassVar[NumericField] = NumericField( - "matillionJobComponentCount", "matillionJobComponentCount" - ) - """ - Number of components within the job. - """ - MATILLION_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "matillionJobSchedule", "matillionJobSchedule" - ) - """ - How the job is scheduled, for example: weekly or monthly. - """ - MATILLION_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "matillionProjectName", "matillionProjectName.keyword", "matillionProjectName" - ) - """ - Simple name of the project to which the job belongs. - """ - MATILLION_PROJECT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "matillionProjectQualifiedName", - "matillionProjectQualifiedName", - "matillionProjectQualifiedName.text", - ) - """ - Unique name of the project to which the job belongs. - """ - - MATILLION_PROJECT: ClassVar[RelationField] = RelationField("matillionProject") - """ - TBC - """ - MATILLION_COMPONENTS: ClassVar[RelationField] = RelationField("matillionComponents") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "matillion_job_type", - "matillion_job_path", - "matillion_job_component_count", - "matillion_job_schedule", - "matillion_project_name", - "matillion_project_qualified_name", - "matillion_project", - "matillion_components", - ] - - @property - def matillion_job_type(self) -> Optional[MatillionJobType]: - return None if self.attributes is None else self.attributes.matillion_job_type - - @matillion_job_type.setter - def matillion_job_type(self, matillion_job_type: Optional[MatillionJobType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job_type = matillion_job_type - - @property - def matillion_job_path(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.matillion_job_path - - @matillion_job_path.setter - def matillion_job_path(self, matillion_job_path: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job_path = matillion_job_path - - @property - def matillion_job_component_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.matillion_job_component_count - ) - - @matillion_job_component_count.setter - def matillion_job_component_count( - self, matillion_job_component_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job_component_count = matillion_job_component_count - - @property - def matillion_job_schedule(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.matillion_job_schedule - ) - - @matillion_job_schedule.setter - def matillion_job_schedule(self, matillion_job_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job_schedule = matillion_job_schedule - - @property - def matillion_project_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.matillion_project_name - ) - - @matillion_project_name.setter - def matillion_project_name(self, matillion_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_project_name = matillion_project_name - - @property - def matillion_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.matillion_project_qualified_name - ) - - @matillion_project_qualified_name.setter - def matillion_project_qualified_name( - self, matillion_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_project_qualified_name = ( - matillion_project_qualified_name - ) - - @property - def matillion_project(self) -> Optional[MatillionProject]: - return None if self.attributes is None else self.attributes.matillion_project - - @matillion_project.setter - def matillion_project(self, matillion_project: Optional[MatillionProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_project = matillion_project - - @property - def matillion_components(self) -> Optional[list[MatillionComponent]]: - return None if self.attributes is None else self.attributes.matillion_components - - @matillion_components.setter - def matillion_components( - self, matillion_components: Optional[list[MatillionComponent]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_components = matillion_components - - class Attributes(Matillion.Attributes): - matillion_job_type: Optional[MatillionJobType] = Field( - None, description="", alias="matillionJobType" - ) - matillion_job_path: Optional[str] = Field( - None, description="", alias="matillionJobPath" - ) - matillion_job_component_count: Optional[int] = Field( - None, description="", alias="matillionJobComponentCount" - ) - matillion_job_schedule: Optional[str] = Field( - None, description="", alias="matillionJobSchedule" - ) - matillion_project_name: Optional[str] = Field( - None, description="", alias="matillionProjectName" - ) - matillion_project_qualified_name: Optional[str] = Field( - None, description="", alias="matillionProjectQualifiedName" - ) - matillion_project: Optional[MatillionProject] = Field( - None, description="", alias="matillionProject" - ) # relationship - matillion_components: Optional[list[MatillionComponent]] = Field( - None, description="", alias="matillionComponents" - ) # relationship - - attributes: "MatillionJob.Attributes" = Field( - default_factory=lambda: MatillionJob.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MatillionProject(Matillion): - """Description""" - - type_name: str = Field("MatillionProject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MatillionProject": - raise ValueError("must be MatillionProject") - return v - - def __setattr__(self, name, value): - if name in MatillionProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MATILLION_VERSIONS: ClassVar[KeywordField] = KeywordField( - "matillionVersions", "matillionVersions" - ) - """ - List of versions in the project. - """ - MATILLION_ENVIRONMENTS: ClassVar[KeywordField] = KeywordField( - "matillionEnvironments", "matillionEnvironments" - ) - """ - List of environments in the project. - """ - MATILLION_PROJECT_JOB_COUNT: ClassVar[NumericField] = NumericField( - "matillionProjectJobCount", "matillionProjectJobCount" - ) - """ - Number of jobs in the project. - """ - MATILLION_GROUP_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "matillionGroupName", "matillionGroupName.keyword", "matillionGroupName" - ) - """ - Simple name of the Matillion group to which the project belongs. - """ - MATILLION_GROUP_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "matillionGroupQualifiedName", - "matillionGroupQualifiedName", - "matillionGroupQualifiedName.text", - ) - """ - Unique name of the Matillion group to which the project belongs. - """ - - MATILLION_JOBS: ClassVar[RelationField] = RelationField("matillionJobs") - """ - TBC - """ - MATILLION_GROUP: ClassVar[RelationField] = RelationField("matillionGroup") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "matillion_versions", - "matillion_environments", - "matillion_project_job_count", - "matillion_group_name", - "matillion_group_qualified_name", - "matillion_jobs", - "matillion_group", - ] - - @property - def matillion_versions(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.matillion_versions - - @matillion_versions.setter - def matillion_versions(self, matillion_versions: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_versions = matillion_versions - - @property - def matillion_environments(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.matillion_environments - ) - - @matillion_environments.setter - def matillion_environments(self, matillion_environments: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_environments = matillion_environments - - @property - def matillion_project_job_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.matillion_project_job_count - ) - - @matillion_project_job_count.setter - def matillion_project_job_count(self, matillion_project_job_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_project_job_count = matillion_project_job_count - - @property - def matillion_group_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.matillion_group_name - - @matillion_group_name.setter - def matillion_group_name(self, matillion_group_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_group_name = matillion_group_name - - @property - def matillion_group_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.matillion_group_qualified_name - ) - - @matillion_group_qualified_name.setter - def matillion_group_qualified_name( - self, matillion_group_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_group_qualified_name = matillion_group_qualified_name - - @property - def matillion_jobs(self) -> Optional[list[MatillionJob]]: - return None if self.attributes is None else self.attributes.matillion_jobs - - @matillion_jobs.setter - def matillion_jobs(self, matillion_jobs: Optional[list[MatillionJob]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_jobs = matillion_jobs - - @property - def matillion_group(self) -> Optional[MatillionGroup]: - return None if self.attributes is None else self.attributes.matillion_group - - @matillion_group.setter - def matillion_group(self, matillion_group: Optional[MatillionGroup]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_group = matillion_group - - class Attributes(Matillion.Attributes): - matillion_versions: Optional[set[str]] = Field( - None, description="", alias="matillionVersions" - ) - matillion_environments: Optional[set[str]] = Field( - None, description="", alias="matillionEnvironments" - ) - matillion_project_job_count: Optional[int] = Field( - None, description="", alias="matillionProjectJobCount" - ) - matillion_group_name: Optional[str] = Field( - None, description="", alias="matillionGroupName" - ) - matillion_group_qualified_name: Optional[str] = Field( - None, description="", alias="matillionGroupQualifiedName" - ) - matillion_jobs: Optional[list[MatillionJob]] = Field( - None, description="", alias="matillionJobs" - ) # relationship - matillion_group: Optional[MatillionGroup] = Field( - None, description="", alias="matillionGroup" - ) # relationship - - attributes: "MatillionProject.Attributes" = Field( - default_factory=lambda: MatillionProject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MatillionComponent(Matillion): - """Description""" - - type_name: str = Field("MatillionComponent", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MatillionComponent": - raise ValueError("must be MatillionComponent") - return v - - def __setattr__(self, name, value): - if name in MatillionComponent._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MATILLION_COMPONENT_ID: ClassVar[KeywordField] = KeywordField( - "matillionComponentId", "matillionComponentId" - ) - """ - Unique identifier of the component in Matillion. - """ - MATILLION_COMPONENT_IMPLEMENTATION_ID: ClassVar[KeywordField] = KeywordField( - "matillionComponentImplementationId", "matillionComponentImplementationId" - ) - """ - Unique identifier for the type of the component in Matillion. - """ - MATILLION_COMPONENT_LINKED_JOB: ClassVar[KeywordField] = KeywordField( - "matillionComponentLinkedJob", "matillionComponentLinkedJob" - ) - """ - Job details of the job to which the component internally links. - """ - MATILLION_COMPONENT_LAST_RUN_STATUS: ClassVar[KeywordField] = KeywordField( - "matillionComponentLastRunStatus", "matillionComponentLastRunStatus" - ) - """ - Latest run status of the component within a job. - """ - MATILLION_COMPONENT_LAST_FIVE_RUN_STATUS: ClassVar[KeywordField] = KeywordField( - "matillionComponentLastFiveRunStatus", "matillionComponentLastFiveRunStatus" - ) - """ - Last five run statuses of the component within a job. - """ - MATILLION_COMPONENT_SQLS: ClassVar[KeywordField] = KeywordField( - "matillionComponentSqls", "matillionComponentSqls" - ) - """ - SQL queries used by the component. - """ - MATILLION_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "matillionJobName", "matillionJobName.keyword", "matillionJobName" - ) - """ - Simple name of the job to which the component belongs. - """ - MATILLION_JOB_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "matillionJobQualifiedName", - "matillionJobQualifiedName", - "matillionJobQualifiedName.text", - ) - """ - Unique name of the job to which the component belongs. - """ - - MATILLION_PROCESS: ClassVar[RelationField] = RelationField("matillionProcess") - """ - TBC - """ - MATILLION_JOB: ClassVar[RelationField] = RelationField("matillionJob") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "matillion_component_id", - "matillion_component_implementation_id", - "matillion_component_linked_job", - "matillion_component_last_run_status", - "matillion_component_last_five_run_status", - "matillion_component_sqls", - "matillion_job_name", - "matillion_job_qualified_name", - "matillion_process", - "matillion_job", - ] - - @property - def matillion_component_id(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.matillion_component_id - ) - - @matillion_component_id.setter - def matillion_component_id(self, matillion_component_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component_id = matillion_component_id - - @property - def matillion_component_implementation_id(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.matillion_component_implementation_id - ) - - @matillion_component_implementation_id.setter - def matillion_component_implementation_id( - self, matillion_component_implementation_id: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component_implementation_id = ( - matillion_component_implementation_id - ) - - @property - def matillion_component_linked_job(self) -> Optional[dict[str, str]]: - return ( - None - if self.attributes is None - else self.attributes.matillion_component_linked_job - ) - - @matillion_component_linked_job.setter - def matillion_component_linked_job( - self, matillion_component_linked_job: Optional[dict[str, str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component_linked_job = matillion_component_linked_job - - @property - def matillion_component_last_run_status(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.matillion_component_last_run_status - ) - - @matillion_component_last_run_status.setter - def matillion_component_last_run_status( - self, matillion_component_last_run_status: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component_last_run_status = ( - matillion_component_last_run_status - ) - - @property - def matillion_component_last_five_run_status(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.matillion_component_last_five_run_status - ) - - @matillion_component_last_five_run_status.setter - def matillion_component_last_five_run_status( - self, matillion_component_last_five_run_status: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component_last_five_run_status = ( - matillion_component_last_five_run_status - ) - - @property - def matillion_component_sqls(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.matillion_component_sqls - ) - - @matillion_component_sqls.setter - def matillion_component_sqls(self, matillion_component_sqls: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_component_sqls = matillion_component_sqls - - @property - def matillion_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.matillion_job_name - - @matillion_job_name.setter - def matillion_job_name(self, matillion_job_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job_name = matillion_job_name - - @property - def matillion_job_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.matillion_job_qualified_name - ) - - @matillion_job_qualified_name.setter - def matillion_job_qualified_name(self, matillion_job_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job_qualified_name = matillion_job_qualified_name - - @property - def matillion_process(self) -> Optional[Process]: - return None if self.attributes is None else self.attributes.matillion_process - - @matillion_process.setter - def matillion_process(self, matillion_process: Optional[Process]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_process = matillion_process - - @property - def matillion_job(self) -> Optional[MatillionJob]: - return None if self.attributes is None else self.attributes.matillion_job - - @matillion_job.setter - def matillion_job(self, matillion_job: Optional[MatillionJob]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.matillion_job = matillion_job - - class Attributes(Matillion.Attributes): - matillion_component_id: Optional[str] = Field( - None, description="", alias="matillionComponentId" - ) - matillion_component_implementation_id: Optional[str] = Field( - None, description="", alias="matillionComponentImplementationId" - ) - matillion_component_linked_job: Optional[dict[str, str]] = Field( - None, description="", alias="matillionComponentLinkedJob" - ) - matillion_component_last_run_status: Optional[str] = Field( - None, description="", alias="matillionComponentLastRunStatus" - ) - matillion_component_last_five_run_status: Optional[str] = Field( - None, description="", alias="matillionComponentLastFiveRunStatus" - ) - matillion_component_sqls: Optional[set[str]] = Field( - None, description="", alias="matillionComponentSqls" - ) - matillion_job_name: Optional[str] = Field( - None, description="", alias="matillionJobName" - ) - matillion_job_qualified_name: Optional[str] = Field( - None, description="", alias="matillionJobQualifiedName" - ) - matillion_process: Optional[Process] = Field( - None, description="", alias="matillionProcess" - ) # relationship - matillion_job: Optional[MatillionJob] = Field( - None, description="", alias="matillionJob" - ) # relationship - - attributes: "MatillionComponent.Attributes" = Field( - default_factory=lambda: MatillionComponent.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Dbt(Catalog): - """Description""" - - type_name: str = Field("Dbt", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Dbt": - raise ValueError("must be Dbt") - return v - - def __setattr__(self, name, value): - if name in Dbt._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAlias", "dbtAlias.keyword", "dbtAlias" - ) - """ - - """ - DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") - """ - - """ - DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" - ) - """ - - """ - DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" - ) - """ - - """ - DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" - ) - """ - - """ - DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" - ) - """ - - """ - DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobName", "dbtJobName.keyword", "dbtJobName" - ) - """ - - """ - DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "dbtJobSchedule", "dbtJobSchedule" - ) - """ - - """ - DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtJobStatus", "dbtJobStatus" - ) - """ - - """ - DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobScheduleCronHumanized", - "dbtJobScheduleCronHumanized.keyword", - "dbtJobScheduleCronHumanized", - ) - """ - - """ - DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( - "dbtJobLastRun", "dbtJobLastRun" - ) - """ - - """ - DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( - "dbtJobNextRun", "dbtJobNextRun" - ) - """ - - """ - DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobNextRunHumanized", - "dbtJobNextRunHumanized.keyword", - "dbtJobNextRunHumanized", - ) - """ - - """ - DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" - ) - """ - - """ - DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentDbtVersion", - "dbtEnvironmentDbtVersion.keyword", - "dbtEnvironmentDbtVersion", - ) - """ - - """ - DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") - """ - - """ - DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( - "dbtConnectionContext", "dbtConnectionContext" - ) - """ - - """ - DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( - "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" - ) - """ - - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dbt_alias", - "dbt_meta", - "dbt_unique_id", - "dbt_account_name", - "dbt_project_name", - "dbt_package_name", - "dbt_job_name", - "dbt_job_schedule", - "dbt_job_status", - "dbt_job_schedule_cron_humanized", - "dbt_job_last_run", - "dbt_job_next_run", - "dbt_job_next_run_humanized", - "dbt_environment_name", - "dbt_environment_dbt_version", - "dbt_tags", - "dbt_connection_context", - "dbt_semantic_layer_proxy_url", - ] - - @property - def dbt_alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_alias - - @dbt_alias.setter - def dbt_alias(self, dbt_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_alias = dbt_alias - - @property - def dbt_meta(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_meta - - @dbt_meta.setter - def dbt_meta(self, dbt_meta: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_meta = dbt_meta - - @property - def dbt_unique_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_unique_id - - @dbt_unique_id.setter - def dbt_unique_id(self, dbt_unique_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_unique_id = dbt_unique_id - - @property - def dbt_account_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_account_name - - @dbt_account_name.setter - def dbt_account_name(self, dbt_account_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_account_name = dbt_account_name - - @property - def dbt_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_project_name - - @dbt_project_name.setter - def dbt_project_name(self, dbt_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_project_name = dbt_project_name - - @property - def dbt_package_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_package_name - - @dbt_package_name.setter - def dbt_package_name(self, dbt_package_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_package_name = dbt_package_name - - @property - def dbt_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_name - - @dbt_job_name.setter - def dbt_job_name(self, dbt_job_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_name = dbt_job_name - - @property - def dbt_job_schedule(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_schedule - - @dbt_job_schedule.setter - def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule = dbt_job_schedule - - @property - def dbt_job_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_status - - @dbt_job_status.setter - def dbt_job_status(self, dbt_job_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_status = dbt_job_status - - @property - def dbt_job_schedule_cron_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_schedule_cron_humanized - ) - - @dbt_job_schedule_cron_humanized.setter - def dbt_job_schedule_cron_humanized( - self, dbt_job_schedule_cron_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule_cron_humanized = ( - dbt_job_schedule_cron_humanized - ) - - @property - def dbt_job_last_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_last_run - - @dbt_job_last_run.setter - def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_last_run = dbt_job_last_run - - @property - def dbt_job_next_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_next_run - - @dbt_job_next_run.setter - def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run = dbt_job_next_run - - @property - def dbt_job_next_run_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_next_run_humanized - ) - - @dbt_job_next_run_humanized.setter - def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized - - @property - def dbt_environment_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_environment_name - - @dbt_environment_name.setter - def dbt_environment_name(self, dbt_environment_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_name = dbt_environment_name - - @property - def dbt_environment_dbt_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_environment_dbt_version - ) - - @dbt_environment_dbt_version.setter - def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version - - @property - def dbt_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.dbt_tags - - @dbt_tags.setter - def dbt_tags(self, dbt_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tags = dbt_tags - - @property - def dbt_connection_context(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_connection_context - ) - - @dbt_connection_context.setter - def dbt_connection_context(self, dbt_connection_context: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_connection_context = dbt_connection_context - - @property - def dbt_semantic_layer_proxy_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_semantic_layer_proxy_url - ) - - @dbt_semantic_layer_proxy_url.setter - def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url - - class Attributes(Catalog.Attributes): - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) - dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" - ) - dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" - ) - - attributes: "Dbt.Attributes" = Field( - default_factory=lambda: Dbt.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DbtModelColumn(Dbt): - """Description""" - - type_name: str = Field("DbtModelColumn", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DbtModelColumn": - raise ValueError("must be DbtModelColumn") - return v - - def __setattr__(self, name, value): - if name in DbtModelColumn._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DBT_MODEL_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtModelQualifiedName", "dbtModelQualifiedName", "dbtModelQualifiedName.text" - ) - """ - - """ - DBT_MODEL_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( - "dbtModelColumnDataType", "dbtModelColumnDataType" - ) - """ - - """ - DBT_MODEL_COLUMN_ORDER: ClassVar[NumericField] = NumericField( - "dbtModelColumnOrder", "dbtModelColumnOrder" - ) - """ - - """ - - SQL_COLUMN: ClassVar[RelationField] = RelationField("sqlColumn") - """ - TBC - """ - DBT_MODEL: ClassVar[RelationField] = RelationField("dbtModel") - """ - TBC - """ - DBT_MODEL_COLUMN_SQL_COLUMNS: ClassVar[RelationField] = RelationField( - "dbtModelColumnSqlColumns" - ) - """ - TBC - """ - DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dbt_model_qualified_name", - "dbt_model_column_data_type", - "dbt_model_column_order", - "sql_column", - "dbt_model", - "dbt_model_column_sql_columns", - "dbt_tests", - ] - - @property - def dbt_model_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_qualified_name - ) - - @dbt_model_qualified_name.setter - def dbt_model_qualified_name(self, dbt_model_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_qualified_name = dbt_model_qualified_name - - @property - def dbt_model_column_data_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_column_data_type - ) - - @dbt_model_column_data_type.setter - def dbt_model_column_data_type(self, dbt_model_column_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_column_data_type = dbt_model_column_data_type - - @property - def dbt_model_column_order(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.dbt_model_column_order - ) - - @dbt_model_column_order.setter - def dbt_model_column_order(self, dbt_model_column_order: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_column_order = dbt_model_column_order - - @property - def sql_column(self) -> Optional[Column]: - return None if self.attributes is None else self.attributes.sql_column - - @sql_column.setter - def sql_column(self, sql_column: Optional[Column]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_column = sql_column - - @property - def dbt_model(self) -> Optional[DbtModel]: - return None if self.attributes is None else self.attributes.dbt_model - - @dbt_model.setter - def dbt_model(self, dbt_model: Optional[DbtModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model = dbt_model - - @property - def dbt_model_column_sql_columns(self) -> Optional[list[Column]]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_column_sql_columns - ) - - @dbt_model_column_sql_columns.setter - def dbt_model_column_sql_columns( - self, dbt_model_column_sql_columns: Optional[list[Column]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_column_sql_columns = dbt_model_column_sql_columns - - @property - def dbt_tests(self) -> Optional[list[DbtTest]]: - return None if self.attributes is None else self.attributes.dbt_tests - - @dbt_tests.setter - def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tests = dbt_tests - - class Attributes(Dbt.Attributes): - dbt_model_qualified_name: Optional[str] = Field( - None, description="", alias="dbtModelQualifiedName" - ) - dbt_model_column_data_type: Optional[str] = Field( - None, description="", alias="dbtModelColumnDataType" - ) - dbt_model_column_order: Optional[int] = Field( - None, description="", alias="dbtModelColumnOrder" - ) - sql_column: Optional[Column] = Field( - None, description="", alias="sqlColumn" - ) # relationship - dbt_model: Optional[DbtModel] = Field( - None, description="", alias="dbtModel" - ) # relationship - dbt_model_column_sql_columns: Optional[list[Column]] = Field( - None, description="", alias="dbtModelColumnSqlColumns" - ) # relationship - dbt_tests: Optional[list[DbtTest]] = Field( - None, description="", alias="dbtTests" - ) # relationship - - attributes: "DbtModelColumn.Attributes" = Field( - default_factory=lambda: DbtModelColumn.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DbtTest(Dbt): - """Description""" - - type_name: str = Field("DbtTest", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DbtTest": - raise ValueError("must be DbtTest") - return v - - def __setattr__(self, name, value): - if name in DbtTest._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DBT_TEST_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtTestStatus", "dbtTestStatus" - ) - """ - Details of the results of the test. For errors, it reads "ERROR". - """ - DBT_TEST_STATE: ClassVar[KeywordField] = KeywordField( - "dbtTestState", "dbtTestState" - ) - """ - Test results. Can be one of, in order of severity, "error", "fail", "warn", "pass". - """ - DBT_TEST_ERROR: ClassVar[KeywordField] = KeywordField( - "dbtTestError", "dbtTestError" - ) - """ - Error message in the case of state being "error". - """ - DBT_TEST_RAW_SQL: ClassVar[KeywordTextField] = KeywordTextField( - "dbtTestRawSQL", "dbtTestRawSQL", "dbtTestRawSQL.text" - ) - """ - Raw SQL of the test. - """ - DBT_TEST_COMPILED_SQL: ClassVar[KeywordField] = KeywordField( - "dbtTestCompiledSQL", "dbtTestCompiledSQL" - ) - """ - Compiled SQL of the test. - """ - DBT_TEST_RAW_CODE: ClassVar[KeywordTextField] = KeywordTextField( - "dbtTestRawCode", "dbtTestRawCode", "dbtTestRawCode.text" - ) - """ - Raw code of the test (when the test is defined using Python). - """ - DBT_TEST_COMPILED_CODE: ClassVar[KeywordField] = KeywordField( - "dbtTestCompiledCode", "dbtTestCompiledCode" - ) - """ - Compiled code of the test (when the test is defined using Python). - """ - DBT_TEST_LANGUAGE: ClassVar[KeywordField] = KeywordField( - "dbtTestLanguage", "dbtTestLanguage" - ) - """ - Language in which the test is written, for example: SQL or Python. - """ - - DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") - """ - TBC - """ - SQL_ASSETS: ClassVar[RelationField] = RelationField("sqlAssets") - """ - TBC - """ - DBT_MODELS: ClassVar[RelationField] = RelationField("dbtModels") - """ - TBC - """ - DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField("dbtModelColumns") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dbt_test_status", - "dbt_test_state", - "dbt_test_error", - "dbt_test_raw_s_q_l", - "dbt_test_compiled_s_q_l", - "dbt_test_raw_code", - "dbt_test_compiled_code", - "dbt_test_language", - "dbt_sources", - "sql_assets", - "dbt_models", - "dbt_model_columns", - ] - - @property - def dbt_test_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_test_status - - @dbt_test_status.setter - def dbt_test_status(self, dbt_test_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_status = dbt_test_status - - @property - def dbt_test_state(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_test_state - - @dbt_test_state.setter - def dbt_test_state(self, dbt_test_state: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_state = dbt_test_state - - @property - def dbt_test_error(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_test_error - - @dbt_test_error.setter - def dbt_test_error(self, dbt_test_error: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_error = dbt_test_error - - @property - def dbt_test_raw_s_q_l(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_test_raw_s_q_l - - @dbt_test_raw_s_q_l.setter - def dbt_test_raw_s_q_l(self, dbt_test_raw_s_q_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_raw_s_q_l = dbt_test_raw_s_q_l - - @property - def dbt_test_compiled_s_q_l(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_test_compiled_s_q_l - ) - - @dbt_test_compiled_s_q_l.setter - def dbt_test_compiled_s_q_l(self, dbt_test_compiled_s_q_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_compiled_s_q_l = dbt_test_compiled_s_q_l - - @property - def dbt_test_raw_code(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_test_raw_code - - @dbt_test_raw_code.setter - def dbt_test_raw_code(self, dbt_test_raw_code: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_raw_code = dbt_test_raw_code - - @property - def dbt_test_compiled_code(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_test_compiled_code - ) - - @dbt_test_compiled_code.setter - def dbt_test_compiled_code(self, dbt_test_compiled_code: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_compiled_code = dbt_test_compiled_code - - @property - def dbt_test_language(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_test_language - - @dbt_test_language.setter - def dbt_test_language(self, dbt_test_language: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_test_language = dbt_test_language - - @property - def dbt_sources(self) -> Optional[list[DbtSource]]: - return None if self.attributes is None else self.attributes.dbt_sources - - @dbt_sources.setter - def dbt_sources(self, dbt_sources: Optional[list[DbtSource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_sources = dbt_sources - - @property - def sql_assets(self) -> Optional[list[SQL]]: - return None if self.attributes is None else self.attributes.sql_assets - - @sql_assets.setter - def sql_assets(self, sql_assets: Optional[list[SQL]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_assets = sql_assets - - @property - def dbt_models(self) -> Optional[list[DbtModel]]: - return None if self.attributes is None else self.attributes.dbt_models - - @dbt_models.setter - def dbt_models(self, dbt_models: Optional[list[DbtModel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_models = dbt_models - - @property - def dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: - return None if self.attributes is None else self.attributes.dbt_model_columns - - @dbt_model_columns.setter - def dbt_model_columns(self, dbt_model_columns: Optional[list[DbtModelColumn]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_columns = dbt_model_columns - - class Attributes(Dbt.Attributes): - dbt_test_status: Optional[str] = Field( - None, description="", alias="dbtTestStatus" - ) - dbt_test_state: Optional[str] = Field( - None, description="", alias="dbtTestState" - ) - dbt_test_error: Optional[str] = Field( - None, description="", alias="dbtTestError" - ) - dbt_test_raw_s_q_l: Optional[str] = Field( - None, description="", alias="dbtTestRawSQL" - ) - dbt_test_compiled_s_q_l: Optional[str] = Field( - None, description="", alias="dbtTestCompiledSQL" - ) - dbt_test_raw_code: Optional[str] = Field( - None, description="", alias="dbtTestRawCode" - ) - dbt_test_compiled_code: Optional[str] = Field( - None, description="", alias="dbtTestCompiledCode" - ) - dbt_test_language: Optional[str] = Field( - None, description="", alias="dbtTestLanguage" - ) - dbt_sources: Optional[list[DbtSource]] = Field( - None, description="", alias="dbtSources" - ) # relationship - sql_assets: Optional[list[SQL]] = Field( - None, description="", alias="sqlAssets" - ) # relationship - dbt_models: Optional[list[DbtModel]] = Field( - None, description="", alias="dbtModels" - ) # relationship - dbt_model_columns: Optional[list[DbtModelColumn]] = Field( - None, description="", alias="dbtModelColumns" - ) # relationship - - attributes: "DbtTest.Attributes" = Field( - default_factory=lambda: DbtTest.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DbtModel(Dbt): - """Description""" - - type_name: str = Field("DbtModel", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DbtModel": - raise ValueError("must be DbtModel") - return v - - def __setattr__(self, name, value): - if name in DbtModel._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DBT_STATUS: ClassVar[KeywordField] = KeywordField("dbtStatus", "dbtStatus") - """ - - """ - DBT_ERROR: ClassVar[KeywordField] = KeywordField("dbtError", "dbtError") - """ - - """ - DBT_RAW_SQL: ClassVar[KeywordField] = KeywordField("dbtRawSQL", "dbtRawSQL") - """ - - """ - DBT_COMPILED_SQL: ClassVar[KeywordField] = KeywordField( - "dbtCompiledSQL", "dbtCompiledSQL" - ) - """ - - """ - DBT_STATS: ClassVar[KeywordField] = KeywordField("dbtStats", "dbtStats") - """ - - """ - DBT_MATERIALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( - "dbtMaterializationType", "dbtMaterializationType" - ) - """ - - """ - DBT_MODEL_COMPILE_STARTED_AT: ClassVar[NumericField] = NumericField( - "dbtModelCompileStartedAt", "dbtModelCompileStartedAt" - ) - """ - - """ - DBT_MODEL_COMPILE_COMPLETED_AT: ClassVar[NumericField] = NumericField( - "dbtModelCompileCompletedAt", "dbtModelCompileCompletedAt" - ) - """ - - """ - DBT_MODEL_EXECUTE_STARTED_AT: ClassVar[NumericField] = NumericField( - "dbtModelExecuteStartedAt", "dbtModelExecuteStartedAt" - ) - """ - - """ - DBT_MODEL_EXECUTE_COMPLETED_AT: ClassVar[NumericField] = NumericField( - "dbtModelExecuteCompletedAt", "dbtModelExecuteCompletedAt" - ) - """ - - """ - DBT_MODEL_EXECUTION_TIME: ClassVar[NumericField] = NumericField( - "dbtModelExecutionTime", "dbtModelExecutionTime" - ) - """ - - """ - DBT_MODEL_RUN_GENERATED_AT: ClassVar[NumericField] = NumericField( - "dbtModelRunGeneratedAt", "dbtModelRunGeneratedAt" - ) - """ - - """ - DBT_MODEL_RUN_ELAPSED_TIME: ClassVar[NumericField] = NumericField( - "dbtModelRunElapsedTime", "dbtModelRunElapsedTime" - ) - """ - - """ - - DBT_METRICS: ClassVar[RelationField] = RelationField("dbtMetrics") - """ - TBC - """ - DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") - """ - TBC - """ - DBT_MODEL_SQL_ASSETS: ClassVar[RelationField] = RelationField("dbtModelSqlAssets") - """ - TBC - """ - DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField("dbtModelColumns") - """ - TBC - """ - SQL_ASSET: ClassVar[RelationField] = RelationField("sqlAsset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dbt_status", - "dbt_error", - "dbt_raw_s_q_l", - "dbt_compiled_s_q_l", - "dbt_stats", - "dbt_materialization_type", - "dbt_model_compile_started_at", - "dbt_model_compile_completed_at", - "dbt_model_execute_started_at", - "dbt_model_execute_completed_at", - "dbt_model_execution_time", - "dbt_model_run_generated_at", - "dbt_model_run_elapsed_time", - "dbt_metrics", - "dbt_tests", - "dbt_model_sql_assets", - "dbt_model_columns", - "sql_asset", - ] - - @property - def dbt_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_status - - @dbt_status.setter - def dbt_status(self, dbt_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_status = dbt_status - - @property - def dbt_error(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_error - - @dbt_error.setter - def dbt_error(self, dbt_error: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_error = dbt_error - - @property - def dbt_raw_s_q_l(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_raw_s_q_l - - @dbt_raw_s_q_l.setter - def dbt_raw_s_q_l(self, dbt_raw_s_q_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_raw_s_q_l = dbt_raw_s_q_l - - @property - def dbt_compiled_s_q_l(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_compiled_s_q_l - - @dbt_compiled_s_q_l.setter - def dbt_compiled_s_q_l(self, dbt_compiled_s_q_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_compiled_s_q_l = dbt_compiled_s_q_l - - @property - def dbt_stats(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_stats - - @dbt_stats.setter - def dbt_stats(self, dbt_stats: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_stats = dbt_stats - - @property - def dbt_materialization_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_materialization_type - ) - - @dbt_materialization_type.setter - def dbt_materialization_type(self, dbt_materialization_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_materialization_type = dbt_materialization_type - - @property - def dbt_model_compile_started_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_compile_started_at - ) - - @dbt_model_compile_started_at.setter - def dbt_model_compile_started_at( - self, dbt_model_compile_started_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_compile_started_at = dbt_model_compile_started_at - - @property - def dbt_model_compile_completed_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_compile_completed_at - ) - - @dbt_model_compile_completed_at.setter - def dbt_model_compile_completed_at( - self, dbt_model_compile_completed_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_compile_completed_at = dbt_model_compile_completed_at - - @property - def dbt_model_execute_started_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_execute_started_at - ) - - @dbt_model_execute_started_at.setter - def dbt_model_execute_started_at( - self, dbt_model_execute_started_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_execute_started_at = dbt_model_execute_started_at - - @property - def dbt_model_execute_completed_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_execute_completed_at - ) - - @dbt_model_execute_completed_at.setter - def dbt_model_execute_completed_at( - self, dbt_model_execute_completed_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_execute_completed_at = dbt_model_execute_completed_at - - @property - def dbt_model_execution_time(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_execution_time - ) - - @dbt_model_execution_time.setter - def dbt_model_execution_time(self, dbt_model_execution_time: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_execution_time = dbt_model_execution_time - - @property - def dbt_model_run_generated_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_run_generated_at - ) - - @dbt_model_run_generated_at.setter - def dbt_model_run_generated_at( - self, dbt_model_run_generated_at: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_run_generated_at = dbt_model_run_generated_at - - @property - def dbt_model_run_elapsed_time(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.dbt_model_run_elapsed_time - ) - - @dbt_model_run_elapsed_time.setter - def dbt_model_run_elapsed_time(self, dbt_model_run_elapsed_time: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_run_elapsed_time = dbt_model_run_elapsed_time - - @property - def dbt_metrics(self) -> Optional[list[DbtMetric]]: - return None if self.attributes is None else self.attributes.dbt_metrics - - @dbt_metrics.setter - def dbt_metrics(self, dbt_metrics: Optional[list[DbtMetric]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_metrics = dbt_metrics - - @property - def dbt_tests(self) -> Optional[list[DbtTest]]: - return None if self.attributes is None else self.attributes.dbt_tests - - @dbt_tests.setter - def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tests = dbt_tests - - @property - def dbt_model_sql_assets(self) -> Optional[list[SQL]]: - return None if self.attributes is None else self.attributes.dbt_model_sql_assets - - @dbt_model_sql_assets.setter - def dbt_model_sql_assets(self, dbt_model_sql_assets: Optional[list[SQL]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_sql_assets = dbt_model_sql_assets - - @property - def dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: - return None if self.attributes is None else self.attributes.dbt_model_columns - - @dbt_model_columns.setter - def dbt_model_columns(self, dbt_model_columns: Optional[list[DbtModelColumn]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model_columns = dbt_model_columns - - @property - def sql_asset(self) -> Optional[SQL]: - return None if self.attributes is None else self.attributes.sql_asset - - @sql_asset.setter - def sql_asset(self, sql_asset: Optional[SQL]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_asset = sql_asset - - class Attributes(Dbt.Attributes): - dbt_status: Optional[str] = Field(None, description="", alias="dbtStatus") - dbt_error: Optional[str] = Field(None, description="", alias="dbtError") - dbt_raw_s_q_l: Optional[str] = Field(None, description="", alias="dbtRawSQL") - dbt_compiled_s_q_l: Optional[str] = Field( - None, description="", alias="dbtCompiledSQL" - ) - dbt_stats: Optional[str] = Field(None, description="", alias="dbtStats") - dbt_materialization_type: Optional[str] = Field( - None, description="", alias="dbtMaterializationType" - ) - dbt_model_compile_started_at: Optional[datetime] = Field( - None, description="", alias="dbtModelCompileStartedAt" - ) - dbt_model_compile_completed_at: Optional[datetime] = Field( - None, description="", alias="dbtModelCompileCompletedAt" - ) - dbt_model_execute_started_at: Optional[datetime] = Field( - None, description="", alias="dbtModelExecuteStartedAt" - ) - dbt_model_execute_completed_at: Optional[datetime] = Field( - None, description="", alias="dbtModelExecuteCompletedAt" - ) - dbt_model_execution_time: Optional[float] = Field( - None, description="", alias="dbtModelExecutionTime" - ) - dbt_model_run_generated_at: Optional[datetime] = Field( - None, description="", alias="dbtModelRunGeneratedAt" - ) - dbt_model_run_elapsed_time: Optional[float] = Field( - None, description="", alias="dbtModelRunElapsedTime" - ) - dbt_metrics: Optional[list[DbtMetric]] = Field( - None, description="", alias="dbtMetrics" - ) # relationship - dbt_tests: Optional[list[DbtTest]] = Field( - None, description="", alias="dbtTests" - ) # relationship - dbt_model_sql_assets: Optional[list[SQL]] = Field( - None, description="", alias="dbtModelSqlAssets" - ) # relationship - dbt_model_columns: Optional[list[DbtModelColumn]] = Field( - None, description="", alias="dbtModelColumns" - ) # relationship - sql_asset: Optional[SQL] = Field( - None, description="", alias="sqlAsset" - ) # relationship - - attributes: "DbtModel.Attributes" = Field( - default_factory=lambda: DbtModel.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DbtMetric(Dbt): - """Description""" - - type_name: str = Field("DbtMetric", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DbtMetric": - raise ValueError("must be DbtMetric") - return v - - def __setattr__(self, name, value): - if name in DbtMetric._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DBT_METRIC_FILTERS: ClassVar[KeywordField] = KeywordField( - "dbtMetricFilters", "dbtMetricFilters" - ) - """ - - """ - DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAlias", "dbtAlias.keyword", "dbtAlias" - ) - """ - - """ - DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") - """ - - """ - DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( - "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" - ) - """ - - """ - DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" - ) - """ - - """ - DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" - ) - """ - - """ - DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" - ) - """ - - """ - DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobName", "dbtJobName.keyword", "dbtJobName" - ) - """ - - """ - DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "dbtJobSchedule", "dbtJobSchedule" - ) - """ - - """ - DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( - "dbtJobStatus", "dbtJobStatus" - ) - """ - - """ - DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobScheduleCronHumanized", - "dbtJobScheduleCronHumanized.keyword", - "dbtJobScheduleCronHumanized", - ) - """ - - """ - DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( - "dbtJobLastRun", "dbtJobLastRun" - ) - """ - - """ - DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( - "dbtJobNextRun", "dbtJobNextRun" - ) - """ - - """ - DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( - "dbtJobNextRunHumanized", - "dbtJobNextRunHumanized.keyword", - "dbtJobNextRunHumanized", - ) - """ - - """ - DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" - ) - """ - - """ - DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( - "dbtEnvironmentDbtVersion", - "dbtEnvironmentDbtVersion.keyword", - "dbtEnvironmentDbtVersion", - ) - """ - - """ - DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") - """ - - """ - DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( - "dbtConnectionContext", "dbtConnectionContext" - ) - """ - - """ - DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( - "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" - ) - """ - - """ - METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") - """ - Type of the metric. - """ - METRIC_SQL: ClassVar[KeywordField] = KeywordField("metricSQL", "metricSQL") - """ - SQL query used to compute the metric. - """ - METRIC_FILTERS: ClassVar[TextField] = TextField("metricFilters", "metricFilters") - """ - Filters to be applied to the metric query. - """ - METRIC_TIME_GRAINS: ClassVar[TextField] = TextField( - "metricTimeGrains", "metricTimeGrains" - ) - """ - List of time grains to be applied to the metric query. - """ - - METRIC_TIMESTAMP_COLUMN: ClassVar[RelationField] = RelationField( - "metricTimestampColumn" - ) - """ - TBC - """ - DBT_MODEL: ClassVar[RelationField] = RelationField("dbtModel") - """ - TBC - """ - ASSETS: ClassVar[RelationField] = RelationField("assets") - """ - TBC - """ - METRIC_DIMENSION_COLUMNS: ClassVar[RelationField] = RelationField( - "metricDimensionColumns" - ) - """ - TBC - """ - DBT_METRIC_FILTER_COLUMNS: ClassVar[RelationField] = RelationField( - "dbtMetricFilterColumns" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dbt_metric_filters", - "dbt_alias", - "dbt_meta", - "dbt_unique_id", - "dbt_account_name", - "dbt_project_name", - "dbt_package_name", - "dbt_job_name", - "dbt_job_schedule", - "dbt_job_status", - "dbt_job_schedule_cron_humanized", - "dbt_job_last_run", - "dbt_job_next_run", - "dbt_job_next_run_humanized", - "dbt_environment_name", - "dbt_environment_dbt_version", - "dbt_tags", - "dbt_connection_context", - "dbt_semantic_layer_proxy_url", - "metric_type", - "metric_s_q_l", - "metric_filters", - "metric_time_grains", - "metric_timestamp_column", - "dbt_model", - "assets", - "metric_dimension_columns", - "dbt_metric_filter_columns", - ] - - @property - def dbt_metric_filters(self) -> Optional[list[DbtMetricFilter]]: - return None if self.attributes is None else self.attributes.dbt_metric_filters - - @dbt_metric_filters.setter - def dbt_metric_filters(self, dbt_metric_filters: Optional[list[DbtMetricFilter]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_metric_filters = dbt_metric_filters - - @property - def dbt_alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_alias - - @dbt_alias.setter - def dbt_alias(self, dbt_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_alias = dbt_alias - - @property - def dbt_meta(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_meta - - @dbt_meta.setter - def dbt_meta(self, dbt_meta: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_meta = dbt_meta - - @property - def dbt_unique_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_unique_id - - @dbt_unique_id.setter - def dbt_unique_id(self, dbt_unique_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_unique_id = dbt_unique_id - - @property - def dbt_account_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_account_name - - @dbt_account_name.setter - def dbt_account_name(self, dbt_account_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_account_name = dbt_account_name - - @property - def dbt_project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_project_name - - @dbt_project_name.setter - def dbt_project_name(self, dbt_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_project_name = dbt_project_name - - @property - def dbt_package_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_package_name - - @dbt_package_name.setter - def dbt_package_name(self, dbt_package_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_package_name = dbt_package_name - - @property - def dbt_job_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_name - - @dbt_job_name.setter - def dbt_job_name(self, dbt_job_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_name = dbt_job_name - - @property - def dbt_job_schedule(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_schedule - - @dbt_job_schedule.setter - def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule = dbt_job_schedule - - @property - def dbt_job_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_job_status - - @dbt_job_status.setter - def dbt_job_status(self, dbt_job_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_status = dbt_job_status - - @property - def dbt_job_schedule_cron_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_schedule_cron_humanized - ) - - @dbt_job_schedule_cron_humanized.setter - def dbt_job_schedule_cron_humanized( - self, dbt_job_schedule_cron_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_schedule_cron_humanized = ( - dbt_job_schedule_cron_humanized - ) - - @property - def dbt_job_last_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_last_run - - @dbt_job_last_run.setter - def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_last_run = dbt_job_last_run - - @property - def dbt_job_next_run(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.dbt_job_next_run - - @dbt_job_next_run.setter - def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run = dbt_job_next_run - - @property - def dbt_job_next_run_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_job_next_run_humanized - ) - - @dbt_job_next_run_humanized.setter - def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized - - @property - def dbt_environment_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_environment_name - - @dbt_environment_name.setter - def dbt_environment_name(self, dbt_environment_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_name = dbt_environment_name - - @property - def dbt_environment_dbt_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_environment_dbt_version - ) - - @dbt_environment_dbt_version.setter - def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version - - @property - def dbt_tags(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.dbt_tags - - @dbt_tags.setter - def dbt_tags(self, dbt_tags: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tags = dbt_tags - - @property - def dbt_connection_context(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_connection_context - ) - - @dbt_connection_context.setter - def dbt_connection_context(self, dbt_connection_context: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_connection_context = dbt_connection_context - - @property - def dbt_semantic_layer_proxy_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dbt_semantic_layer_proxy_url - ) - - @dbt_semantic_layer_proxy_url.setter - def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url - - @property - def metric_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metric_type - - @metric_type.setter - def metric_type(self, metric_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_type = metric_type - - @property - def metric_s_q_l(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metric_s_q_l - - @metric_s_q_l.setter - def metric_s_q_l(self, metric_s_q_l: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_s_q_l = metric_s_q_l - - @property - def metric_filters(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metric_filters - - @metric_filters.setter - def metric_filters(self, metric_filters: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_filters = metric_filters - - @property - def metric_time_grains(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.metric_time_grains - - @metric_time_grains.setter - def metric_time_grains(self, metric_time_grains: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_time_grains = metric_time_grains - - @property - def metric_timestamp_column(self) -> Optional[Column]: - return ( - None if self.attributes is None else self.attributes.metric_timestamp_column - ) - - @metric_timestamp_column.setter - def metric_timestamp_column(self, metric_timestamp_column: Optional[Column]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_timestamp_column = metric_timestamp_column - - @property - def dbt_model(self) -> Optional[DbtModel]: - return None if self.attributes is None else self.attributes.dbt_model - - @dbt_model.setter - def dbt_model(self, dbt_model: Optional[DbtModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_model = dbt_model - - @property - def assets(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.assets - - @assets.setter - def assets(self, assets: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.assets = assets - - @property - def metric_dimension_columns(self) -> Optional[list[Column]]: - return ( - None - if self.attributes is None - else self.attributes.metric_dimension_columns - ) - - @metric_dimension_columns.setter - def metric_dimension_columns( - self, metric_dimension_columns: Optional[list[Column]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metric_dimension_columns = metric_dimension_columns - - @property - def dbt_metric_filter_columns(self) -> Optional[list[Column]]: - return ( - None - if self.attributes is None - else self.attributes.dbt_metric_filter_columns - ) - - @dbt_metric_filter_columns.setter - def dbt_metric_filter_columns( - self, dbt_metric_filter_columns: Optional[list[Column]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_metric_filter_columns = dbt_metric_filter_columns - - class Attributes(Dbt.Attributes): - dbt_metric_filters: Optional[list[DbtMetricFilter]] = Field( - None, description="", alias="dbtMetricFilters" - ) - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) - dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" - ) - dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" - ) - metric_type: Optional[str] = Field(None, description="", alias="metricType") - metric_s_q_l: Optional[str] = Field(None, description="", alias="metricSQL") - metric_filters: Optional[str] = Field( - None, description="", alias="metricFilters" - ) - metric_time_grains: Optional[set[str]] = Field( - None, description="", alias="metricTimeGrains" - ) - metric_timestamp_column: Optional[Column] = Field( - None, description="", alias="metricTimestampColumn" - ) # relationship - dbt_model: Optional[DbtModel] = Field( - None, description="", alias="dbtModel" - ) # relationship - assets: Optional[list[Asset]] = Field( - None, description="", alias="assets" - ) # relationship - metric_dimension_columns: Optional[list[Column]] = Field( - None, description="", alias="metricDimensionColumns" - ) # relationship - dbt_metric_filter_columns: Optional[list[Column]] = Field( - None, description="", alias="dbtMetricFilterColumns" - ) # relationship - - attributes: "DbtMetric.Attributes" = Field( - default_factory=lambda: DbtMetric.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DbtSource(Dbt): - """Description""" - - type_name: str = Field("DbtSource", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DbtSource": - raise ValueError("must be DbtSource") - return v - - def __setattr__(self, name, value): - if name in DbtSource._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DBT_STATE: ClassVar[KeywordField] = KeywordField("dbtState", "dbtState") - """ - - """ - DBT_FRESHNESS_CRITERIA: ClassVar[KeywordField] = KeywordField( - "dbtFreshnessCriteria", "dbtFreshnessCriteria" - ) - """ - - """ - - SQL_ASSETS: ClassVar[RelationField] = RelationField("sqlAssets") - """ - TBC - """ - DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") - """ - TBC - """ - SQL_ASSET: ClassVar[RelationField] = RelationField("sqlAsset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dbt_state", - "dbt_freshness_criteria", - "sql_assets", - "dbt_tests", - "sql_asset", - ] - - @property - def dbt_state(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dbt_state - - @dbt_state.setter - def dbt_state(self, dbt_state: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_state = dbt_state - - @property - def dbt_freshness_criteria(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dbt_freshness_criteria - ) - - @dbt_freshness_criteria.setter - def dbt_freshness_criteria(self, dbt_freshness_criteria: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_freshness_criteria = dbt_freshness_criteria - - @property - def sql_assets(self) -> Optional[list[SQL]]: - return None if self.attributes is None else self.attributes.sql_assets - - @sql_assets.setter - def sql_assets(self, sql_assets: Optional[list[SQL]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_assets = sql_assets - - @property - def dbt_tests(self) -> Optional[list[DbtTest]]: - return None if self.attributes is None else self.attributes.dbt_tests - - @dbt_tests.setter - def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dbt_tests = dbt_tests - - @property - def sql_asset(self) -> Optional[SQL]: - return None if self.attributes is None else self.attributes.sql_asset - - @sql_asset.setter - def sql_asset(self, sql_asset: Optional[SQL]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_asset = sql_asset - - class Attributes(Dbt.Attributes): - dbt_state: Optional[str] = Field(None, description="", alias="dbtState") - dbt_freshness_criteria: Optional[str] = Field( - None, description="", alias="dbtFreshnessCriteria" - ) - sql_assets: Optional[list[SQL]] = Field( - None, description="", alias="sqlAssets" - ) # relationship - dbt_tests: Optional[list[DbtTest]] = Field( - None, description="", alias="dbtTests" - ) # relationship - sql_asset: Optional[SQL] = Field( - None, description="", alias="sqlAsset" - ) # relationship - - attributes: "DbtSource.Attributes" = Field( - default_factory=lambda: DbtSource.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SchemaRegistry(Catalog): - """Description""" - - type_name: str = Field("SchemaRegistry", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SchemaRegistry": - raise ValueError("must be SchemaRegistry") - return v - - def __setattr__(self, name, value): - if name in SchemaRegistry._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SCHEMA_REGISTRY_SCHEMA_TYPE: ClassVar[KeywordField] = KeywordField( - "schemaRegistrySchemaType", "schemaRegistrySchemaType" - ) - """ - Type of language or specification used to define the schema, for example: JSON, Protobuf, etc. - """ - SCHEMA_REGISTRY_SCHEMA_ID: ClassVar[KeywordField] = KeywordField( - "schemaRegistrySchemaId", "schemaRegistrySchemaId" - ) - """ - Unique identifier for schema definition set by the schema registry. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "schema_registry_schema_type", - "schema_registry_schema_id", - ] - - @property - def schema_registry_schema_type(self) -> Optional[SchemaRegistrySchemaType]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_schema_type - ) - - @schema_registry_schema_type.setter - def schema_registry_schema_type( - self, schema_registry_schema_type: Optional[SchemaRegistrySchemaType] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_schema_type = schema_registry_schema_type - - @property - def schema_registry_schema_id(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_schema_id - ) - - @schema_registry_schema_id.setter - def schema_registry_schema_id(self, schema_registry_schema_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_schema_id = schema_registry_schema_id - - class Attributes(Catalog.Attributes): - schema_registry_schema_type: Optional[SchemaRegistrySchemaType] = Field( - None, description="", alias="schemaRegistrySchemaType" - ) - schema_registry_schema_id: Optional[str] = Field( - None, description="", alias="schemaRegistrySchemaId" - ) - - attributes: "SchemaRegistry.Attributes" = Field( - default_factory=lambda: SchemaRegistry.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SchemaRegistrySubject(SchemaRegistry): - """Description""" - - type_name: str = Field("SchemaRegistrySubject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SchemaRegistrySubject": - raise ValueError("must be SchemaRegistrySubject") - return v - - def __setattr__(self, name, value): - if name in SchemaRegistrySubject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SCHEMA_REGISTRY_SUBJECT_BASE_NAME: ClassVar[KeywordField] = KeywordField( - "schemaRegistrySubjectBaseName", "schemaRegistrySubjectBaseName" - ) - """ - Base name of the subject, without -key, -value prefixes. - """ - SCHEMA_REGISTRY_SUBJECT_IS_KEY_SCHEMA: ClassVar[BooleanField] = BooleanField( - "schemaRegistrySubjectIsKeySchema", "schemaRegistrySubjectIsKeySchema" - ) - """ - Whether the subject is a schema for the keys of the messages (true) or not (false). - """ - SCHEMA_REGISTRY_SUBJECT_SCHEMA_COMPATIBILITY: ClassVar[KeywordField] = KeywordField( - "schemaRegistrySubjectSchemaCompatibility", - "schemaRegistrySubjectSchemaCompatibility", - ) - """ - Compatibility of the schema across versions. - """ - SCHEMA_REGISTRY_SUBJECT_LATEST_SCHEMA_VERSION: ClassVar[ - KeywordField - ] = KeywordField( - "schemaRegistrySubjectLatestSchemaVersion", - "schemaRegistrySubjectLatestSchemaVersion", - ) - """ - Latest schema version of the subject. - """ - SCHEMA_REGISTRY_SUBJECT_LATEST_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( - "schemaRegistrySubjectLatestSchemaDefinition", - "schemaRegistrySubjectLatestSchemaDefinition", - ) - """ - Definition of the latest schema in the subject. - """ - SCHEMA_REGISTRY_SUBJECT_GOVERNING_ASSET_QUALIFIED_NAMES: ClassVar[ - KeywordField - ] = KeywordField( - "schemaRegistrySubjectGoverningAssetQualifiedNames", - "schemaRegistrySubjectGoverningAssetQualifiedNames", - ) - """ - List of asset qualified names that this subject is governing/validating. - """ - - ASSETS: ClassVar[RelationField] = RelationField("assets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "schema_registry_subject_base_name", - "schema_registry_subject_is_key_schema", - "schema_registry_subject_schema_compatibility", - "schema_registry_subject_latest_schema_version", - "schema_registry_subject_latest_schema_definition", - "schema_registry_subject_governing_asset_qualified_names", - "assets", - ] - - @property - def schema_registry_subject_base_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subject_base_name - ) - - @schema_registry_subject_base_name.setter - def schema_registry_subject_base_name( - self, schema_registry_subject_base_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subject_base_name = ( - schema_registry_subject_base_name - ) - - @property - def schema_registry_subject_is_key_schema(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subject_is_key_schema - ) - - @schema_registry_subject_is_key_schema.setter - def schema_registry_subject_is_key_schema( - self, schema_registry_subject_is_key_schema: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subject_is_key_schema = ( - schema_registry_subject_is_key_schema - ) - - @property - def schema_registry_subject_schema_compatibility( - self, - ) -> Optional[SchemaRegistrySchemaCompatibility]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subject_schema_compatibility - ) - - @schema_registry_subject_schema_compatibility.setter - def schema_registry_subject_schema_compatibility( - self, - schema_registry_subject_schema_compatibility: Optional[ - SchemaRegistrySchemaCompatibility - ], - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subject_schema_compatibility = ( - schema_registry_subject_schema_compatibility - ) - - @property - def schema_registry_subject_latest_schema_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subject_latest_schema_version - ) - - @schema_registry_subject_latest_schema_version.setter - def schema_registry_subject_latest_schema_version( - self, schema_registry_subject_latest_schema_version: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subject_latest_schema_version = ( - schema_registry_subject_latest_schema_version - ) - - @property - def schema_registry_subject_latest_schema_definition(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subject_latest_schema_definition - ) - - @schema_registry_subject_latest_schema_definition.setter - def schema_registry_subject_latest_schema_definition( - self, schema_registry_subject_latest_schema_definition: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subject_latest_schema_definition = ( - schema_registry_subject_latest_schema_definition - ) - - @property - def schema_registry_subject_governing_asset_qualified_names( - self, - ) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.schema_registry_subject_governing_asset_qualified_names - ) - - @schema_registry_subject_governing_asset_qualified_names.setter - def schema_registry_subject_governing_asset_qualified_names( - self, - schema_registry_subject_governing_asset_qualified_names: Optional[set[str]], - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_registry_subject_governing_asset_qualified_names = ( - schema_registry_subject_governing_asset_qualified_names - ) - - @property - def assets(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.assets - - @assets.setter - def assets(self, assets: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.assets = assets - - class Attributes(SchemaRegistry.Attributes): - schema_registry_subject_base_name: Optional[str] = Field( - None, description="", alias="schemaRegistrySubjectBaseName" - ) - schema_registry_subject_is_key_schema: Optional[bool] = Field( - None, description="", alias="schemaRegistrySubjectIsKeySchema" - ) - schema_registry_subject_schema_compatibility: Optional[ - SchemaRegistrySchemaCompatibility - ] = Field( - None, description="", alias="schemaRegistrySubjectSchemaCompatibility" - ) - schema_registry_subject_latest_schema_version: Optional[str] = Field( - None, description="", alias="schemaRegistrySubjectLatestSchemaVersion" - ) - schema_registry_subject_latest_schema_definition: Optional[str] = Field( - None, description="", alias="schemaRegistrySubjectLatestSchemaDefinition" - ) - schema_registry_subject_governing_asset_qualified_names: Optional[ - set[str] - ] = Field( - None, - description="", - alias="schemaRegistrySubjectGoverningAssetQualifiedNames", - ) - assets: Optional[list[Asset]] = Field( - None, description="", alias="assets" - ) # relationship - - attributes: "SchemaRegistrySubject.Attributes" = Field( - default_factory=lambda: SchemaRegistrySubject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MonteCarlo(DataQuality): - """Description""" - - type_name: str = Field("MonteCarlo", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MonteCarlo": - raise ValueError("must be MonteCarlo") - return v - - def __setattr__(self, name, value): - if name in MonteCarlo._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MC_LABELS: ClassVar[KeywordField] = KeywordField("mcLabels", "mcLabels") - """ - List of labels for this Monte Carlo asset. - """ - MC_ASSET_QUALIFIED_NAMES: ClassVar[KeywordField] = KeywordField( - "mcAssetQualifiedNames", "mcAssetQualifiedNames" - ) - """ - List of unique names of assets that are part of this Monte Carlo asset. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mc_labels", - "mc_asset_qualified_names", - ] - - @property - def mc_labels(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.mc_labels - - @mc_labels.setter - def mc_labels(self, mc_labels: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_labels = mc_labels - - @property - def mc_asset_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.mc_asset_qualified_names - ) - - @mc_asset_qualified_names.setter - def mc_asset_qualified_names(self, mc_asset_qualified_names: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_asset_qualified_names = mc_asset_qualified_names - - class Attributes(DataQuality.Attributes): - mc_labels: Optional[set[str]] = Field(None, description="", alias="mcLabels") - mc_asset_qualified_names: Optional[set[str]] = Field( - None, description="", alias="mcAssetQualifiedNames" - ) - - attributes: "MonteCarlo.Attributes" = Field( - default_factory=lambda: MonteCarlo.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MCIncident(MonteCarlo): - """Description""" - - type_name: str = Field("MCIncident", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MCIncident": - raise ValueError("must be MCIncident") - return v - - def __setattr__(self, name, value): - if name in MCIncident._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MC_INCIDENT_ID: ClassVar[KeywordField] = KeywordField( - "mcIncidentId", "mcIncidentId" - ) - """ - Identifier of this incident, from Monte Carlo. - """ - MC_INCIDENT_TYPE: ClassVar[KeywordField] = KeywordField( - "mcIncidentType", "mcIncidentType" - ) - """ - Type of this incident. - """ - MC_INCIDENT_SUB_TYPES: ClassVar[KeywordField] = KeywordField( - "mcIncidentSubTypes", "mcIncidentSubTypes" - ) - """ - Subtypes of this incident. - """ - MC_INCIDENT_SEVERITY: ClassVar[KeywordField] = KeywordField( - "mcIncidentSeverity", "mcIncidentSeverity" - ) - """ - Severity of this incident. - """ - MC_INCIDENT_STATE: ClassVar[KeywordField] = KeywordField( - "mcIncidentState", "mcIncidentState" - ) - """ - State of this incident. - """ - MC_INCIDENT_WAREHOUSE: ClassVar[KeywordField] = KeywordField( - "mcIncidentWarehouse", "mcIncidentWarehouse" - ) - """ - Name of this incident's warehouse. - """ - - MC_MONITOR: ClassVar[RelationField] = RelationField("mcMonitor") - """ - TBC - """ - MC_INCIDENT_ASSETS: ClassVar[RelationField] = RelationField("mcIncidentAssets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mc_incident_id", - "mc_incident_type", - "mc_incident_sub_types", - "mc_incident_severity", - "mc_incident_state", - "mc_incident_warehouse", - "mc_monitor", - "mc_incident_assets", - ] - - @property - def mc_incident_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_incident_id - - @mc_incident_id.setter - def mc_incident_id(self, mc_incident_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_id = mc_incident_id - - @property - def mc_incident_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_incident_type - - @mc_incident_type.setter - def mc_incident_type(self, mc_incident_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_type = mc_incident_type - - @property - def mc_incident_sub_types(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.mc_incident_sub_types - ) - - @mc_incident_sub_types.setter - def mc_incident_sub_types(self, mc_incident_sub_types: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_sub_types = mc_incident_sub_types - - @property - def mc_incident_severity(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_incident_severity - - @mc_incident_severity.setter - def mc_incident_severity(self, mc_incident_severity: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_severity = mc_incident_severity - - @property - def mc_incident_state(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_incident_state - - @mc_incident_state.setter - def mc_incident_state(self, mc_incident_state: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_state = mc_incident_state - - @property - def mc_incident_warehouse(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mc_incident_warehouse - ) - - @mc_incident_warehouse.setter - def mc_incident_warehouse(self, mc_incident_warehouse: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_warehouse = mc_incident_warehouse - - @property - def mc_monitor(self) -> Optional[MCMonitor]: - return None if self.attributes is None else self.attributes.mc_monitor - - @mc_monitor.setter - def mc_monitor(self, mc_monitor: Optional[MCMonitor]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor = mc_monitor - - @property - def mc_incident_assets(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.mc_incident_assets - - @mc_incident_assets.setter - def mc_incident_assets(self, mc_incident_assets: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_incident_assets = mc_incident_assets - - class Attributes(MonteCarlo.Attributes): - mc_incident_id: Optional[str] = Field( - None, description="", alias="mcIncidentId" - ) - mc_incident_type: Optional[str] = Field( - None, description="", alias="mcIncidentType" - ) - mc_incident_sub_types: Optional[set[str]] = Field( - None, description="", alias="mcIncidentSubTypes" - ) - mc_incident_severity: Optional[str] = Field( - None, description="", alias="mcIncidentSeverity" - ) - mc_incident_state: Optional[str] = Field( - None, description="", alias="mcIncidentState" - ) - mc_incident_warehouse: Optional[str] = Field( - None, description="", alias="mcIncidentWarehouse" - ) - mc_monitor: Optional[MCMonitor] = Field( - None, description="", alias="mcMonitor" - ) # relationship - mc_incident_assets: Optional[list[Asset]] = Field( - None, description="", alias="mcIncidentAssets" - ) # relationship - - attributes: "MCIncident.Attributes" = Field( - default_factory=lambda: MCIncident.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MCMonitor(MonteCarlo): - """Description""" - - type_name: str = Field("MCMonitor", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MCMonitor": - raise ValueError("must be MCMonitor") - return v - - def __setattr__(self, name, value): - if name in MCMonitor._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MC_MONITOR_ID: ClassVar[KeywordField] = KeywordField("mcMonitorId", "mcMonitorId") - """ - Unique identifier for this monitor, from Monte Carlo. - """ - MC_MONITOR_STATUS: ClassVar[KeywordField] = KeywordField( - "mcMonitorStatus", "mcMonitorStatus" - ) - """ - Status of this monitor. - """ - MC_MONITOR_TYPE: ClassVar[KeywordField] = KeywordField( - "mcMonitorType", "mcMonitorType" - ) - """ - Type of this monitor, for example: field health (stats) or dimension tracking (categories). - """ - MC_MONITOR_WAREHOUSE: ClassVar[KeywordField] = KeywordField( - "mcMonitorWarehouse", "mcMonitorWarehouse" - ) - """ - Name of the warehouse for this monitor. - """ - MC_MONITOR_SCHEDULE_TYPE: ClassVar[KeywordField] = KeywordField( - "mcMonitorScheduleType", "mcMonitorScheduleType" - ) - """ - Type of schedule for this monitor, for example: fixed or dynamic. - """ - MC_MONITOR_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( - "mcMonitorNamespace", "mcMonitorNamespace.keyword", "mcMonitorNamespace" - ) - """ - Namespace of this monitor. - """ - MC_MONITOR_RULE_TYPE: ClassVar[KeywordField] = KeywordField( - "mcMonitorRuleType", "mcMonitorRuleType" - ) - """ - Type of rule for this monitor. - """ - MC_MONITOR_RULE_CUSTOM_SQL: ClassVar[KeywordField] = KeywordField( - "mcMonitorRuleCustomSql", "mcMonitorRuleCustomSql" - ) - """ - SQL code for custom SQL rules. - """ - MC_MONITOR_RULE_SCHEDULE_CONFIG: ClassVar[KeywordField] = KeywordField( - "mcMonitorRuleScheduleConfig", "mcMonitorRuleScheduleConfig" - ) - """ - Schedule details for the rule. - """ - MC_MONITOR_RULE_SCHEDULE_CONFIG_HUMANIZED: ClassVar[TextField] = TextField( - "mcMonitorRuleScheduleConfigHumanized", "mcMonitorRuleScheduleConfigHumanized" - ) - """ - Readable description of the schedule for the rule. - """ - MC_MONITOR_ALERT_CONDITION: ClassVar[TextField] = TextField( - "mcMonitorAlertCondition", "mcMonitorAlertCondition" - ) - """ - Condition on which the monitor produces an alert. - """ - MC_MONITOR_RULE_NEXT_EXECUTION_TIME: ClassVar[NumericField] = NumericField( - "mcMonitorRuleNextExecutionTime", "mcMonitorRuleNextExecutionTime" - ) - """ - Time at which the next execution of the rule should occur. - """ - MC_MONITOR_RULE_PREVIOUS_EXECUTION_TIME: ClassVar[NumericField] = NumericField( - "mcMonitorRulePreviousExecutionTime", "mcMonitorRulePreviousExecutionTime" - ) - """ - Time at which the previous execution of the rule occurred. - """ - MC_MONITOR_RULE_COMPARISONS: ClassVar[KeywordField] = KeywordField( - "mcMonitorRuleComparisons", "mcMonitorRuleComparisons" - ) - """ - Comparison logic used for the rule. - """ - MC_MONITOR_RULE_IS_SNOOZED: ClassVar[BooleanField] = BooleanField( - "mcMonitorRuleIsSnoozed", "mcMonitorRuleIsSnoozed" - ) - """ - Whether the rule is currently snoozed (true) or not (false). - """ - MC_MONITOR_BREACH_RATE: ClassVar[NumericField] = NumericField( - "mcMonitorBreachRate", "mcMonitorBreachRate" - ) - """ - Rate at which this monitor is breached. - """ - MC_MONITOR_INCIDENT_COUNT: ClassVar[NumericField] = NumericField( - "mcMonitorIncidentCount", "mcMonitorIncidentCount" - ) - """ - Number of incidents associated with this monitor. - """ - - MC_MONITOR_ASSETS: ClassVar[RelationField] = RelationField("mcMonitorAssets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mc_monitor_id", - "mc_monitor_status", - "mc_monitor_type", - "mc_monitor_warehouse", - "mc_monitor_schedule_type", - "mc_monitor_namespace", - "mc_monitor_rule_type", - "mc_monitor_rule_custom_sql", - "mc_monitor_rule_schedule_config", - "mc_monitor_rule_schedule_config_humanized", - "mc_monitor_alert_condition", - "mc_monitor_rule_next_execution_time", - "mc_monitor_rule_previous_execution_time", - "mc_monitor_rule_comparisons", - "mc_monitor_rule_is_snoozed", - "mc_monitor_breach_rate", - "mc_monitor_incident_count", - "mc_monitor_assets", - ] - - @property - def mc_monitor_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_monitor_id - - @mc_monitor_id.setter - def mc_monitor_id(self, mc_monitor_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_id = mc_monitor_id - - @property - def mc_monitor_status(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_monitor_status - - @mc_monitor_status.setter - def mc_monitor_status(self, mc_monitor_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_status = mc_monitor_status - - @property - def mc_monitor_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_monitor_type - - @mc_monitor_type.setter - def mc_monitor_type(self, mc_monitor_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_type = mc_monitor_type - - @property - def mc_monitor_warehouse(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_monitor_warehouse - - @mc_monitor_warehouse.setter - def mc_monitor_warehouse(self, mc_monitor_warehouse: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_warehouse = mc_monitor_warehouse - - @property - def mc_monitor_schedule_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_schedule_type - ) - - @mc_monitor_schedule_type.setter - def mc_monitor_schedule_type(self, mc_monitor_schedule_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_schedule_type = mc_monitor_schedule_type - - @property - def mc_monitor_namespace(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_monitor_namespace - - @mc_monitor_namespace.setter - def mc_monitor_namespace(self, mc_monitor_namespace: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_namespace = mc_monitor_namespace - - @property - def mc_monitor_rule_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mc_monitor_rule_type - - @mc_monitor_rule_type.setter - def mc_monitor_rule_type(self, mc_monitor_rule_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_type = mc_monitor_rule_type - - @property - def mc_monitor_rule_custom_sql(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_custom_sql - ) - - @mc_monitor_rule_custom_sql.setter - def mc_monitor_rule_custom_sql(self, mc_monitor_rule_custom_sql: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_custom_sql = mc_monitor_rule_custom_sql - - @property - def mc_monitor_rule_schedule_config(self) -> Optional[MCRuleSchedule]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_schedule_config - ) - - @mc_monitor_rule_schedule_config.setter - def mc_monitor_rule_schedule_config( - self, mc_monitor_rule_schedule_config: Optional[MCRuleSchedule] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_schedule_config = ( - mc_monitor_rule_schedule_config - ) - - @property - def mc_monitor_rule_schedule_config_humanized(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_schedule_config_humanized - ) - - @mc_monitor_rule_schedule_config_humanized.setter - def mc_monitor_rule_schedule_config_humanized( - self, mc_monitor_rule_schedule_config_humanized: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_schedule_config_humanized = ( - mc_monitor_rule_schedule_config_humanized - ) - - @property - def mc_monitor_alert_condition(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_alert_condition - ) - - @mc_monitor_alert_condition.setter - def mc_monitor_alert_condition(self, mc_monitor_alert_condition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_alert_condition = mc_monitor_alert_condition - - @property - def mc_monitor_rule_next_execution_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_next_execution_time - ) - - @mc_monitor_rule_next_execution_time.setter - def mc_monitor_rule_next_execution_time( - self, mc_monitor_rule_next_execution_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_next_execution_time = ( - mc_monitor_rule_next_execution_time - ) - - @property - def mc_monitor_rule_previous_execution_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_previous_execution_time - ) - - @mc_monitor_rule_previous_execution_time.setter - def mc_monitor_rule_previous_execution_time( - self, mc_monitor_rule_previous_execution_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_previous_execution_time = ( - mc_monitor_rule_previous_execution_time - ) - - @property - def mc_monitor_rule_comparisons(self) -> Optional[list[MCRuleComparison]]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_comparisons - ) - - @mc_monitor_rule_comparisons.setter - def mc_monitor_rule_comparisons( - self, mc_monitor_rule_comparisons: Optional[list[MCRuleComparison]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_comparisons = mc_monitor_rule_comparisons - - @property - def mc_monitor_rule_is_snoozed(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_rule_is_snoozed - ) - - @mc_monitor_rule_is_snoozed.setter - def mc_monitor_rule_is_snoozed(self, mc_monitor_rule_is_snoozed: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_rule_is_snoozed = mc_monitor_rule_is_snoozed - - @property - def mc_monitor_breach_rate(self) -> Optional[float]: - return ( - None if self.attributes is None else self.attributes.mc_monitor_breach_rate - ) - - @mc_monitor_breach_rate.setter - def mc_monitor_breach_rate(self, mc_monitor_breach_rate: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_breach_rate = mc_monitor_breach_rate - - @property - def mc_monitor_incident_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mc_monitor_incident_count - ) - - @mc_monitor_incident_count.setter - def mc_monitor_incident_count(self, mc_monitor_incident_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_incident_count = mc_monitor_incident_count - - @property - def mc_monitor_assets(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.mc_monitor_assets - - @mc_monitor_assets.setter - def mc_monitor_assets(self, mc_monitor_assets: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mc_monitor_assets = mc_monitor_assets - - class Attributes(MonteCarlo.Attributes): - mc_monitor_id: Optional[str] = Field(None, description="", alias="mcMonitorId") - mc_monitor_status: Optional[str] = Field( - None, description="", alias="mcMonitorStatus" - ) - mc_monitor_type: Optional[str] = Field( - None, description="", alias="mcMonitorType" - ) - mc_monitor_warehouse: Optional[str] = Field( - None, description="", alias="mcMonitorWarehouse" - ) - mc_monitor_schedule_type: Optional[str] = Field( - None, description="", alias="mcMonitorScheduleType" - ) - mc_monitor_namespace: Optional[str] = Field( - None, description="", alias="mcMonitorNamespace" - ) - mc_monitor_rule_type: Optional[str] = Field( - None, description="", alias="mcMonitorRuleType" - ) - mc_monitor_rule_custom_sql: Optional[str] = Field( - None, description="", alias="mcMonitorRuleCustomSql" - ) - mc_monitor_rule_schedule_config: Optional[MCRuleSchedule] = Field( - None, description="", alias="mcMonitorRuleScheduleConfig" - ) - mc_monitor_rule_schedule_config_humanized: Optional[str] = Field( - None, description="", alias="mcMonitorRuleScheduleConfigHumanized" - ) - mc_monitor_alert_condition: Optional[str] = Field( - None, description="", alias="mcMonitorAlertCondition" - ) - mc_monitor_rule_next_execution_time: Optional[datetime] = Field( - None, description="", alias="mcMonitorRuleNextExecutionTime" - ) - mc_monitor_rule_previous_execution_time: Optional[datetime] = Field( - None, description="", alias="mcMonitorRulePreviousExecutionTime" - ) - mc_monitor_rule_comparisons: Optional[list[MCRuleComparison]] = Field( - None, description="", alias="mcMonitorRuleComparisons" - ) - mc_monitor_rule_is_snoozed: Optional[bool] = Field( - None, description="", alias="mcMonitorRuleIsSnoozed" - ) - mc_monitor_breach_rate: Optional[float] = Field( - None, description="", alias="mcMonitorBreachRate" - ) - mc_monitor_incident_count: Optional[int] = Field( - None, description="", alias="mcMonitorIncidentCount" - ) - mc_monitor_assets: Optional[list[Asset]] = Field( - None, description="", alias="mcMonitorAssets" - ) # relationship - - attributes: "MCMonitor.Attributes" = Field( - default_factory=lambda: MCMonitor.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Soda(DataQuality): - """Description""" - - type_name: str = Field("Soda", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Soda": - raise ValueError("must be Soda") - return v - - def __setattr__(self, name, value): - if name in Soda._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - _convenience_properties: ClassVar[list[str]] = [] - - -class SodaCheck(Soda): - """Description""" - - type_name: str = Field("SodaCheck", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SodaCheck": - raise ValueError("must be SodaCheck") - return v - - def __setattr__(self, name, value): - if name in SodaCheck._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SODA_CHECK_ID: ClassVar[KeywordField] = KeywordField("sodaCheckId", "sodaCheckId") - """ - Identifier of the check in Soda. - """ - SODA_CHECK_EVALUATION_STATUS: ClassVar[KeywordField] = KeywordField( - "sodaCheckEvaluationStatus", "sodaCheckEvaluationStatus" - ) - """ - Status of the check in Soda. - """ - SODA_CHECK_DEFINITION: ClassVar[KeywordField] = KeywordField( - "sodaCheckDefinition", "sodaCheckDefinition" - ) - """ - Definition of the check in Soda. - """ - SODA_CHECK_LAST_SCAN_AT: ClassVar[NumericField] = NumericField( - "sodaCheckLastScanAt", "sodaCheckLastScanAt" - ) - """ - - """ - SODA_CHECK_INCIDENT_COUNT: ClassVar[NumericField] = NumericField( - "sodaCheckIncidentCount", "sodaCheckIncidentCount" - ) - """ - - """ - - SODA_CHECK_COLUMNS: ClassVar[RelationField] = RelationField("sodaCheckColumns") - """ - TBC - """ - SODA_CHECK_ASSETS: ClassVar[RelationField] = RelationField("sodaCheckAssets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "soda_check_id", - "soda_check_evaluation_status", - "soda_check_definition", - "soda_check_last_scan_at", - "soda_check_incident_count", - "soda_check_columns", - "soda_check_assets", - ] - - @property - def soda_check_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.soda_check_id - - @soda_check_id.setter - def soda_check_id(self, soda_check_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_id = soda_check_id - - @property - def soda_check_evaluation_status(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.soda_check_evaluation_status - ) - - @soda_check_evaluation_status.setter - def soda_check_evaluation_status(self, soda_check_evaluation_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_evaluation_status = soda_check_evaluation_status - - @property - def soda_check_definition(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.soda_check_definition - ) - - @soda_check_definition.setter - def soda_check_definition(self, soda_check_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_definition = soda_check_definition - - @property - def soda_check_last_scan_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.soda_check_last_scan_at - ) - - @soda_check_last_scan_at.setter - def soda_check_last_scan_at(self, soda_check_last_scan_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_last_scan_at = soda_check_last_scan_at - - @property - def soda_check_incident_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.soda_check_incident_count - ) - - @soda_check_incident_count.setter - def soda_check_incident_count(self, soda_check_incident_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_incident_count = soda_check_incident_count - - @property - def soda_check_columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.soda_check_columns - - @soda_check_columns.setter - def soda_check_columns(self, soda_check_columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_columns = soda_check_columns - - @property - def soda_check_assets(self) -> Optional[list[Asset]]: - return None if self.attributes is None else self.attributes.soda_check_assets - - @soda_check_assets.setter - def soda_check_assets(self, soda_check_assets: Optional[list[Asset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.soda_check_assets = soda_check_assets - - class Attributes(Soda.Attributes): - soda_check_id: Optional[str] = Field(None, description="", alias="sodaCheckId") - soda_check_evaluation_status: Optional[str] = Field( - None, description="", alias="sodaCheckEvaluationStatus" - ) - soda_check_definition: Optional[str] = Field( - None, description="", alias="sodaCheckDefinition" - ) - soda_check_last_scan_at: Optional[datetime] = Field( - None, description="", alias="sodaCheckLastScanAt" - ) - soda_check_incident_count: Optional[int] = Field( - None, description="", alias="sodaCheckIncidentCount" - ) - soda_check_columns: Optional[list[Column]] = Field( - None, description="", alias="sodaCheckColumns" - ) # relationship - soda_check_assets: Optional[list[Asset]] = Field( - None, description="", alias="sodaCheckAssets" - ) # relationship - - attributes: "SodaCheck.Attributes" = Field( - default_factory=lambda: SodaCheck.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Table(SQL): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, schema_qualified_name: str) -> Table: - validate_required_fields( - ["name", "schema_qualified_name"], [name, schema_qualified_name] - ) - attributes = Table.Attributes.create( - name=name, schema_qualified_name=schema_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("Table", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Table": - raise ValueError("must be Table") - return v - - def __setattr__(self, name, value): - if name in Table._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") - """ - Number of columns in this table. - """ - ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") - """ - Number of rows in this table. - """ - SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") - """ - Size of this table, in bytes. - """ - ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") - """ - Alias for this table. - """ - IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") - """ - Whether this table is temporary (true) or not (false). - """ - IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "isQueryPreview", "isQueryPreview" - ) - """ - Whether preview queries are allowed for this table (true) or not (false). - """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) - """ - Configuration for preview queries. - """ - EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( - "externalLocation", "externalLocation" - ) - """ - External location of this table, for example: an S3 object location. - """ - EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( - "externalLocationRegion", "externalLocationRegion" - ) - """ - Region of the external location of this table, for example: S3 region. - """ - EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( - "externalLocationFormat", "externalLocationFormat" - ) - """ - Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. - """ - IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( - "isPartitioned", "isPartitioned" - ) - """ - Whether this table is partitioned (true) or not (false). - """ - PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( - "partitionStrategy", "partitionStrategy" - ) - """ - Partition strategy for this table. - """ - PARTITION_COUNT: ClassVar[NumericField] = NumericField( - "partitionCount", "partitionCount" - ) - """ - Number of partitions in this table. - """ - PARTITION_LIST: ClassVar[KeywordField] = KeywordField( - "partitionList", "partitionList" - ) - """ - List of partitions in this table. - """ - - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - FACTS: ClassVar[RelationField] = RelationField("facts") - """ - TBC - """ - ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") - """ - TBC - """ - PARTITIONS: ClassVar[RelationField] = RelationField("partitions") - """ - TBC - """ - QUERIES: ClassVar[RelationField] = RelationField("queries") - """ - TBC - """ - DIMENSIONS: ClassVar[RelationField] = RelationField("dimensions") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "column_count", - "row_count", - "size_bytes", - "alias", - "is_temporary", - "is_query_preview", - "query_preview_config", - "external_location", - "external_location_region", - "external_location_format", - "is_partitioned", - "partition_strategy", - "partition_count", - "partition_list", - "columns", - "facts", - "atlan_schema", - "partitions", - "queries", - "dimensions", - ] - - @property - def column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_count - - @column_count.setter - def column_count(self, column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_count = column_count - - @property - def row_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_count - - @row_count.setter - def row_count(self, row_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.row_count = row_count - - @property - def size_bytes(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.size_bytes - - @size_bytes.setter - def size_bytes(self, size_bytes: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.size_bytes = size_bytes - - @property - def alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.alias - - @alias.setter - def alias(self, alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.alias = alias - - @property - def is_temporary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_temporary - - @is_temporary.setter - def is_temporary(self, is_temporary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_temporary = is_temporary - - @property - def is_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_query_preview - - @is_query_preview.setter - def is_query_preview(self, is_query_preview: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_query_preview = is_query_preview - - @property - def query_preview_config(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config - - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config - - @property - def external_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.external_location - - @external_location.setter - def external_location(self, external_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location = external_location - - @property - def external_location_region(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.external_location_region - ) - - @external_location_region.setter - def external_location_region(self, external_location_region: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_region = external_location_region - - @property - def external_location_format(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.external_location_format - ) - - @external_location_format.setter - def external_location_format(self, external_location_format: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_format = external_location_format - - @property - def is_partitioned(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partitioned - - @is_partitioned.setter - def is_partitioned(self, is_partitioned: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_partitioned = is_partitioned - - @property - def partition_strategy(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_strategy - - @partition_strategy.setter - def partition_strategy(self, partition_strategy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_strategy = partition_strategy - - @property - def partition_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.partition_count - - @partition_count.setter - def partition_count(self, partition_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_count = partition_count - - @property - def partition_list(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_list - - @partition_list.setter - def partition_list(self, partition_list: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_list = partition_list - - @property - def columns(self) -> Optional[list[Column]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[Column]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def facts(self) -> Optional[list[Table]]: - return None if self.attributes is None else self.attributes.facts - - @facts.setter - def facts(self, facts: Optional[list[Table]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.facts = facts - - @property - def atlan_schema(self) -> Optional[Schema]: - return None if self.attributes is None else self.attributes.atlan_schema - - @atlan_schema.setter - def atlan_schema(self, atlan_schema: Optional[Schema]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.atlan_schema = atlan_schema - - @property - def partitions(self) -> Optional[list[TablePartition]]: - return None if self.attributes is None else self.attributes.partitions - - @partitions.setter - def partitions(self, partitions: Optional[list[TablePartition]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partitions = partitions - - @property - def queries(self) -> Optional[list[Query]]: - return None if self.attributes is None else self.attributes.queries - - @queries.setter - def queries(self, queries: Optional[list[Query]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.queries = queries - - @property - def dimensions(self) -> Optional[list[Table]]: - return None if self.attributes is None else self.attributes.dimensions - - @dimensions.setter - def dimensions(self, dimensions: Optional[list[Table]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dimensions = dimensions - - class Attributes(SQL.Attributes): - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) - query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - external_location: Optional[str] = Field( - None, description="", alias="externalLocation" - ) - external_location_region: Optional[str] = Field( - None, description="", alias="externalLocationRegion" - ) - external_location_format: Optional[str] = Field( - None, description="", alias="externalLocationFormat" - ) - is_partitioned: Optional[bool] = Field( - None, description="", alias="isPartitioned" - ) - partition_strategy: Optional[str] = Field( - None, description="", alias="partitionStrategy" - ) - partition_count: Optional[int] = Field( - None, description="", alias="partitionCount" - ) - partition_list: Optional[str] = Field( - None, description="", alias="partitionList" - ) - columns: Optional[list[Column]] = Field( - None, description="", alias="columns" - ) # relationship - facts: Optional[list[Table]] = Field( - None, description="", alias="facts" - ) # relationship - atlan_schema: Optional[Schema] = Field( - None, description="", alias="atlanSchema" - ) # relationship - partitions: Optional[list[TablePartition]] = Field( - None, description="", alias="partitions" - ) # relationship - queries: Optional[list[Query]] = Field( - None, description="", alias="queries" - ) # relationship - dimensions: Optional[list[Table]] = Field( - None, description="", alias="dimensions" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, schema_qualified_name: str) -> Table.Attributes: - if not name: - raise ValueError("name cannot be blank") - validate_required_fields(["schema_qualified_name"], [schema_qualified_name]) - fields = schema_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid schema_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid schema_qualified_name") from e - return Table.Attributes( - name=name, - database_name=fields[3], - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", - qualified_name=f"{schema_qualified_name}/{name}", - schema_qualified_name=schema_qualified_name, - schema_name=fields[4], - connector_name=connector_type.value, - atlan_schema=Schema.ref_by_qualified_name(schema_qualified_name), - ) - - attributes: "Table.Attributes" = Field( - default_factory=lambda: Table.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SnowflakeDynamicTable(Table): - """Description""" - - type_name: str = Field("SnowflakeDynamicTable", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SnowflakeDynamicTable": - raise ValueError("must be SnowflakeDynamicTable") - return v - - def __setattr__(self, name, value): - if name in SnowflakeDynamicTable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") - """ - SQL statements used to define the dynamic table. - """ - - _convenience_properties: ClassVar[list[str]] = [ - "definition", - ] - - @property - def definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.definition - - @definition.setter - def definition(self, definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.definition = definition - - class Attributes(Table.Attributes): - definition: Optional[str] = Field(None, description="", alias="definition") - - attributes: "SnowflakeDynamicTable.Attributes" = Field( - default_factory=lambda: SnowflakeDynamicTable.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class Database(SQL): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> Database: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - attributes = Database.Attributes( - name=name, - connection_qualified_name=connection_qualified_name, - qualified_name=f"{connection_qualified_name}/{name}", - connector_name=connector_type.value, - ) - return cls(attributes=attributes) - - type_name: str = Field("Database", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Database": - raise ValueError("must be Database") - return v - - def __setattr__(self, name, value): - if name in Database._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") - """ - Number of schemas in this database. - """ - - SCHEMAS: ClassVar[RelationField] = RelationField("schemas") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "schema_count", - "schemas", - ] - - @property - def schema_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.schema_count - - @schema_count.setter - def schema_count(self, schema_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_count = schema_count - - @property - def schemas(self) -> Optional[list[Schema]]: - return None if self.attributes is None else self.attributes.schemas - - @schemas.setter - def schemas(self, schemas: Optional[list[Schema]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schemas = schemas - - class Attributes(SQL.Attributes): - schema_count: Optional[int] = Field(None, description="", alias="schemaCount") - schemas: Optional[list[Schema]] = Field( - None, description="", alias="schemas" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, name: str, connection_qualified_name: str - ) -> Database.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - return Database.Attributes( - name=name, - connection_qualified_name=connection_qualified_name, - qualified_name=f"{connection_qualified_name}/{name}", - connector_name=connector_type.value, - ) - - attributes: "Database.Attributes" = Field( - default_factory=lambda: Database.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -Referenceable.Attributes.update_forward_refs() - - -Asset.Attributes.update_forward_refs() - - -AtlasGlossaryCategory.Attributes.update_forward_refs() - - -AtlasGlossary.Attributes.update_forward_refs() - - -AtlasGlossaryTerm.Attributes.update_forward_refs() - - -Process.Attributes.update_forward_refs() - - -Namespace.Attributes.update_forward_refs() - - -Folder.Attributes.update_forward_refs() - - -Catalog.Attributes.update_forward_refs() - - -Tag.Attributes.update_forward_refs() - - -ColumnProcess.Attributes.update_forward_refs() - - -Airflow.Attributes.update_forward_refs() - - -AirflowDag.Attributes.update_forward_refs() - - -AirflowTask.Attributes.update_forward_refs() - - -DataQuality.Attributes.update_forward_refs() - - -Metric.Attributes.update_forward_refs() - - -Resource.Attributes.update_forward_refs() - - -Readme.Attributes.update_forward_refs() - - -File.Attributes.update_forward_refs() - - -Link.Attributes.update_forward_refs() - - -DataMesh.Attributes.update_forward_refs() - - -DataDomain.Attributes.update_forward_refs() - - -DataProduct.Attributes.update_forward_refs() - - -SQL.Attributes.update_forward_refs() - - -Query.Attributes.update_forward_refs() - - -Schema.Attributes.update_forward_refs() - - -SnowflakePipe.Attributes.update_forward_refs() - - -View.Attributes.update_forward_refs() - - -MaterialisedView.Attributes.update_forward_refs() - - -Function.Attributes.update_forward_refs() - - -TablePartition.Attributes.update_forward_refs() - - -Column.Attributes.update_forward_refs() - - -SnowflakeStream.Attributes.update_forward_refs() - - -Procedure.Attributes.update_forward_refs() - - -SnowflakeTag.Attributes.update_forward_refs() - - -Matillion.Attributes.update_forward_refs() - - -MatillionGroup.Attributes.update_forward_refs() - - -MatillionJob.Attributes.update_forward_refs() - - -MatillionProject.Attributes.update_forward_refs() - - -MatillionComponent.Attributes.update_forward_refs() - - -Dbt.Attributes.update_forward_refs() - - -DbtModelColumn.Attributes.update_forward_refs() - - -DbtTest.Attributes.update_forward_refs() - - -DbtModel.Attributes.update_forward_refs() - - -DbtMetric.Attributes.update_forward_refs() - - -DbtSource.Attributes.update_forward_refs() - - -SchemaRegistry.Attributes.update_forward_refs() - - -SchemaRegistrySubject.Attributes.update_forward_refs() - - -MonteCarlo.Attributes.update_forward_refs() - - -MCIncident.Attributes.update_forward_refs() - - -MCMonitor.Attributes.update_forward_refs() - - -Soda.Attributes.update_forward_refs() - - -SodaCheck.Attributes.update_forward_refs() - - -Table.Attributes.update_forward_refs() - - -SnowflakeDynamicTable.Attributes.update_forward_refs() - - -Database.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset60.py b/pyatlan/model/assets/asset60.py deleted file mode 100644 index a75b301b4..000000000 --- a/pyatlan/model/assets/asset60.py +++ /dev/null @@ -1,525 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.enums import AtlanConnectorType -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - RelationField, - TextField, -) -from pyatlan.utils import init_guid, validate_required_fields - -from .asset28 import API - - -class APISpec(API): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> APISpec: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = APISpec.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("APISpec", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "APISpec": - raise ValueError("must be APISpec") - return v - - def __setattr__(self, name, value): - if name in APISpec._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - API_SPEC_TERMS_OF_SERVICE_URL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecTermsOfServiceURL", - "apiSpecTermsOfServiceURL", - "apiSpecTermsOfServiceURL.text", - ) - """ - URL to the terms of service for the API specification. - """ - API_SPEC_CONTACT_EMAIL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecContactEmail", "apiSpecContactEmail", "apiSpecContactEmail.text" - ) - """ - Email address for a contact responsible for the API specification. - """ - API_SPEC_CONTACT_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecContactName", "apiSpecContactName.keyword", "apiSpecContactName" - ) - """ - Name of the contact responsible for the API specification. - """ - API_SPEC_CONTACT_URL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecContactURL", "apiSpecContactURL", "apiSpecContactURL.text" - ) - """ - URL pointing to the contact information. - """ - API_SPEC_LICENSE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecLicenseName", "apiSpecLicenseName.keyword", "apiSpecLicenseName" - ) - """ - Name of the license under which the API specification is available. - """ - API_SPEC_LICENSE_URL: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecLicenseURL", "apiSpecLicenseURL", "apiSpecLicenseURL.text" - ) - """ - URL to the license under which the API specification is available. - """ - API_SPEC_CONTRACT_VERSION: ClassVar[KeywordField] = KeywordField( - "apiSpecContractVersion", "apiSpecContractVersion" - ) - """ - Version of the contract for the API specification. - """ - API_SPEC_SERVICE_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( - "apiSpecServiceAlias", "apiSpecServiceAlias", "apiSpecServiceAlias.text" - ) - """ - Service alias for the API specification. - """ - - API_PATHS: ClassVar[RelationField] = RelationField("apiPaths") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "api_spec_terms_of_service_url", - "api_spec_contact_email", - "api_spec_contact_name", - "api_spec_contact_url", - "api_spec_license_name", - "api_spec_license_url", - "api_spec_contract_version", - "api_spec_service_alias", - "api_paths", - ] - - @property - def api_spec_terms_of_service_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.api_spec_terms_of_service_url - ) - - @api_spec_terms_of_service_url.setter - def api_spec_terms_of_service_url( - self, api_spec_terms_of_service_url: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_terms_of_service_url = api_spec_terms_of_service_url - - @property - def api_spec_contact_email(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_contact_email - ) - - @api_spec_contact_email.setter - def api_spec_contact_email(self, api_spec_contact_email: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contact_email = api_spec_contact_email - - @property - def api_spec_contact_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_contact_name - ) - - @api_spec_contact_name.setter - def api_spec_contact_name(self, api_spec_contact_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contact_name = api_spec_contact_name - - @property - def api_spec_contact_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_contact_url - - @api_spec_contact_url.setter - def api_spec_contact_url(self, api_spec_contact_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contact_url = api_spec_contact_url - - @property - def api_spec_license_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_license_name - ) - - @api_spec_license_name.setter - def api_spec_license_name(self, api_spec_license_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_license_name = api_spec_license_name - - @property - def api_spec_license_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_spec_license_url - - @api_spec_license_url.setter - def api_spec_license_url(self, api_spec_license_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_license_url = api_spec_license_url - - @property - def api_spec_contract_version(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.api_spec_contract_version - ) - - @api_spec_contract_version.setter - def api_spec_contract_version(self, api_spec_contract_version: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_contract_version = api_spec_contract_version - - @property - def api_spec_service_alias(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.api_spec_service_alias - ) - - @api_spec_service_alias.setter - def api_spec_service_alias(self, api_spec_service_alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec_service_alias = api_spec_service_alias - - @property - def api_paths(self) -> Optional[list[APIPath]]: - return None if self.attributes is None else self.attributes.api_paths - - @api_paths.setter - def api_paths(self, api_paths: Optional[list[APIPath]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_paths = api_paths - - class Attributes(API.Attributes): - api_spec_terms_of_service_url: Optional[str] = Field( - None, description="", alias="apiSpecTermsOfServiceURL" - ) - api_spec_contact_email: Optional[str] = Field( - None, description="", alias="apiSpecContactEmail" - ) - api_spec_contact_name: Optional[str] = Field( - None, description="", alias="apiSpecContactName" - ) - api_spec_contact_url: Optional[str] = Field( - None, description="", alias="apiSpecContactURL" - ) - api_spec_license_name: Optional[str] = Field( - None, description="", alias="apiSpecLicenseName" - ) - api_spec_license_url: Optional[str] = Field( - None, description="", alias="apiSpecLicenseURL" - ) - api_spec_contract_version: Optional[str] = Field( - None, description="", alias="apiSpecContractVersion" - ) - api_spec_service_alias: Optional[str] = Field( - None, description="", alias="apiSpecServiceAlias" - ) - api_paths: Optional[list[APIPath]] = Field( - None, description="", alias="apiPaths" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str - ) -> APISpec.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return APISpec.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - ) - - attributes: "APISpec.Attributes" = Field( - default_factory=lambda: APISpec.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class APIPath(API): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, path_raw_uri: str, spec_qualified_name: str) -> APIPath: - validate_required_fields( - ["path_raw_uri", "spec_qualified_name"], [path_raw_uri, spec_qualified_name] - ) - attributes = APIPath.Attributes.create( - path_raw_uri=path_raw_uri, spec_qualified_name=spec_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("APIPath", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "APIPath": - raise ValueError("must be APIPath") - return v - - def __setattr__(self, name, value): - if name in APIPath._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - API_PATH_SUMMARY: ClassVar[TextField] = TextField( - "apiPathSummary", "apiPathSummary" - ) - """ - Descriptive summary intended to apply to all operations in this path. - """ - API_PATH_RAW_URI: ClassVar[KeywordTextField] = KeywordTextField( - "apiPathRawURI", "apiPathRawURI", "apiPathRawURI.text" - ) - """ - Absolute path to an individual endpoint. - """ - API_PATH_IS_TEMPLATED: ClassVar[BooleanField] = BooleanField( - "apiPathIsTemplated", "apiPathIsTemplated" - ) - """ - Whether the endpoint's path contains replaceable parameters (true) or not (false). - """ - API_PATH_AVAILABLE_OPERATIONS: ClassVar[KeywordField] = KeywordField( - "apiPathAvailableOperations", "apiPathAvailableOperations" - ) - """ - List of the operations available on the endpoint. - """ - API_PATH_AVAILABLE_RESPONSE_CODES: ClassVar[KeywordField] = KeywordField( - "apiPathAvailableResponseCodes", "apiPathAvailableResponseCodes" - ) - """ - Response codes available on the path across all operations. - """ - API_PATH_IS_INGRESS_EXPOSED: ClassVar[BooleanField] = BooleanField( - "apiPathIsIngressExposed", "apiPathIsIngressExposed" - ) - """ - Whether the path is exposed as an ingress (true) or not (false). - """ - - API_SPEC: ClassVar[RelationField] = RelationField("apiSpec") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "api_path_summary", - "api_path_raw_u_r_i", - "api_path_is_templated", - "api_path_available_operations", - "api_path_available_response_codes", - "api_path_is_ingress_exposed", - "api_spec", - ] - - @property - def api_path_summary(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_path_summary - - @api_path_summary.setter - def api_path_summary(self, api_path_summary: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_summary = api_path_summary - - @property - def api_path_raw_u_r_i(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.api_path_raw_u_r_i - - @api_path_raw_u_r_i.setter - def api_path_raw_u_r_i(self, api_path_raw_u_r_i: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_raw_u_r_i = api_path_raw_u_r_i - - @property - def api_path_is_templated(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.api_path_is_templated - ) - - @api_path_is_templated.setter - def api_path_is_templated(self, api_path_is_templated: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_is_templated = api_path_is_templated - - @property - def api_path_available_operations(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.api_path_available_operations - ) - - @api_path_available_operations.setter - def api_path_available_operations( - self, api_path_available_operations: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_available_operations = api_path_available_operations - - @property - def api_path_available_response_codes(self) -> Optional[dict[str, str]]: - return ( - None - if self.attributes is None - else self.attributes.api_path_available_response_codes - ) - - @api_path_available_response_codes.setter - def api_path_available_response_codes( - self, api_path_available_response_codes: Optional[dict[str, str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_available_response_codes = ( - api_path_available_response_codes - ) - - @property - def api_path_is_ingress_exposed(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.api_path_is_ingress_exposed - ) - - @api_path_is_ingress_exposed.setter - def api_path_is_ingress_exposed(self, api_path_is_ingress_exposed: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_path_is_ingress_exposed = api_path_is_ingress_exposed - - @property - def api_spec(self) -> Optional[APISpec]: - return None if self.attributes is None else self.attributes.api_spec - - @api_spec.setter - def api_spec(self, api_spec: Optional[APISpec]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.api_spec = api_spec - - class Attributes(API.Attributes): - api_path_summary: Optional[str] = Field( - None, description="", alias="apiPathSummary" - ) - api_path_raw_u_r_i: Optional[str] = Field( - None, description="", alias="apiPathRawURI" - ) - api_path_is_templated: Optional[bool] = Field( - None, description="", alias="apiPathIsTemplated" - ) - api_path_available_operations: Optional[set[str]] = Field( - None, description="", alias="apiPathAvailableOperations" - ) - api_path_available_response_codes: Optional[dict[str, str]] = Field( - None, description="", alias="apiPathAvailableResponseCodes" - ) - api_path_is_ingress_exposed: Optional[bool] = Field( - None, description="", alias="apiPathIsIngressExposed" - ) - api_spec: Optional[APISpec] = Field( - None, description="", alias="apiSpec" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, path_raw_uri: str, spec_qualified_name: str - ) -> APIPath.Attributes: - validate_required_fields( - ["path_raw_uri", "spec_qualified_name"], - [path_raw_uri, spec_qualified_name], - ) - - # Split the spec_qualified_name to extract necessary information - fields = spec_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid spec_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid spec_qualified_name") from e - - return APIPath.Attributes( - api_path_raw_u_r_i=path_raw_uri, - name=path_raw_uri, - api_spec_qualified_name=spec_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{spec_qualified_name}{path_raw_uri}", - connector_name=connector_type.value, - apiSpec=APISpec.ref_by_qualified_name(spec_qualified_name), - ) - - attributes: "APIPath.Attributes" = Field( - default_factory=lambda: APIPath.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -APISpec.Attributes.update_forward_refs() - - -APIPath.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset63.py b/pyatlan/model/assets/asset63.py deleted file mode 100644 index e842fe12d..000000000 --- a/pyatlan/model/assets/asset63.py +++ /dev/null @@ -1,1146 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.enums import ( - ADLSAccessTier, - ADLSAccountStatus, - ADLSEncryptionTypes, - ADLSLeaseState, - ADLSLeaseStatus, - ADLSObjectArchiveStatus, - ADLSObjectType, - ADLSPerformance, - ADLSProvisionState, - ADLSReplicationType, - ADLSStorageKind, - AtlanConnectorType, -) -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, - TextField, -) -from pyatlan.utils import get_parent_qualified_name, init_guid, validate_required_fields - -from .asset36 import ADLS - - -class ADLSAccount(ADLS): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> ADLSAccount: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = ADLSAccount.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("ADLSAccount", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ADLSAccount": - raise ValueError("must be ADLSAccount") - return v - - def __setattr__(self, name, value): - if name in ADLSAccount._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - ADLS_E_TAG: ClassVar[KeywordField] = KeywordField("adlsETag", "adlsETag") - """ - Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata. - """ # noqa: E501 - ADLS_ENCRYPTION_TYPE: ClassVar[KeywordField] = KeywordField( - "adlsEncryptionType", "adlsEncryptionType" - ) - """ - Type of encryption for this account. - """ - ADLS_ACCOUNT_RESOURCE_GROUP: ClassVar[KeywordTextField] = KeywordTextField( - "adlsAccountResourceGroup", - "adlsAccountResourceGroup.keyword", - "adlsAccountResourceGroup", - ) - """ - Resource group for this account. - """ - ADLS_ACCOUNT_SUBSCRIPTION: ClassVar[KeywordTextField] = KeywordTextField( - "adlsAccountSubscription", - "adlsAccountSubscription.keyword", - "adlsAccountSubscription", - ) - """ - Subscription for this account. - """ - ADLS_ACCOUNT_PERFORMANCE: ClassVar[KeywordField] = KeywordField( - "adlsAccountPerformance", "adlsAccountPerformance" - ) - """ - Performance of this account. - """ - ADLS_ACCOUNT_REPLICATION: ClassVar[KeywordField] = KeywordField( - "adlsAccountReplication", "adlsAccountReplication" - ) - """ - Replication of this account. - """ - ADLS_ACCOUNT_KIND: ClassVar[KeywordField] = KeywordField( - "adlsAccountKind", "adlsAccountKind" - ) - """ - Kind of this account. - """ - ADLS_PRIMARY_DISK_STATE: ClassVar[KeywordField] = KeywordField( - "adlsPrimaryDiskState", "adlsPrimaryDiskState" - ) - """ - Primary disk state of this account. - """ - ADLS_ACCOUNT_PROVISION_STATE: ClassVar[KeywordField] = KeywordField( - "adlsAccountProvisionState", "adlsAccountProvisionState" - ) - """ - Provision state of this account. - """ - ADLS_ACCOUNT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( - "adlsAccountAccessTier", "adlsAccountAccessTier" - ) - """ - Access tier of this account. - """ - - ADLS_CONTAINERS: ClassVar[RelationField] = RelationField("adlsContainers") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "adls_e_tag", - "adls_encryption_type", - "adls_account_resource_group", - "adls_account_subscription", - "adls_account_performance", - "adls_account_replication", - "adls_account_kind", - "adls_primary_disk_state", - "adls_account_provision_state", - "adls_account_access_tier", - "adls_containers", - ] - - @property - def adls_e_tag(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.adls_e_tag - - @adls_e_tag.setter - def adls_e_tag(self, adls_e_tag: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_e_tag = adls_e_tag - - @property - def adls_encryption_type(self) -> Optional[ADLSEncryptionTypes]: - return None if self.attributes is None else self.attributes.adls_encryption_type - - @adls_encryption_type.setter - def adls_encryption_type(self, adls_encryption_type: Optional[ADLSEncryptionTypes]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_encryption_type = adls_encryption_type - - @property - def adls_account_resource_group(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_resource_group - ) - - @adls_account_resource_group.setter - def adls_account_resource_group(self, adls_account_resource_group: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_resource_group = adls_account_resource_group - - @property - def adls_account_subscription(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_subscription - ) - - @adls_account_subscription.setter - def adls_account_subscription(self, adls_account_subscription: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_subscription = adls_account_subscription - - @property - def adls_account_performance(self) -> Optional[ADLSPerformance]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_performance - ) - - @adls_account_performance.setter - def adls_account_performance( - self, adls_account_performance: Optional[ADLSPerformance] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_performance = adls_account_performance - - @property - def adls_account_replication(self) -> Optional[ADLSReplicationType]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_replication - ) - - @adls_account_replication.setter - def adls_account_replication( - self, adls_account_replication: Optional[ADLSReplicationType] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_replication = adls_account_replication - - @property - def adls_account_kind(self) -> Optional[ADLSStorageKind]: - return None if self.attributes is None else self.attributes.adls_account_kind - - @adls_account_kind.setter - def adls_account_kind(self, adls_account_kind: Optional[ADLSStorageKind]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_kind = adls_account_kind - - @property - def adls_primary_disk_state(self) -> Optional[ADLSAccountStatus]: - return ( - None if self.attributes is None else self.attributes.adls_primary_disk_state - ) - - @adls_primary_disk_state.setter - def adls_primary_disk_state( - self, adls_primary_disk_state: Optional[ADLSAccountStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_primary_disk_state = adls_primary_disk_state - - @property - def adls_account_provision_state(self) -> Optional[ADLSProvisionState]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_provision_state - ) - - @adls_account_provision_state.setter - def adls_account_provision_state( - self, adls_account_provision_state: Optional[ADLSProvisionState] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_provision_state = adls_account_provision_state - - @property - def adls_account_access_tier(self) -> Optional[ADLSAccessTier]: - return ( - None - if self.attributes is None - else self.attributes.adls_account_access_tier - ) - - @adls_account_access_tier.setter - def adls_account_access_tier( - self, adls_account_access_tier: Optional[ADLSAccessTier] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account_access_tier = adls_account_access_tier - - @property - def adls_containers(self) -> Optional[list[ADLSContainer]]: - return None if self.attributes is None else self.attributes.adls_containers - - @adls_containers.setter - def adls_containers(self, adls_containers: Optional[list[ADLSContainer]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_containers = adls_containers - - class Attributes(ADLS.Attributes): - adls_e_tag: Optional[str] = Field(None, description="", alias="adlsETag") - adls_encryption_type: Optional[ADLSEncryptionTypes] = Field( - None, description="", alias="adlsEncryptionType" - ) - adls_account_resource_group: Optional[str] = Field( - None, description="", alias="adlsAccountResourceGroup" - ) - adls_account_subscription: Optional[str] = Field( - None, description="", alias="adlsAccountSubscription" - ) - adls_account_performance: Optional[ADLSPerformance] = Field( - None, description="", alias="adlsAccountPerformance" - ) - adls_account_replication: Optional[ADLSReplicationType] = Field( - None, description="", alias="adlsAccountReplication" - ) - adls_account_kind: Optional[ADLSStorageKind] = Field( - None, description="", alias="adlsAccountKind" - ) - adls_primary_disk_state: Optional[ADLSAccountStatus] = Field( - None, description="", alias="adlsPrimaryDiskState" - ) - adls_account_provision_state: Optional[ADLSProvisionState] = Field( - None, description="", alias="adlsAccountProvisionState" - ) - adls_account_access_tier: Optional[ADLSAccessTier] = Field( - None, description="", alias="adlsAccountAccessTier" - ) - adls_containers: Optional[list[ADLSContainer]] = Field( - None, description="", alias="adlsContainers" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str - ) -> ADLSAccount.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return ADLSAccount.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - ) - - attributes: "ADLSAccount.Attributes" = Field( - default_factory=lambda: ADLSAccount.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ADLSContainer(ADLS): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, adls_account_qualified_name: str) -> ADLSContainer: - validate_required_fields( - ["name", "adls_account_qualified_name"], [name, adls_account_qualified_name] - ) - attributes = ADLSContainer.Attributes.create( - name=name, adls_account_qualified_name=adls_account_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("ADLSContainer", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ADLSContainer": - raise ValueError("must be ADLSContainer") - return v - - def __setattr__(self, name, value): - if name in ADLSContainer._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - ADLS_CONTAINER_URL: ClassVar[KeywordTextField] = KeywordTextField( - "adlsContainerUrl", "adlsContainerUrl.keyword", "adlsContainerUrl" - ) - """ - URL of this container. - """ - ADLS_CONTAINER_LEASE_STATE: ClassVar[KeywordField] = KeywordField( - "adlsContainerLeaseState", "adlsContainerLeaseState" - ) - """ - Lease state of this container. - """ - ADLS_CONTAINER_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( - "adlsContainerLeaseStatus", "adlsContainerLeaseStatus" - ) - """ - Lease status of this container. - """ - ADLS_CONTAINER_ENCRYPTION_SCOPE: ClassVar[KeywordField] = KeywordField( - "adlsContainerEncryptionScope", "adlsContainerEncryptionScope" - ) - """ - Encryption scope of this container. - """ - ADLS_CONTAINER_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ - BooleanField - ] = BooleanField( - "adlsContainerVersionLevelImmutabilitySupport", - "adlsContainerVersionLevelImmutabilitySupport", - ) - """ - Whether this container supports version-level immutability (true) or not (false). - """ - ADLS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( - "adlsObjectCount", "adlsObjectCount" - ) - """ - Number of objects that exist within this container. - """ - - ADLS_OBJECTS: ClassVar[RelationField] = RelationField("adlsObjects") - """ - TBC - """ - ADLS_ACCOUNT: ClassVar[RelationField] = RelationField("adlsAccount") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "adls_container_url", - "adls_container_lease_state", - "adls_container_lease_status", - "adls_container_encryption_scope", - "adls_container_version_level_immutability_support", - "adls_object_count", - "adls_objects", - "adls_account", - ] - - @property - def adls_container_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.adls_container_url - - @adls_container_url.setter - def adls_container_url(self, adls_container_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_url = adls_container_url - - @property - def adls_container_lease_state(self) -> Optional[ADLSLeaseState]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_lease_state - ) - - @adls_container_lease_state.setter - def adls_container_lease_state( - self, adls_container_lease_state: Optional[ADLSLeaseState] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_lease_state = adls_container_lease_state - - @property - def adls_container_lease_status(self) -> Optional[ADLSLeaseStatus]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_lease_status - ) - - @adls_container_lease_status.setter - def adls_container_lease_status( - self, adls_container_lease_status: Optional[ADLSLeaseStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_lease_status = adls_container_lease_status - - @property - def adls_container_encryption_scope(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_encryption_scope - ) - - @adls_container_encryption_scope.setter - def adls_container_encryption_scope( - self, adls_container_encryption_scope: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_encryption_scope = ( - adls_container_encryption_scope - ) - - @property - def adls_container_version_level_immutability_support(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_version_level_immutability_support - ) - - @adls_container_version_level_immutability_support.setter - def adls_container_version_level_immutability_support( - self, adls_container_version_level_immutability_support: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_version_level_immutability_support = ( - adls_container_version_level_immutability_support - ) - - @property - def adls_object_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.adls_object_count - - @adls_object_count.setter - def adls_object_count(self, adls_object_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_count = adls_object_count - - @property - def adls_objects(self) -> Optional[list[ADLSObject]]: - return None if self.attributes is None else self.attributes.adls_objects - - @adls_objects.setter - def adls_objects(self, adls_objects: Optional[list[ADLSObject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_objects = adls_objects - - @property - def adls_account(self) -> Optional[ADLSAccount]: - return None if self.attributes is None else self.attributes.adls_account - - @adls_account.setter - def adls_account(self, adls_account: Optional[ADLSAccount]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_account = adls_account - - class Attributes(ADLS.Attributes): - adls_container_url: Optional[str] = Field( - None, description="", alias="adlsContainerUrl" - ) - adls_container_lease_state: Optional[ADLSLeaseState] = Field( - None, description="", alias="adlsContainerLeaseState" - ) - adls_container_lease_status: Optional[ADLSLeaseStatus] = Field( - None, description="", alias="adlsContainerLeaseStatus" - ) - adls_container_encryption_scope: Optional[str] = Field( - None, description="", alias="adlsContainerEncryptionScope" - ) - adls_container_version_level_immutability_support: Optional[bool] = Field( - None, description="", alias="adlsContainerVersionLevelImmutabilitySupport" - ) - adls_object_count: Optional[int] = Field( - None, description="", alias="adlsObjectCount" - ) - adls_objects: Optional[list[ADLSObject]] = Field( - None, description="", alias="adlsObjects" - ) # relationship - adls_account: Optional[ADLSAccount] = Field( - None, description="", alias="adlsAccount" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, adls_account_qualified_name: str - ) -> ADLSContainer.Attributes: - validate_required_fields( - ["name", "adls_account_qualified_name"], - [name, adls_account_qualified_name], - ) - - # Split the adls_account_qualified_name to extract necessary information - fields = adls_account_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid adls_account_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid adls_account_qualified_name") from e - - return ADLSContainer.Attributes( - name=name, - adls_account_qualified_name=adls_account_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{adls_account_qualified_name}/{name}", - connector_name=connector_type.value, - adls_account=ADLSAccount.ref_by_qualified_name( - adls_account_qualified_name - ), - ) - - attributes: "ADLSContainer.Attributes" = Field( - default_factory=lambda: ADLSContainer.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ADLSObject(ADLS): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: str, - adls_container_qualified_name: str, - ) -> ADLSObject: - validate_required_fields( - ["name", "adls_container_qualified_name"], - [name, adls_container_qualified_name], - ) - attributes = ADLSObject.Attributes.create( - name=name, adls_container_qualified_name=adls_container_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("ADLSObject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ADLSObject": - raise ValueError("must be ADLSObject") - return v - - def __setattr__(self, name, value): - if name in ADLSObject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - ADLS_OBJECT_URL: ClassVar[KeywordTextField] = KeywordTextField( - "adlsObjectUrl", "adlsObjectUrl.keyword", "adlsObjectUrl" - ) - """ - URL of this object. - """ - ADLS_OBJECT_VERSION_ID: ClassVar[KeywordField] = KeywordField( - "adlsObjectVersionId", "adlsObjectVersionId" - ) - """ - Identifier of the version of this object, from ADLS. - """ - ADLS_OBJECT_TYPE: ClassVar[KeywordField] = KeywordField( - "adlsObjectType", "adlsObjectType" - ) - """ - Type of this object. - """ - ADLS_OBJECT_SIZE: ClassVar[NumericField] = NumericField( - "adlsObjectSize", "adlsObjectSize" - ) - """ - Size of this object. - """ - ADLS_OBJECT_ACCESS_TIER: ClassVar[KeywordField] = KeywordField( - "adlsObjectAccessTier", "adlsObjectAccessTier" - ) - """ - Access tier of this object. - """ - ADLS_OBJECT_ACCESS_TIER_LAST_MODIFIED_TIME: ClassVar[NumericField] = NumericField( - "adlsObjectAccessTierLastModifiedTime", "adlsObjectAccessTierLastModifiedTime" - ) - """ - Time (epoch) when the acccess tier for this object was last modified, in milliseconds. - """ - ADLS_OBJECT_ARCHIVE_STATUS: ClassVar[KeywordField] = KeywordField( - "adlsObjectArchiveStatus", "adlsObjectArchiveStatus" - ) - """ - Archive status of this object. - """ - ADLS_OBJECT_SERVER_ENCRYPTED: ClassVar[BooleanField] = BooleanField( - "adlsObjectServerEncrypted", "adlsObjectServerEncrypted" - ) - """ - Whether this object is server encrypted (true) or not (false). - """ - ADLS_OBJECT_VERSION_LEVEL_IMMUTABILITY_SUPPORT: ClassVar[ - BooleanField - ] = BooleanField( - "adlsObjectVersionLevelImmutabilitySupport", - "adlsObjectVersionLevelImmutabilitySupport", - ) - """ - Whether this object supports version-level immutability (true) or not (false). - """ - ADLS_OBJECT_CACHE_CONTROL: ClassVar[TextField] = TextField( - "adlsObjectCacheControl", "adlsObjectCacheControl" - ) - """ - Cache control of this object. - """ - ADLS_OBJECT_CONTENT_TYPE: ClassVar[TextField] = TextField( - "adlsObjectContentType", "adlsObjectContentType" - ) - """ - Content type of this object. - """ - ADLS_OBJECT_CONTENT_MD5HASH: ClassVar[KeywordField] = KeywordField( - "adlsObjectContentMD5Hash", "adlsObjectContentMD5Hash" - ) - """ - MD5 hash of this object's contents. - """ - ADLS_OBJECT_CONTENT_LANGUAGE: ClassVar[KeywordTextField] = KeywordTextField( - "adlsObjectContentLanguage", - "adlsObjectContentLanguage.keyword", - "adlsObjectContentLanguage", - ) - """ - Language of this object's contents. - """ - ADLS_OBJECT_LEASE_STATUS: ClassVar[KeywordField] = KeywordField( - "adlsObjectLeaseStatus", "adlsObjectLeaseStatus" - ) - """ - Status of this object's lease. - """ - ADLS_OBJECT_LEASE_STATE: ClassVar[KeywordField] = KeywordField( - "adlsObjectLeaseState", "adlsObjectLeaseState" - ) - """ - State of this object's lease. - """ - ADLS_OBJECT_METADATA: ClassVar[KeywordField] = KeywordField( - "adlsObjectMetadata", "adlsObjectMetadata" - ) - """ - Metadata associated with this object, from ADLS. - """ - ADLS_CONTAINER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "adlsContainerQualifiedName", - "adlsContainerQualifiedName", - "adlsContainerQualifiedName.text", - ) - """ - Unique name of the container this object exists within. - """ - - ADLS_CONTAINER: ClassVar[RelationField] = RelationField("adlsContainer") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "adls_object_url", - "adls_object_version_id", - "adls_object_type", - "adls_object_size", - "adls_object_access_tier", - "adls_object_access_tier_last_modified_time", - "adls_object_archive_status", - "adls_object_server_encrypted", - "adls_object_version_level_immutability_support", - "adls_object_cache_control", - "adls_object_content_type", - "adls_object_content_m_d5_hash", - "adls_object_content_language", - "adls_object_lease_status", - "adls_object_lease_state", - "adls_object_metadata", - "adls_container_qualified_name", - "adls_container", - ] - - @property - def adls_object_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.adls_object_url - - @adls_object_url.setter - def adls_object_url(self, adls_object_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_url = adls_object_url - - @property - def adls_object_version_id(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.adls_object_version_id - ) - - @adls_object_version_id.setter - def adls_object_version_id(self, adls_object_version_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_version_id = adls_object_version_id - - @property - def adls_object_type(self) -> Optional[ADLSObjectType]: - return None if self.attributes is None else self.attributes.adls_object_type - - @adls_object_type.setter - def adls_object_type(self, adls_object_type: Optional[ADLSObjectType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_type = adls_object_type - - @property - def adls_object_size(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.adls_object_size - - @adls_object_size.setter - def adls_object_size(self, adls_object_size: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_size = adls_object_size - - @property - def adls_object_access_tier(self) -> Optional[ADLSAccessTier]: - return ( - None if self.attributes is None else self.attributes.adls_object_access_tier - ) - - @adls_object_access_tier.setter - def adls_object_access_tier( - self, adls_object_access_tier: Optional[ADLSAccessTier] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_access_tier = adls_object_access_tier - - @property - def adls_object_access_tier_last_modified_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_access_tier_last_modified_time - ) - - @adls_object_access_tier_last_modified_time.setter - def adls_object_access_tier_last_modified_time( - self, adls_object_access_tier_last_modified_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_access_tier_last_modified_time = ( - adls_object_access_tier_last_modified_time - ) - - @property - def adls_object_archive_status(self) -> Optional[ADLSObjectArchiveStatus]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_archive_status - ) - - @adls_object_archive_status.setter - def adls_object_archive_status( - self, adls_object_archive_status: Optional[ADLSObjectArchiveStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_archive_status = adls_object_archive_status - - @property - def adls_object_server_encrypted(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_server_encrypted - ) - - @adls_object_server_encrypted.setter - def adls_object_server_encrypted( - self, adls_object_server_encrypted: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_server_encrypted = adls_object_server_encrypted - - @property - def adls_object_version_level_immutability_support(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_version_level_immutability_support - ) - - @adls_object_version_level_immutability_support.setter - def adls_object_version_level_immutability_support( - self, adls_object_version_level_immutability_support: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_version_level_immutability_support = ( - adls_object_version_level_immutability_support - ) - - @property - def adls_object_cache_control(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_cache_control - ) - - @adls_object_cache_control.setter - def adls_object_cache_control(self, adls_object_cache_control: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_cache_control = adls_object_cache_control - - @property - def adls_object_content_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_content_type - ) - - @adls_object_content_type.setter - def adls_object_content_type(self, adls_object_content_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_content_type = adls_object_content_type - - @property - def adls_object_content_m_d5_hash(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_content_m_d5_hash - ) - - @adls_object_content_m_d5_hash.setter - def adls_object_content_m_d5_hash( - self, adls_object_content_m_d5_hash: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_content_m_d5_hash = adls_object_content_m_d5_hash - - @property - def adls_object_content_language(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_content_language - ) - - @adls_object_content_language.setter - def adls_object_content_language(self, adls_object_content_language: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_content_language = adls_object_content_language - - @property - def adls_object_lease_status(self) -> Optional[ADLSLeaseStatus]: - return ( - None - if self.attributes is None - else self.attributes.adls_object_lease_status - ) - - @adls_object_lease_status.setter - def adls_object_lease_status( - self, adls_object_lease_status: Optional[ADLSLeaseStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_lease_status = adls_object_lease_status - - @property - def adls_object_lease_state(self) -> Optional[ADLSLeaseState]: - return ( - None if self.attributes is None else self.attributes.adls_object_lease_state - ) - - @adls_object_lease_state.setter - def adls_object_lease_state( - self, adls_object_lease_state: Optional[ADLSLeaseState] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_lease_state = adls_object_lease_state - - @property - def adls_object_metadata(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.adls_object_metadata - - @adls_object_metadata.setter - def adls_object_metadata(self, adls_object_metadata: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_object_metadata = adls_object_metadata - - @property - def adls_container_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.adls_container_qualified_name - ) - - @adls_container_qualified_name.setter - def adls_container_qualified_name( - self, adls_container_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container_qualified_name = adls_container_qualified_name - - @property - def adls_container(self) -> Optional[ADLSContainer]: - return None if self.attributes is None else self.attributes.adls_container - - @adls_container.setter - def adls_container(self, adls_container: Optional[ADLSContainer]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.adls_container = adls_container - - class Attributes(ADLS.Attributes): - adls_object_url: Optional[str] = Field( - None, description="", alias="adlsObjectUrl" - ) - adls_object_version_id: Optional[str] = Field( - None, description="", alias="adlsObjectVersionId" - ) - adls_object_type: Optional[ADLSObjectType] = Field( - None, description="", alias="adlsObjectType" - ) - adls_object_size: Optional[int] = Field( - None, description="", alias="adlsObjectSize" - ) - adls_object_access_tier: Optional[ADLSAccessTier] = Field( - None, description="", alias="adlsObjectAccessTier" - ) - adls_object_access_tier_last_modified_time: Optional[datetime] = Field( - None, description="", alias="adlsObjectAccessTierLastModifiedTime" - ) - adls_object_archive_status: Optional[ADLSObjectArchiveStatus] = Field( - None, description="", alias="adlsObjectArchiveStatus" - ) - adls_object_server_encrypted: Optional[bool] = Field( - None, description="", alias="adlsObjectServerEncrypted" - ) - adls_object_version_level_immutability_support: Optional[bool] = Field( - None, description="", alias="adlsObjectVersionLevelImmutabilitySupport" - ) - adls_object_cache_control: Optional[str] = Field( - None, description="", alias="adlsObjectCacheControl" - ) - adls_object_content_type: Optional[str] = Field( - None, description="", alias="adlsObjectContentType" - ) - adls_object_content_m_d5_hash: Optional[str] = Field( - None, description="", alias="adlsObjectContentMD5Hash" - ) - adls_object_content_language: Optional[str] = Field( - None, description="", alias="adlsObjectContentLanguage" - ) - adls_object_lease_status: Optional[ADLSLeaseStatus] = Field( - None, description="", alias="adlsObjectLeaseStatus" - ) - adls_object_lease_state: Optional[ADLSLeaseState] = Field( - None, description="", alias="adlsObjectLeaseState" - ) - adls_object_metadata: Optional[dict[str, str]] = Field( - None, description="", alias="adlsObjectMetadata" - ) - adls_container_qualified_name: Optional[str] = Field( - None, description="", alias="adlsContainerQualifiedName" - ) - adls_container: Optional[ADLSContainer] = Field( - None, description="", alias="adlsContainer" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, adls_container_qualified_name: str - ) -> ADLSObject.Attributes: - validate_required_fields( - ["name", "adls_container_qualified_name"], - [name, adls_container_qualified_name], - ) - - # Split the qualified_name to extract necessary information - fields = adls_container_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid qualified_name") from e - adls_account_qualified_name = get_parent_qualified_name( - adls_container_qualified_name - ) - - return ADLSObject.Attributes( - name=name, - adls_container_qualified_name=adls_container_qualified_name, - qualified_name=f"{adls_container_qualified_name}/{name}", - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - connector_name=connector_type.value, - adls_container=ADLSContainer.ref_by_qualified_name( - adls_container_qualified_name - ), - adls_account_qualified_name=adls_account_qualified_name, - ) - - attributes: "ADLSObject.Attributes" = Field( - default_factory=lambda: ADLSObject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -ADLSAccount.Attributes.update_forward_refs() - - -ADLSContainer.Attributes.update_forward_refs() - - -ADLSObject.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset65.py b/pyatlan/model/assets/asset65.py deleted file mode 100644 index c5893b0f6..000000000 --- a/pyatlan/model/assets/asset65.py +++ /dev/null @@ -1,958 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.enums import AtlanConnectorType -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - KeywordTextStemmedField, - NumericField, - RelationField, - TextField, -) -from pyatlan.utils import init_guid, validate_required_fields - -from .asset40 import Preset - - -class PresetChart(Preset): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, preset_dashboard_qualified_name: str) -> PresetChart: - validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], - ) - attributes = PresetChart.Attributes.create( - name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("PresetChart", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PresetChart": - raise ValueError("must be PresetChart") - return v - - def __setattr__(self, name, value): - if name in PresetChart._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PRESET_CHART_DESCRIPTION_MARKDOWN: ClassVar[TextField] = TextField( - "presetChartDescriptionMarkdown", "presetChartDescriptionMarkdown" - ) - """ - - """ - PRESET_CHART_FORM_DATA: ClassVar[KeywordField] = KeywordField( - "presetChartFormData", "presetChartFormData" - ) - """ - - """ - - PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "preset_chart_description_markdown", - "preset_chart_form_data", - "preset_dashboard", - ] - - @property - def preset_chart_description_markdown(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_chart_description_markdown - ) - - @preset_chart_description_markdown.setter - def preset_chart_description_markdown( - self, preset_chart_description_markdown: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_chart_description_markdown = ( - preset_chart_description_markdown - ) - - @property - def preset_chart_form_data(self) -> Optional[dict[str, str]]: - return ( - None if self.attributes is None else self.attributes.preset_chart_form_data - ) - - @preset_chart_form_data.setter - def preset_chart_form_data(self, preset_chart_form_data: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_chart_form_data = preset_chart_form_data - - @property - def preset_dashboard(self) -> Optional[PresetDashboard]: - return None if self.attributes is None else self.attributes.preset_dashboard - - @preset_dashboard.setter - def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard = preset_dashboard - - class Attributes(Preset.Attributes): - preset_chart_description_markdown: Optional[str] = Field( - None, description="", alias="presetChartDescriptionMarkdown" - ) - preset_chart_form_data: Optional[dict[str, str]] = Field( - None, description="", alias="presetChartFormData" - ) - preset_dashboard: Optional[PresetDashboard] = Field( - None, description="", alias="presetDashboard" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_dashboard_qualified_name: str - ) -> PresetChart.Attributes: - validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], - ) - - # Split the preset_dashboard_qualified_name to extract necessary information - fields = preset_dashboard_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid preset_dashboard_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid preset_dashboard_qualified_name") from e - - return PresetChart.Attributes( - name=name, - preset_dashboard_qualified_name=preset_dashboard_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{preset_dashboard_qualified_name}/{name}", - connector_name=connector_type.value, - preset_dashboard=PresetDashboard.ref_by_qualified_name( - preset_dashboard_qualified_name - ), - ) - - attributes: "PresetChart.Attributes" = Field( - default_factory=lambda: PresetChart.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PresetDataset(Preset): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_dashboard_qualified_name: str - ) -> PresetDataset: - validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], - ) - attributes = PresetDataset.Attributes.create( - name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("PresetDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PresetDataset": - raise ValueError("must be PresetDataset") - return v - - def __setattr__(self, name, value): - if name in PresetDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PRESET_DATASET_DATASOURCE_NAME: ClassVar[ - KeywordTextStemmedField - ] = KeywordTextStemmedField( - "presetDatasetDatasourceName", - "presetDatasetDatasourceName.keyword", - "presetDatasetDatasourceName", - "presetDatasetDatasourceName.stemmed", - ) - """ - - """ - PRESET_DATASET_ID: ClassVar[NumericField] = NumericField( - "presetDatasetId", "presetDatasetId" - ) - """ - - """ - PRESET_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( - "presetDatasetType", "presetDatasetType" - ) - """ - - """ - - PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "preset_dataset_datasource_name", - "preset_dataset_id", - "preset_dataset_type", - "preset_dashboard", - ] - - @property - def preset_dataset_datasource_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_dataset_datasource_name - ) - - @preset_dataset_datasource_name.setter - def preset_dataset_datasource_name( - self, preset_dataset_datasource_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dataset_datasource_name = preset_dataset_datasource_name - - @property - def preset_dataset_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.preset_dataset_id - - @preset_dataset_id.setter - def preset_dataset_id(self, preset_dataset_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dataset_id = preset_dataset_id - - @property - def preset_dataset_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.preset_dataset_type - - @preset_dataset_type.setter - def preset_dataset_type(self, preset_dataset_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dataset_type = preset_dataset_type - - @property - def preset_dashboard(self) -> Optional[PresetDashboard]: - return None if self.attributes is None else self.attributes.preset_dashboard - - @preset_dashboard.setter - def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard = preset_dashboard - - class Attributes(Preset.Attributes): - preset_dataset_datasource_name: Optional[str] = Field( - None, description="", alias="presetDatasetDatasourceName" - ) - preset_dataset_id: Optional[int] = Field( - None, description="", alias="presetDatasetId" - ) - preset_dataset_type: Optional[str] = Field( - None, description="", alias="presetDatasetType" - ) - preset_dashboard: Optional[PresetDashboard] = Field( - None, description="", alias="presetDashboard" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_dashboard_qualified_name: str - ) -> PresetDataset.Attributes: - validate_required_fields( - ["name", "preset_dashboard_qualified_name"], - [name, preset_dashboard_qualified_name], - ) - - # Split the preset_dashboard_qualified_name to extract necessary information - fields = preset_dashboard_qualified_name.split("/") - if len(fields) != 5: - raise ValueError("Invalid preset_dashboard_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid preset_dashboard_qualified_name") from e - - return PresetDataset.Attributes( - name=name, - preset_dashboard_qualified_name=preset_dashboard_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{preset_dashboard_qualified_name}/{name}", - connector_name=connector_type.value, - preset_dashboard=PresetDashboard.ref_by_qualified_name( - preset_dashboard_qualified_name - ), - ) - - attributes: "PresetDataset.Attributes" = Field( - default_factory=lambda: PresetDataset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PresetDashboard(Preset): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_workspace_qualified_name: str - ) -> PresetDashboard: - validate_required_fields( - ["name", "preset_workspace_qualified_name"], - [name, preset_workspace_qualified_name], - ) - attributes = PresetDashboard.Attributes.create( - name=name, preset_workspace_qualified_name=preset_workspace_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("PresetDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PresetDashboard": - raise ValueError("must be PresetDashboard") - return v - - def __setattr__(self, name, value): - if name in PresetDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PRESET_DASHBOARD_CHANGED_BY_NAME: ClassVar[ - KeywordTextStemmedField - ] = KeywordTextStemmedField( - "presetDashboardChangedByName", - "presetDashboardChangedByName.keyword", - "presetDashboardChangedByName", - "presetDashboardChangedByName.stemmed", - ) - """ - - """ - PRESET_DASHBOARD_CHANGED_BY_URL: ClassVar[KeywordField] = KeywordField( - "presetDashboardChangedByURL", "presetDashboardChangedByURL" - ) - """ - - """ - PRESET_DASHBOARD_IS_MANAGED_EXTERNALLY: ClassVar[BooleanField] = BooleanField( - "presetDashboardIsManagedExternally", "presetDashboardIsManagedExternally" - ) - """ - - """ - PRESET_DASHBOARD_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( - "presetDashboardIsPublished", "presetDashboardIsPublished" - ) - """ - - """ - PRESET_DASHBOARD_THUMBNAIL_URL: ClassVar[KeywordField] = KeywordField( - "presetDashboardThumbnailURL", "presetDashboardThumbnailURL" - ) - """ - - """ - PRESET_DASHBOARD_CHART_COUNT: ClassVar[NumericField] = NumericField( - "presetDashboardChartCount", "presetDashboardChartCount" - ) - """ - - """ - - PRESET_DATASETS: ClassVar[RelationField] = RelationField("presetDatasets") - """ - TBC - """ - PRESET_CHARTS: ClassVar[RelationField] = RelationField("presetCharts") - """ - TBC - """ - PRESET_WORKSPACE: ClassVar[RelationField] = RelationField("presetWorkspace") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "preset_dashboard_changed_by_name", - "preset_dashboard_changed_by_url", - "preset_dashboard_is_managed_externally", - "preset_dashboard_is_published", - "preset_dashboard_thumbnail_url", - "preset_dashboard_chart_count", - "preset_datasets", - "preset_charts", - "preset_workspace", - ] - - @property - def preset_dashboard_changed_by_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_changed_by_name - ) - - @preset_dashboard_changed_by_name.setter - def preset_dashboard_changed_by_name( - self, preset_dashboard_changed_by_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_changed_by_name = ( - preset_dashboard_changed_by_name - ) - - @property - def preset_dashboard_changed_by_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_changed_by_url - ) - - @preset_dashboard_changed_by_url.setter - def preset_dashboard_changed_by_url( - self, preset_dashboard_changed_by_url: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_changed_by_url = ( - preset_dashboard_changed_by_url - ) - - @property - def preset_dashboard_is_managed_externally(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_is_managed_externally - ) - - @preset_dashboard_is_managed_externally.setter - def preset_dashboard_is_managed_externally( - self, preset_dashboard_is_managed_externally: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_is_managed_externally = ( - preset_dashboard_is_managed_externally - ) - - @property - def preset_dashboard_is_published(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_is_published - ) - - @preset_dashboard_is_published.setter - def preset_dashboard_is_published( - self, preset_dashboard_is_published: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_is_published = preset_dashboard_is_published - - @property - def preset_dashboard_thumbnail_url(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_thumbnail_url - ) - - @preset_dashboard_thumbnail_url.setter - def preset_dashboard_thumbnail_url( - self, preset_dashboard_thumbnail_url: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_thumbnail_url = preset_dashboard_thumbnail_url - - @property - def preset_dashboard_chart_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.preset_dashboard_chart_count - ) - - @preset_dashboard_chart_count.setter - def preset_dashboard_chart_count(self, preset_dashboard_chart_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboard_chart_count = preset_dashboard_chart_count - - @property - def preset_datasets(self) -> Optional[list[PresetDataset]]: - return None if self.attributes is None else self.attributes.preset_datasets - - @preset_datasets.setter - def preset_datasets(self, preset_datasets: Optional[list[PresetDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_datasets = preset_datasets - - @property - def preset_charts(self) -> Optional[list[PresetChart]]: - return None if self.attributes is None else self.attributes.preset_charts - - @preset_charts.setter - def preset_charts(self, preset_charts: Optional[list[PresetChart]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_charts = preset_charts - - @property - def preset_workspace(self) -> Optional[PresetWorkspace]: - return None if self.attributes is None else self.attributes.preset_workspace - - @preset_workspace.setter - def preset_workspace(self, preset_workspace: Optional[PresetWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace = preset_workspace - - class Attributes(Preset.Attributes): - preset_dashboard_changed_by_name: Optional[str] = Field( - None, description="", alias="presetDashboardChangedByName" - ) - preset_dashboard_changed_by_url: Optional[str] = Field( - None, description="", alias="presetDashboardChangedByURL" - ) - preset_dashboard_is_managed_externally: Optional[bool] = Field( - None, description="", alias="presetDashboardIsManagedExternally" - ) - preset_dashboard_is_published: Optional[bool] = Field( - None, description="", alias="presetDashboardIsPublished" - ) - preset_dashboard_thumbnail_url: Optional[str] = Field( - None, description="", alias="presetDashboardThumbnailURL" - ) - preset_dashboard_chart_count: Optional[int] = Field( - None, description="", alias="presetDashboardChartCount" - ) - preset_datasets: Optional[list[PresetDataset]] = Field( - None, description="", alias="presetDatasets" - ) # relationship - preset_charts: Optional[list[PresetChart]] = Field( - None, description="", alias="presetCharts" - ) # relationship - preset_workspace: Optional[PresetWorkspace] = Field( - None, description="", alias="presetWorkspace" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, preset_workspace_qualified_name: str - ) -> PresetDashboard.Attributes: - validate_required_fields( - ["name", "preset_workspace_qualified_name"], - [name, preset_workspace_qualified_name], - ) - - # Split the preset_workspace_qualified_name to extract necessary information - fields = preset_workspace_qualified_name.split("/") - if len(fields) != 4: - raise ValueError("Invalid preset_workspace_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid preset_workspace_qualified_name") from e - - return PresetDashboard.Attributes( - name=name, - preset_workspace_qualified_name=preset_workspace_qualified_name, - connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", - qualified_name=f"{preset_workspace_qualified_name}/{name}", - connector_name=connector_type.value, - preset_workspace=PresetWorkspace.ref_by_qualified_name( - preset_workspace_qualified_name - ), - ) - - attributes: "PresetDashboard.Attributes" = Field( - default_factory=lambda: PresetDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PresetWorkspace(Preset): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> PresetWorkspace: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = PresetWorkspace.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("PresetWorkspace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PresetWorkspace": - raise ValueError("must be PresetWorkspace") - return v - - def __setattr__(self, name, value): - if name in PresetWorkspace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PRESET_WORKSPACE_PUBLIC_DASHBOARDS_ALLOWED: ClassVar[BooleanField] = BooleanField( - "presetWorkspacePublicDashboardsAllowed", - "presetWorkspacePublicDashboardsAllowed", - ) - """ - - """ - PRESET_WORKSPACE_CLUSTER_ID: ClassVar[NumericField] = NumericField( - "presetWorkspaceClusterId", "presetWorkspaceClusterId" - ) - """ - - """ - PRESET_WORKSPACE_HOSTNAME: ClassVar[KeywordTextField] = KeywordTextField( - "presetWorkspaceHostname", - "presetWorkspaceHostname", - "presetWorkspaceHostname.text", - ) - """ - - """ - PRESET_WORKSPACE_IS_IN_MAINTENANCE_MODE: ClassVar[BooleanField] = BooleanField( - "presetWorkspaceIsInMaintenanceMode", "presetWorkspaceIsInMaintenanceMode" - ) - """ - - """ - PRESET_WORKSPACE_REGION: ClassVar[KeywordTextField] = KeywordTextField( - "presetWorkspaceRegion", "presetWorkspaceRegion", "presetWorkspaceRegion.text" - ) - """ - - """ - PRESET_WORKSPACE_STATUS: ClassVar[KeywordField] = KeywordField( - "presetWorkspaceStatus", "presetWorkspaceStatus" - ) - """ - - """ - PRESET_WORKSPACE_DEPLOYMENT_ID: ClassVar[NumericField] = NumericField( - "presetWorkspaceDeploymentId", "presetWorkspaceDeploymentId" - ) - """ - - """ - PRESET_WORKSPACE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( - "presetWorkspaceDashboardCount", "presetWorkspaceDashboardCount" - ) - """ - - """ - PRESET_WORKSPACE_DATASET_COUNT: ClassVar[NumericField] = NumericField( - "presetWorkspaceDatasetCount", "presetWorkspaceDatasetCount" - ) - """ - - """ - - PRESET_DASHBOARDS: ClassVar[RelationField] = RelationField("presetDashboards") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "preset_workspace_public_dashboards_allowed", - "preset_workspace_cluster_id", - "preset_workspace_hostname", - "preset_workspace_is_in_maintenance_mode", - "preset_workspace_region", - "preset_workspace_status", - "preset_workspace_deployment_id", - "preset_workspace_dashboard_count", - "preset_workspace_dataset_count", - "preset_dashboards", - ] - - @property - def preset_workspace_public_dashboards_allowed(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_public_dashboards_allowed - ) - - @preset_workspace_public_dashboards_allowed.setter - def preset_workspace_public_dashboards_allowed( - self, preset_workspace_public_dashboards_allowed: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_public_dashboards_allowed = ( - preset_workspace_public_dashboards_allowed - ) - - @property - def preset_workspace_cluster_id(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_cluster_id - ) - - @preset_workspace_cluster_id.setter - def preset_workspace_cluster_id(self, preset_workspace_cluster_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_cluster_id = preset_workspace_cluster_id - - @property - def preset_workspace_hostname(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_hostname - ) - - @preset_workspace_hostname.setter - def preset_workspace_hostname(self, preset_workspace_hostname: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_hostname = preset_workspace_hostname - - @property - def preset_workspace_is_in_maintenance_mode(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_is_in_maintenance_mode - ) - - @preset_workspace_is_in_maintenance_mode.setter - def preset_workspace_is_in_maintenance_mode( - self, preset_workspace_is_in_maintenance_mode: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_is_in_maintenance_mode = ( - preset_workspace_is_in_maintenance_mode - ) - - @property - def preset_workspace_region(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.preset_workspace_region - ) - - @preset_workspace_region.setter - def preset_workspace_region(self, preset_workspace_region: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_region = preset_workspace_region - - @property - def preset_workspace_status(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.preset_workspace_status - ) - - @preset_workspace_status.setter - def preset_workspace_status(self, preset_workspace_status: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_status = preset_workspace_status - - @property - def preset_workspace_deployment_id(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_deployment_id - ) - - @preset_workspace_deployment_id.setter - def preset_workspace_deployment_id( - self, preset_workspace_deployment_id: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_deployment_id = preset_workspace_deployment_id - - @property - def preset_workspace_dashboard_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_dashboard_count - ) - - @preset_workspace_dashboard_count.setter - def preset_workspace_dashboard_count( - self, preset_workspace_dashboard_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_dashboard_count = ( - preset_workspace_dashboard_count - ) - - @property - def preset_workspace_dataset_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.preset_workspace_dataset_count - ) - - @preset_workspace_dataset_count.setter - def preset_workspace_dataset_count( - self, preset_workspace_dataset_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_workspace_dataset_count = preset_workspace_dataset_count - - @property - def preset_dashboards(self) -> Optional[list[PresetDashboard]]: - return None if self.attributes is None else self.attributes.preset_dashboards - - @preset_dashboards.setter - def preset_dashboards(self, preset_dashboards: Optional[list[PresetDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.preset_dashboards = preset_dashboards - - class Attributes(Preset.Attributes): - preset_workspace_public_dashboards_allowed: Optional[bool] = Field( - None, description="", alias="presetWorkspacePublicDashboardsAllowed" - ) - preset_workspace_cluster_id: Optional[int] = Field( - None, description="", alias="presetWorkspaceClusterId" - ) - preset_workspace_hostname: Optional[str] = Field( - None, description="", alias="presetWorkspaceHostname" - ) - preset_workspace_is_in_maintenance_mode: Optional[bool] = Field( - None, description="", alias="presetWorkspaceIsInMaintenanceMode" - ) - preset_workspace_region: Optional[str] = Field( - None, description="", alias="presetWorkspaceRegion" - ) - preset_workspace_status: Optional[str] = Field( - None, description="", alias="presetWorkspaceStatus" - ) - preset_workspace_deployment_id: Optional[int] = Field( - None, description="", alias="presetWorkspaceDeploymentId" - ) - preset_workspace_dashboard_count: Optional[int] = Field( - None, description="", alias="presetWorkspaceDashboardCount" - ) - preset_workspace_dataset_count: Optional[int] = Field( - None, description="", alias="presetWorkspaceDatasetCount" - ) - preset_dashboards: Optional[list[PresetDashboard]] = Field( - None, description="", alias="presetDashboards" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str - ) -> PresetWorkspace.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return PresetWorkspace.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - ) - - attributes: "PresetWorkspace.Attributes" = Field( - default_factory=lambda: PresetWorkspace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -PresetChart.Attributes.update_forward_refs() - - -PresetDataset.Attributes.update_forward_refs() - - -PresetDashboard.Attributes.update_forward_refs() - - -PresetWorkspace.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset66.py b/pyatlan/model/assets/asset66.py deleted file mode 100644 index c995201ba..000000000 --- a/pyatlan/model/assets/asset66.py +++ /dev/null @@ -1,601 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - NumericField, - RelationField, - TextField, -) - -from .asset41 import Mode - - -class ModeReport(Mode): - """Description""" - - type_name: str = Field("ModeReport", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeReport": - raise ValueError("must be ModeReport") - return v - - def __setattr__(self, name, value): - if name in ModeReport._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_COLLECTION_TOKEN: ClassVar[KeywordField] = KeywordField( - "modeCollectionToken", "modeCollectionToken" - ) - """ - - """ - MODE_REPORT_PUBLISHED_AT: ClassVar[NumericField] = NumericField( - "modeReportPublishedAt", "modeReportPublishedAt" - ) - """ - - """ - MODE_QUERY_COUNT: ClassVar[NumericField] = NumericField( - "modeQueryCount", "modeQueryCount" - ) - """ - - """ - MODE_CHART_COUNT: ClassVar[NumericField] = NumericField( - "modeChartCount", "modeChartCount" - ) - """ - - """ - MODE_QUERY_PREVIEW: ClassVar[TextField] = TextField( - "modeQueryPreview", "modeQueryPreview" - ) - """ - - """ - MODE_IS_PUBLIC: ClassVar[BooleanField] = BooleanField( - "modeIsPublic", "modeIsPublic" - ) - """ - - """ - MODE_IS_SHARED: ClassVar[BooleanField] = BooleanField( - "modeIsShared", "modeIsShared" - ) - """ - - """ - - MODE_QUERIES: ClassVar[RelationField] = RelationField("modeQueries") - """ - TBC - """ - MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_collection_token", - "mode_report_published_at", - "mode_query_count", - "mode_chart_count", - "mode_query_preview", - "mode_is_public", - "mode_is_shared", - "mode_queries", - "mode_collections", - ] - - @property - def mode_collection_token(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mode_collection_token - ) - - @mode_collection_token.setter - def mode_collection_token(self, mode_collection_token: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_collection_token = mode_collection_token - - @property - def mode_report_published_at(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.mode_report_published_at - ) - - @mode_report_published_at.setter - def mode_report_published_at(self, mode_report_published_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_report_published_at = mode_report_published_at - - @property - def mode_query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.mode_query_count - - @mode_query_count.setter - def mode_query_count(self, mode_query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_query_count = mode_query_count - - @property - def mode_chart_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.mode_chart_count - - @mode_chart_count.setter - def mode_chart_count(self, mode_chart_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_chart_count = mode_chart_count - - @property - def mode_query_preview(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_query_preview - - @mode_query_preview.setter - def mode_query_preview(self, mode_query_preview: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_query_preview = mode_query_preview - - @property - def mode_is_public(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.mode_is_public - - @mode_is_public.setter - def mode_is_public(self, mode_is_public: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_is_public = mode_is_public - - @property - def mode_is_shared(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.mode_is_shared - - @mode_is_shared.setter - def mode_is_shared(self, mode_is_shared: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_is_shared = mode_is_shared - - @property - def mode_queries(self) -> Optional[list[ModeQuery]]: - return None if self.attributes is None else self.attributes.mode_queries - - @mode_queries.setter - def mode_queries(self, mode_queries: Optional[list[ModeQuery]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_queries = mode_queries - - @property - def mode_collections(self) -> Optional[list[ModeCollection]]: - return None if self.attributes is None else self.attributes.mode_collections - - @mode_collections.setter - def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_collections = mode_collections - - class Attributes(Mode.Attributes): - mode_collection_token: Optional[str] = Field( - None, description="", alias="modeCollectionToken" - ) - mode_report_published_at: Optional[datetime] = Field( - None, description="", alias="modeReportPublishedAt" - ) - mode_query_count: Optional[int] = Field( - None, description="", alias="modeQueryCount" - ) - mode_chart_count: Optional[int] = Field( - None, description="", alias="modeChartCount" - ) - mode_query_preview: Optional[str] = Field( - None, description="", alias="modeQueryPreview" - ) - mode_is_public: Optional[bool] = Field( - None, description="", alias="modeIsPublic" - ) - mode_is_shared: Optional[bool] = Field( - None, description="", alias="modeIsShared" - ) - mode_queries: Optional[list[ModeQuery]] = Field( - None, description="", alias="modeQueries" - ) # relationship - mode_collections: Optional[list[ModeCollection]] = Field( - None, description="", alias="modeCollections" - ) # relationship - - attributes: "ModeReport.Attributes" = Field( - default_factory=lambda: ModeReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ModeQuery(Mode): - """Description""" - - type_name: str = Field("ModeQuery", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeQuery": - raise ValueError("must be ModeQuery") - return v - - def __setattr__(self, name, value): - if name in ModeQuery._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_RAW_QUERY: ClassVar[TextField] = TextField("modeRawQuery", "modeRawQuery") - """ - - """ - MODE_REPORT_IMPORT_COUNT: ClassVar[NumericField] = NumericField( - "modeReportImportCount", "modeReportImportCount" - ) - """ - - """ - - MODE_CHARTS: ClassVar[RelationField] = RelationField("modeCharts") - """ - TBC - """ - MODE_REPORT: ClassVar[RelationField] = RelationField("modeReport") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_raw_query", - "mode_report_import_count", - "mode_charts", - "mode_report", - ] - - @property - def mode_raw_query(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_raw_query - - @mode_raw_query.setter - def mode_raw_query(self, mode_raw_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_raw_query = mode_raw_query - - @property - def mode_report_import_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mode_report_import_count - ) - - @mode_report_import_count.setter - def mode_report_import_count(self, mode_report_import_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_report_import_count = mode_report_import_count - - @property - def mode_charts(self) -> Optional[list[ModeChart]]: - return None if self.attributes is None else self.attributes.mode_charts - - @mode_charts.setter - def mode_charts(self, mode_charts: Optional[list[ModeChart]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_charts = mode_charts - - @property - def mode_report(self) -> Optional[ModeReport]: - return None if self.attributes is None else self.attributes.mode_report - - @mode_report.setter - def mode_report(self, mode_report: Optional[ModeReport]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_report = mode_report - - class Attributes(Mode.Attributes): - mode_raw_query: Optional[str] = Field( - None, description="", alias="modeRawQuery" - ) - mode_report_import_count: Optional[int] = Field( - None, description="", alias="modeReportImportCount" - ) - mode_charts: Optional[list[ModeChart]] = Field( - None, description="", alias="modeCharts" - ) # relationship - mode_report: Optional[ModeReport] = Field( - None, description="", alias="modeReport" - ) # relationship - - attributes: "ModeQuery.Attributes" = Field( - default_factory=lambda: ModeQuery.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ModeChart(Mode): - """Description""" - - type_name: str = Field("ModeChart", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeChart": - raise ValueError("must be ModeChart") - return v - - def __setattr__(self, name, value): - if name in ModeChart._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_CHART_TYPE: ClassVar[KeywordField] = KeywordField( - "modeChartType", "modeChartType" - ) - """ - Type of chart. - """ - - MODE_QUERY: ClassVar[RelationField] = RelationField("modeQuery") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_chart_type", - "mode_query", - ] - - @property - def mode_chart_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_chart_type - - @mode_chart_type.setter - def mode_chart_type(self, mode_chart_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_chart_type = mode_chart_type - - @property - def mode_query(self) -> Optional[ModeQuery]: - return None if self.attributes is None else self.attributes.mode_query - - @mode_query.setter - def mode_query(self, mode_query: Optional[ModeQuery]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_query = mode_query - - class Attributes(Mode.Attributes): - mode_chart_type: Optional[str] = Field( - None, description="", alias="modeChartType" - ) - mode_query: Optional[ModeQuery] = Field( - None, description="", alias="modeQuery" - ) # relationship - - attributes: "ModeChart.Attributes" = Field( - default_factory=lambda: ModeChart.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ModeWorkspace(Mode): - """Description""" - - type_name: str = Field("ModeWorkspace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeWorkspace": - raise ValueError("must be ModeWorkspace") - return v - - def __setattr__(self, name, value): - if name in ModeWorkspace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( - "modeCollectionCount", "modeCollectionCount" - ) - """ - Number of collections in this workspace. - """ - - MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_collection_count", - "mode_collections", - ] - - @property - def mode_collection_count(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.mode_collection_count - ) - - @mode_collection_count.setter - def mode_collection_count(self, mode_collection_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_collection_count = mode_collection_count - - @property - def mode_collections(self) -> Optional[list[ModeCollection]]: - return None if self.attributes is None else self.attributes.mode_collections - - @mode_collections.setter - def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_collections = mode_collections - - class Attributes(Mode.Attributes): - mode_collection_count: Optional[int] = Field( - None, description="", alias="modeCollectionCount" - ) - mode_collections: Optional[list[ModeCollection]] = Field( - None, description="", alias="modeCollections" - ) # relationship - - attributes: "ModeWorkspace.Attributes" = Field( - default_factory=lambda: ModeWorkspace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class ModeCollection(Mode): - """Description""" - - type_name: str = Field("ModeCollection", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ModeCollection": - raise ValueError("must be ModeCollection") - return v - - def __setattr__(self, name, value): - if name in ModeCollection._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODE_COLLECTION_TYPE: ClassVar[KeywordField] = KeywordField( - "modeCollectionType", "modeCollectionType" - ) - """ - Type of this collection. - """ - MODE_COLLECTION_STATE: ClassVar[KeywordField] = KeywordField( - "modeCollectionState", "modeCollectionState" - ) - """ - State of this collection. - """ - - MODE_WORKSPACE: ClassVar[RelationField] = RelationField("modeWorkspace") - """ - TBC - """ - MODE_REPORTS: ClassVar[RelationField] = RelationField("modeReports") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mode_collection_type", - "mode_collection_state", - "mode_workspace", - "mode_reports", - ] - - @property - def mode_collection_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.mode_collection_type - - @mode_collection_type.setter - def mode_collection_type(self, mode_collection_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_collection_type = mode_collection_type - - @property - def mode_collection_state(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.mode_collection_state - ) - - @mode_collection_state.setter - def mode_collection_state(self, mode_collection_state: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_collection_state = mode_collection_state - - @property - def mode_workspace(self) -> Optional[ModeWorkspace]: - return None if self.attributes is None else self.attributes.mode_workspace - - @mode_workspace.setter - def mode_workspace(self, mode_workspace: Optional[ModeWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_workspace = mode_workspace - - @property - def mode_reports(self) -> Optional[list[ModeReport]]: - return None if self.attributes is None else self.attributes.mode_reports - - @mode_reports.setter - def mode_reports(self, mode_reports: Optional[list[ModeReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mode_reports = mode_reports - - class Attributes(Mode.Attributes): - mode_collection_type: Optional[str] = Field( - None, description="", alias="modeCollectionType" - ) - mode_collection_state: Optional[str] = Field( - None, description="", alias="modeCollectionState" - ) - mode_workspace: Optional[ModeWorkspace] = Field( - None, description="", alias="modeWorkspace" - ) # relationship - mode_reports: Optional[list[ModeReport]] = Field( - None, description="", alias="modeReports" - ) # relationship - - attributes: "ModeCollection.Attributes" = Field( - default_factory=lambda: ModeCollection.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -ModeReport.Attributes.update_forward_refs() - - -ModeQuery.Attributes.update_forward_refs() - - -ModeChart.Attributes.update_forward_refs() - - -ModeWorkspace.Attributes.update_forward_refs() - - -ModeCollection.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset67.py b/pyatlan/model/assets/asset67.py deleted file mode 100644 index fd0cbfcdf..000000000 --- a/pyatlan/model/assets/asset67.py +++ /dev/null @@ -1,195 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - KeywordTextField, - NumericField, - RelationField, -) - -from .asset42 import Sigma - - -class SigmaDatasetColumn(Sigma): - """Description""" - - type_name: str = Field("SigmaDatasetColumn", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SigmaDatasetColumn": - raise ValueError("must be SigmaDatasetColumn") - return v - - def __setattr__(self, name, value): - if name in SigmaDatasetColumn._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SIGMA_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaDatasetQualifiedName", - "sigmaDatasetQualifiedName", - "sigmaDatasetQualifiedName.text", - ) - """ - Unique name of the dataset in which this column exists. - """ - SIGMA_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sigmaDatasetName", "sigmaDatasetName.keyword", "sigmaDatasetName" - ) - """ - Simple name of the dataset in which this column exists. - """ - - SIGMA_DATASET: ClassVar[RelationField] = RelationField("sigmaDataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sigma_dataset_qualified_name", - "sigma_dataset_name", - "sigma_dataset", - ] - - @property - def sigma_dataset_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sigma_dataset_qualified_name - ) - - @sigma_dataset_qualified_name.setter - def sigma_dataset_qualified_name(self, sigma_dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_dataset_qualified_name = sigma_dataset_qualified_name - - @property - def sigma_dataset_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sigma_dataset_name - - @sigma_dataset_name.setter - def sigma_dataset_name(self, sigma_dataset_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_dataset_name = sigma_dataset_name - - @property - def sigma_dataset(self) -> Optional[SigmaDataset]: - return None if self.attributes is None else self.attributes.sigma_dataset - - @sigma_dataset.setter - def sigma_dataset(self, sigma_dataset: Optional[SigmaDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_dataset = sigma_dataset - - class Attributes(Sigma.Attributes): - sigma_dataset_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaDatasetQualifiedName" - ) - sigma_dataset_name: Optional[str] = Field( - None, description="", alias="sigmaDatasetName" - ) - sigma_dataset: Optional[SigmaDataset] = Field( - None, description="", alias="sigmaDataset" - ) # relationship - - attributes: "SigmaDatasetColumn.Attributes" = Field( - default_factory=lambda: SigmaDatasetColumn.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SigmaDataset(Sigma): - """Description""" - - type_name: str = Field("SigmaDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SigmaDataset": - raise ValueError("must be SigmaDataset") - return v - - def __setattr__(self, name, value): - if name in SigmaDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SIGMA_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "sigmaDatasetColumnCount", "sigmaDatasetColumnCount" - ) - """ - Number of columns in this dataset. - """ - - SIGMA_DATASET_COLUMNS: ClassVar[RelationField] = RelationField( - "sigmaDatasetColumns" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sigma_dataset_column_count", - "sigma_dataset_columns", - ] - - @property - def sigma_dataset_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sigma_dataset_column_count - ) - - @sigma_dataset_column_count.setter - def sigma_dataset_column_count(self, sigma_dataset_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_dataset_column_count = sigma_dataset_column_count - - @property - def sigma_dataset_columns(self) -> Optional[list[SigmaDatasetColumn]]: - return ( - None if self.attributes is None else self.attributes.sigma_dataset_columns - ) - - @sigma_dataset_columns.setter - def sigma_dataset_columns( - self, sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_dataset_columns = sigma_dataset_columns - - class Attributes(Sigma.Attributes): - sigma_dataset_column_count: Optional[int] = Field( - None, description="", alias="sigmaDatasetColumnCount" - ) - sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] = Field( - None, description="", alias="sigmaDatasetColumns" - ) # relationship - - attributes: "SigmaDataset.Attributes" = Field( - default_factory=lambda: SigmaDataset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -SigmaDatasetColumn.Attributes.update_forward_refs() - - -SigmaDataset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset68.py b/pyatlan/model/assets/asset68.py deleted file mode 100644 index cddf6b8ed..000000000 --- a/pyatlan/model/assets/asset68.py +++ /dev/null @@ -1,441 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - NumericField, - RelationField, - TextField, -) - -from .asset42 import Sigma - - -class SigmaWorkbook(Sigma): - """Description""" - - type_name: str = Field("SigmaWorkbook", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SigmaWorkbook": - raise ValueError("must be SigmaWorkbook") - return v - - def __setattr__(self, name, value): - if name in SigmaWorkbook._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SIGMA_PAGE_COUNT: ClassVar[NumericField] = NumericField( - "sigmaPageCount", "sigmaPageCount" - ) - """ - Number of pages in this workbook. - """ - - SIGMA_PAGES: ClassVar[RelationField] = RelationField("sigmaPages") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sigma_page_count", - "sigma_pages", - ] - - @property - def sigma_page_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.sigma_page_count - - @sigma_page_count.setter - def sigma_page_count(self, sigma_page_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_page_count = sigma_page_count - - @property - def sigma_pages(self) -> Optional[list[SigmaPage]]: - return None if self.attributes is None else self.attributes.sigma_pages - - @sigma_pages.setter - def sigma_pages(self, sigma_pages: Optional[list[SigmaPage]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_pages = sigma_pages - - class Attributes(Sigma.Attributes): - sigma_page_count: Optional[int] = Field( - None, description="", alias="sigmaPageCount" - ) - sigma_pages: Optional[list[SigmaPage]] = Field( - None, description="", alias="sigmaPages" - ) # relationship - - attributes: "SigmaWorkbook.Attributes" = Field( - default_factory=lambda: SigmaWorkbook.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SigmaDataElementField(Sigma): - """Description""" - - type_name: str = Field("SigmaDataElementField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SigmaDataElementField": - raise ValueError("must be SigmaDataElementField") - return v - - def __setattr__(self, name, value): - if name in SigmaDataElementField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( - "sigmaDataElementFieldIsHidden", "sigmaDataElementFieldIsHidden" - ) - """ - Whether this field is hidden (true) or not (false). - """ - SIGMA_DATA_ELEMENT_FIELD_FORMULA: ClassVar[TextField] = TextField( - "sigmaDataElementFieldFormula", "sigmaDataElementFieldFormula" - ) - """ - - """ - - SIGMA_DATA_ELEMENT: ClassVar[RelationField] = RelationField("sigmaDataElement") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sigma_data_element_field_is_hidden", - "sigma_data_element_field_formula", - "sigma_data_element", - ] - - @property - def sigma_data_element_field_is_hidden(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_field_is_hidden - ) - - @sigma_data_element_field_is_hidden.setter - def sigma_data_element_field_is_hidden( - self, sigma_data_element_field_is_hidden: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_field_is_hidden = ( - sigma_data_element_field_is_hidden - ) - - @property - def sigma_data_element_field_formula(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_field_formula - ) - - @sigma_data_element_field_formula.setter - def sigma_data_element_field_formula( - self, sigma_data_element_field_formula: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_field_formula = ( - sigma_data_element_field_formula - ) - - @property - def sigma_data_element(self) -> Optional[SigmaDataElement]: - return None if self.attributes is None else self.attributes.sigma_data_element - - @sigma_data_element.setter - def sigma_data_element(self, sigma_data_element: Optional[SigmaDataElement]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element = sigma_data_element - - class Attributes(Sigma.Attributes): - sigma_data_element_field_is_hidden: Optional[bool] = Field( - None, description="", alias="sigmaDataElementFieldIsHidden" - ) - sigma_data_element_field_formula: Optional[str] = Field( - None, description="", alias="sigmaDataElementFieldFormula" - ) - sigma_data_element: Optional[SigmaDataElement] = Field( - None, description="", alias="sigmaDataElement" - ) # relationship - - attributes: "SigmaDataElementField.Attributes" = Field( - default_factory=lambda: SigmaDataElementField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SigmaPage(Sigma): - """Description""" - - type_name: str = Field("SigmaPage", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SigmaPage": - raise ValueError("must be SigmaPage") - return v - - def __setattr__(self, name, value): - if name in SigmaPage._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SIGMA_DATA_ELEMENT_COUNT: ClassVar[NumericField] = NumericField( - "sigmaDataElementCount", "sigmaDataElementCount" - ) - """ - Number of data elements on this page. - """ - - SIGMA_DATA_ELEMENTS: ClassVar[RelationField] = RelationField("sigmaDataElements") - """ - TBC - """ - SIGMA_WORKBOOK: ClassVar[RelationField] = RelationField("sigmaWorkbook") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sigma_data_element_count", - "sigma_data_elements", - "sigma_workbook", - ] - - @property - def sigma_data_element_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_count - ) - - @sigma_data_element_count.setter - def sigma_data_element_count(self, sigma_data_element_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_count = sigma_data_element_count - - @property - def sigma_data_elements(self) -> Optional[list[SigmaDataElement]]: - return None if self.attributes is None else self.attributes.sigma_data_elements - - @sigma_data_elements.setter - def sigma_data_elements( - self, sigma_data_elements: Optional[list[SigmaDataElement]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_elements = sigma_data_elements - - @property - def sigma_workbook(self) -> Optional[SigmaWorkbook]: - return None if self.attributes is None else self.attributes.sigma_workbook - - @sigma_workbook.setter - def sigma_workbook(self, sigma_workbook: Optional[SigmaWorkbook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_workbook = sigma_workbook - - class Attributes(Sigma.Attributes): - sigma_data_element_count: Optional[int] = Field( - None, description="", alias="sigmaDataElementCount" - ) - sigma_data_elements: Optional[list[SigmaDataElement]] = Field( - None, description="", alias="sigmaDataElements" - ) # relationship - sigma_workbook: Optional[SigmaWorkbook] = Field( - None, description="", alias="sigmaWorkbook" - ) # relationship - - attributes: "SigmaPage.Attributes" = Field( - default_factory=lambda: SigmaPage.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SigmaDataElement(Sigma): - """Description""" - - type_name: str = Field("SigmaDataElement", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SigmaDataElement": - raise ValueError("must be SigmaDataElement") - return v - - def __setattr__(self, name, value): - if name in SigmaDataElement._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SIGMA_DATA_ELEMENT_QUERY: ClassVar[KeywordField] = KeywordField( - "sigmaDataElementQuery", "sigmaDataElementQuery" - ) - """ - - """ - SIGMA_DATA_ELEMENT_TYPE: ClassVar[KeywordField] = KeywordField( - "sigmaDataElementType", "sigmaDataElementType" - ) - """ - - """ - SIGMA_DATA_ELEMENT_FIELD_COUNT: ClassVar[NumericField] = NumericField( - "sigmaDataElementFieldCount", "sigmaDataElementFieldCount" - ) - """ - Number of fields in this data element. - """ - - SIGMA_PAGE: ClassVar[RelationField] = RelationField("sigmaPage") - """ - TBC - """ - SIGMA_DATA_ELEMENT_FIELDS: ClassVar[RelationField] = RelationField( - "sigmaDataElementFields" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sigma_data_element_query", - "sigma_data_element_type", - "sigma_data_element_field_count", - "sigma_page", - "sigma_data_element_fields", - ] - - @property - def sigma_data_element_query(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_query - ) - - @sigma_data_element_query.setter - def sigma_data_element_query(self, sigma_data_element_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_query = sigma_data_element_query - - @property - def sigma_data_element_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.sigma_data_element_type - ) - - @sigma_data_element_type.setter - def sigma_data_element_type(self, sigma_data_element_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_type = sigma_data_element_type - - @property - def sigma_data_element_field_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_field_count - ) - - @sigma_data_element_field_count.setter - def sigma_data_element_field_count( - self, sigma_data_element_field_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_field_count = sigma_data_element_field_count - - @property - def sigma_page(self) -> Optional[SigmaPage]: - return None if self.attributes is None else self.attributes.sigma_page - - @sigma_page.setter - def sigma_page(self, sigma_page: Optional[SigmaPage]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_page = sigma_page - - @property - def sigma_data_element_fields(self) -> Optional[list[SigmaDataElementField]]: - return ( - None - if self.attributes is None - else self.attributes.sigma_data_element_fields - ) - - @sigma_data_element_fields.setter - def sigma_data_element_fields( - self, sigma_data_element_fields: Optional[list[SigmaDataElementField]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sigma_data_element_fields = sigma_data_element_fields - - class Attributes(Sigma.Attributes): - sigma_data_element_query: Optional[str] = Field( - None, description="", alias="sigmaDataElementQuery" - ) - sigma_data_element_type: Optional[str] = Field( - None, description="", alias="sigmaDataElementType" - ) - sigma_data_element_field_count: Optional[int] = Field( - None, description="", alias="sigmaDataElementFieldCount" - ) - sigma_page: Optional[SigmaPage] = Field( - None, description="", alias="sigmaPage" - ) # relationship - sigma_data_element_fields: Optional[list[SigmaDataElementField]] = Field( - None, description="", alias="sigmaDataElementFields" - ) # relationship - - attributes: "SigmaDataElement.Attributes" = Field( - default_factory=lambda: SigmaDataElement.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -SigmaWorkbook.Attributes.update_forward_refs() - - -SigmaDataElementField.Attributes.update_forward_refs() - - -SigmaPage.Attributes.update_forward_refs() - - -SigmaDataElement.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset69.py b/pyatlan/model/assets/asset69.py deleted file mode 100644 index 4c13614a1..000000000 --- a/pyatlan/model/assets/asset69.py +++ /dev/null @@ -1,2200 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - RelationField, -) - -from .asset43 import Tableau - - -class TableauWorkbook(Tableau): - """Description""" - - type_name: str = Field("TableauWorkbook", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauWorkbook": - raise ValueError("must be TableauWorkbook") - return v - - def __setattr__(self, name, value): - if name in TableauWorkbook._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this workbook exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this workbook exists. - """ - TOP_LEVEL_PROJECT_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectName", "topLevelProjectName" - ) - """ - Simple name of the top-level project in which this workbook exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this workbook exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects with their nested child projects. - """ - - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - DATASOURCES: ClassVar[RelationField] = RelationField("datasources") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_name", - "top_level_project_qualified_name", - "project_hierarchy", - "project", - "dashboards", - "worksheets", - "datasources", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.top_level_project_name - ) - - @top_level_project_name.setter - def top_level_project_name(self, top_level_project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_name = top_level_project_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def dashboards(self) -> Optional[list[TableauDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[TableauDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - @property - def datasources(self) -> Optional[list[TableauDatasource]]: - return None if self.attributes is None else self.attributes.datasources - - @datasources.setter - def datasources(self, datasources: Optional[list[TableauDatasource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasources = datasources - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_name: Optional[str] = Field( - None, description="", alias="topLevelProjectName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - project: Optional[TableauProject] = Field( - None, description="", alias="project" - ) # relationship - dashboards: Optional[list[TableauDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - datasources: Optional[list[TableauDatasource]] = Field( - None, description="", alias="datasources" - ) # relationship - - attributes: "TableauWorkbook.Attributes" = Field( - default_factory=lambda: TableauWorkbook.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauDatasourceField(Tableau): - """Description""" - - type_name: str = Field("TableauDatasourceField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauDatasourceField": - raise ValueError("must be TableauDatasourceField") - return v - - def __setattr__(self, name, value): - if name in TableauDatasourceField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this datasource field exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this datasource field exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this datasource field exists. - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - Unique name of the workbook in which this datasource field exists. - """ - DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasourceQualifiedName", "datasourceQualifiedName" - ) - """ - Unique name of the datasource in which this datasource field exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects and their nested child projects. - """ - FULLY_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "fullyQualifiedName", "fullyQualifiedName" - ) - """ - Name used internally in Tableau to uniquely identify this field. - """ - TABLEAU_DATASOURCE_FIELD_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldDataCategory", "tableauDatasourceFieldDataCategory" - ) - """ - Data category of this field. - """ - TABLEAU_DATASOURCE_FIELD_ROLE: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldRole", "tableauDatasourceFieldRole" - ) - """ - Role of this field, for example: 'dimension', 'measure', or 'unknown'. - """ - TABLEAU_DATASOURCE_FIELD_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "tableauDatasourceFieldDataType", - "tableauDatasourceFieldDataType", - "tableauDatasourceFieldDataType.text", - ) - """ - Data type of this field. - """ - UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( - "upstreamTables", "upstreamTables" - ) - """ - Tables upstream to this datasource field. - """ - TABLEAU_DATASOURCE_FIELD_FORMULA: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldFormula", "tableauDatasourceFieldFormula" - ) - """ - Formula for this field. - """ - TABLEAU_DATASOURCE_FIELD_BIN_SIZE: ClassVar[KeywordField] = KeywordField( - "tableauDatasourceFieldBinSize", "tableauDatasourceFieldBinSize" - ) - """ - Bin size of this field. - """ - UPSTREAM_COLUMNS: ClassVar[KeywordField] = KeywordField( - "upstreamColumns", "upstreamColumns" - ) - """ - Columns upstream to this field. - """ - UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( - "upstreamFields", "upstreamFields" - ) - """ - Fields upstream to this field. - """ - DATASOURCE_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( - "datasourceFieldType", "datasourceFieldType" - ) - """ - Type of this datasource field. - """ - - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - DATASOURCE: ClassVar[RelationField] = RelationField("datasource") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "workbook_qualified_name", - "datasource_qualified_name", - "project_hierarchy", - "fully_qualified_name", - "tableau_datasource_field_data_category", - "tableau_datasource_field_role", - "tableau_datasource_field_data_type", - "upstream_tables", - "tableau_datasource_field_formula", - "tableau_datasource_field_bin_size", - "upstream_columns", - "upstream_fields", - "datasource_field_type", - "worksheets", - "datasource", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def datasource_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.datasource_qualified_name - ) - - @datasource_qualified_name.setter - def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_qualified_name = datasource_qualified_name - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def fully_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.fully_qualified_name - - @fully_qualified_name.setter - def fully_qualified_name(self, fully_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fully_qualified_name = fully_qualified_name - - @property - def tableau_datasource_field_data_category(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_data_category - ) - - @tableau_datasource_field_data_category.setter - def tableau_datasource_field_data_category( - self, tableau_datasource_field_data_category: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_data_category = ( - tableau_datasource_field_data_category - ) - - @property - def tableau_datasource_field_role(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_role - ) - - @tableau_datasource_field_role.setter - def tableau_datasource_field_role( - self, tableau_datasource_field_role: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_role = tableau_datasource_field_role - - @property - def tableau_datasource_field_data_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_data_type - ) - - @tableau_datasource_field_data_type.setter - def tableau_datasource_field_data_type( - self, tableau_datasource_field_data_type: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_data_type = ( - tableau_datasource_field_data_type - ) - - @property - def upstream_tables(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_tables - - @upstream_tables.setter - def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_tables = upstream_tables - - @property - def tableau_datasource_field_formula(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_formula - ) - - @tableau_datasource_field_formula.setter - def tableau_datasource_field_formula( - self, tableau_datasource_field_formula: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_formula = ( - tableau_datasource_field_formula - ) - - @property - def tableau_datasource_field_bin_size(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.tableau_datasource_field_bin_size - ) - - @tableau_datasource_field_bin_size.setter - def tableau_datasource_field_bin_size( - self, tableau_datasource_field_bin_size: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_datasource_field_bin_size = ( - tableau_datasource_field_bin_size - ) - - @property - def upstream_columns(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_columns - - @upstream_columns.setter - def upstream_columns(self, upstream_columns: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_columns = upstream_columns - - @property - def upstream_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_fields - - @upstream_fields.setter - def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_fields = upstream_fields - - @property - def datasource_field_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.datasource_field_type - ) - - @datasource_field_type.setter - def datasource_field_type(self, datasource_field_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_field_type = datasource_field_type - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - @property - def datasource(self) -> Optional[TableauDatasource]: - return None if self.attributes is None else self.attributes.datasource - - @datasource.setter - def datasource(self, datasource: Optional[TableauDatasource]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource = datasource - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - datasource_qualified_name: Optional[str] = Field( - None, description="", alias="datasourceQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - fully_qualified_name: Optional[str] = Field( - None, description="", alias="fullyQualifiedName" - ) - tableau_datasource_field_data_category: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldDataCategory" - ) - tableau_datasource_field_role: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldRole" - ) - tableau_datasource_field_data_type: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldDataType" - ) - upstream_tables: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamTables" - ) - tableau_datasource_field_formula: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldFormula" - ) - tableau_datasource_field_bin_size: Optional[str] = Field( - None, description="", alias="tableauDatasourceFieldBinSize" - ) - upstream_columns: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamColumns" - ) - upstream_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamFields" - ) - datasource_field_type: Optional[str] = Field( - None, description="", alias="datasourceFieldType" - ) - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - datasource: Optional[TableauDatasource] = Field( - None, description="", alias="datasource" - ) # relationship - - attributes: "TableauDatasourceField.Attributes" = Field( - default_factory=lambda: TableauDatasourceField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauCalculatedField(Tableau): - """Description""" - - type_name: str = Field("TableauCalculatedField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauCalculatedField": - raise ValueError("must be TableauCalculatedField") - return v - - def __setattr__(self, name, value): - if name in TableauCalculatedField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this calculated field exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this calculated field exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this calculated field exists. - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - Unique name of the workbook in which this calculated field exists. - """ - DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasourceQualifiedName", "datasourceQualifiedName" - ) - """ - Unique name of the datasource in which this calculated field exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects and their nested projects. - """ - DATA_CATEGORY: ClassVar[KeywordField] = KeywordField("dataCategory", "dataCategory") - """ - Data category of this field. - """ - ROLE: ClassVar[KeywordField] = KeywordField("role", "role") - """ - Role of this field, for example: 'dimension', 'measure', or 'unknown'. - """ - TABLEAU_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "tableauDataType", "tableauDataType", "tableauDataType.text" - ) - """ - Data type of the field, from Tableau. - """ - FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") - """ - Formula for this calculated field. - """ - UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( - "upstreamFields", "upstreamFields" - ) - """ - List of fields that are upstream to this calculated field. - """ - - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - DATASOURCE: ClassVar[RelationField] = RelationField("datasource") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "workbook_qualified_name", - "datasource_qualified_name", - "project_hierarchy", - "data_category", - "role", - "tableau_data_type", - "formula", - "upstream_fields", - "worksheets", - "datasource", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def datasource_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.datasource_qualified_name - ) - - @datasource_qualified_name.setter - def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_qualified_name = datasource_qualified_name - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def data_category(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.data_category - - @data_category.setter - def data_category(self, data_category: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_category = data_category - - @property - def role(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.role - - @role.setter - def role(self, role: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.role = role - - @property - def tableau_data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.tableau_data_type - - @tableau_data_type.setter - def tableau_data_type(self, tableau_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tableau_data_type = tableau_data_type - - @property - def formula(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.formula - - @formula.setter - def formula(self, formula: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.formula = formula - - @property - def upstream_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_fields - - @upstream_fields.setter - def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_fields = upstream_fields - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - @property - def datasource(self) -> Optional[TableauDatasource]: - return None if self.attributes is None else self.attributes.datasource - - @datasource.setter - def datasource(self, datasource: Optional[TableauDatasource]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource = datasource - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - datasource_qualified_name: Optional[str] = Field( - None, description="", alias="datasourceQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - data_category: Optional[str] = Field(None, description="", alias="dataCategory") - role: Optional[str] = Field(None, description="", alias="role") - tableau_data_type: Optional[str] = Field( - None, description="", alias="tableauDataType" - ) - formula: Optional[str] = Field(None, description="", alias="formula") - upstream_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamFields" - ) - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - datasource: Optional[TableauDatasource] = Field( - None, description="", alias="datasource" - ) # relationship - - attributes: "TableauCalculatedField.Attributes" = Field( - default_factory=lambda: TableauCalculatedField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauProject(Tableau): - """Description""" - - type_name: str = Field("TableauProject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauProject": - raise ValueError("must be TableauProject") - return v - - def __setattr__(self, name, value): - if name in TableauProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this project exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this project exists, if this is a nested project. - """ - IS_TOP_LEVEL_PROJECT: ClassVar[BooleanField] = BooleanField( - "isTopLevelProject", "isTopLevelProject" - ) - """ - Whether this project is a top-level project (true) or not (false). - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects with their nested child projects. - """ - - PARENT_PROJECT: ClassVar[RelationField] = RelationField("parentProject") - """ - TBC - """ - WORKBOOKS: ClassVar[RelationField] = RelationField("workbooks") - """ - TBC - """ - SITE: ClassVar[RelationField] = RelationField("site") - """ - TBC - """ - DATASOURCES: ClassVar[RelationField] = RelationField("datasources") - """ - TBC - """ - FLOWS: ClassVar[RelationField] = RelationField("flows") - """ - TBC - """ - CHILD_PROJECTS: ClassVar[RelationField] = RelationField("childProjects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "top_level_project_qualified_name", - "is_top_level_project", - "project_hierarchy", - "parent_project", - "workbooks", - "site", - "datasources", - "flows", - "child_projects", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def is_top_level_project(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_top_level_project - - @is_top_level_project.setter - def is_top_level_project(self, is_top_level_project: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_top_level_project = is_top_level_project - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def parent_project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.parent_project - - @parent_project.setter - def parent_project(self, parent_project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_project = parent_project - - @property - def workbooks(self) -> Optional[list[TableauWorkbook]]: - return None if self.attributes is None else self.attributes.workbooks - - @workbooks.setter - def workbooks(self, workbooks: Optional[list[TableauWorkbook]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbooks = workbooks - - @property - def site(self) -> Optional[TableauSite]: - return None if self.attributes is None else self.attributes.site - - @site.setter - def site(self, site: Optional[TableauSite]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site = site - - @property - def datasources(self) -> Optional[list[TableauDatasource]]: - return None if self.attributes is None else self.attributes.datasources - - @datasources.setter - def datasources(self, datasources: Optional[list[TableauDatasource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasources = datasources - - @property - def flows(self) -> Optional[list[TableauFlow]]: - return None if self.attributes is None else self.attributes.flows - - @flows.setter - def flows(self, flows: Optional[list[TableauFlow]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.flows = flows - - @property - def child_projects(self) -> Optional[list[TableauProject]]: - return None if self.attributes is None else self.attributes.child_projects - - @child_projects.setter - def child_projects(self, child_projects: Optional[list[TableauProject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.child_projects = child_projects - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - is_top_level_project: Optional[bool] = Field( - None, description="", alias="isTopLevelProject" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - parent_project: Optional[TableauProject] = Field( - None, description="", alias="parentProject" - ) # relationship - workbooks: Optional[list[TableauWorkbook]] = Field( - None, description="", alias="workbooks" - ) # relationship - site: Optional[TableauSite] = Field( - None, description="", alias="site" - ) # relationship - datasources: Optional[list[TableauDatasource]] = Field( - None, description="", alias="datasources" - ) # relationship - flows: Optional[list[TableauFlow]] = Field( - None, description="", alias="flows" - ) # relationship - child_projects: Optional[list[TableauProject]] = Field( - None, description="", alias="childProjects" - ) # relationship - - attributes: "TableauProject.Attributes" = Field( - default_factory=lambda: TableauProject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauSite(Tableau): - """Description""" - - type_name: str = Field("TableauSite", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauSite": - raise ValueError("must be TableauSite") - return v - - def __setattr__(self, name, value): - if name in TableauSite._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PROJECTS: ClassVar[RelationField] = RelationField("projects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "projects", - ] - - @property - def projects(self) -> Optional[list[TableauProject]]: - return None if self.attributes is None else self.attributes.projects - - @projects.setter - def projects(self, projects: Optional[list[TableauProject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.projects = projects - - class Attributes(Tableau.Attributes): - projects: Optional[list[TableauProject]] = Field( - None, description="", alias="projects" - ) # relationship - - attributes: "TableauSite.Attributes" = Field( - default_factory=lambda: TableauSite.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauDatasource(Tableau): - """Description""" - - type_name: str = Field("TableauDatasource", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauDatasource": - raise ValueError("must be TableauDatasource") - return v - - def __setattr__(self, name, value): - if name in TableauDatasource._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this datasource exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this datasource exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this datasource exists. - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - Unique name of the workbook in which this datasource exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects with their nested child projects. - """ - IS_PUBLISHED: ClassVar[BooleanField] = BooleanField("isPublished", "isPublished") - """ - Whether this datasource is published (true) or embedded (false). - """ - HAS_EXTRACTS: ClassVar[BooleanField] = BooleanField("hasExtracts", "hasExtracts") - """ - Whether this datasource has extracts (true) or not (false). - """ - IS_CERTIFIED: ClassVar[BooleanField] = BooleanField("isCertified", "isCertified") - """ - Whether this datasource is certified in Tableau (true) or not (false). - """ - CERTIFIER: ClassVar[KeywordField] = KeywordField("certifier", "certifier") - """ - Users that have marked this datasource as cerified, in Tableau. - """ - CERTIFICATION_NOTE: ClassVar[KeywordField] = KeywordField( - "certificationNote", "certificationNote" - ) - """ - Notes related to this datasource being cerfified, in Tableau. - """ - CERTIFIER_DISPLAY_NAME: ClassVar[KeywordField] = KeywordField( - "certifierDisplayName", "certifierDisplayName" - ) - """ - Name of the user who cerified this datasource, in Tableau. - """ - UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( - "upstreamTables", "upstreamTables" - ) - """ - List of tables that are upstream of this datasource. - """ - UPSTREAM_DATASOURCES: ClassVar[KeywordField] = KeywordField( - "upstreamDatasources", "upstreamDatasources" - ) - """ - List of datasources that are upstream of this datasource. - """ - - WORKBOOK: ClassVar[RelationField] = RelationField("workbook") - """ - TBC - """ - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "workbook_qualified_name", - "project_hierarchy", - "is_published", - "has_extracts", - "is_certified", - "certifier", - "certification_note", - "certifier_display_name", - "upstream_tables", - "upstream_datasources", - "workbook", - "project", - "fields", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def is_published(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_published - - @is_published.setter - def is_published(self, is_published: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_published = is_published - - @property - def has_extracts(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.has_extracts - - @has_extracts.setter - def has_extracts(self, has_extracts: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.has_extracts = has_extracts - - @property - def is_certified(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_certified - - @is_certified.setter - def is_certified(self, is_certified: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_certified = is_certified - - @property - def certifier(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.certifier - - @certifier.setter - def certifier(self, certifier: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certifier = certifier - - @property - def certification_note(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.certification_note - - @certification_note.setter - def certification_note(self, certification_note: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certification_note = certification_note - - @property - def certifier_display_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.certifier_display_name - ) - - @certifier_display_name.setter - def certifier_display_name(self, certifier_display_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.certifier_display_name = certifier_display_name - - @property - def upstream_tables(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_tables - - @upstream_tables.setter - def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_tables = upstream_tables - - @property - def upstream_datasources(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.upstream_datasources - - @upstream_datasources.setter - def upstream_datasources( - self, upstream_datasources: Optional[list[dict[str, str]]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.upstream_datasources = upstream_datasources - - @property - def workbook(self) -> Optional[TableauWorkbook]: - return None if self.attributes is None else self.attributes.workbook - - @workbook.setter - def workbook(self, workbook: Optional[TableauWorkbook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook = workbook - - @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def fields(self) -> Optional[list[TableauDatasourceField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[TableauDatasourceField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - is_published: Optional[bool] = Field(None, description="", alias="isPublished") - has_extracts: Optional[bool] = Field(None, description="", alias="hasExtracts") - is_certified: Optional[bool] = Field(None, description="", alias="isCertified") - certifier: Optional[dict[str, str]] = Field( - None, description="", alias="certifier" - ) - certification_note: Optional[str] = Field( - None, description="", alias="certificationNote" - ) - certifier_display_name: Optional[str] = Field( - None, description="", alias="certifierDisplayName" - ) - upstream_tables: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamTables" - ) - upstream_datasources: Optional[list[dict[str, str]]] = Field( - None, description="", alias="upstreamDatasources" - ) - workbook: Optional[TableauWorkbook] = Field( - None, description="", alias="workbook" - ) # relationship - project: Optional[TableauProject] = Field( - None, description="", alias="project" - ) # relationship - fields: Optional[list[TableauDatasourceField]] = Field( - None, description="", alias="fields" - ) # relationship - - attributes: "TableauDatasource.Attributes" = Field( - default_factory=lambda: TableauDatasource.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauDashboard(Tableau): - """Description""" - - type_name: str = Field("TableauDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauDashboard": - raise ValueError("must be TableauDashboard") - return v - - def __setattr__(self, name, value): - if name in TableauDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this dashboard exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this dashboard exists. - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - Unique name of the workbook in which this dashboard exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this dashboard exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects and their nested child projects. - """ - - WORKBOOK: ClassVar[RelationField] = RelationField("workbook") - """ - TBC - """ - WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "workbook_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "workbook", - "worksheets", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def workbook(self) -> Optional[TableauWorkbook]: - return None if self.attributes is None else self.attributes.workbook - - @workbook.setter - def workbook(self, workbook: Optional[TableauWorkbook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook = workbook - - @property - def worksheets(self) -> Optional[list[TableauWorksheet]]: - return None if self.attributes is None else self.attributes.worksheets - - @worksheets.setter - def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.worksheets = worksheets - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - workbook: Optional[TableauWorkbook] = Field( - None, description="", alias="workbook" - ) # relationship - worksheets: Optional[list[TableauWorksheet]] = Field( - None, description="", alias="worksheets" - ) # relationship - - attributes: "TableauDashboard.Attributes" = Field( - default_factory=lambda: TableauDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauFlow(Tableau): - """Description""" - - type_name: str = Field("TableauFlow", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauFlow": - raise ValueError("must be TableauFlow") - return v - - def __setattr__(self, name, value): - if name in TableauFlow._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this flow exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this flow exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this flow exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects with their nested child projects. - """ - INPUT_FIELDS: ClassVar[KeywordField] = KeywordField("inputFields", "inputFields") - """ - List of fields that are inputs to this flow. - """ - OUTPUT_FIELDS: ClassVar[KeywordField] = KeywordField("outputFields", "outputFields") - """ - List of fields that are outputs from this flow. - """ - OUTPUT_STEPS: ClassVar[KeywordField] = KeywordField("outputSteps", "outputSteps") - """ - List of steps that are outputs from this flow. - """ - - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "input_fields", - "output_fields", - "output_steps", - "project", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def input_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.input_fields - - @input_fields.setter - def input_fields(self, input_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_fields = input_fields - - @property - def output_fields(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.output_fields - - @output_fields.setter - def output_fields(self, output_fields: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_fields = output_fields - - @property - def output_steps(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.output_steps - - @output_steps.setter - def output_steps(self, output_steps: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.output_steps = output_steps - - @property - def project(self) -> Optional[TableauProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[TableauProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - input_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="inputFields" - ) - output_fields: Optional[list[dict[str, str]]] = Field( - None, description="", alias="outputFields" - ) - output_steps: Optional[list[dict[str, str]]] = Field( - None, description="", alias="outputSteps" - ) - project: Optional[TableauProject] = Field( - None, description="", alias="project" - ) # relationship - - attributes: "TableauFlow.Attributes" = Field( - default_factory=lambda: TableauFlow.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class TableauWorksheet(Tableau): - """Description""" - - type_name: str = Field("TableauWorksheet", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "TableauWorksheet": - raise ValueError("must be TableauWorksheet") - return v - - def __setattr__(self, name, value): - if name in TableauWorksheet._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "siteQualifiedName", "siteQualifiedName" - ) - """ - Unique name of the site in which this worksheet exists. - """ - PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "projectQualifiedName", "projectQualifiedName" - ) - """ - Unique name of the project in which this worksheet exists. - """ - TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" - ) - """ - Unique name of the top-level project in which this worksheet exists. - """ - PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "projectHierarchy", "projectHierarchy" - ) - """ - List of top-level projects with their nested child projects. - """ - WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workbookQualifiedName", "workbookQualifiedName" - ) - """ - Unique name of the workbook in which this worksheet exists. - """ - - WORKBOOK: ClassVar[RelationField] = RelationField("workbook") - """ - TBC - """ - DATASOURCE_FIELDS: ClassVar[RelationField] = RelationField("datasourceFields") - """ - TBC - """ - CALCULATED_FIELDS: ClassVar[RelationField] = RelationField("calculatedFields") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "site_qualified_name", - "project_qualified_name", - "top_level_project_qualified_name", - "project_hierarchy", - "workbook_qualified_name", - "workbook", - "datasource_fields", - "calculated_fields", - "dashboards", - ] - - @property - def site_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.site_qualified_name - - @site_qualified_name.setter - def site_qualified_name(self, site_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.site_qualified_name = site_qualified_name - - @property - def project_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.project_qualified_name - ) - - @project_qualified_name.setter - def project_qualified_name(self, project_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_qualified_name = project_qualified_name - - @property - def top_level_project_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.top_level_project_qualified_name - ) - - @top_level_project_qualified_name.setter - def top_level_project_qualified_name( - self, top_level_project_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.top_level_project_qualified_name = ( - top_level_project_qualified_name - ) - - @property - def project_hierarchy(self) -> Optional[list[dict[str, str]]]: - return None if self.attributes is None else self.attributes.project_hierarchy - - @project_hierarchy.setter - def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_hierarchy = project_hierarchy - - @property - def workbook_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.workbook_qualified_name - ) - - @workbook_qualified_name.setter - def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook_qualified_name = workbook_qualified_name - - @property - def workbook(self) -> Optional[TableauWorkbook]: - return None if self.attributes is None else self.attributes.workbook - - @workbook.setter - def workbook(self, workbook: Optional[TableauWorkbook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workbook = workbook - - @property - def datasource_fields(self) -> Optional[list[TableauDatasourceField]]: - return None if self.attributes is None else self.attributes.datasource_fields - - @datasource_fields.setter - def datasource_fields( - self, datasource_fields: Optional[list[TableauDatasourceField]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasource_fields = datasource_fields - - @property - def calculated_fields(self) -> Optional[list[TableauCalculatedField]]: - return None if self.attributes is None else self.attributes.calculated_fields - - @calculated_fields.setter - def calculated_fields( - self, calculated_fields: Optional[list[TableauCalculatedField]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.calculated_fields = calculated_fields - - @property - def dashboards(self) -> Optional[list[TableauDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[TableauDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) - top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" - ) - project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" - ) - workbook_qualified_name: Optional[str] = Field( - None, description="", alias="workbookQualifiedName" - ) - workbook: Optional[TableauWorkbook] = Field( - None, description="", alias="workbook" - ) # relationship - datasource_fields: Optional[list[TableauDatasourceField]] = Field( - None, description="", alias="datasourceFields" - ) # relationship - calculated_fields: Optional[list[TableauCalculatedField]] = Field( - None, description="", alias="calculatedFields" - ) # relationship - dashboards: Optional[list[TableauDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - - attributes: "TableauWorksheet.Attributes" = Field( - default_factory=lambda: TableauWorksheet.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -TableauWorkbook.Attributes.update_forward_refs() - - -TableauDatasourceField.Attributes.update_forward_refs() - - -TableauCalculatedField.Attributes.update_forward_refs() - - -TableauProject.Attributes.update_forward_refs() - - -TableauSite.Attributes.update_forward_refs() - - -TableauDatasource.Attributes.update_forward_refs() - - -TableauDashboard.Attributes.update_forward_refs() - - -TableauFlow.Attributes.update_forward_refs() - - -TableauWorksheet.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset71.py b/pyatlan/model/assets/asset71.py deleted file mode 100644 index 30bb4cc45..000000000 --- a/pyatlan/model/assets/asset71.py +++ /dev/null @@ -1,1928 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) - -from .asset44 import Looker - - -class LookerLook(Looker): - """Description""" - - type_name: str = Field("LookerLook", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerLook": - raise ValueError("must be LookerLook") - return v - - def __setattr__(self, name, value): - if name in LookerLook._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") - """ - Name of the folder in which the Look is organized. - """ - SOURCE_USER_ID: ClassVar[NumericField] = NumericField( - "sourceUserId", "sourceUserId" - ) - """ - Identifier of the user who created the Look, from Looker. - """ - SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( - "sourceViewCount", "sourceViewCount" - ) - """ - Number of times the look has been viewed in the Looker web UI. - """ - SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( - "sourcelastUpdaterId", "sourcelastUpdaterId" - ) - """ - Identifier of the user that last updated the Look, from Looker. - """ - SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( - "sourceLastAccessedAt", "sourceLastAccessedAt" - ) - """ - Time (epoch) when the Look was last accessed by a user, in milliseconds. - """ - SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( - "sourceLastViewedAt", "sourceLastViewedAt" - ) - """ - Time (epoch) when the Look was last viewed by a user, in milliseconds. - """ - SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( - "sourceContentMetadataId", "sourceContentMetadataId" - ) - """ - Identifier of the Look's content metadata, from Looker. - """ - SOURCE_QUERY_ID: ClassVar[NumericField] = NumericField( - "sourceQueryId", "sourceQueryId" - ) - """ - Identifier of the query for the Look, from Looker. - """ - MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") - """ - Name of the model in which this Look exists. - """ - - QUERY: ClassVar[RelationField] = RelationField("query") - """ - TBC - """ - FOLDER: ClassVar[RelationField] = RelationField("folder") - """ - TBC - """ - TILE: ClassVar[RelationField] = RelationField("tile") - """ - TBC - """ - MODEL: ClassVar[RelationField] = RelationField("model") - """ - TBC - """ - DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "folder_name", - "source_user_id", - "source_view_count", - "sourcelast_updater_id", - "source_last_accessed_at", - "source_last_viewed_at", - "source_content_metadata_id", - "source_query_id", - "model_name", - "query", - "folder", - "tile", - "model", - "dashboard", - ] - - @property - def folder_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.folder_name - - @folder_name.setter - def folder_name(self, folder_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.folder_name = folder_name - - @property - def source_user_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_user_id - - @source_user_id.setter - def source_user_id(self, source_user_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_user_id = source_user_id - - @property - def source_view_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_view_count - - @source_view_count.setter - def source_view_count(self, source_view_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_view_count = source_view_count - - @property - def sourcelast_updater_id(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.sourcelast_updater_id - ) - - @sourcelast_updater_id.setter - def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sourcelast_updater_id = sourcelast_updater_id - - @property - def source_last_accessed_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.source_last_accessed_at - ) - - @source_last_accessed_at.setter - def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_last_accessed_at = source_last_accessed_at - - @property - def source_last_viewed_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.source_last_viewed_at - ) - - @source_last_viewed_at.setter - def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_last_viewed_at = source_last_viewed_at - - @property - def source_content_metadata_id(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.source_content_metadata_id - ) - - @source_content_metadata_id.setter - def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_content_metadata_id = source_content_metadata_id - - @property - def source_query_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_query_id - - @source_query_id.setter - def source_query_id(self, source_query_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_query_id = source_query_id - - @property - def model_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.model_name - - @model_name.setter - def model_name(self, model_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model_name = model_name - - @property - def query(self) -> Optional[LookerQuery]: - return None if self.attributes is None else self.attributes.query - - @query.setter - def query(self, query: Optional[LookerQuery]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query = query - - @property - def folder(self) -> Optional[LookerFolder]: - return None if self.attributes is None else self.attributes.folder - - @folder.setter - def folder(self, folder: Optional[LookerFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.folder = folder - - @property - def tile(self) -> Optional[LookerTile]: - return None if self.attributes is None else self.attributes.tile - - @tile.setter - def tile(self, tile: Optional[LookerTile]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tile = tile - - @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model - - @model.setter - def model(self, model: Optional[LookerModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model = model - - @property - def dashboard(self) -> Optional[LookerDashboard]: - return None if self.attributes is None else self.attributes.dashboard - - @dashboard.setter - def dashboard(self, dashboard: Optional[LookerDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard = dashboard - - class Attributes(Looker.Attributes): - folder_name: Optional[str] = Field(None, description="", alias="folderName") - source_user_id: Optional[int] = Field( - None, description="", alias="sourceUserId" - ) - source_view_count: Optional[int] = Field( - None, description="", alias="sourceViewCount" - ) - sourcelast_updater_id: Optional[int] = Field( - None, description="", alias="sourcelastUpdaterId" - ) - source_last_accessed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastAccessedAt" - ) - source_last_viewed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastViewedAt" - ) - source_content_metadata_id: Optional[int] = Field( - None, description="", alias="sourceContentMetadataId" - ) - source_query_id: Optional[int] = Field( - None, description="", alias="sourceQueryId" - ) - model_name: Optional[str] = Field(None, description="", alias="modelName") - query: Optional[LookerQuery] = Field( - None, description="", alias="query" - ) # relationship - folder: Optional[LookerFolder] = Field( - None, description="", alias="folder" - ) # relationship - tile: Optional[LookerTile] = Field( - None, description="", alias="tile" - ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" - ) # relationship - dashboard: Optional[LookerDashboard] = Field( - None, description="", alias="dashboard" - ) # relationship - - attributes: "LookerLook.Attributes" = Field( - default_factory=lambda: LookerLook.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerDashboard(Looker): - """Description""" - - type_name: str = Field("LookerDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerDashboard": - raise ValueError("must be LookerDashboard") - return v - - def __setattr__(self, name, value): - if name in LookerDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") - """ - Name of the parent folder in Looker that contains this dashboard. - """ - SOURCE_USER_ID: ClassVar[NumericField] = NumericField( - "sourceUserId", "sourceUserId" - ) - """ - Identifier of the user who created this dashboard, from Looker. - """ - SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( - "sourceViewCount", "sourceViewCount" - ) - """ - Number of times the dashboard has been viewed through the Looker web UI. - """ - SOURCE_METADATA_ID: ClassVar[NumericField] = NumericField( - "sourceMetadataId", "sourceMetadataId" - ) - """ - Identifier of the dashboard's content metadata, from Looker. - """ - SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( - "sourcelastUpdaterId", "sourcelastUpdaterId" - ) - """ - Identifier of the user who last updated the dashboard, from Looker. - """ - SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( - "sourceLastAccessedAt", "sourceLastAccessedAt" - ) - """ - Timestamp (epoch) when the dashboard was last accessed by a user, in milliseconds. - """ - SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( - "sourceLastViewedAt", "sourceLastViewedAt" - ) - """ - Timestamp (epoch) when the dashboard was last viewed by a user. - """ - - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - LOOKS: ClassVar[RelationField] = RelationField("looks") - """ - TBC - """ - FOLDER: ClassVar[RelationField] = RelationField("folder") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "folder_name", - "source_user_id", - "source_view_count", - "source_metadata_id", - "sourcelast_updater_id", - "source_last_accessed_at", - "source_last_viewed_at", - "tiles", - "looks", - "folder", - ] - - @property - def folder_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.folder_name - - @folder_name.setter - def folder_name(self, folder_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.folder_name = folder_name - - @property - def source_user_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_user_id - - @source_user_id.setter - def source_user_id(self, source_user_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_user_id = source_user_id - - @property - def source_view_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_view_count - - @source_view_count.setter - def source_view_count(self, source_view_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_view_count = source_view_count - - @property - def source_metadata_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_metadata_id - - @source_metadata_id.setter - def source_metadata_id(self, source_metadata_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_metadata_id = source_metadata_id - - @property - def sourcelast_updater_id(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.sourcelast_updater_id - ) - - @sourcelast_updater_id.setter - def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sourcelast_updater_id = sourcelast_updater_id - - @property - def source_last_accessed_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.source_last_accessed_at - ) - - @source_last_accessed_at.setter - def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_last_accessed_at = source_last_accessed_at - - @property - def source_last_viewed_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.source_last_viewed_at - ) - - @source_last_viewed_at.setter - def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_last_viewed_at = source_last_viewed_at - - @property - def tiles(self) -> Optional[list[LookerTile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[LookerTile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - @property - def looks(self) -> Optional[list[LookerLook]]: - return None if self.attributes is None else self.attributes.looks - - @looks.setter - def looks(self, looks: Optional[list[LookerLook]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looks = looks - - @property - def folder(self) -> Optional[LookerFolder]: - return None if self.attributes is None else self.attributes.folder - - @folder.setter - def folder(self, folder: Optional[LookerFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.folder = folder - - class Attributes(Looker.Attributes): - folder_name: Optional[str] = Field(None, description="", alias="folderName") - source_user_id: Optional[int] = Field( - None, description="", alias="sourceUserId" - ) - source_view_count: Optional[int] = Field( - None, description="", alias="sourceViewCount" - ) - source_metadata_id: Optional[int] = Field( - None, description="", alias="sourceMetadataId" - ) - sourcelast_updater_id: Optional[int] = Field( - None, description="", alias="sourcelastUpdaterId" - ) - source_last_accessed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastAccessedAt" - ) - source_last_viewed_at: Optional[datetime] = Field( - None, description="", alias="sourceLastViewedAt" - ) - tiles: Optional[list[LookerTile]] = Field( - None, description="", alias="tiles" - ) # relationship - looks: Optional[list[LookerLook]] = Field( - None, description="", alias="looks" - ) # relationship - folder: Optional[LookerFolder] = Field( - None, description="", alias="folder" - ) # relationship - - attributes: "LookerDashboard.Attributes" = Field( - default_factory=lambda: LookerDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerFolder(Looker): - """Description""" - - type_name: str = Field("LookerFolder", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerFolder": - raise ValueError("must be LookerFolder") - return v - - def __setattr__(self, name, value): - if name in LookerFolder._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( - "sourceContentMetadataId", "sourceContentMetadataId" - ) - """ - Identifier for the folder's content metadata in Looker. - """ - SOURCE_CREATOR_ID: ClassVar[NumericField] = NumericField( - "sourceCreatorId", "sourceCreatorId" - ) - """ - Identifier of the user who created the folder, from Looker. - """ - SOURCE_CHILD_COUNT: ClassVar[NumericField] = NumericField( - "sourceChildCount", "sourceChildCount" - ) - """ - Number of subfolders in this folder. - """ - SOURCE_PARENT_ID: ClassVar[NumericField] = NumericField( - "sourceParentID", "sourceParentID" - ) - """ - Identifier of the parent folder of this folder, from Looker. - """ - - LOOKER_SUB_FOLDERS: ClassVar[RelationField] = RelationField("lookerSubFolders") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - LOOKS: ClassVar[RelationField] = RelationField("looks") - """ - TBC - """ - LOOKER_PARENT_FOLDER: ClassVar[RelationField] = RelationField("lookerParentFolder") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "source_content_metadata_id", - "source_creator_id", - "source_child_count", - "source_parent_i_d", - "looker_sub_folders", - "dashboards", - "looks", - "looker_parent_folder", - ] - - @property - def source_content_metadata_id(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.source_content_metadata_id - ) - - @source_content_metadata_id.setter - def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_content_metadata_id = source_content_metadata_id - - @property - def source_creator_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_creator_id - - @source_creator_id.setter - def source_creator_id(self, source_creator_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_creator_id = source_creator_id - - @property - def source_child_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_child_count - - @source_child_count.setter - def source_child_count(self, source_child_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_child_count = source_child_count - - @property - def source_parent_i_d(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.source_parent_i_d - - @source_parent_i_d.setter - def source_parent_i_d(self, source_parent_i_d: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_parent_i_d = source_parent_i_d - - @property - def looker_sub_folders(self) -> Optional[list[LookerFolder]]: - return None if self.attributes is None else self.attributes.looker_sub_folders - - @looker_sub_folders.setter - def looker_sub_folders(self, looker_sub_folders: Optional[list[LookerFolder]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_sub_folders = looker_sub_folders - - @property - def dashboards(self) -> Optional[list[LookerDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[LookerDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - @property - def looks(self) -> Optional[list[LookerLook]]: - return None if self.attributes is None else self.attributes.looks - - @looks.setter - def looks(self, looks: Optional[list[LookerLook]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looks = looks - - @property - def looker_parent_folder(self) -> Optional[LookerFolder]: - return None if self.attributes is None else self.attributes.looker_parent_folder - - @looker_parent_folder.setter - def looker_parent_folder(self, looker_parent_folder: Optional[LookerFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_parent_folder = looker_parent_folder - - class Attributes(Looker.Attributes): - source_content_metadata_id: Optional[int] = Field( - None, description="", alias="sourceContentMetadataId" - ) - source_creator_id: Optional[int] = Field( - None, description="", alias="sourceCreatorId" - ) - source_child_count: Optional[int] = Field( - None, description="", alias="sourceChildCount" - ) - source_parent_i_d: Optional[int] = Field( - None, description="", alias="sourceParentID" - ) - looker_sub_folders: Optional[list[LookerFolder]] = Field( - None, description="", alias="lookerSubFolders" - ) # relationship - dashboards: Optional[list[LookerDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - looks: Optional[list[LookerLook]] = Field( - None, description="", alias="looks" - ) # relationship - looker_parent_folder: Optional[LookerFolder] = Field( - None, description="", alias="lookerParentFolder" - ) # relationship - - attributes: "LookerFolder.Attributes" = Field( - default_factory=lambda: LookerFolder.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerTile(Looker): - """Description""" - - type_name: str = Field("LookerTile", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerTile": - raise ValueError("must be LookerTile") - return v - - def __setattr__(self, name, value): - if name in LookerTile._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - LOOKML_LINK_ID: ClassVar[KeywordField] = KeywordField( - "lookmlLinkId", "lookmlLinkId" - ) - """ - Identifier for the LoomML link. - """ - MERGE_RESULT_ID: ClassVar[KeywordField] = KeywordField( - "mergeResultId", "mergeResultId" - ) - """ - Identifier for the merge result. - """ - NOTE_TEXT: ClassVar[KeywordField] = KeywordField("noteText", "noteText") - """ - Text of notes added to the tile. - """ - QUERY_ID: ClassVar[NumericField] = NumericField("queryID", "queryID") - """ - Identifier for the query used to build this tile, from Looker. - """ - RESULT_MAKER_ID: ClassVar[NumericField] = NumericField( - "resultMakerID", "resultMakerID" - ) - """ - Identifier of the ResultMarkerLookup entry, from Looker. - """ - SUBTITLE_TEXT: ClassVar[KeywordField] = KeywordField("subtitleText", "subtitleText") - """ - Text for the subtitle for text tiles. - """ - LOOK_ID: ClassVar[NumericField] = NumericField("lookId", "lookId") - """ - Identifier of the Look used to create this tile, from Looker. - """ - - QUERY: ClassVar[RelationField] = RelationField("query") - """ - TBC - """ - LOOK: ClassVar[RelationField] = RelationField("look") - """ - TBC - """ - DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "lookml_link_id", - "merge_result_id", - "note_text", - "query_i_d", - "result_maker_i_d", - "subtitle_text", - "look_id", - "query", - "look", - "dashboard", - ] - - @property - def lookml_link_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.lookml_link_id - - @lookml_link_id.setter - def lookml_link_id(self, lookml_link_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.lookml_link_id = lookml_link_id - - @property - def merge_result_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.merge_result_id - - @merge_result_id.setter - def merge_result_id(self, merge_result_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.merge_result_id = merge_result_id - - @property - def note_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.note_text - - @note_text.setter - def note_text(self, note_text: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.note_text = note_text - - @property - def query_i_d(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_i_d - - @query_i_d.setter - def query_i_d(self, query_i_d: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_i_d = query_i_d - - @property - def result_maker_i_d(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.result_maker_i_d - - @result_maker_i_d.setter - def result_maker_i_d(self, result_maker_i_d: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.result_maker_i_d = result_maker_i_d - - @property - def subtitle_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.subtitle_text - - @subtitle_text.setter - def subtitle_text(self, subtitle_text: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.subtitle_text = subtitle_text - - @property - def look_id(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.look_id - - @look_id.setter - def look_id(self, look_id: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.look_id = look_id - - @property - def query(self) -> Optional[LookerQuery]: - return None if self.attributes is None else self.attributes.query - - @query.setter - def query(self, query: Optional[LookerQuery]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query = query - - @property - def look(self) -> Optional[LookerLook]: - return None if self.attributes is None else self.attributes.look - - @look.setter - def look(self, look: Optional[LookerLook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.look = look - - @property - def dashboard(self) -> Optional[LookerDashboard]: - return None if self.attributes is None else self.attributes.dashboard - - @dashboard.setter - def dashboard(self, dashboard: Optional[LookerDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard = dashboard - - class Attributes(Looker.Attributes): - lookml_link_id: Optional[str] = Field( - None, description="", alias="lookmlLinkId" - ) - merge_result_id: Optional[str] = Field( - None, description="", alias="mergeResultId" - ) - note_text: Optional[str] = Field(None, description="", alias="noteText") - query_i_d: Optional[int] = Field(None, description="", alias="queryID") - result_maker_i_d: Optional[int] = Field( - None, description="", alias="resultMakerID" - ) - subtitle_text: Optional[str] = Field(None, description="", alias="subtitleText") - look_id: Optional[int] = Field(None, description="", alias="lookId") - query: Optional[LookerQuery] = Field( - None, description="", alias="query" - ) # relationship - look: Optional[LookerLook] = Field( - None, description="", alias="look" - ) # relationship - dashboard: Optional[LookerDashboard] = Field( - None, description="", alias="dashboard" - ) # relationship - - attributes: "LookerTile.Attributes" = Field( - default_factory=lambda: LookerTile.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerModel(Looker): - """Description""" - - type_name: str = Field("LookerModel", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerModel": - raise ValueError("must be LookerModel") - return v - - def __setattr__(self, name, value): - if name in LookerModel._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") - """ - Name of the project in which the model exists. - """ - - EXPLORES: ClassVar[RelationField] = RelationField("explores") - """ - TBC - """ - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - LOOK: ClassVar[RelationField] = RelationField("look") - """ - TBC - """ - QUERIES: ClassVar[RelationField] = RelationField("queries") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "explores", - "project", - "look", - "queries", - "fields", - ] - - @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name - - @project_name.setter - def project_name(self, project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_name = project_name - - @property - def explores(self) -> Optional[list[LookerExplore]]: - return None if self.attributes is None else self.attributes.explores - - @explores.setter - def explores(self, explores: Optional[list[LookerExplore]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.explores = explores - - @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[LookerProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def look(self) -> Optional[LookerLook]: - return None if self.attributes is None else self.attributes.look - - @look.setter - def look(self, look: Optional[LookerLook]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.look = look - - @property - def queries(self) -> Optional[list[LookerQuery]]: - return None if self.attributes is None else self.attributes.queries - - @queries.setter - def queries(self, queries: Optional[list[LookerQuery]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.queries = queries - - @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - explores: Optional[list[LookerExplore]] = Field( - None, description="", alias="explores" - ) # relationship - project: Optional[LookerProject] = Field( - None, description="", alias="project" - ) # relationship - look: Optional[LookerLook] = Field( - None, description="", alias="look" - ) # relationship - queries: Optional[list[LookerQuery]] = Field( - None, description="", alias="queries" - ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" - ) # relationship - - attributes: "LookerModel.Attributes" = Field( - default_factory=lambda: LookerModel.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerExplore(Looker): - """Description""" - - type_name: str = Field("LookerExplore", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerExplore": - raise ValueError("must be LookerExplore") - return v - - def __setattr__(self, name, value): - if name in LookerExplore._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") - """ - Name of the parent project of this Explore. - """ - MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") - """ - Name of the parent model of this Explore. - """ - SOURCE_CONNECTION_NAME: ClassVar[KeywordField] = KeywordField( - "sourceConnectionName", "sourceConnectionName" - ) - """ - Connection name for the Explore, from Looker. - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - Name of the view for the Explore. - """ - SQL_TABLE_NAME: ClassVar[KeywordField] = KeywordField( - "sqlTableName", "sqlTableName" - ) - """ - Name of the SQL table used to declare the Explore. - """ - - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - MODEL: ClassVar[RelationField] = RelationField("model") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "model_name", - "source_connection_name", - "view_name", - "sql_table_name", - "project", - "model", - "fields", - ] - - @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name - - @project_name.setter - def project_name(self, project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_name = project_name - - @property - def model_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.model_name - - @model_name.setter - def model_name(self, model_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model_name = model_name - - @property - def source_connection_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.source_connection_name - ) - - @source_connection_name.setter - def source_connection_name(self, source_connection_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_connection_name = source_connection_name - - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name - - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name - - @property - def sql_table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sql_table_name - - @sql_table_name.setter - def sql_table_name(self, sql_table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sql_table_name = sql_table_name - - @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[LookerProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model - - @model.setter - def model(self, model: Optional[LookerModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model = model - - @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - model_name: Optional[str] = Field(None, description="", alias="modelName") - source_connection_name: Optional[str] = Field( - None, description="", alias="sourceConnectionName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - sql_table_name: Optional[str] = Field( - None, description="", alias="sqlTableName" - ) - project: Optional[LookerProject] = Field( - None, description="", alias="project" - ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" - ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" - ) # relationship - - attributes: "LookerExplore.Attributes" = Field( - default_factory=lambda: LookerExplore.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerProject(Looker): - """Description""" - - type_name: str = Field("LookerProject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerProject": - raise ValueError("must be LookerProject") - return v - - def __setattr__(self, name, value): - if name in LookerProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MODELS: ClassVar[RelationField] = RelationField("models") - """ - TBC - """ - EXPLORES: ClassVar[RelationField] = RelationField("explores") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - VIEWS: ClassVar[RelationField] = RelationField("views") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "models", - "explores", - "fields", - "views", - ] - - @property - def models(self) -> Optional[list[LookerModel]]: - return None if self.attributes is None else self.attributes.models - - @models.setter - def models(self, models: Optional[list[LookerModel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.models = models - - @property - def explores(self) -> Optional[list[LookerExplore]]: - return None if self.attributes is None else self.attributes.explores - - @explores.setter - def explores(self, explores: Optional[list[LookerExplore]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.explores = explores - - @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - @property - def views(self) -> Optional[list[LookerView]]: - return None if self.attributes is None else self.attributes.views - - @views.setter - def views(self, views: Optional[list[LookerView]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.views = views - - class Attributes(Looker.Attributes): - models: Optional[list[LookerModel]] = Field( - None, description="", alias="models" - ) # relationship - explores: Optional[list[LookerExplore]] = Field( - None, description="", alias="explores" - ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" - ) # relationship - views: Optional[list[LookerView]] = Field( - None, description="", alias="views" - ) # relationship - - attributes: "LookerProject.Attributes" = Field( - default_factory=lambda: LookerProject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerQuery(Looker): - """Description""" - - type_name: str = Field("LookerQuery", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerQuery": - raise ValueError("must be LookerQuery") - return v - - def __setattr__(self, name, value): - if name in LookerQuery._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( - "sourceDefinition", "sourceDefinition" - ) - """ - Deprecated. - """ - SOURCE_DEFINITION_DATABASE: ClassVar[KeywordField] = KeywordField( - "sourceDefinitionDatabase", "sourceDefinitionDatabase" - ) - """ - Deprecated. - """ - SOURCE_DEFINITION_SCHEMA: ClassVar[KeywordField] = KeywordField( - "sourceDefinitionSchema", "sourceDefinitionSchema" - ) - """ - Deprecated. - """ - FIELDS: ClassVar[KeywordField] = KeywordField("fields", "fields") - """ - Deprecated. - """ - - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - LOOKS: ClassVar[RelationField] = RelationField("looks") - """ - TBC - """ - MODEL: ClassVar[RelationField] = RelationField("model") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "source_definition", - "source_definition_database", - "source_definition_schema", - "fields", - "tiles", - "looks", - "model", - ] - - @property - def source_definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_definition - - @source_definition.setter - def source_definition(self, source_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_definition = source_definition - - @property - def source_definition_database(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.source_definition_database - ) - - @source_definition_database.setter - def source_definition_database(self, source_definition_database: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_definition_database = source_definition_database - - @property - def source_definition_schema(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.source_definition_schema - ) - - @source_definition_schema.setter - def source_definition_schema(self, source_definition_schema: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_definition_schema = source_definition_schema - - @property - def fields(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - @property - def tiles(self) -> Optional[list[LookerTile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[LookerTile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - @property - def looks(self) -> Optional[list[LookerLook]]: - return None if self.attributes is None else self.attributes.looks - - @looks.setter - def looks(self, looks: Optional[list[LookerLook]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looks = looks - - @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model - - @model.setter - def model(self, model: Optional[LookerModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model = model - - class Attributes(Looker.Attributes): - source_definition: Optional[str] = Field( - None, description="", alias="sourceDefinition" - ) - source_definition_database: Optional[str] = Field( - None, description="", alias="sourceDefinitionDatabase" - ) - source_definition_schema: Optional[str] = Field( - None, description="", alias="sourceDefinitionSchema" - ) - fields: Optional[set[str]] = Field(None, description="", alias="fields") - tiles: Optional[list[LookerTile]] = Field( - None, description="", alias="tiles" - ) # relationship - looks: Optional[list[LookerLook]] = Field( - None, description="", alias="looks" - ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" - ) # relationship - - attributes: "LookerQuery.Attributes" = Field( - default_factory=lambda: LookerQuery.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerField(Looker): - """Description""" - - type_name: str = Field("LookerField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerField": - raise ValueError("must be LookerField") - return v - - def __setattr__(self, name, value): - if name in LookerField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") - """ - Name of the project in which this field exists. - """ - LOOKER_EXPLORE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "lookerExploreQualifiedName", - "lookerExploreQualifiedName", - "lookerExploreQualifiedName.text", - ) - """ - Unique name of the Explore in which this field exists. - """ - LOOKER_VIEW_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "lookerViewQualifiedName", - "lookerViewQualifiedName", - "lookerViewQualifiedName.text", - ) - """ - Unique name of the view in which this field exists. - """ - MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") - """ - Name of the model in which this field exists. - """ - SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( - "sourceDefinition", "sourceDefinition" - ) - """ - Deprecated. - """ - LOOKER_FIELD_DATA_TYPE: ClassVar[KeywordField] = KeywordField( - "lookerFieldDataType", "lookerFieldDataType" - ) - """ - Deprecated. - """ - LOOKER_TIMES_USED: ClassVar[NumericField] = NumericField( - "lookerTimesUsed", "lookerTimesUsed" - ) - """ - Deprecated. - """ - - EXPLORE: ClassVar[RelationField] = RelationField("explore") - """ - TBC - """ - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - VIEW: ClassVar[RelationField] = RelationField("view") - """ - TBC - """ - MODEL: ClassVar[RelationField] = RelationField("model") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "looker_explore_qualified_name", - "looker_view_qualified_name", - "model_name", - "source_definition", - "looker_field_data_type", - "looker_times_used", - "explore", - "project", - "view", - "model", - ] - - @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name - - @project_name.setter - def project_name(self, project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_name = project_name - - @property - def looker_explore_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.looker_explore_qualified_name - ) - - @looker_explore_qualified_name.setter - def looker_explore_qualified_name( - self, looker_explore_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_explore_qualified_name = looker_explore_qualified_name - - @property - def looker_view_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.looker_view_qualified_name - ) - - @looker_view_qualified_name.setter - def looker_view_qualified_name(self, looker_view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_view_qualified_name = looker_view_qualified_name - - @property - def model_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.model_name - - @model_name.setter - def model_name(self, model_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model_name = model_name - - @property - def source_definition(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_definition - - @source_definition.setter - def source_definition(self, source_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_definition = source_definition - - @property - def looker_field_data_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.looker_field_data_type - ) - - @looker_field_data_type.setter - def looker_field_data_type(self, looker_field_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_field_data_type = looker_field_data_type - - @property - def looker_times_used(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.looker_times_used - - @looker_times_used.setter - def looker_times_used(self, looker_times_used: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_times_used = looker_times_used - - @property - def explore(self) -> Optional[LookerExplore]: - return None if self.attributes is None else self.attributes.explore - - @explore.setter - def explore(self, explore: Optional[LookerExplore]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.explore = explore - - @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[LookerProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def view(self) -> Optional[LookerView]: - return None if self.attributes is None else self.attributes.view - - @view.setter - def view(self, view: Optional[LookerView]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view = view - - @property - def model(self) -> Optional[LookerModel]: - return None if self.attributes is None else self.attributes.model - - @model.setter - def model(self, model: Optional[LookerModel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model = model - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - looker_explore_qualified_name: Optional[str] = Field( - None, description="", alias="lookerExploreQualifiedName" - ) - looker_view_qualified_name: Optional[str] = Field( - None, description="", alias="lookerViewQualifiedName" - ) - model_name: Optional[str] = Field(None, description="", alias="modelName") - source_definition: Optional[str] = Field( - None, description="", alias="sourceDefinition" - ) - looker_field_data_type: Optional[str] = Field( - None, description="", alias="lookerFieldDataType" - ) - looker_times_used: Optional[int] = Field( - None, description="", alias="lookerTimesUsed" - ) - explore: Optional[LookerExplore] = Field( - None, description="", alias="explore" - ) # relationship - project: Optional[LookerProject] = Field( - None, description="", alias="project" - ) # relationship - view: Optional[LookerView] = Field( - None, description="", alias="view" - ) # relationship - model: Optional[LookerModel] = Field( - None, description="", alias="model" - ) # relationship - - attributes: "LookerField.Attributes" = Field( - default_factory=lambda: LookerField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class LookerView(Looker): - """Description""" - - type_name: str = Field("LookerView", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "LookerView": - raise ValueError("must be LookerView") - return v - - def __setattr__(self, name, value): - if name in LookerView._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") - """ - Name of the project in which this view exists. - """ - LOOKER_VIEW_FILE_PATH: ClassVar[KeywordField] = KeywordField( - "lookerViewFilePath", "lookerViewFilePath" - ) - """ - File path of this view within the project. - """ - LOOKER_VIEW_FILE_NAME: ClassVar[KeywordField] = KeywordField( - "lookerViewFileName", "lookerViewFileName" - ) - """ - File name of this view. - """ - - PROJECT: ClassVar[RelationField] = RelationField("project") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "project_name", - "looker_view_file_path", - "looker_view_file_name", - "project", - "fields", - ] - - @property - def project_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.project_name - - @project_name.setter - def project_name(self, project_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project_name = project_name - - @property - def looker_view_file_path(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.looker_view_file_path - ) - - @looker_view_file_path.setter - def looker_view_file_path(self, looker_view_file_path: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_view_file_path = looker_view_file_path - - @property - def looker_view_file_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.looker_view_file_name - ) - - @looker_view_file_name.setter - def looker_view_file_name(self, looker_view_file_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.looker_view_file_name = looker_view_file_name - - @property - def project(self) -> Optional[LookerProject]: - return None if self.attributes is None else self.attributes.project - - @project.setter - def project(self, project: Optional[LookerProject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.project = project - - @property - def fields(self) -> Optional[list[LookerField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[LookerField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Looker.Attributes): - project_name: Optional[str] = Field(None, description="", alias="projectName") - looker_view_file_path: Optional[str] = Field( - None, description="", alias="lookerViewFilePath" - ) - looker_view_file_name: Optional[str] = Field( - None, description="", alias="lookerViewFileName" - ) - project: Optional[LookerProject] = Field( - None, description="", alias="project" - ) # relationship - fields: Optional[list[LookerField]] = Field( - None, description="", alias="fields" - ) # relationship - - attributes: "LookerView.Attributes" = Field( - default_factory=lambda: LookerView.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -LookerLook.Attributes.update_forward_refs() - - -LookerDashboard.Attributes.update_forward_refs() - - -LookerFolder.Attributes.update_forward_refs() - - -LookerTile.Attributes.update_forward_refs() - - -LookerModel.Attributes.update_forward_refs() - - -LookerExplore.Attributes.update_forward_refs() - - -LookerProject.Attributes.update_forward_refs() - - -LookerQuery.Attributes.update_forward_refs() - - -LookerField.Attributes.update_forward_refs() - - -LookerView.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset74.py b/pyatlan/model/assets/asset74.py deleted file mode 100644 index b0d1600f1..000000000 --- a/pyatlan/model/assets/asset74.py +++ /dev/null @@ -1,1113 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) - -from .asset46 import Sisense - - -class SisenseFolder(Sisense): - """Description""" - - type_name: str = Field("SisenseFolder", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseFolder": - raise ValueError("must be SisenseFolder") - return v - - def __setattr__(self, name, value): - if name in SisenseFolder._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_FOLDER_PARENT_FOLDER_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "sisenseFolderParentFolderQualifiedName", - "sisenseFolderParentFolderQualifiedName", - "sisenseFolderParentFolderQualifiedName.text", - ) - """ - Unique name of the parent folder in which this folder exists. - """ - - SISENSE_CHILD_FOLDERS: ClassVar[RelationField] = RelationField( - "sisenseChildFolders" - ) - """ - TBC - """ - SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") - """ - TBC - """ - SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") - """ - TBC - """ - SISENSE_PARENT_FOLDER: ClassVar[RelationField] = RelationField( - "sisenseParentFolder" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_folder_parent_folder_qualified_name", - "sisense_child_folders", - "sisense_widgets", - "sisense_dashboards", - "sisense_parent_folder", - ] - - @property - def sisense_folder_parent_folder_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_folder_parent_folder_qualified_name - ) - - @sisense_folder_parent_folder_qualified_name.setter - def sisense_folder_parent_folder_qualified_name( - self, sisense_folder_parent_folder_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_folder_parent_folder_qualified_name = ( - sisense_folder_parent_folder_qualified_name - ) - - @property - def sisense_child_folders(self) -> Optional[list[SisenseFolder]]: - return ( - None if self.attributes is None else self.attributes.sisense_child_folders - ) - - @sisense_child_folders.setter - def sisense_child_folders( - self, sisense_child_folders: Optional[list[SisenseFolder]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_child_folders = sisense_child_folders - - @property - def sisense_widgets(self) -> Optional[list[SisenseWidget]]: - return None if self.attributes is None else self.attributes.sisense_widgets - - @sisense_widgets.setter - def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widgets = sisense_widgets - - @property - def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: - return None if self.attributes is None else self.attributes.sisense_dashboards - - @sisense_dashboards.setter - def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboards = sisense_dashboards - - @property - def sisense_parent_folder(self) -> Optional[SisenseFolder]: - return ( - None if self.attributes is None else self.attributes.sisense_parent_folder - ) - - @sisense_parent_folder.setter - def sisense_parent_folder(self, sisense_parent_folder: Optional[SisenseFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_parent_folder = sisense_parent_folder - - class Attributes(Sisense.Attributes): - sisense_folder_parent_folder_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseFolderParentFolderQualifiedName" - ) - sisense_child_folders: Optional[list[SisenseFolder]] = Field( - None, description="", alias="sisenseChildFolders" - ) # relationship - sisense_widgets: Optional[list[SisenseWidget]] = Field( - None, description="", alias="sisenseWidgets" - ) # relationship - sisense_dashboards: Optional[list[SisenseDashboard]] = Field( - None, description="", alias="sisenseDashboards" - ) # relationship - sisense_parent_folder: Optional[SisenseFolder] = Field( - None, description="", alias="sisenseParentFolder" - ) # relationship - - attributes: "SisenseFolder.Attributes" = Field( - default_factory=lambda: SisenseFolder.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseWidget(Sisense): - """Description""" - - type_name: str = Field("SisenseWidget", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseWidget": - raise ValueError("must be SisenseWidget") - return v - - def __setattr__(self, name, value): - if name in SisenseWidget._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_WIDGET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "sisenseWidgetColumnCount", "sisenseWidgetColumnCount" - ) - """ - Number of columns used in this widget. - """ - SISENSE_WIDGET_SUB_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseWidgetSubType", "sisenseWidgetSubType" - ) - """ - Subtype of this widget. - """ - SISENSE_WIDGET_SIZE: ClassVar[KeywordField] = KeywordField( - "sisenseWidgetSize", "sisenseWidgetSize" - ) - """ - Size of this widget. - """ - SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "sisenseWidgetDashboardQualifiedName", - "sisenseWidgetDashboardQualifiedName", - "sisenseWidgetDashboardQualifiedName.text", - ) - """ - Unique name of the dashboard in which this widget exists. - """ - SISENSE_WIDGET_FOLDER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sisenseWidgetFolderQualifiedName", - "sisenseWidgetFolderQualifiedName", - "sisenseWidgetFolderQualifiedName.text", - ) - """ - Unique name of the folder in which this widget exists. - """ - - SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( - "sisenseDatamodelTables" - ) - """ - TBC - """ - SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") - """ - TBC - """ - SISENSE_DASHBOARD: ClassVar[RelationField] = RelationField("sisenseDashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_widget_column_count", - "sisense_widget_sub_type", - "sisense_widget_size", - "sisense_widget_dashboard_qualified_name", - "sisense_widget_folder_qualified_name", - "sisense_datamodel_tables", - "sisense_folder", - "sisense_dashboard", - ] - - @property - def sisense_widget_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_widget_column_count - ) - - @sisense_widget_column_count.setter - def sisense_widget_column_count(self, sisense_widget_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_column_count = sisense_widget_column_count - - @property - def sisense_widget_sub_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.sisense_widget_sub_type - ) - - @sisense_widget_sub_type.setter - def sisense_widget_sub_type(self, sisense_widget_sub_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_sub_type = sisense_widget_sub_type - - @property - def sisense_widget_size(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sisense_widget_size - - @sisense_widget_size.setter - def sisense_widget_size(self, sisense_widget_size: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_size = sisense_widget_size - - @property - def sisense_widget_dashboard_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_widget_dashboard_qualified_name - ) - - @sisense_widget_dashboard_qualified_name.setter - def sisense_widget_dashboard_qualified_name( - self, sisense_widget_dashboard_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_dashboard_qualified_name = ( - sisense_widget_dashboard_qualified_name - ) - - @property - def sisense_widget_folder_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_widget_folder_qualified_name - ) - - @sisense_widget_folder_qualified_name.setter - def sisense_widget_folder_qualified_name( - self, sisense_widget_folder_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widget_folder_qualified_name = ( - sisense_widget_folder_qualified_name - ) - - @property - def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_tables - ) - - @sisense_datamodel_tables.setter - def sisense_datamodel_tables( - self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_tables = sisense_datamodel_tables - - @property - def sisense_folder(self) -> Optional[SisenseFolder]: - return None if self.attributes is None else self.attributes.sisense_folder - - @sisense_folder.setter - def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_folder = sisense_folder - - @property - def sisense_dashboard(self) -> Optional[SisenseDashboard]: - return None if self.attributes is None else self.attributes.sisense_dashboard - - @sisense_dashboard.setter - def sisense_dashboard(self, sisense_dashboard: Optional[SisenseDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboard = sisense_dashboard - - class Attributes(Sisense.Attributes): - sisense_widget_column_count: Optional[int] = Field( - None, description="", alias="sisenseWidgetColumnCount" - ) - sisense_widget_sub_type: Optional[str] = Field( - None, description="", alias="sisenseWidgetSubType" - ) - sisense_widget_size: Optional[str] = Field( - None, description="", alias="sisenseWidgetSize" - ) - sisense_widget_dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseWidgetDashboardQualifiedName" - ) - sisense_widget_folder_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseWidgetFolderQualifiedName" - ) - sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( - None, description="", alias="sisenseDatamodelTables" - ) # relationship - sisense_folder: Optional[SisenseFolder] = Field( - None, description="", alias="sisenseFolder" - ) # relationship - sisense_dashboard: Optional[SisenseDashboard] = Field( - None, description="", alias="sisenseDashboard" - ) # relationship - - attributes: "SisenseWidget.Attributes" = Field( - default_factory=lambda: SisenseWidget.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseDatamodel(Sisense): - """Description""" - - type_name: str = Field("SisenseDatamodel", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseDatamodel": - raise ValueError("must be SisenseDatamodel") - return v - - def __setattr__(self, name, value): - if name in SisenseDatamodel._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_DATAMODEL_TABLE_COUNT: ClassVar[NumericField] = NumericField( - "sisenseDatamodelTableCount", "sisenseDatamodelTableCount" - ) - """ - Number of tables in this datamodel. - """ - SISENSE_DATAMODEL_SERVER: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelServer", "sisenseDatamodelServer" - ) - """ - Hostname of the server on which this datamodel was created. - """ - SISENSE_DATAMODEL_REVISION: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelRevision", "sisenseDatamodelRevision" - ) - """ - Revision of this datamodel. - """ - SISENSE_DATAMODEL_LAST_BUILD_TIME: ClassVar[NumericField] = NumericField( - "sisenseDatamodelLastBuildTime", "sisenseDatamodelLastBuildTime" - ) - """ - Time (epoch) when this datamodel was last built, in milliseconds. - """ - SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[NumericField] = NumericField( - "sisenseDatamodelLastSuccessfulBuildTime", - "sisenseDatamodelLastSuccessfulBuildTime", - ) - """ - Time (epoch) when this datamodel was last built successfully, in milliseconds. - """ - SISENSE_DATAMODEL_LAST_PUBLISH_TIME: ClassVar[NumericField] = NumericField( - "sisenseDatamodelLastPublishTime", "sisenseDatamodelLastPublishTime" - ) - """ - Time (epoch) when this datamodel was last published, in milliseconds. - """ - SISENSE_DATAMODEL_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelType", "sisenseDatamodelType" - ) - """ - Type of this datamodel, for example: 'extract' or 'custom'. - """ - SISENSE_DATAMODEL_RELATION_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelRelationType", "sisenseDatamodelRelationType" - ) - """ - Default relation type for this datamodel. 'extract' type Datamodels have regular relations by default. 'live' type Datamodels have direct relations by default. - """ # noqa: E501 - - SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( - "sisenseDatamodelTables" - ) - """ - TBC - """ - SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_datamodel_table_count", - "sisense_datamodel_server", - "sisense_datamodel_revision", - "sisense_datamodel_last_build_time", - "sisense_datamodel_last_successful_build_time", - "sisense_datamodel_last_publish_time", - "sisense_datamodel_type", - "sisense_datamodel_relation_type", - "sisense_datamodel_tables", - "sisense_dashboards", - ] - - @property - def sisense_datamodel_table_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_count - ) - - @sisense_datamodel_table_count.setter - def sisense_datamodel_table_count( - self, sisense_datamodel_table_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_count = sisense_datamodel_table_count - - @property - def sisense_datamodel_server(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_server - ) - - @sisense_datamodel_server.setter - def sisense_datamodel_server(self, sisense_datamodel_server: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_server = sisense_datamodel_server - - @property - def sisense_datamodel_revision(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_revision - ) - - @sisense_datamodel_revision.setter - def sisense_datamodel_revision(self, sisense_datamodel_revision: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_revision = sisense_datamodel_revision - - @property - def sisense_datamodel_last_build_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_last_build_time - ) - - @sisense_datamodel_last_build_time.setter - def sisense_datamodel_last_build_time( - self, sisense_datamodel_last_build_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_last_build_time = ( - sisense_datamodel_last_build_time - ) - - @property - def sisense_datamodel_last_successful_build_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_last_successful_build_time - ) - - @sisense_datamodel_last_successful_build_time.setter - def sisense_datamodel_last_successful_build_time( - self, sisense_datamodel_last_successful_build_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_last_successful_build_time = ( - sisense_datamodel_last_successful_build_time - ) - - @property - def sisense_datamodel_last_publish_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_last_publish_time - ) - - @sisense_datamodel_last_publish_time.setter - def sisense_datamodel_last_publish_time( - self, sisense_datamodel_last_publish_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_last_publish_time = ( - sisense_datamodel_last_publish_time - ) - - @property - def sisense_datamodel_type(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.sisense_datamodel_type - ) - - @sisense_datamodel_type.setter - def sisense_datamodel_type(self, sisense_datamodel_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_type = sisense_datamodel_type - - @property - def sisense_datamodel_relation_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_relation_type - ) - - @sisense_datamodel_relation_type.setter - def sisense_datamodel_relation_type( - self, sisense_datamodel_relation_type: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_relation_type = ( - sisense_datamodel_relation_type - ) - - @property - def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_tables - ) - - @sisense_datamodel_tables.setter - def sisense_datamodel_tables( - self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_tables = sisense_datamodel_tables - - @property - def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: - return None if self.attributes is None else self.attributes.sisense_dashboards - - @sisense_dashboards.setter - def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboards = sisense_dashboards - - class Attributes(Sisense.Attributes): - sisense_datamodel_table_count: Optional[int] = Field( - None, description="", alias="sisenseDatamodelTableCount" - ) - sisense_datamodel_server: Optional[str] = Field( - None, description="", alias="sisenseDatamodelServer" - ) - sisense_datamodel_revision: Optional[str] = Field( - None, description="", alias="sisenseDatamodelRevision" - ) - sisense_datamodel_last_build_time: Optional[datetime] = Field( - None, description="", alias="sisenseDatamodelLastBuildTime" - ) - sisense_datamodel_last_successful_build_time: Optional[datetime] = Field( - None, description="", alias="sisenseDatamodelLastSuccessfulBuildTime" - ) - sisense_datamodel_last_publish_time: Optional[datetime] = Field( - None, description="", alias="sisenseDatamodelLastPublishTime" - ) - sisense_datamodel_type: Optional[str] = Field( - None, description="", alias="sisenseDatamodelType" - ) - sisense_datamodel_relation_type: Optional[str] = Field( - None, description="", alias="sisenseDatamodelRelationType" - ) - sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( - None, description="", alias="sisenseDatamodelTables" - ) # relationship - sisense_dashboards: Optional[list[SisenseDashboard]] = Field( - None, description="", alias="sisenseDashboards" - ) # relationship - - attributes: "SisenseDatamodel.Attributes" = Field( - default_factory=lambda: SisenseDatamodel.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseDatamodelTable(Sisense): - """Description""" - - type_name: str = Field("SisenseDatamodelTable", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseDatamodelTable": - raise ValueError("must be SisenseDatamodelTable") - return v - - def __setattr__(self, name, value): - if name in SisenseDatamodelTable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_DATAMODEL_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "sisenseDatamodelQualifiedName", - "sisenseDatamodelQualifiedName", - "sisenseDatamodelQualifiedName.text", - ) - """ - Unique name of the datamodel in which this datamodel table exists. - """ - SISENSE_DATAMODEL_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "sisenseDatamodelTableColumnCount", "sisenseDatamodelTableColumnCount" - ) - """ - Number of columns present in this datamodel table. - """ - SISENSE_DATAMODEL_TABLE_TYPE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableType", "sisenseDatamodelTableType" - ) - """ - Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables. - """ - SISENSE_DATAMODEL_TABLE_EXPRESSION: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableExpression", "sisenseDatamodelTableExpression" - ) - """ - SQL expression of this datamodel table. - """ - SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED: ClassVar[BooleanField] = BooleanField( - "sisenseDatamodelTableIsMaterialized", "sisenseDatamodelTableIsMaterialized" - ) - """ - Whether this datamodel table is materialised (true) or not (false). - """ - SISENSE_DATAMODEL_TABLE_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( - "sisenseDatamodelTableIsHidden", "sisenseDatamodelTableIsHidden" - ) - """ - Whether this datamodel table is hidden in Sisense (true) or not (false). - """ - SISENSE_DATAMODEL_TABLE_SCHEDULE: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableSchedule", "sisenseDatamodelTableSchedule" - ) - """ - JSON specifying the refresh schedule of this datamodel table. - """ - SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS: ClassVar[KeywordField] = KeywordField( - "sisenseDatamodelTableLiveQuerySettings", - "sisenseDatamodelTableLiveQuerySettings", - ) - """ - JSON specifying the LiveQuery settings of this datamodel table. - """ - - SISENSE_DATAMODEL: ClassVar[RelationField] = RelationField("sisenseDatamodel") - """ - TBC - """ - SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_datamodel_qualified_name", - "sisense_datamodel_table_column_count", - "sisense_datamodel_table_type", - "sisense_datamodel_table_expression", - "sisense_datamodel_table_is_materialized", - "sisense_datamodel_table_is_hidden", - "sisense_datamodel_table_schedule", - "sisense_datamodel_table_live_query_settings", - "sisense_datamodel", - "sisense_widgets", - ] - - @property - def sisense_datamodel_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_qualified_name - ) - - @sisense_datamodel_qualified_name.setter - def sisense_datamodel_qualified_name( - self, sisense_datamodel_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_qualified_name = ( - sisense_datamodel_qualified_name - ) - - @property - def sisense_datamodel_table_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_column_count - ) - - @sisense_datamodel_table_column_count.setter - def sisense_datamodel_table_column_count( - self, sisense_datamodel_table_column_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_column_count = ( - sisense_datamodel_table_column_count - ) - - @property - def sisense_datamodel_table_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_type - ) - - @sisense_datamodel_table_type.setter - def sisense_datamodel_table_type(self, sisense_datamodel_table_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_type = sisense_datamodel_table_type - - @property - def sisense_datamodel_table_expression(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_expression - ) - - @sisense_datamodel_table_expression.setter - def sisense_datamodel_table_expression( - self, sisense_datamodel_table_expression: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_expression = ( - sisense_datamodel_table_expression - ) - - @property - def sisense_datamodel_table_is_materialized(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_is_materialized - ) - - @sisense_datamodel_table_is_materialized.setter - def sisense_datamodel_table_is_materialized( - self, sisense_datamodel_table_is_materialized: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_is_materialized = ( - sisense_datamodel_table_is_materialized - ) - - @property - def sisense_datamodel_table_is_hidden(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_is_hidden - ) - - @sisense_datamodel_table_is_hidden.setter - def sisense_datamodel_table_is_hidden( - self, sisense_datamodel_table_is_hidden: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_is_hidden = ( - sisense_datamodel_table_is_hidden - ) - - @property - def sisense_datamodel_table_schedule(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_schedule - ) - - @sisense_datamodel_table_schedule.setter - def sisense_datamodel_table_schedule( - self, sisense_datamodel_table_schedule: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_schedule = ( - sisense_datamodel_table_schedule - ) - - @property - def sisense_datamodel_table_live_query_settings(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_datamodel_table_live_query_settings - ) - - @sisense_datamodel_table_live_query_settings.setter - def sisense_datamodel_table_live_query_settings( - self, sisense_datamodel_table_live_query_settings: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel_table_live_query_settings = ( - sisense_datamodel_table_live_query_settings - ) - - @property - def sisense_datamodel(self) -> Optional[SisenseDatamodel]: - return None if self.attributes is None else self.attributes.sisense_datamodel - - @sisense_datamodel.setter - def sisense_datamodel(self, sisense_datamodel: Optional[SisenseDatamodel]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodel = sisense_datamodel - - @property - def sisense_widgets(self) -> Optional[list[SisenseWidget]]: - return None if self.attributes is None else self.attributes.sisense_widgets - - @sisense_widgets.setter - def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widgets = sisense_widgets - - class Attributes(Sisense.Attributes): - sisense_datamodel_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseDatamodelQualifiedName" - ) - sisense_datamodel_table_column_count: Optional[int] = Field( - None, description="", alias="sisenseDatamodelTableColumnCount" - ) - sisense_datamodel_table_type: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableType" - ) - sisense_datamodel_table_expression: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableExpression" - ) - sisense_datamodel_table_is_materialized: Optional[bool] = Field( - None, description="", alias="sisenseDatamodelTableIsMaterialized" - ) - sisense_datamodel_table_is_hidden: Optional[bool] = Field( - None, description="", alias="sisenseDatamodelTableIsHidden" - ) - sisense_datamodel_table_schedule: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableSchedule" - ) - sisense_datamodel_table_live_query_settings: Optional[str] = Field( - None, description="", alias="sisenseDatamodelTableLiveQuerySettings" - ) - sisense_datamodel: Optional[SisenseDatamodel] = Field( - None, description="", alias="sisenseDatamodel" - ) # relationship - sisense_widgets: Optional[list[SisenseWidget]] = Field( - None, description="", alias="sisenseWidgets" - ) # relationship - - attributes: "SisenseDatamodelTable.Attributes" = Field( - default_factory=lambda: SisenseDatamodelTable.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SisenseDashboard(Sisense): - """Description""" - - type_name: str = Field("SisenseDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SisenseDashboard": - raise ValueError("must be SisenseDashboard") - return v - - def __setattr__(self, name, value): - if name in SisenseDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SISENSE_DASHBOARD_FOLDER_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "sisenseDashboardFolderQualifiedName", - "sisenseDashboardFolderQualifiedName", - "sisenseDashboardFolderQualifiedName.text", - ) - """ - Unique name of the folder in which this dashboard exists. - """ - SISENSE_DASHBOARD_WIDGET_COUNT: ClassVar[NumericField] = NumericField( - "sisenseDashboardWidgetCount", "sisenseDashboardWidgetCount" - ) - """ - Number of widgets in this dashboard. - """ - - SISENSE_DATAMODELS: ClassVar[RelationField] = RelationField("sisenseDatamodels") - """ - TBC - """ - SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") - """ - TBC - """ - SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "sisense_dashboard_folder_qualified_name", - "sisense_dashboard_widget_count", - "sisense_datamodels", - "sisense_widgets", - "sisense_folder", - ] - - @property - def sisense_dashboard_folder_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.sisense_dashboard_folder_qualified_name - ) - - @sisense_dashboard_folder_qualified_name.setter - def sisense_dashboard_folder_qualified_name( - self, sisense_dashboard_folder_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboard_folder_qualified_name = ( - sisense_dashboard_folder_qualified_name - ) - - @property - def sisense_dashboard_widget_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.sisense_dashboard_widget_count - ) - - @sisense_dashboard_widget_count.setter - def sisense_dashboard_widget_count( - self, sisense_dashboard_widget_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_dashboard_widget_count = sisense_dashboard_widget_count - - @property - def sisense_datamodels(self) -> Optional[list[SisenseDatamodel]]: - return None if self.attributes is None else self.attributes.sisense_datamodels - - @sisense_datamodels.setter - def sisense_datamodels(self, sisense_datamodels: Optional[list[SisenseDatamodel]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_datamodels = sisense_datamodels - - @property - def sisense_widgets(self) -> Optional[list[SisenseWidget]]: - return None if self.attributes is None else self.attributes.sisense_widgets - - @sisense_widgets.setter - def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_widgets = sisense_widgets - - @property - def sisense_folder(self) -> Optional[SisenseFolder]: - return None if self.attributes is None else self.attributes.sisense_folder - - @sisense_folder.setter - def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sisense_folder = sisense_folder - - class Attributes(Sisense.Attributes): - sisense_dashboard_folder_qualified_name: Optional[str] = Field( - None, description="", alias="sisenseDashboardFolderQualifiedName" - ) - sisense_dashboard_widget_count: Optional[int] = Field( - None, description="", alias="sisenseDashboardWidgetCount" - ) - sisense_datamodels: Optional[list[SisenseDatamodel]] = Field( - None, description="", alias="sisenseDatamodels" - ) # relationship - sisense_widgets: Optional[list[SisenseWidget]] = Field( - None, description="", alias="sisenseWidgets" - ) # relationship - sisense_folder: Optional[SisenseFolder] = Field( - None, description="", alias="sisenseFolder" - ) # relationship - - attributes: "SisenseDashboard.Attributes" = Field( - default_factory=lambda: SisenseDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -SisenseFolder.Attributes.update_forward_refs() - - -SisenseWidget.Attributes.update_forward_refs() - - -SisenseDatamodel.Attributes.update_forward_refs() - - -SisenseDatamodelTable.Attributes.update_forward_refs() - - -SisenseDashboard.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset75.py b/pyatlan/model/assets/asset75.py deleted file mode 100644 index 31b12a387..000000000 --- a/pyatlan/model/assets/asset75.py +++ /dev/null @@ -1,402 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) - -from .asset48 import Metabase - - -class MetabaseQuestion(Metabase): - """Description""" - - type_name: str = Field("MetabaseQuestion", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MetabaseQuestion": - raise ValueError("must be MetabaseQuestion") - return v - - def __setattr__(self, name, value): - if name in MetabaseQuestion._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - METABASE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( - "metabaseDashboardCount", "metabaseDashboardCount" - ) - """ - - """ - METABASE_QUERY_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseQueryType", "metabaseQueryType", "metabaseQueryType.text" - ) - """ - - """ - METABASE_QUERY: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseQuery", "metabaseQuery.keyword", "metabaseQuery" - ) - """ - - """ - - METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") - """ - TBC - """ - METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "metabase_dashboard_count", - "metabase_query_type", - "metabase_query", - "metabase_dashboards", - "metabase_collection", - ] - - @property - def metabase_dashboard_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.metabase_dashboard_count - ) - - @metabase_dashboard_count.setter - def metabase_dashboard_count(self, metabase_dashboard_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_dashboard_count = metabase_dashboard_count - - @property - def metabase_query_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_query_type - - @metabase_query_type.setter - def metabase_query_type(self, metabase_query_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_query_type = metabase_query_type - - @property - def metabase_query(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_query - - @metabase_query.setter - def metabase_query(self, metabase_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_query = metabase_query - - @property - def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: - return None if self.attributes is None else self.attributes.metabase_dashboards - - @metabase_dashboards.setter - def metabase_dashboards( - self, metabase_dashboards: Optional[list[MetabaseDashboard]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_dashboards = metabase_dashboards - - @property - def metabase_collection(self) -> Optional[MetabaseCollection]: - return None if self.attributes is None else self.attributes.metabase_collection - - @metabase_collection.setter - def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_collection = metabase_collection - - class Attributes(Metabase.Attributes): - metabase_dashboard_count: Optional[int] = Field( - None, description="", alias="metabaseDashboardCount" - ) - metabase_query_type: Optional[str] = Field( - None, description="", alias="metabaseQueryType" - ) - metabase_query: Optional[str] = Field( - None, description="", alias="metabaseQuery" - ) - metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( - None, description="", alias="metabaseDashboards" - ) # relationship - metabase_collection: Optional[MetabaseCollection] = Field( - None, description="", alias="metabaseCollection" - ) # relationship - - attributes: "MetabaseQuestion.Attributes" = Field( - default_factory=lambda: MetabaseQuestion.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MetabaseCollection(Metabase): - """Description""" - - type_name: str = Field("MetabaseCollection", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MetabaseCollection": - raise ValueError("must be MetabaseCollection") - return v - - def __setattr__(self, name, value): - if name in MetabaseCollection._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - METABASE_SLUG: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseSlug", "metabaseSlug", "metabaseSlug.text" - ) - """ - - """ - METABASE_COLOR: ClassVar[KeywordField] = KeywordField( - "metabaseColor", "metabaseColor" - ) - """ - - """ - METABASE_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( - "metabaseNamespace", "metabaseNamespace", "metabaseNamespace.text" - ) - """ - - """ - METABASE_IS_PERSONAL_COLLECTION: ClassVar[BooleanField] = BooleanField( - "metabaseIsPersonalCollection", "metabaseIsPersonalCollection" - ) - """ - - """ - - METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") - """ - TBC - """ - METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "metabase_slug", - "metabase_color", - "metabase_namespace", - "metabase_is_personal_collection", - "metabase_dashboards", - "metabase_questions", - ] - - @property - def metabase_slug(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_slug - - @metabase_slug.setter - def metabase_slug(self, metabase_slug: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_slug = metabase_slug - - @property - def metabase_color(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_color - - @metabase_color.setter - def metabase_color(self, metabase_color: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_color = metabase_color - - @property - def metabase_namespace(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.metabase_namespace - - @metabase_namespace.setter - def metabase_namespace(self, metabase_namespace: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_namespace = metabase_namespace - - @property - def metabase_is_personal_collection(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.metabase_is_personal_collection - ) - - @metabase_is_personal_collection.setter - def metabase_is_personal_collection( - self, metabase_is_personal_collection: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_is_personal_collection = ( - metabase_is_personal_collection - ) - - @property - def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: - return None if self.attributes is None else self.attributes.metabase_dashboards - - @metabase_dashboards.setter - def metabase_dashboards( - self, metabase_dashboards: Optional[list[MetabaseDashboard]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_dashboards = metabase_dashboards - - @property - def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: - return None if self.attributes is None else self.attributes.metabase_questions - - @metabase_questions.setter - def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_questions = metabase_questions - - class Attributes(Metabase.Attributes): - metabase_slug: Optional[str] = Field(None, description="", alias="metabaseSlug") - metabase_color: Optional[str] = Field( - None, description="", alias="metabaseColor" - ) - metabase_namespace: Optional[str] = Field( - None, description="", alias="metabaseNamespace" - ) - metabase_is_personal_collection: Optional[bool] = Field( - None, description="", alias="metabaseIsPersonalCollection" - ) - metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( - None, description="", alias="metabaseDashboards" - ) # relationship - metabase_questions: Optional[list[MetabaseQuestion]] = Field( - None, description="", alias="metabaseQuestions" - ) # relationship - - attributes: "MetabaseCollection.Attributes" = Field( - default_factory=lambda: MetabaseCollection.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MetabaseDashboard(Metabase): - """Description""" - - type_name: str = Field("MetabaseDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MetabaseDashboard": - raise ValueError("must be MetabaseDashboard") - return v - - def __setattr__(self, name, value): - if name in MetabaseDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - METABASE_QUESTION_COUNT: ClassVar[NumericField] = NumericField( - "metabaseQuestionCount", "metabaseQuestionCount" - ) - """ - - """ - - METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") - """ - TBC - """ - METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "metabase_question_count", - "metabase_questions", - "metabase_collection", - ] - - @property - def metabase_question_count(self) -> Optional[int]: - return ( - None if self.attributes is None else self.attributes.metabase_question_count - ) - - @metabase_question_count.setter - def metabase_question_count(self, metabase_question_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_question_count = metabase_question_count - - @property - def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: - return None if self.attributes is None else self.attributes.metabase_questions - - @metabase_questions.setter - def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_questions = metabase_questions - - @property - def metabase_collection(self) -> Optional[MetabaseCollection]: - return None if self.attributes is None else self.attributes.metabase_collection - - @metabase_collection.setter - def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.metabase_collection = metabase_collection - - class Attributes(Metabase.Attributes): - metabase_question_count: Optional[int] = Field( - None, description="", alias="metabaseQuestionCount" - ) - metabase_questions: Optional[list[MetabaseQuestion]] = Field( - None, description="", alias="metabaseQuestions" - ) # relationship - metabase_collection: Optional[MetabaseCollection] = Field( - None, description="", alias="metabaseCollection" - ) # relationship - - attributes: "MetabaseDashboard.Attributes" = Field( - default_factory=lambda: MetabaseDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -MetabaseQuestion.Attributes.update_forward_refs() - - -MetabaseCollection.Attributes.update_forward_refs() - - -MetabaseDashboard.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset76.py b/pyatlan/model/assets/asset76.py deleted file mode 100644 index 0afc5b5d6..000000000 --- a/pyatlan/model/assets/asset76.py +++ /dev/null @@ -1,932 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.enums import ( - QuickSightAnalysisStatus, - QuickSightDatasetFieldType, - QuickSightDatasetImportMode, - QuickSightFolderType, -) -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - NumericField, - RelationField, -) - -from .asset49 import QuickSight - - -class QuickSightFolder(QuickSight): - """Description""" - - type_name: str = Field("QuickSightFolder", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightFolder": - raise ValueError("must be QuickSightFolder") - return v - - def __setattr__(self, name, value): - if name in QuickSightFolder._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_FOLDER_TYPE: ClassVar[KeywordField] = KeywordField( - "quickSightFolderType", "quickSightFolderType" - ) - """ - Type of this folder, for example: SHARED. - """ - QUICK_SIGHT_FOLDER_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "quickSightFolderHierarchy", "quickSightFolderHierarchy" - ) - """ - Detailed path of this folder. - """ - - QUICK_SIGHT_DASHBOARDS: ClassVar[RelationField] = RelationField( - "quickSightDashboards" - ) - """ - TBC - """ - QUICK_SIGHT_DATASETS: ClassVar[RelationField] = RelationField("quickSightDatasets") - """ - TBC - """ - QUICK_SIGHT_ANALYSES: ClassVar[RelationField] = RelationField("quickSightAnalyses") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_folder_type", - "quick_sight_folder_hierarchy", - "quick_sight_dashboards", - "quick_sight_datasets", - "quick_sight_analyses", - ] - - @property - def quick_sight_folder_type(self) -> Optional[QuickSightFolderType]: - return ( - None if self.attributes is None else self.attributes.quick_sight_folder_type - ) - - @quick_sight_folder_type.setter - def quick_sight_folder_type( - self, quick_sight_folder_type: Optional[QuickSightFolderType] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_folder_type = quick_sight_folder_type - - @property - def quick_sight_folder_hierarchy(self) -> Optional[list[dict[str, str]]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_folder_hierarchy - ) - - @quick_sight_folder_hierarchy.setter - def quick_sight_folder_hierarchy( - self, quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_folder_hierarchy = quick_sight_folder_hierarchy - - @property - def quick_sight_dashboards(self) -> Optional[list[QuickSightDashboard]]: - return ( - None if self.attributes is None else self.attributes.quick_sight_dashboards - ) - - @quick_sight_dashboards.setter - def quick_sight_dashboards( - self, quick_sight_dashboards: Optional[list[QuickSightDashboard]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboards = quick_sight_dashboards - - @property - def quick_sight_datasets(self) -> Optional[list[QuickSightDataset]]: - return None if self.attributes is None else self.attributes.quick_sight_datasets - - @quick_sight_datasets.setter - def quick_sight_datasets( - self, quick_sight_datasets: Optional[list[QuickSightDataset]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_datasets = quick_sight_datasets - - @property - def quick_sight_analyses(self) -> Optional[list[QuickSightAnalysis]]: - return None if self.attributes is None else self.attributes.quick_sight_analyses - - @quick_sight_analyses.setter - def quick_sight_analyses( - self, quick_sight_analyses: Optional[list[QuickSightAnalysis]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analyses = quick_sight_analyses - - class Attributes(QuickSight.Attributes): - quick_sight_folder_type: Optional[QuickSightFolderType] = Field( - None, description="", alias="quickSightFolderType" - ) - quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="quickSightFolderHierarchy" - ) - quick_sight_dashboards: Optional[list[QuickSightDashboard]] = Field( - None, description="", alias="quickSightDashboards" - ) # relationship - quick_sight_datasets: Optional[list[QuickSightDataset]] = Field( - None, description="", alias="quickSightDatasets" - ) # relationship - quick_sight_analyses: Optional[list[QuickSightAnalysis]] = Field( - None, description="", alias="quickSightAnalyses" - ) # relationship - - attributes: "QuickSightFolder.Attributes" = Field( - default_factory=lambda: QuickSightFolder.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightDashboardVisual(QuickSight): - """Description""" - - type_name: str = Field("QuickSightDashboardVisual", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightDashboardVisual": - raise ValueError("must be QuickSightDashboardVisual") - return v - - def __setattr__(self, name, value): - if name in QuickSightDashboardVisual._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightDashboardQualifiedName", - "quickSightDashboardQualifiedName", - "quickSightDashboardQualifiedName.text", - ) - """ - Unique name of the dashboard in which this visual exists. - """ - - QUICK_SIGHT_DASHBOARD: ClassVar[RelationField] = RelationField( - "quickSightDashboard" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dashboard_qualified_name", - "quick_sight_dashboard", - ] - - @property - def quick_sight_dashboard_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dashboard_qualified_name - ) - - @quick_sight_dashboard_qualified_name.setter - def quick_sight_dashboard_qualified_name( - self, quick_sight_dashboard_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_qualified_name = ( - quick_sight_dashboard_qualified_name - ) - - @property - def quick_sight_dashboard(self) -> Optional[QuickSightDashboard]: - return ( - None if self.attributes is None else self.attributes.quick_sight_dashboard - ) - - @quick_sight_dashboard.setter - def quick_sight_dashboard( - self, quick_sight_dashboard: Optional[QuickSightDashboard] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard = quick_sight_dashboard - - class Attributes(QuickSight.Attributes): - quick_sight_dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="quickSightDashboardQualifiedName" - ) - quick_sight_dashboard: Optional[QuickSightDashboard] = Field( - None, description="", alias="quickSightDashboard" - ) # relationship - - attributes: "QuickSightDashboardVisual.Attributes" = Field( - default_factory=lambda: QuickSightDashboardVisual.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightAnalysisVisual(QuickSight): - """Description""" - - type_name: str = Field("QuickSightAnalysisVisual", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightAnalysisVisual": - raise ValueError("must be QuickSightAnalysisVisual") - return v - - def __setattr__(self, name, value): - if name in QuickSightAnalysisVisual._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_ANALYSIS_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightAnalysisQualifiedName", - "quickSightAnalysisQualifiedName", - "quickSightAnalysisQualifiedName.text", - ) - """ - Unique name of the QuickSight analysis in which this visual exists. - """ - - QUICK_SIGHT_ANALYSIS: ClassVar[RelationField] = RelationField("quickSightAnalysis") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_analysis_qualified_name", - "quick_sight_analysis", - ] - - @property - def quick_sight_analysis_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_qualified_name - ) - - @quick_sight_analysis_qualified_name.setter - def quick_sight_analysis_qualified_name( - self, quick_sight_analysis_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_qualified_name = ( - quick_sight_analysis_qualified_name - ) - - @property - def quick_sight_analysis(self) -> Optional[QuickSightAnalysis]: - return None if self.attributes is None else self.attributes.quick_sight_analysis - - @quick_sight_analysis.setter - def quick_sight_analysis(self, quick_sight_analysis: Optional[QuickSightAnalysis]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis = quick_sight_analysis - - class Attributes(QuickSight.Attributes): - quick_sight_analysis_qualified_name: Optional[str] = Field( - None, description="", alias="quickSightAnalysisQualifiedName" - ) - quick_sight_analysis: Optional[QuickSightAnalysis] = Field( - None, description="", alias="quickSightAnalysis" - ) # relationship - - attributes: "QuickSightAnalysisVisual.Attributes" = Field( - default_factory=lambda: QuickSightAnalysisVisual.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightDatasetField(QuickSight): - """Description""" - - type_name: str = Field("QuickSightDatasetField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightDatasetField": - raise ValueError("must be QuickSightDatasetField") - return v - - def __setattr__(self, name, value): - if name in QuickSightDatasetField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_DATASET_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( - "quickSightDatasetFieldType", "quickSightDatasetFieldType" - ) - """ - Datatype of this field, for example: STRING, INTEGER, etc. - """ - QUICK_SIGHT_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "quickSightDatasetQualifiedName", - "quickSightDatasetQualifiedName", - "quickSightDatasetQualifiedName.text", - ) - """ - Unique name of the dataset in which this field exists. - """ - - QUICK_SIGHT_DATASET: ClassVar[RelationField] = RelationField("quickSightDataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dataset_field_type", - "quick_sight_dataset_qualified_name", - "quick_sight_dataset", - ] - - @property - def quick_sight_dataset_field_type(self) -> Optional[QuickSightDatasetFieldType]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_field_type - ) - - @quick_sight_dataset_field_type.setter - def quick_sight_dataset_field_type( - self, quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_field_type = quick_sight_dataset_field_type - - @property - def quick_sight_dataset_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_qualified_name - ) - - @quick_sight_dataset_qualified_name.setter - def quick_sight_dataset_qualified_name( - self, quick_sight_dataset_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_qualified_name = ( - quick_sight_dataset_qualified_name - ) - - @property - def quick_sight_dataset(self) -> Optional[QuickSightDataset]: - return None if self.attributes is None else self.attributes.quick_sight_dataset - - @quick_sight_dataset.setter - def quick_sight_dataset(self, quick_sight_dataset: Optional[QuickSightDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset = quick_sight_dataset - - class Attributes(QuickSight.Attributes): - quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] = Field( - None, description="", alias="quickSightDatasetFieldType" - ) - quick_sight_dataset_qualified_name: Optional[str] = Field( - None, description="", alias="quickSightDatasetQualifiedName" - ) - quick_sight_dataset: Optional[QuickSightDataset] = Field( - None, description="", alias="quickSightDataset" - ) # relationship - - attributes: "QuickSightDatasetField.Attributes" = Field( - default_factory=lambda: QuickSightDatasetField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightAnalysis(QuickSight): - """Description""" - - type_name: str = Field("QuickSightAnalysis", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightAnalysis": - raise ValueError("must be QuickSightAnalysis") - return v - - def __setattr__(self, name, value): - if name in QuickSightAnalysis._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_ANALYSIS_STATUS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisStatus", "quickSightAnalysisStatus" - ) - """ - Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc. - """ - QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisCalculatedFields", "quickSightAnalysisCalculatedFields" - ) - """ - List of field names calculated by this analysis. - """ - QUICK_SIGHT_ANALYSIS_PARAMETER_DECLARATIONS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisParameterDeclarations", - "quickSightAnalysisParameterDeclarations", - ) - """ - List of parameters used for this analysis. - """ - QUICK_SIGHT_ANALYSIS_FILTER_GROUPS: ClassVar[KeywordField] = KeywordField( - "quickSightAnalysisFilterGroups", "quickSightAnalysisFilterGroups" - ) - """ - List of filter groups used for this analysis. - """ - - QUICK_SIGHT_ANALYSIS_VISUALS: ClassVar[RelationField] = RelationField( - "quickSightAnalysisVisuals" - ) - """ - TBC - """ - QUICK_SIGHT_ANALYSIS_FOLDERS: ClassVar[RelationField] = RelationField( - "quickSightAnalysisFolders" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_analysis_status", - "quick_sight_analysis_calculated_fields", - "quick_sight_analysis_parameter_declarations", - "quick_sight_analysis_filter_groups", - "quick_sight_analysis_visuals", - "quick_sight_analysis_folders", - ] - - @property - def quick_sight_analysis_status(self) -> Optional[QuickSightAnalysisStatus]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_status - ) - - @quick_sight_analysis_status.setter - def quick_sight_analysis_status( - self, quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_status = quick_sight_analysis_status - - @property - def quick_sight_analysis_calculated_fields(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_calculated_fields - ) - - @quick_sight_analysis_calculated_fields.setter - def quick_sight_analysis_calculated_fields( - self, quick_sight_analysis_calculated_fields: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_calculated_fields = ( - quick_sight_analysis_calculated_fields - ) - - @property - def quick_sight_analysis_parameter_declarations(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_parameter_declarations - ) - - @quick_sight_analysis_parameter_declarations.setter - def quick_sight_analysis_parameter_declarations( - self, quick_sight_analysis_parameter_declarations: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_parameter_declarations = ( - quick_sight_analysis_parameter_declarations - ) - - @property - def quick_sight_analysis_filter_groups(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_filter_groups - ) - - @quick_sight_analysis_filter_groups.setter - def quick_sight_analysis_filter_groups( - self, quick_sight_analysis_filter_groups: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_filter_groups = ( - quick_sight_analysis_filter_groups - ) - - @property - def quick_sight_analysis_visuals(self) -> Optional[list[QuickSightAnalysisVisual]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_visuals - ) - - @quick_sight_analysis_visuals.setter - def quick_sight_analysis_visuals( - self, quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_visuals = quick_sight_analysis_visuals - - @property - def quick_sight_analysis_folders(self) -> Optional[list[QuickSightFolder]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_analysis_folders - ) - - @quick_sight_analysis_folders.setter - def quick_sight_analysis_folders( - self, quick_sight_analysis_folders: Optional[list[QuickSightFolder]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_analysis_folders = quick_sight_analysis_folders - - class Attributes(QuickSight.Attributes): - quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] = Field( - None, description="", alias="quickSightAnalysisStatus" - ) - quick_sight_analysis_calculated_fields: Optional[set[str]] = Field( - None, description="", alias="quickSightAnalysisCalculatedFields" - ) - quick_sight_analysis_parameter_declarations: Optional[set[str]] = Field( - None, description="", alias="quickSightAnalysisParameterDeclarations" - ) - quick_sight_analysis_filter_groups: Optional[set[str]] = Field( - None, description="", alias="quickSightAnalysisFilterGroups" - ) - quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] = Field( - None, description="", alias="quickSightAnalysisVisuals" - ) # relationship - quick_sight_analysis_folders: Optional[list[QuickSightFolder]] = Field( - None, description="", alias="quickSightAnalysisFolders" - ) # relationship - - attributes: "QuickSightAnalysis.Attributes" = Field( - default_factory=lambda: QuickSightAnalysis.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightDashboard(QuickSight): - """Description""" - - type_name: str = Field("QuickSightDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightDashboard": - raise ValueError("must be QuickSightDashboard") - return v - - def __setattr__(self, name, value): - if name in QuickSightDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER: ClassVar[ - NumericField - ] = NumericField( - "quickSightDashboardPublishedVersionNumber", - "quickSightDashboardPublishedVersionNumber", - ) - """ - Version number of the published dashboard. - """ - QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME: ClassVar[NumericField] = NumericField( - "quickSightDashboardLastPublishedTime", "quickSightDashboardLastPublishedTime" - ) - """ - Time (epoch) at which this dashboard was last published, in milliseconds. - """ - - QUICK_SIGHT_DASHBOARD_FOLDERS: ClassVar[RelationField] = RelationField( - "quickSightDashboardFolders" - ) - """ - TBC - """ - QUICK_SIGHT_DASHBOARD_VISUALS: ClassVar[RelationField] = RelationField( - "quickSightDashboardVisuals" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dashboard_published_version_number", - "quick_sight_dashboard_last_published_time", - "quick_sight_dashboard_folders", - "quick_sight_dashboard_visuals", - ] - - @property - def quick_sight_dashboard_published_version_number(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dashboard_published_version_number - ) - - @quick_sight_dashboard_published_version_number.setter - def quick_sight_dashboard_published_version_number( - self, quick_sight_dashboard_published_version_number: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_published_version_number = ( - quick_sight_dashboard_published_version_number - ) - - @property - def quick_sight_dashboard_last_published_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dashboard_last_published_time - ) - - @quick_sight_dashboard_last_published_time.setter - def quick_sight_dashboard_last_published_time( - self, quick_sight_dashboard_last_published_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_last_published_time = ( - quick_sight_dashboard_last_published_time - ) - - @property - def quick_sight_dashboard_folders(self) -> Optional[list[QuickSightFolder]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dashboard_folders - ) - - @quick_sight_dashboard_folders.setter - def quick_sight_dashboard_folders( - self, quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_folders = quick_sight_dashboard_folders - - @property - def quick_sight_dashboard_visuals( - self, - ) -> Optional[list[QuickSightDashboardVisual]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dashboard_visuals - ) - - @quick_sight_dashboard_visuals.setter - def quick_sight_dashboard_visuals( - self, quick_sight_dashboard_visuals: Optional[list[QuickSightDashboardVisual]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dashboard_visuals = quick_sight_dashboard_visuals - - class Attributes(QuickSight.Attributes): - quick_sight_dashboard_published_version_number: Optional[int] = Field( - None, description="", alias="quickSightDashboardPublishedVersionNumber" - ) - quick_sight_dashboard_last_published_time: Optional[datetime] = Field( - None, description="", alias="quickSightDashboardLastPublishedTime" - ) - quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] = Field( - None, description="", alias="quickSightDashboardFolders" - ) # relationship - quick_sight_dashboard_visuals: Optional[ - list[QuickSightDashboardVisual] - ] = Field( - None, description="", alias="quickSightDashboardVisuals" - ) # relationship - - attributes: "QuickSightDashboard.Attributes" = Field( - default_factory=lambda: QuickSightDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QuickSightDataset(QuickSight): - """Description""" - - type_name: str = Field("QuickSightDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QuickSightDataset": - raise ValueError("must be QuickSightDataset") - return v - - def __setattr__(self, name, value): - if name in QuickSightDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QUICK_SIGHT_DATASET_IMPORT_MODE: ClassVar[KeywordField] = KeywordField( - "quickSightDatasetImportMode", "quickSightDatasetImportMode" - ) - """ - Import mode for this dataset, for example: SPICE or DIRECT_QUERY. - """ - QUICK_SIGHT_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "quickSightDatasetColumnCount", "quickSightDatasetColumnCount" - ) - """ - Number of columns present in this dataset. - """ - - QUICK_SIGHT_DATASET_FOLDERS: ClassVar[RelationField] = RelationField( - "quickSightDatasetFolders" - ) - """ - TBC - """ - QUICK_SIGHT_DATASET_FIELDS: ClassVar[RelationField] = RelationField( - "quickSightDatasetFields" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "quick_sight_dataset_import_mode", - "quick_sight_dataset_column_count", - "quick_sight_dataset_folders", - "quick_sight_dataset_fields", - ] - - @property - def quick_sight_dataset_import_mode(self) -> Optional[QuickSightDatasetImportMode]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_import_mode - ) - - @quick_sight_dataset_import_mode.setter - def quick_sight_dataset_import_mode( - self, quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_import_mode = ( - quick_sight_dataset_import_mode - ) - - @property - def quick_sight_dataset_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_column_count - ) - - @quick_sight_dataset_column_count.setter - def quick_sight_dataset_column_count( - self, quick_sight_dataset_column_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_column_count = ( - quick_sight_dataset_column_count - ) - - @property - def quick_sight_dataset_folders(self) -> Optional[list[QuickSightFolder]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_folders - ) - - @quick_sight_dataset_folders.setter - def quick_sight_dataset_folders( - self, quick_sight_dataset_folders: Optional[list[QuickSightFolder]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_folders = quick_sight_dataset_folders - - @property - def quick_sight_dataset_fields(self) -> Optional[list[QuickSightDatasetField]]: - return ( - None - if self.attributes is None - else self.attributes.quick_sight_dataset_fields - ) - - @quick_sight_dataset_fields.setter - def quick_sight_dataset_fields( - self, quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.quick_sight_dataset_fields = quick_sight_dataset_fields - - class Attributes(QuickSight.Attributes): - quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] = Field( - None, description="", alias="quickSightDatasetImportMode" - ) - quick_sight_dataset_column_count: Optional[int] = Field( - None, description="", alias="quickSightDatasetColumnCount" - ) - quick_sight_dataset_folders: Optional[list[QuickSightFolder]] = Field( - None, description="", alias="quickSightDatasetFolders" - ) # relationship - quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] = Field( - None, description="", alias="quickSightDatasetFields" - ) # relationship - - attributes: "QuickSightDataset.Attributes" = Field( - default_factory=lambda: QuickSightDataset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -QuickSightFolder.Attributes.update_forward_refs() - - -QuickSightDashboardVisual.Attributes.update_forward_refs() - - -QuickSightAnalysisVisual.Attributes.update_forward_refs() - - -QuickSightDatasetField.Attributes.update_forward_refs() - - -QuickSightAnalysis.Attributes.update_forward_refs() - - -QuickSightDashboard.Attributes.update_forward_refs() - - -QuickSightDataset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset79.py b/pyatlan/model/assets/asset79.py deleted file mode 100644 index 6efeca4e8..000000000 --- a/pyatlan/model/assets/asset79.py +++ /dev/null @@ -1,1668 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - NumericField, - RelationField, - TextField, -) - -from .asset51 import PowerBI - - -class PowerBIReport(PowerBI): - """Description""" - - type_name: str = Field("PowerBIReport", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIReport": - raise ValueError("must be PowerBIReport") - return v - - def __setattr__(self, name, value): - if name in PowerBIReport._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this report exists. - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - Unique name of the dataset used to build this report. - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - Deprecated. See 'sourceUrl' instead. - """ - PAGE_COUNT: ClassVar[NumericField] = NumericField("pageCount", "pageCount") - """ - Number of pages in this report. - """ - - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - PAGES: ClassVar[RelationField] = RelationField("pages") - """ - TBC - """ - DATASET: ClassVar[RelationField] = RelationField("dataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "web_url", - "page_count", - "workspace", - "tiles", - "pages", - "dataset", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def page_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.page_count - - @page_count.setter - def page_count(self, page_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.page_count = page_count - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def tiles(self) -> Optional[list[PowerBITile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[PowerBITile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - @property - def pages(self) -> Optional[list[PowerBIPage]]: - return None if self.attributes is None else self.attributes.pages - - @pages.setter - def pages(self, pages: Optional[list[PowerBIPage]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.pages = pages - - @property - def dataset(self) -> Optional[PowerBIDataset]: - return None if self.attributes is None else self.attributes.dataset - - @dataset.setter - def dataset(self, dataset: Optional[PowerBIDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset = dataset - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - page_count: Optional[int] = Field(None, description="", alias="pageCount") - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - tiles: Optional[list[PowerBITile]] = Field( - None, description="", alias="tiles" - ) # relationship - pages: Optional[list[PowerBIPage]] = Field( - None, description="", alias="pages" - ) # relationship - dataset: Optional[PowerBIDataset] = Field( - None, description="", alias="dataset" - ) # relationship - - attributes: "PowerBIReport.Attributes" = Field( - default_factory=lambda: PowerBIReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIMeasure(PowerBI): - """Description""" - - type_name: str = Field("PowerBIMeasure", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIMeasure": - raise ValueError("must be PowerBIMeasure") - return v - - def __setattr__(self, name, value): - if name in PowerBIMeasure._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this measure exists. - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - Unique name of the dataset in which this measure exists. - """ - POWER_BI_MEASURE_EXPRESSION: ClassVar[TextField] = TextField( - "powerBIMeasureExpression", "powerBIMeasureExpression" - ) - """ - DAX expression for this measure. - """ - POWER_BI_IS_EXTERNAL_MEASURE: ClassVar[BooleanField] = BooleanField( - "powerBIIsExternalMeasure", "powerBIIsExternalMeasure" - ) - """ - Whether this measure is external (true) or internal (false). - """ - - TABLE: ClassVar[RelationField] = RelationField("table") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "power_b_i_measure_expression", - "power_b_i_is_external_measure", - "table", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def power_b_i_measure_expression(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_measure_expression - ) - - @power_b_i_measure_expression.setter - def power_b_i_measure_expression(self, power_b_i_measure_expression: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_measure_expression = power_b_i_measure_expression - - @property - def power_b_i_is_external_measure(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_is_external_measure - ) - - @power_b_i_is_external_measure.setter - def power_b_i_is_external_measure( - self, power_b_i_is_external_measure: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_is_external_measure = power_b_i_is_external_measure - - @property - def table(self) -> Optional[PowerBITable]: - return None if self.attributes is None else self.attributes.table - - @table.setter - def table(self, table: Optional[PowerBITable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table = table - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - power_b_i_measure_expression: Optional[str] = Field( - None, description="", alias="powerBIMeasureExpression" - ) - power_b_i_is_external_measure: Optional[bool] = Field( - None, description="", alias="powerBIIsExternalMeasure" - ) - table: Optional[PowerBITable] = Field( - None, description="", alias="table" - ) # relationship - - attributes: "PowerBIMeasure.Attributes" = Field( - default_factory=lambda: PowerBIMeasure.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIColumn(PowerBI): - """Description""" - - type_name: str = Field("PowerBIColumn", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIColumn": - raise ValueError("must be PowerBIColumn") - return v - - def __setattr__(self, name, value): - if name in PowerBIColumn._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this column exists. - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - Unique name of the dataset in which this column exists. - """ - POWER_BI_COLUMN_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( - "powerBIColumnDataCategory", "powerBIColumnDataCategory" - ) - """ - Data category that describes the data in this column. - """ - POWER_BI_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( - "powerBIColumnDataType", "powerBIColumnDataType" - ) - """ - Data type of this column. - """ - POWER_BI_SORT_BY_COLUMN: ClassVar[KeywordField] = KeywordField( - "powerBISortByColumn", "powerBISortByColumn" - ) - """ - Name of a column in the same table to use to order this column. - """ - POWER_BI_COLUMN_SUMMARIZE_BY: ClassVar[KeywordField] = KeywordField( - "powerBIColumnSummarizeBy", "powerBIColumnSummarizeBy" - ) - """ - Aggregate function to use for summarizing this column. - """ - - TABLE: ClassVar[RelationField] = RelationField("table") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "power_b_i_column_data_category", - "power_b_i_column_data_type", - "power_b_i_sort_by_column", - "power_b_i_column_summarize_by", - "table", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def power_b_i_column_data_category(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_column_data_category - ) - - @power_b_i_column_data_category.setter - def power_b_i_column_data_category( - self, power_b_i_column_data_category: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_column_data_category = power_b_i_column_data_category - - @property - def power_b_i_column_data_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_column_data_type - ) - - @power_b_i_column_data_type.setter - def power_b_i_column_data_type(self, power_b_i_column_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_column_data_type = power_b_i_column_data_type - - @property - def power_b_i_sort_by_column(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_sort_by_column - ) - - @power_b_i_sort_by_column.setter - def power_b_i_sort_by_column(self, power_b_i_sort_by_column: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_sort_by_column = power_b_i_sort_by_column - - @property - def power_b_i_column_summarize_by(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_column_summarize_by - ) - - @power_b_i_column_summarize_by.setter - def power_b_i_column_summarize_by( - self, power_b_i_column_summarize_by: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_column_summarize_by = power_b_i_column_summarize_by - - @property - def table(self) -> Optional[PowerBITable]: - return None if self.attributes is None else self.attributes.table - - @table.setter - def table(self, table: Optional[PowerBITable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table = table - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - power_b_i_column_data_category: Optional[str] = Field( - None, description="", alias="powerBIColumnDataCategory" - ) - power_b_i_column_data_type: Optional[str] = Field( - None, description="", alias="powerBIColumnDataType" - ) - power_b_i_sort_by_column: Optional[str] = Field( - None, description="", alias="powerBISortByColumn" - ) - power_b_i_column_summarize_by: Optional[str] = Field( - None, description="", alias="powerBIColumnSummarizeBy" - ) - table: Optional[PowerBITable] = Field( - None, description="", alias="table" - ) # relationship - - attributes: "PowerBIColumn.Attributes" = Field( - default_factory=lambda: PowerBIColumn.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBITable(PowerBI): - """Description""" - - type_name: str = Field("PowerBITable", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBITable": - raise ValueError("must be PowerBITable") - return v - - def __setattr__(self, name, value): - if name in PowerBITable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this table exists. - """ - DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "datasetQualifiedName", "datasetQualifiedName" - ) - """ - Unique name of the dataset in which this table exists. - """ - POWER_BI_TABLE_SOURCE_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( - "powerBITableSourceExpressions", "powerBITableSourceExpressions" - ) - """ - Power Query M expressions for the table. - """ - POWER_BI_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "powerBITableColumnCount", "powerBITableColumnCount" - ) - """ - Number of columns in this table. - """ - POWER_BI_TABLE_MEASURE_COUNT: ClassVar[NumericField] = NumericField( - "powerBITableMeasureCount", "powerBITableMeasureCount" - ) - """ - Number of measures in this table. - """ - - COLUMNS: ClassVar[RelationField] = RelationField("columns") - """ - TBC - """ - MEASURES: ClassVar[RelationField] = RelationField("measures") - """ - TBC - """ - DATASET: ClassVar[RelationField] = RelationField("dataset") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dataset_qualified_name", - "power_b_i_table_source_expressions", - "power_b_i_table_column_count", - "power_b_i_table_measure_count", - "columns", - "measures", - "dataset", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dataset_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.dataset_qualified_name - ) - - @dataset_qualified_name.setter - def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_qualified_name = dataset_qualified_name - - @property - def power_b_i_table_source_expressions(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_source_expressions - ) - - @power_b_i_table_source_expressions.setter - def power_b_i_table_source_expressions( - self, power_b_i_table_source_expressions: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_table_source_expressions = ( - power_b_i_table_source_expressions - ) - - @property - def power_b_i_table_column_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_column_count - ) - - @power_b_i_table_column_count.setter - def power_b_i_table_column_count(self, power_b_i_table_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_table_column_count = power_b_i_table_column_count - - @property - def power_b_i_table_measure_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.power_b_i_table_measure_count - ) - - @power_b_i_table_measure_count.setter - def power_b_i_table_measure_count( - self, power_b_i_table_measure_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.power_b_i_table_measure_count = power_b_i_table_measure_count - - @property - def columns(self) -> Optional[list[PowerBIColumn]]: - return None if self.attributes is None else self.attributes.columns - - @columns.setter - def columns(self, columns: Optional[list[PowerBIColumn]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.columns = columns - - @property - def measures(self) -> Optional[list[PowerBIMeasure]]: - return None if self.attributes is None else self.attributes.measures - - @measures.setter - def measures(self, measures: Optional[list[PowerBIMeasure]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.measures = measures - - @property - def dataset(self) -> Optional[PowerBIDataset]: - return None if self.attributes is None else self.attributes.dataset - - @dataset.setter - def dataset(self, dataset: Optional[PowerBIDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset = dataset - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dataset_qualified_name: Optional[str] = Field( - None, description="", alias="datasetQualifiedName" - ) - power_b_i_table_source_expressions: Optional[set[str]] = Field( - None, description="", alias="powerBITableSourceExpressions" - ) - power_b_i_table_column_count: Optional[int] = Field( - None, description="", alias="powerBITableColumnCount" - ) - power_b_i_table_measure_count: Optional[int] = Field( - None, description="", alias="powerBITableMeasureCount" - ) - columns: Optional[list[PowerBIColumn]] = Field( - None, description="", alias="columns" - ) # relationship - measures: Optional[list[PowerBIMeasure]] = Field( - None, description="", alias="measures" - ) # relationship - dataset: Optional[PowerBIDataset] = Field( - None, description="", alias="dataset" - ) # relationship - - attributes: "PowerBITable.Attributes" = Field( - default_factory=lambda: PowerBITable.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBITile(PowerBI): - """Description""" - - type_name: str = Field("PowerBITile", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBITile": - raise ValueError("must be PowerBITile") - return v - - def __setattr__(self, name, value): - if name in PowerBITile._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this tile exists. - """ - DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "dashboardQualifiedName", "dashboardQualifiedName" - ) - """ - Unique name of the dashboard in which this tile is pinned. - """ - - REPORT: ClassVar[RelationField] = RelationField("report") - """ - TBC - """ - DATASET: ClassVar[RelationField] = RelationField("dataset") - """ - TBC - """ - DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "dashboard_qualified_name", - "report", - "dataset", - "dashboard", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def dashboard_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.dashboard_qualified_name - ) - - @dashboard_qualified_name.setter - def dashboard_qualified_name(self, dashboard_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard_qualified_name = dashboard_qualified_name - - @property - def report(self) -> Optional[PowerBIReport]: - return None if self.attributes is None else self.attributes.report - - @report.setter - def report(self, report: Optional[PowerBIReport]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report = report - - @property - def dataset(self) -> Optional[PowerBIDataset]: - return None if self.attributes is None else self.attributes.dataset - - @dataset.setter - def dataset(self, dataset: Optional[PowerBIDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset = dataset - - @property - def dashboard(self) -> Optional[PowerBIDashboard]: - return None if self.attributes is None else self.attributes.dashboard - - @dashboard.setter - def dashboard(self, dashboard: Optional[PowerBIDashboard]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard = dashboard - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="dashboardQualifiedName" - ) - report: Optional[PowerBIReport] = Field( - None, description="", alias="report" - ) # relationship - dataset: Optional[PowerBIDataset] = Field( - None, description="", alias="dataset" - ) # relationship - dashboard: Optional[PowerBIDashboard] = Field( - None, description="", alias="dashboard" - ) # relationship - - attributes: "PowerBITile.Attributes" = Field( - default_factory=lambda: PowerBITile.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDatasource(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDatasource", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDatasource": - raise ValueError("must be PowerBIDatasource") - return v - - def __setattr__(self, name, value): - if name in PowerBIDatasource._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CONNECTION_DETAILS: ClassVar[KeywordField] = KeywordField( - "connectionDetails", "connectionDetails" - ) - """ - Connection details of the datasource. - """ - - DATASETS: ClassVar[RelationField] = RelationField("datasets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "connection_details", - "datasets", - ] - - @property - def connection_details(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.connection_details - - @connection_details.setter - def connection_details(self, connection_details: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.connection_details = connection_details - - @property - def datasets(self) -> Optional[list[PowerBIDataset]]: - return None if self.attributes is None else self.attributes.datasets - - @datasets.setter - def datasets(self, datasets: Optional[list[PowerBIDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasets = datasets - - class Attributes(PowerBI.Attributes): - connection_details: Optional[dict[str, str]] = Field( - None, description="", alias="connectionDetails" - ) - datasets: Optional[list[PowerBIDataset]] = Field( - None, description="", alias="datasets" - ) # relationship - - attributes: "PowerBIDatasource.Attributes" = Field( - default_factory=lambda: PowerBIDatasource.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIWorkspace(PowerBI): - """Description""" - - type_name: str = Field("PowerBIWorkspace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIWorkspace": - raise ValueError("must be PowerBIWorkspace") - return v - - def __setattr__(self, name, value): - if name in PowerBIWorkspace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - Deprecated. - """ - REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") - """ - Number of reports in this workspace. - """ - DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( - "dashboardCount", "dashboardCount" - ) - """ - Number of dashboards in this workspace. - """ - DATASET_COUNT: ClassVar[NumericField] = NumericField("datasetCount", "datasetCount") - """ - Number of datasets in this workspace. - """ - DATAFLOW_COUNT: ClassVar[NumericField] = NumericField( - "dataflowCount", "dataflowCount" - ) - """ - Number of dataflows in this workspace. - """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - DATASETS: ClassVar[RelationField] = RelationField("datasets") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "web_url", - "report_count", - "dashboard_count", - "dataset_count", - "dataflow_count", - "reports", - "datasets", - "dashboards", - "dataflows", - ] - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def report_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.report_count - - @report_count.setter - def report_count(self, report_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report_count = report_count - - @property - def dashboard_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.dashboard_count - - @dashboard_count.setter - def dashboard_count(self, dashboard_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard_count = dashboard_count - - @property - def dataset_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.dataset_count - - @dataset_count.setter - def dataset_count(self, dataset_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataset_count = dataset_count - - @property - def dataflow_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.dataflow_count - - @dataflow_count.setter - def dataflow_count(self, dataflow_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataflow_count = dataflow_count - - @property - def reports(self) -> Optional[list[PowerBIReport]]: - return None if self.attributes is None else self.attributes.reports - - @reports.setter - def reports(self, reports: Optional[list[PowerBIReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reports = reports - - @property - def datasets(self) -> Optional[list[PowerBIDataset]]: - return None if self.attributes is None else self.attributes.datasets - - @datasets.setter - def datasets(self, datasets: Optional[list[PowerBIDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasets = datasets - - @property - def dashboards(self) -> Optional[list[PowerBIDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[PowerBIDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - @property - def dataflows(self) -> Optional[list[PowerBIDataflow]]: - return None if self.attributes is None else self.attributes.dataflows - - @dataflows.setter - def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataflows = dataflows - - class Attributes(PowerBI.Attributes): - web_url: Optional[str] = Field(None, description="", alias="webUrl") - report_count: Optional[int] = Field(None, description="", alias="reportCount") - dashboard_count: Optional[int] = Field( - None, description="", alias="dashboardCount" - ) - dataset_count: Optional[int] = Field(None, description="", alias="datasetCount") - dataflow_count: Optional[int] = Field( - None, description="", alias="dataflowCount" - ) - reports: Optional[list[PowerBIReport]] = Field( - None, description="", alias="reports" - ) # relationship - datasets: Optional[list[PowerBIDataset]] = Field( - None, description="", alias="datasets" - ) # relationship - dashboards: Optional[list[PowerBIDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - dataflows: Optional[list[PowerBIDataflow]] = Field( - None, description="", alias="dataflows" - ) # relationship - - attributes: "PowerBIWorkspace.Attributes" = Field( - default_factory=lambda: PowerBIWorkspace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDataset(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDataset": - raise ValueError("must be PowerBIDataset") - return v - - def __setattr__(self, name, value): - if name in PowerBIDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this dataset exists. - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - Deprecated. See 'sourceUrl' instead. - """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") - """ - TBC - """ - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - TABLES: ClassVar[RelationField] = RelationField("tables") - """ - TBC - """ - DATASOURCES: ClassVar[RelationField] = RelationField("datasources") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "web_url", - "reports", - "workspace", - "dataflows", - "tiles", - "tables", - "datasources", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def reports(self) -> Optional[list[PowerBIReport]]: - return None if self.attributes is None else self.attributes.reports - - @reports.setter - def reports(self, reports: Optional[list[PowerBIReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reports = reports - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def dataflows(self) -> Optional[list[PowerBIDataflow]]: - return None if self.attributes is None else self.attributes.dataflows - - @dataflows.setter - def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dataflows = dataflows - - @property - def tiles(self) -> Optional[list[PowerBITile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[PowerBITile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - @property - def tables(self) -> Optional[list[PowerBITable]]: - return None if self.attributes is None else self.attributes.tables - - @tables.setter - def tables(self, tables: Optional[list[PowerBITable]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tables = tables - - @property - def datasources(self) -> Optional[list[PowerBIDatasource]]: - return None if self.attributes is None else self.attributes.datasources - - @datasources.setter - def datasources(self, datasources: Optional[list[PowerBIDatasource]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasources = datasources - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - reports: Optional[list[PowerBIReport]] = Field( - None, description="", alias="reports" - ) # relationship - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - dataflows: Optional[list[PowerBIDataflow]] = Field( - None, description="", alias="dataflows" - ) # relationship - tiles: Optional[list[PowerBITile]] = Field( - None, description="", alias="tiles" - ) # relationship - tables: Optional[list[PowerBITable]] = Field( - None, description="", alias="tables" - ) # relationship - datasources: Optional[list[PowerBIDatasource]] = Field( - None, description="", alias="datasources" - ) # relationship - - attributes: "PowerBIDataset.Attributes" = Field( - default_factory=lambda: PowerBIDataset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDashboard(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDashboard": - raise ValueError("must be PowerBIDashboard") - return v - - def __setattr__(self, name, value): - if name in PowerBIDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this dashboard exists. - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - Deprecated. See 'sourceUrl' instead. - """ - TILE_COUNT: ClassVar[NumericField] = NumericField("tileCount", "tileCount") - """ - Number of tiles in this table. - """ - - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - TILES: ClassVar[RelationField] = RelationField("tiles") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "web_url", - "tile_count", - "workspace", - "tiles", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def tile_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.tile_count - - @tile_count.setter - def tile_count(self, tile_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tile_count = tile_count - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def tiles(self) -> Optional[list[PowerBITile]]: - return None if self.attributes is None else self.attributes.tiles - - @tiles.setter - def tiles(self, tiles: Optional[list[PowerBITile]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.tiles = tiles - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - tile_count: Optional[int] = Field(None, description="", alias="tileCount") - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - tiles: Optional[list[PowerBITile]] = Field( - None, description="", alias="tiles" - ) # relationship - - attributes: "PowerBIDashboard.Attributes" = Field( - default_factory=lambda: PowerBIDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIDataflow(PowerBI): - """Description""" - - type_name: str = Field("PowerBIDataflow", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIDataflow": - raise ValueError("must be PowerBIDataflow") - return v - - def __setattr__(self, name, value): - if name in PowerBIDataflow._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this dataflow exists. - """ - WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") - """ - Deprecated. See 'sourceUrl' instead. - """ - - WORKSPACE: ClassVar[RelationField] = RelationField("workspace") - """ - TBC - """ - DATASETS: ClassVar[RelationField] = RelationField("datasets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "web_url", - "workspace", - "datasets", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def web_url(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.web_url - - @web_url.setter - def web_url(self, web_url: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.web_url = web_url - - @property - def workspace(self) -> Optional[PowerBIWorkspace]: - return None if self.attributes is None else self.attributes.workspace - - @workspace.setter - def workspace(self, workspace: Optional[PowerBIWorkspace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace = workspace - - @property - def datasets(self) -> Optional[list[PowerBIDataset]]: - return None if self.attributes is None else self.attributes.datasets - - @datasets.setter - def datasets(self, datasets: Optional[list[PowerBIDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.datasets = datasets - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - web_url: Optional[str] = Field(None, description="", alias="webUrl") - workspace: Optional[PowerBIWorkspace] = Field( - None, description="", alias="workspace" - ) # relationship - datasets: Optional[list[PowerBIDataset]] = Field( - None, description="", alias="datasets" - ) # relationship - - attributes: "PowerBIDataflow.Attributes" = Field( - default_factory=lambda: PowerBIDataflow.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class PowerBIPage(PowerBI): - """Description""" - - type_name: str = Field("PowerBIPage", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "PowerBIPage": - raise ValueError("must be PowerBIPage") - return v - - def __setattr__(self, name, value): - if name in PowerBIPage._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "workspaceQualifiedName", "workspaceQualifiedName" - ) - """ - Unique name of the workspace in which this page exists. - """ - REPORT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "reportQualifiedName", "reportQualifiedName" - ) - """ - Unique name of the report in which this page exists. - """ - - REPORT: ClassVar[RelationField] = RelationField("report") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "workspace_qualified_name", - "report_qualified_name", - "report", - ] - - @property - def workspace_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.workspace_qualified_name - ) - - @workspace_qualified_name.setter - def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.workspace_qualified_name = workspace_qualified_name - - @property - def report_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.report_qualified_name - ) - - @report_qualified_name.setter - def report_qualified_name(self, report_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report_qualified_name = report_qualified_name - - @property - def report(self) -> Optional[PowerBIReport]: - return None if self.attributes is None else self.attributes.report - - @report.setter - def report(self, report: Optional[PowerBIReport]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report = report - - class Attributes(PowerBI.Attributes): - workspace_qualified_name: Optional[str] = Field( - None, description="", alias="workspaceQualifiedName" - ) - report_qualified_name: Optional[str] = Field( - None, description="", alias="reportQualifiedName" - ) - report: Optional[PowerBIReport] = Field( - None, description="", alias="report" - ) # relationship - - attributes: "PowerBIPage.Attributes" = Field( - default_factory=lambda: PowerBIPage.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -PowerBIReport.Attributes.update_forward_refs() - - -PowerBIMeasure.Attributes.update_forward_refs() - - -PowerBIColumn.Attributes.update_forward_refs() - - -PowerBITable.Attributes.update_forward_refs() - - -PowerBITile.Attributes.update_forward_refs() - - -PowerBIDatasource.Attributes.update_forward_refs() - - -PowerBIWorkspace.Attributes.update_forward_refs() - - -PowerBIDataset.Attributes.update_forward_refs() - - -PowerBIDashboard.Attributes.update_forward_refs() - - -PowerBIDataflow.Attributes.update_forward_refs() - - -PowerBIPage.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset80.py b/pyatlan/model/assets/asset80.py deleted file mode 100644 index ebfbb3602..000000000 --- a/pyatlan/model/assets/asset80.py +++ /dev/null @@ -1,1532 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - KeywordField, - KeywordTextField, - RelationField, -) - -from .asset52 import MicroStrategy - - -class MicroStrategyReport(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyReport", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyReport": - raise ValueError("must be MicroStrategyReport") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyReport._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_REPORT_TYPE: ClassVar[KeywordField] = KeywordField( - "microStrategyReportType", "microStrategyReportType" - ) - """ - Type of report, for example: Grid or Chart. - """ - - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_report_type", - "micro_strategy_metrics", - "micro_strategy_project", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_report_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_report_type - ) - - @micro_strategy_report_type.setter - def micro_strategy_report_type(self, micro_strategy_report_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_report_type = micro_strategy_report_type - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_report_type: Optional[str] = Field( - None, description="", alias="microStrategyReportType" - ) - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyReport.Attributes" = Field( - default_factory=lambda: MicroStrategyReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyProject(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyProject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyProject": - raise ValueError("must be MicroStrategyProject") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyProject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( - "microStrategyReports" - ) - """ - TBC - """ - MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") - """ - TBC - """ - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( - "microStrategyVisualizations" - ) - """ - TBC - """ - MICRO_STRATEGY_DOCUMENTS: ClassVar[RelationField] = RelationField( - "microStrategyDocuments" - ) - """ - TBC - """ - MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") - """ - TBC - """ - MICRO_STRATEGY_DOSSIERS: ClassVar[RelationField] = RelationField( - "microStrategyDossiers" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_reports", - "micro_strategy_facts", - "micro_strategy_metrics", - "micro_strategy_visualizations", - "micro_strategy_documents", - "micro_strategy_cubes", - "micro_strategy_dossiers", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_reports - ) - - @micro_strategy_reports.setter - def micro_strategy_reports( - self, micro_strategy_reports: Optional[list[MicroStrategyReport]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_reports = micro_strategy_reports - - @property - def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: - return None if self.attributes is None else self.attributes.micro_strategy_facts - - @micro_strategy_facts.setter - def micro_strategy_facts( - self, micro_strategy_facts: Optional[list[MicroStrategyFact]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_facts = micro_strategy_facts - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_visualizations( - self, - ) -> Optional[list[MicroStrategyVisualization]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_visualizations - ) - - @micro_strategy_visualizations.setter - def micro_strategy_visualizations( - self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_visualizations = micro_strategy_visualizations - - @property - def micro_strategy_documents(self) -> Optional[list[MicroStrategyDocument]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_documents - ) - - @micro_strategy_documents.setter - def micro_strategy_documents( - self, micro_strategy_documents: Optional[list[MicroStrategyDocument]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_documents = micro_strategy_documents - - @property - def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: - return None if self.attributes is None else self.attributes.micro_strategy_cubes - - @micro_strategy_cubes.setter - def micro_strategy_cubes( - self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cubes = micro_strategy_cubes - - @property - def micro_strategy_dossiers(self) -> Optional[list[MicroStrategyDossier]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_dossiers - ) - - @micro_strategy_dossiers.setter - def micro_strategy_dossiers( - self, micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossiers = micro_strategy_dossiers - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( - None, description="", alias="microStrategyReports" - ) # relationship - micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( - None, description="", alias="microStrategyFacts" - ) # relationship - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_visualizations: Optional[ - list[MicroStrategyVisualization] - ] = Field( - None, description="", alias="microStrategyVisualizations" - ) # relationship - micro_strategy_documents: Optional[list[MicroStrategyDocument]] = Field( - None, description="", alias="microStrategyDocuments" - ) # relationship - micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( - None, description="", alias="microStrategyCubes" - ) # relationship - micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] = Field( - None, description="", alias="microStrategyDossiers" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyProject.Attributes" = Field( - default_factory=lambda: MicroStrategyProject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyMetric(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyMetric", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyMetric": - raise ValueError("must be MicroStrategyMetric") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyMetric._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_METRIC_EXPRESSION: ClassVar[KeywordField] = KeywordField( - "microStrategyMetricExpression", "microStrategyMetricExpression" - ) - """ - Text specifiying this metric's expression. - """ - MICRO_STRATEGY_ATTRIBUTE_QUALIFIED_NAMES: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyAttributeQualifiedNames", - "microStrategyAttributeQualifiedNames", - "microStrategyAttributeQualifiedNames.text", - ) - """ - List of unique names of attributes related to this metric. - """ - MICRO_STRATEGY_ATTRIBUTE_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyAttributeNames", - "microStrategyAttributeNames.keyword", - "microStrategyAttributeNames", - ) - """ - List of simple names of attributes related to this metric. - """ - MICRO_STRATEGY_FACT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyFactQualifiedNames", - "microStrategyFactQualifiedNames", - "microStrategyFactQualifiedNames.text", - ) - """ - List of unique names of facts related to this metric. - """ - MICRO_STRATEGY_FACT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyFactNames", - "microStrategyFactNames.keyword", - "microStrategyFactNames", - ) - """ - List of simple names of facts related to this metric. - """ - MICRO_STRATEGY_METRIC_PARENT_QUALIFIED_NAMES: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyMetricParentQualifiedNames", - "microStrategyMetricParentQualifiedNames", - "microStrategyMetricParentQualifiedNames.text", - ) - """ - List of unique names of parent metrics of this metric. - """ - MICRO_STRATEGY_METRIC_PARENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyMetricParentNames", - "microStrategyMetricParentNames.keyword", - "microStrategyMetricParentNames", - ) - """ - List of simple names of parent metrics of this metric. - """ - - MICRO_STRATEGY_METRIC_PARENTS: ClassVar[RelationField] = RelationField( - "microStrategyMetricParents" - ) - """ - TBC - """ - MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") - """ - TBC - """ - MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( - "microStrategyReports" - ) - """ - TBC - """ - MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") - """ - TBC - """ - MICRO_STRATEGY_METRIC_CHILDREN: ClassVar[RelationField] = RelationField( - "microStrategyMetricChildren" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_metric_expression", - "micro_strategy_attribute_qualified_names", - "micro_strategy_attribute_names", - "micro_strategy_fact_qualified_names", - "micro_strategy_fact_names", - "micro_strategy_metric_parent_qualified_names", - "micro_strategy_metric_parent_names", - "micro_strategy_metric_parents", - "micro_strategy_facts", - "micro_strategy_reports", - "micro_strategy_cubes", - "micro_strategy_metric_children", - "micro_strategy_project", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_metric_expression(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_expression - ) - - @micro_strategy_metric_expression.setter - def micro_strategy_metric_expression( - self, micro_strategy_metric_expression: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_expression = ( - micro_strategy_metric_expression - ) - - @property - def micro_strategy_attribute_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attribute_qualified_names - ) - - @micro_strategy_attribute_qualified_names.setter - def micro_strategy_attribute_qualified_names( - self, micro_strategy_attribute_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attribute_qualified_names = ( - micro_strategy_attribute_qualified_names - ) - - @property - def micro_strategy_attribute_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attribute_names - ) - - @micro_strategy_attribute_names.setter - def micro_strategy_attribute_names( - self, micro_strategy_attribute_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attribute_names = micro_strategy_attribute_names - - @property - def micro_strategy_fact_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_fact_qualified_names - ) - - @micro_strategy_fact_qualified_names.setter - def micro_strategy_fact_qualified_names( - self, micro_strategy_fact_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_fact_qualified_names = ( - micro_strategy_fact_qualified_names - ) - - @property - def micro_strategy_fact_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_fact_names - ) - - @micro_strategy_fact_names.setter - def micro_strategy_fact_names(self, micro_strategy_fact_names: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_fact_names = micro_strategy_fact_names - - @property - def micro_strategy_metric_parent_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_parent_qualified_names - ) - - @micro_strategy_metric_parent_qualified_names.setter - def micro_strategy_metric_parent_qualified_names( - self, micro_strategy_metric_parent_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_parent_qualified_names = ( - micro_strategy_metric_parent_qualified_names - ) - - @property - def micro_strategy_metric_parent_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_parent_names - ) - - @micro_strategy_metric_parent_names.setter - def micro_strategy_metric_parent_names( - self, micro_strategy_metric_parent_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_parent_names = ( - micro_strategy_metric_parent_names - ) - - @property - def micro_strategy_metric_parents(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_parents - ) - - @micro_strategy_metric_parents.setter - def micro_strategy_metric_parents( - self, micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_parents = micro_strategy_metric_parents - - @property - def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: - return None if self.attributes is None else self.attributes.micro_strategy_facts - - @micro_strategy_facts.setter - def micro_strategy_facts( - self, micro_strategy_facts: Optional[list[MicroStrategyFact]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_facts = micro_strategy_facts - - @property - def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_reports - ) - - @micro_strategy_reports.setter - def micro_strategy_reports( - self, micro_strategy_reports: Optional[list[MicroStrategyReport]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_reports = micro_strategy_reports - - @property - def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: - return None if self.attributes is None else self.attributes.micro_strategy_cubes - - @micro_strategy_cubes.setter - def micro_strategy_cubes( - self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cubes = micro_strategy_cubes - - @property - def micro_strategy_metric_children(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_metric_children - ) - - @micro_strategy_metric_children.setter - def micro_strategy_metric_children( - self, micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metric_children = micro_strategy_metric_children - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_metric_expression: Optional[str] = Field( - None, description="", alias="microStrategyMetricExpression" - ) - micro_strategy_attribute_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyAttributeQualifiedNames" - ) - micro_strategy_attribute_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyAttributeNames" - ) - micro_strategy_fact_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyFactQualifiedNames" - ) - micro_strategy_fact_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyFactNames" - ) - micro_strategy_metric_parent_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyMetricParentQualifiedNames" - ) - micro_strategy_metric_parent_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyMetricParentNames" - ) - micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetricParents" - ) # relationship - micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( - None, description="", alias="microStrategyFacts" - ) # relationship - micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( - None, description="", alias="microStrategyReports" - ) # relationship - micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( - None, description="", alias="microStrategyCubes" - ) # relationship - micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetricChildren" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyMetric.Attributes" = Field( - default_factory=lambda: MicroStrategyMetric.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyCube(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyCube", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyCube": - raise ValueError("must be MicroStrategyCube") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyCube._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_CUBE_TYPE: ClassVar[KeywordField] = KeywordField( - "microStrategyCubeType", "microStrategyCubeType" - ) - """ - Type of cube, for example: OLAP or MTDI. - """ - MICRO_STRATEGY_CUBE_QUERY: ClassVar[KeywordField] = KeywordField( - "microStrategyCubeQuery", "microStrategyCubeQuery" - ) - """ - Query used to create the cube. - """ - - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( - "microStrategyAttributes" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_cube_type", - "micro_strategy_cube_query", - "micro_strategy_metrics", - "micro_strategy_project", - "micro_strategy_attributes", - ] - - @property - def micro_strategy_cube_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_cube_type - ) - - @micro_strategy_cube_type.setter - def micro_strategy_cube_type(self, micro_strategy_cube_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cube_type = micro_strategy_cube_type - - @property - def micro_strategy_cube_query(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_cube_query - ) - - @micro_strategy_cube_query.setter - def micro_strategy_cube_query(self, micro_strategy_cube_query: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cube_query = micro_strategy_cube_query - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - @property - def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attributes - ) - - @micro_strategy_attributes.setter - def micro_strategy_attributes( - self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attributes = micro_strategy_attributes - - class Attributes(MicroStrategy.Attributes): - micro_strategy_cube_type: Optional[str] = Field( - None, description="", alias="microStrategyCubeType" - ) - micro_strategy_cube_query: Optional[str] = Field( - None, description="", alias="microStrategyCubeQuery" - ) - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( - None, description="", alias="microStrategyAttributes" - ) # relationship - - attributes: "MicroStrategyCube.Attributes" = Field( - default_factory=lambda: MicroStrategyCube.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyDossier(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyDossier", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyDossier": - raise ValueError("must be MicroStrategyDossier") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyDossier._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_DOSSIER_CHAPTER_NAMES: ClassVar[KeywordField] = KeywordField( - "microStrategyDossierChapterNames", "microStrategyDossierChapterNames" - ) - """ - List of chapter names in this dossier. - """ - - MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( - "microStrategyVisualizations" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_dossier_chapter_names", - "micro_strategy_visualizations", - "micro_strategy_project", - ] - - @property - def micro_strategy_dossier_chapter_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_dossier_chapter_names - ) - - @micro_strategy_dossier_chapter_names.setter - def micro_strategy_dossier_chapter_names( - self, micro_strategy_dossier_chapter_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier_chapter_names = ( - micro_strategy_dossier_chapter_names - ) - - @property - def micro_strategy_visualizations( - self, - ) -> Optional[list[MicroStrategyVisualization]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_visualizations - ) - - @micro_strategy_visualizations.setter - def micro_strategy_visualizations( - self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_visualizations = micro_strategy_visualizations - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_dossier_chapter_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyDossierChapterNames" - ) - micro_strategy_visualizations: Optional[ - list[MicroStrategyVisualization] - ] = Field( - None, description="", alias="microStrategyVisualizations" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyDossier.Attributes" = Field( - default_factory=lambda: MicroStrategyDossier.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyFact(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyFact", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyFact": - raise ValueError("must be MicroStrategyFact") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyFact._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_FACT_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( - "microStrategyFactExpressions", "microStrategyFactExpressions" - ) - """ - List of expressions for this fact. - """ - - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_fact_expressions", - "micro_strategy_metrics", - "micro_strategy_project", - ] - - @property - def micro_strategy_fact_expressions(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_fact_expressions - ) - - @micro_strategy_fact_expressions.setter - def micro_strategy_fact_expressions( - self, micro_strategy_fact_expressions: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_fact_expressions = ( - micro_strategy_fact_expressions - ) - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_fact_expressions: Optional[set[str]] = Field( - None, description="", alias="microStrategyFactExpressions" - ) - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyFact.Attributes" = Field( - default_factory=lambda: MicroStrategyFact.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyDocument(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyDocument", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyDocument": - raise ValueError("must be MicroStrategyDocument") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyDocument._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_project", - ] - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyDocument.Attributes" = Field( - default_factory=lambda: MicroStrategyDocument.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyAttribute(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyAttribute", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyAttribute": - raise ValueError("must be MicroStrategyAttribute") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyAttribute._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_ATTRIBUTE_FORMS: ClassVar[KeywordField] = KeywordField( - "microStrategyAttributeForms", "microStrategyAttributeForms" - ) - """ - JSON string specifying the attribute's name, description, displayFormat, etc. - """ - - MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( - "microStrategyReports" - ) - """ - TBC - """ - MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( - "microStrategyMetrics" - ) - """ - TBC - """ - MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_attribute_forms", - "micro_strategy_reports", - "micro_strategy_metrics", - "micro_strategy_cubes", - "micro_strategy_project", - ] - - @property - def micro_strategy_attribute_forms(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_attribute_forms - ) - - @micro_strategy_attribute_forms.setter - def micro_strategy_attribute_forms( - self, micro_strategy_attribute_forms: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_attribute_forms = micro_strategy_attribute_forms - - @property - def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_reports - ) - - @micro_strategy_reports.setter - def micro_strategy_reports( - self, micro_strategy_reports: Optional[list[MicroStrategyReport]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_reports = micro_strategy_reports - - @property - def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_metrics - ) - - @micro_strategy_metrics.setter - def micro_strategy_metrics( - self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_metrics = micro_strategy_metrics - - @property - def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: - return None if self.attributes is None else self.attributes.micro_strategy_cubes - - @micro_strategy_cubes.setter - def micro_strategy_cubes( - self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_cubes = micro_strategy_cubes - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_attribute_forms: Optional[str] = Field( - None, description="", alias="microStrategyAttributeForms" - ) - micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( - None, description="", alias="microStrategyReports" - ) # relationship - micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( - None, description="", alias="microStrategyMetrics" - ) # relationship - micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( - None, description="", alias="microStrategyCubes" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyAttribute.Attributes" = Field( - default_factory=lambda: MicroStrategyAttribute.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class MicroStrategyVisualization(MicroStrategy): - """Description""" - - type_name: str = Field("MicroStrategyVisualization", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MicroStrategyVisualization": - raise ValueError("must be MicroStrategyVisualization") - return v - - def __setattr__(self, name, value): - if name in MicroStrategyVisualization._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MICRO_STRATEGY_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( - "microStrategyVisualizationType", "microStrategyVisualizationType" - ) - """ - Type of visualization. - """ - MICRO_STRATEGY_DOSSIER_QUALIFIED_NAME: ClassVar[ - KeywordTextField - ] = KeywordTextField( - "microStrategyDossierQualifiedName", - "microStrategyDossierQualifiedName", - "microStrategyDossierQualifiedName.text", - ) - """ - Unique name of the dossier in which this visualization exists. - """ - MICRO_STRATEGY_DOSSIER_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "microStrategyDossierName", - "microStrategyDossierName.keyword", - "microStrategyDossierName", - ) - """ - Simple name of the dossier in which this visualization exists. - """ - - MICRO_STRATEGY_DOSSIER: ClassVar[RelationField] = RelationField( - "microStrategyDossier" - ) - """ - TBC - """ - MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( - "microStrategyProject" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "micro_strategy_visualization_type", - "micro_strategy_dossier_qualified_name", - "micro_strategy_dossier_name", - "micro_strategy_dossier", - "micro_strategy_project", - ] - - @property - def micro_strategy_visualization_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_visualization_type - ) - - @micro_strategy_visualization_type.setter - def micro_strategy_visualization_type( - self, micro_strategy_visualization_type: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_visualization_type = ( - micro_strategy_visualization_type - ) - - @property - def micro_strategy_dossier_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_dossier_qualified_name - ) - - @micro_strategy_dossier_qualified_name.setter - def micro_strategy_dossier_qualified_name( - self, micro_strategy_dossier_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier_qualified_name = ( - micro_strategy_dossier_qualified_name - ) - - @property - def micro_strategy_dossier_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.micro_strategy_dossier_name - ) - - @micro_strategy_dossier_name.setter - def micro_strategy_dossier_name(self, micro_strategy_dossier_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier_name = micro_strategy_dossier_name - - @property - def micro_strategy_dossier(self) -> Optional[MicroStrategyDossier]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_dossier - ) - - @micro_strategy_dossier.setter - def micro_strategy_dossier( - self, micro_strategy_dossier: Optional[MicroStrategyDossier] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_dossier = micro_strategy_dossier - - @property - def micro_strategy_project(self) -> Optional[MicroStrategyProject]: - return ( - None if self.attributes is None else self.attributes.micro_strategy_project - ) - - @micro_strategy_project.setter - def micro_strategy_project( - self, micro_strategy_project: Optional[MicroStrategyProject] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.micro_strategy_project = micro_strategy_project - - class Attributes(MicroStrategy.Attributes): - micro_strategy_visualization_type: Optional[str] = Field( - None, description="", alias="microStrategyVisualizationType" - ) - micro_strategy_dossier_qualified_name: Optional[str] = Field( - None, description="", alias="microStrategyDossierQualifiedName" - ) - micro_strategy_dossier_name: Optional[str] = Field( - None, description="", alias="microStrategyDossierName" - ) - micro_strategy_dossier: Optional[MicroStrategyDossier] = Field( - None, description="", alias="microStrategyDossier" - ) # relationship - micro_strategy_project: Optional[MicroStrategyProject] = Field( - None, description="", alias="microStrategyProject" - ) # relationship - - attributes: "MicroStrategyVisualization.Attributes" = Field( - default_factory=lambda: MicroStrategyVisualization.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -MicroStrategyReport.Attributes.update_forward_refs() - - -MicroStrategyProject.Attributes.update_forward_refs() - - -MicroStrategyMetric.Attributes.update_forward_refs() - - -MicroStrategyCube.Attributes.update_forward_refs() - - -MicroStrategyDossier.Attributes.update_forward_refs() - - -MicroStrategyFact.Attributes.update_forward_refs() - - -MicroStrategyDocument.Attributes.update_forward_refs() - - -MicroStrategyAttribute.Attributes.update_forward_refs() - - -MicroStrategyVisualization.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset81.py b/pyatlan/model/assets/asset81.py deleted file mode 100644 index 433b22264..000000000 --- a/pyatlan/model/assets/asset81.py +++ /dev/null @@ -1,649 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, - TextField, -) - -from .asset53 import Qlik - - -class QlikApp(Qlik): - """Description""" - - type_name: str = Field("QlikApp", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QlikApp": - raise ValueError("must be QlikApp") - return v - - def __setattr__(self, name, value): - if name in QlikApp._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QLIK_HAS_SECTION_ACCESS: ClassVar[BooleanField] = BooleanField( - "qlikHasSectionAccess", "qlikHasSectionAccess" - ) - """ - Whether section access or data masking is enabled on the source (true) or not (false). - """ - QLIK_ORIGIN_APP_ID: ClassVar[KeywordField] = KeywordField( - "qlikOriginAppId", "qlikOriginAppId" - ) - """ - Value of originAppId for this app. - """ - QLIK_IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField( - "qlikIsEncrypted", "qlikIsEncrypted" - ) - """ - Whether this app is encrypted (true) or not (false). - """ - QLIK_IS_DIRECT_QUERY_MODE: ClassVar[BooleanField] = BooleanField( - "qlikIsDirectQueryMode", "qlikIsDirectQueryMode" - ) - """ - Whether this app is in direct query mode (true) or not (false). - """ - QLIK_APP_STATIC_BYTE_SIZE: ClassVar[NumericField] = NumericField( - "qlikAppStaticByteSize", "qlikAppStaticByteSize" - ) - """ - Static space used by this app, in bytes. - """ - - QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") - """ - TBC - """ - QLIK_SHEETS: ClassVar[RelationField] = RelationField("qlikSheets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "qlik_has_section_access", - "qlik_origin_app_id", - "qlik_is_encrypted", - "qlik_is_direct_query_mode", - "qlik_app_static_byte_size", - "qlik_space", - "qlik_sheets", - ] - - @property - def qlik_has_section_access(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.qlik_has_section_access - ) - - @qlik_has_section_access.setter - def qlik_has_section_access(self, qlik_has_section_access: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_has_section_access = qlik_has_section_access - - @property - def qlik_origin_app_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_origin_app_id - - @qlik_origin_app_id.setter - def qlik_origin_app_id(self, qlik_origin_app_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_origin_app_id = qlik_origin_app_id - - @property - def qlik_is_encrypted(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.qlik_is_encrypted - - @qlik_is_encrypted.setter - def qlik_is_encrypted(self, qlik_is_encrypted: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_is_encrypted = qlik_is_encrypted - - @property - def qlik_is_direct_query_mode(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.qlik_is_direct_query_mode - ) - - @qlik_is_direct_query_mode.setter - def qlik_is_direct_query_mode(self, qlik_is_direct_query_mode: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_is_direct_query_mode = qlik_is_direct_query_mode - - @property - def qlik_app_static_byte_size(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.qlik_app_static_byte_size - ) - - @qlik_app_static_byte_size.setter - def qlik_app_static_byte_size(self, qlik_app_static_byte_size: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_app_static_byte_size = qlik_app_static_byte_size - - @property - def qlik_space(self) -> Optional[QlikSpace]: - return None if self.attributes is None else self.attributes.qlik_space - - @qlik_space.setter - def qlik_space(self, qlik_space: Optional[QlikSpace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_space = qlik_space - - @property - def qlik_sheets(self) -> Optional[list[QlikSheet]]: - return None if self.attributes is None else self.attributes.qlik_sheets - - @qlik_sheets.setter - def qlik_sheets(self, qlik_sheets: Optional[list[QlikSheet]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_sheets = qlik_sheets - - class Attributes(Qlik.Attributes): - qlik_has_section_access: Optional[bool] = Field( - None, description="", alias="qlikHasSectionAccess" - ) - qlik_origin_app_id: Optional[str] = Field( - None, description="", alias="qlikOriginAppId" - ) - qlik_is_encrypted: Optional[bool] = Field( - None, description="", alias="qlikIsEncrypted" - ) - qlik_is_direct_query_mode: Optional[bool] = Field( - None, description="", alias="qlikIsDirectQueryMode" - ) - qlik_app_static_byte_size: Optional[int] = Field( - None, description="", alias="qlikAppStaticByteSize" - ) - qlik_space: Optional[QlikSpace] = Field( - None, description="", alias="qlikSpace" - ) # relationship - qlik_sheets: Optional[list[QlikSheet]] = Field( - None, description="", alias="qlikSheets" - ) # relationship - - attributes: "QlikApp.Attributes" = Field( - default_factory=lambda: QlikApp.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QlikChart(Qlik): - """Description""" - - type_name: str = Field("QlikChart", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QlikChart": - raise ValueError("must be QlikChart") - return v - - def __setattr__(self, name, value): - if name in QlikChart._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QLIK_CHART_SUBTITLE: ClassVar[TextField] = TextField( - "qlikChartSubtitle", "qlikChartSubtitle" - ) - """ - Subtitle of this chart. - """ - QLIK_CHART_FOOTNOTE: ClassVar[TextField] = TextField( - "qlikChartFootnote", "qlikChartFootnote" - ) - """ - Footnote of this chart. - """ - QLIK_CHART_ORIENTATION: ClassVar[KeywordField] = KeywordField( - "qlikChartOrientation", "qlikChartOrientation" - ) - """ - Orientation of this chart. - """ - QLIK_CHART_TYPE: ClassVar[KeywordField] = KeywordField( - "qlikChartType", "qlikChartType" - ) - """ - Subtype of this chart, for example: bar, graph, pie, etc. - """ - - QLIK_SHEET: ClassVar[RelationField] = RelationField("qlikSheet") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "qlik_chart_subtitle", - "qlik_chart_footnote", - "qlik_chart_orientation", - "qlik_chart_type", - "qlik_sheet", - ] - - @property - def qlik_chart_subtitle(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_chart_subtitle - - @qlik_chart_subtitle.setter - def qlik_chart_subtitle(self, qlik_chart_subtitle: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_chart_subtitle = qlik_chart_subtitle - - @property - def qlik_chart_footnote(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_chart_footnote - - @qlik_chart_footnote.setter - def qlik_chart_footnote(self, qlik_chart_footnote: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_chart_footnote = qlik_chart_footnote - - @property - def qlik_chart_orientation(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.qlik_chart_orientation - ) - - @qlik_chart_orientation.setter - def qlik_chart_orientation(self, qlik_chart_orientation: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_chart_orientation = qlik_chart_orientation - - @property - def qlik_chart_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_chart_type - - @qlik_chart_type.setter - def qlik_chart_type(self, qlik_chart_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_chart_type = qlik_chart_type - - @property - def qlik_sheet(self) -> Optional[QlikSheet]: - return None if self.attributes is None else self.attributes.qlik_sheet - - @qlik_sheet.setter - def qlik_sheet(self, qlik_sheet: Optional[QlikSheet]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_sheet = qlik_sheet - - class Attributes(Qlik.Attributes): - qlik_chart_subtitle: Optional[str] = Field( - None, description="", alias="qlikChartSubtitle" - ) - qlik_chart_footnote: Optional[str] = Field( - None, description="", alias="qlikChartFootnote" - ) - qlik_chart_orientation: Optional[str] = Field( - None, description="", alias="qlikChartOrientation" - ) - qlik_chart_type: Optional[str] = Field( - None, description="", alias="qlikChartType" - ) - qlik_sheet: Optional[QlikSheet] = Field( - None, description="", alias="qlikSheet" - ) # relationship - - attributes: "QlikChart.Attributes" = Field( - default_factory=lambda: QlikChart.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QlikDataset(Qlik): - """Description""" - - type_name: str = Field("QlikDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QlikDataset": - raise ValueError("must be QlikDataset") - return v - - def __setattr__(self, name, value): - if name in QlikDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QLIK_DATASET_TECHNICAL_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "qlikDatasetTechnicalName", - "qlikDatasetTechnicalName.keyword", - "qlikDatasetTechnicalName", - ) - """ - Technical name of this asset. - """ - QLIK_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( - "qlikDatasetType", "qlikDatasetType" - ) - """ - Type of this data asset, for example: qix-df, snowflake, etc. - """ - QLIK_DATASET_URI: ClassVar[KeywordTextField] = KeywordTextField( - "qlikDatasetUri", "qlikDatasetUri", "qlikDatasetUri.text" - ) - """ - URI of this dataset. - """ - QLIK_DATASET_SUBTYPE: ClassVar[KeywordField] = KeywordField( - "qlikDatasetSubtype", "qlikDatasetSubtype" - ) - """ - Subtype this dataset asset. - """ - - QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "qlik_dataset_technical_name", - "qlik_dataset_type", - "qlik_dataset_uri", - "qlik_dataset_subtype", - "qlik_space", - ] - - @property - def qlik_dataset_technical_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.qlik_dataset_technical_name - ) - - @qlik_dataset_technical_name.setter - def qlik_dataset_technical_name(self, qlik_dataset_technical_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_dataset_technical_name = qlik_dataset_technical_name - - @property - def qlik_dataset_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_dataset_type - - @qlik_dataset_type.setter - def qlik_dataset_type(self, qlik_dataset_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_dataset_type = qlik_dataset_type - - @property - def qlik_dataset_uri(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_dataset_uri - - @qlik_dataset_uri.setter - def qlik_dataset_uri(self, qlik_dataset_uri: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_dataset_uri = qlik_dataset_uri - - @property - def qlik_dataset_subtype(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_dataset_subtype - - @qlik_dataset_subtype.setter - def qlik_dataset_subtype(self, qlik_dataset_subtype: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_dataset_subtype = qlik_dataset_subtype - - @property - def qlik_space(self) -> Optional[QlikSpace]: - return None if self.attributes is None else self.attributes.qlik_space - - @qlik_space.setter - def qlik_space(self, qlik_space: Optional[QlikSpace]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_space = qlik_space - - class Attributes(Qlik.Attributes): - qlik_dataset_technical_name: Optional[str] = Field( - None, description="", alias="qlikDatasetTechnicalName" - ) - qlik_dataset_type: Optional[str] = Field( - None, description="", alias="qlikDatasetType" - ) - qlik_dataset_uri: Optional[str] = Field( - None, description="", alias="qlikDatasetUri" - ) - qlik_dataset_subtype: Optional[str] = Field( - None, description="", alias="qlikDatasetSubtype" - ) - qlik_space: Optional[QlikSpace] = Field( - None, description="", alias="qlikSpace" - ) # relationship - - attributes: "QlikDataset.Attributes" = Field( - default_factory=lambda: QlikDataset.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QlikSheet(Qlik): - """Description""" - - type_name: str = Field("QlikSheet", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QlikSheet": - raise ValueError("must be QlikSheet") - return v - - def __setattr__(self, name, value): - if name in QlikSheet._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QLIK_SHEET_IS_APPROVED: ClassVar[BooleanField] = BooleanField( - "qlikSheetIsApproved", "qlikSheetIsApproved" - ) - """ - Whether this is approved (true) or not (false). - """ - - QLIK_APP: ClassVar[RelationField] = RelationField("qlikApp") - """ - TBC - """ - QLIK_CHARTS: ClassVar[RelationField] = RelationField("qlikCharts") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "qlik_sheet_is_approved", - "qlik_app", - "qlik_charts", - ] - - @property - def qlik_sheet_is_approved(self) -> Optional[bool]: - return ( - None if self.attributes is None else self.attributes.qlik_sheet_is_approved - ) - - @qlik_sheet_is_approved.setter - def qlik_sheet_is_approved(self, qlik_sheet_is_approved: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_sheet_is_approved = qlik_sheet_is_approved - - @property - def qlik_app(self) -> Optional[QlikApp]: - return None if self.attributes is None else self.attributes.qlik_app - - @qlik_app.setter - def qlik_app(self, qlik_app: Optional[QlikApp]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_app = qlik_app - - @property - def qlik_charts(self) -> Optional[list[QlikChart]]: - return None if self.attributes is None else self.attributes.qlik_charts - - @qlik_charts.setter - def qlik_charts(self, qlik_charts: Optional[list[QlikChart]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_charts = qlik_charts - - class Attributes(Qlik.Attributes): - qlik_sheet_is_approved: Optional[bool] = Field( - None, description="", alias="qlikSheetIsApproved" - ) - qlik_app: Optional[QlikApp] = Field( - None, description="", alias="qlikApp" - ) # relationship - qlik_charts: Optional[list[QlikChart]] = Field( - None, description="", alias="qlikCharts" - ) # relationship - - attributes: "QlikSheet.Attributes" = Field( - default_factory=lambda: QlikSheet.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class QlikSpace(Qlik): - """Description""" - - type_name: str = Field("QlikSpace", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "QlikSpace": - raise ValueError("must be QlikSpace") - return v - - def __setattr__(self, name, value): - if name in QlikSpace._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - QLIK_SPACE_TYPE: ClassVar[KeywordField] = KeywordField( - "qlikSpaceType", "qlikSpaceType" - ) - """ - Type of this space, for exmaple: Private, Shared, etc. - """ - - QLIK_DATASETS: ClassVar[RelationField] = RelationField("qlikDatasets") - """ - TBC - """ - QLIK_APPS: ClassVar[RelationField] = RelationField("qlikApps") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "qlik_space_type", - "qlik_datasets", - "qlik_apps", - ] - - @property - def qlik_space_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.qlik_space_type - - @qlik_space_type.setter - def qlik_space_type(self, qlik_space_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_space_type = qlik_space_type - - @property - def qlik_datasets(self) -> Optional[list[QlikDataset]]: - return None if self.attributes is None else self.attributes.qlik_datasets - - @qlik_datasets.setter - def qlik_datasets(self, qlik_datasets: Optional[list[QlikDataset]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_datasets = qlik_datasets - - @property - def qlik_apps(self) -> Optional[list[QlikApp]]: - return None if self.attributes is None else self.attributes.qlik_apps - - @qlik_apps.setter - def qlik_apps(self, qlik_apps: Optional[list[QlikApp]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.qlik_apps = qlik_apps - - class Attributes(Qlik.Attributes): - qlik_space_type: Optional[str] = Field( - None, description="", alias="qlikSpaceType" - ) - qlik_datasets: Optional[list[QlikDataset]] = Field( - None, description="", alias="qlikDatasets" - ) # relationship - qlik_apps: Optional[list[QlikApp]] = Field( - None, description="", alias="qlikApps" - ) # relationship - - attributes: "QlikSpace.Attributes" = Field( - default_factory=lambda: QlikSpace.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -QlikApp.Attributes.update_forward_refs() - - -QlikChart.Attributes.update_forward_refs() - - -QlikDataset.Attributes.update_forward_refs() - - -QlikSheet.Attributes.update_forward_refs() - - -QlikSpace.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset82.py b/pyatlan/model/assets/asset82.py deleted file mode 100644 index 0cecb27eb..000000000 --- a/pyatlan/model/assets/asset82.py +++ /dev/null @@ -1,881 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, Optional - -from pydantic import Field, validator - -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, - TextField, -) - -from .asset54 import Salesforce - - -class SalesforceObject(Salesforce): - """Description""" - - type_name: str = Field("SalesforceObject", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SalesforceObject": - raise ValueError("must be SalesforceObject") - return v - - def __setattr__(self, name, value): - if name in SalesforceObject._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - IS_CUSTOM: ClassVar[BooleanField] = BooleanField("isCustom", "isCustom") - """ - Whether this object is a custom object (true) or not (false). - """ - IS_MERGABLE: ClassVar[BooleanField] = BooleanField("isMergable", "isMergable") - """ - Whether this object is mergable (true) or not (false). - """ - IS_QUERYABLE: ClassVar[BooleanField] = BooleanField("isQueryable", "isQueryable") - """ - Whether this object is queryable (true) or not (false). - """ - FIELD_COUNT: ClassVar[NumericField] = NumericField("fieldCount", "fieldCount") - """ - Number of fields in this object. - """ - - LOOKUP_FIELDS: ClassVar[RelationField] = RelationField("lookupFields") - """ - TBC - """ - ORGANIZATION: ClassVar[RelationField] = RelationField("organization") - """ - TBC - """ - FIELDS: ClassVar[RelationField] = RelationField("fields") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "is_custom", - "is_mergable", - "is_queryable", - "field_count", - "lookup_fields", - "organization", - "fields", - ] - - @property - def is_custom(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_custom - - @is_custom.setter - def is_custom(self, is_custom: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_custom = is_custom - - @property - def is_mergable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_mergable - - @is_mergable.setter - def is_mergable(self, is_mergable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_mergable = is_mergable - - @property - def is_queryable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_queryable - - @is_queryable.setter - def is_queryable(self, is_queryable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_queryable = is_queryable - - @property - def field_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.field_count - - @field_count.setter - def field_count(self, field_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.field_count = field_count - - @property - def lookup_fields(self) -> Optional[list[SalesforceField]]: - return None if self.attributes is None else self.attributes.lookup_fields - - @lookup_fields.setter - def lookup_fields(self, lookup_fields: Optional[list[SalesforceField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.lookup_fields = lookup_fields - - @property - def organization(self) -> Optional[SalesforceOrganization]: - return None if self.attributes is None else self.attributes.organization - - @organization.setter - def organization(self, organization: Optional[SalesforceOrganization]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.organization = organization - - @property - def fields(self) -> Optional[list[SalesforceField]]: - return None if self.attributes is None else self.attributes.fields - - @fields.setter - def fields(self, fields: Optional[list[SalesforceField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.fields = fields - - class Attributes(Salesforce.Attributes): - is_custom: Optional[bool] = Field(None, description="", alias="isCustom") - is_mergable: Optional[bool] = Field(None, description="", alias="isMergable") - is_queryable: Optional[bool] = Field(None, description="", alias="isQueryable") - field_count: Optional[int] = Field(None, description="", alias="fieldCount") - lookup_fields: Optional[list[SalesforceField]] = Field( - None, description="", alias="lookupFields" - ) # relationship - organization: Optional[SalesforceOrganization] = Field( - None, description="", alias="organization" - ) # relationship - fields: Optional[list[SalesforceField]] = Field( - None, description="", alias="fields" - ) # relationship - - attributes: "SalesforceObject.Attributes" = Field( - default_factory=lambda: SalesforceObject.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SalesforceField(Salesforce): - """Description""" - - type_name: str = Field("SalesforceField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SalesforceField": - raise ValueError("must be SalesforceField") - return v - - def __setattr__(self, name, value): - if name in SalesforceField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "dataType", "dataType", "dataType.text" - ) - """ - Data type of values in this field. - """ - OBJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "objectQualifiedName", "objectQualifiedName" - ) - """ - Unique name of the object in which this field exists. - """ - ORDER: ClassVar[NumericField] = NumericField("order", "order") - """ - Order (position) of this field within the object. - """ - INLINE_HELP_TEXT: ClassVar[TextField] = TextField( - "inlineHelpText", "inlineHelpText.text" - ) - """ - Help text for this field. - """ - IS_CALCULATED: ClassVar[BooleanField] = BooleanField("isCalculated", "isCalculated") - """ - Whether this field is calculated (true) or not (false). - """ - FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") - """ - Formula for this field, if it is a calculated field. - """ - IS_CASE_SENSITIVE: ClassVar[BooleanField] = BooleanField( - "isCaseSensitive", "isCaseSensitive" - ) - """ - Whether this field is case sensitive (true) or in-sensitive (false). - """ - IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField("isEncrypted", "isEncrypted") - """ - Whether this field is encrypted (true) or not (false). - """ - MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") - """ - Maximum length of this field. - """ - IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") - """ - Whether this field allows null values (true) or not (false). - """ - PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") - """ - Total number of digits allowed - """ - NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") - """ - Number of digits allowed to the right of the decimal point. - """ - IS_UNIQUE: ClassVar[BooleanField] = BooleanField("isUnique", "isUnique") - """ - Whether this field must have unique values (true) or not (false). - """ - PICKLIST_VALUES: ClassVar[KeywordField] = KeywordField( - "picklistValues", "picklistValues" - ) - """ - List of values from which a user can pick while adding a record. - """ - IS_POLYMORPHIC_FOREIGN_KEY: ClassVar[BooleanField] = BooleanField( - "isPolymorphicForeignKey", "isPolymorphicForeignKey" - ) - """ - Whether this field references a record of multiple objects (true) or not (false). - """ - DEFAULT_VALUE_FORMULA: ClassVar[KeywordField] = KeywordField( - "defaultValueFormula", "defaultValueFormula" - ) - """ - Formula for the default value for this field. - """ - - LOOKUP_OBJECTS: ClassVar[RelationField] = RelationField("lookupObjects") - """ - TBC - """ - OBJECT: ClassVar[RelationField] = RelationField("object") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "data_type", - "object_qualified_name", - "order", - "inline_help_text", - "is_calculated", - "formula", - "is_case_sensitive", - "is_encrypted", - "max_length", - "is_nullable", - "precision", - "numeric_scale", - "is_unique", - "picklist_values", - "is_polymorphic_foreign_key", - "default_value_formula", - "lookup_objects", - "object", - ] - - @property - def data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.data_type - - @data_type.setter - def data_type(self, data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_type = data_type - - @property - def object_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.object_qualified_name - ) - - @object_qualified_name.setter - def object_qualified_name(self, object_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.object_qualified_name = object_qualified_name - - @property - def order(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.order - - @order.setter - def order(self, order: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.order = order - - @property - def inline_help_text(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.inline_help_text - - @inline_help_text.setter - def inline_help_text(self, inline_help_text: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.inline_help_text = inline_help_text - - @property - def is_calculated(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_calculated - - @is_calculated.setter - def is_calculated(self, is_calculated: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_calculated = is_calculated - - @property - def formula(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.formula - - @formula.setter - def formula(self, formula: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.formula = formula - - @property - def is_case_sensitive(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_case_sensitive - - @is_case_sensitive.setter - def is_case_sensitive(self, is_case_sensitive: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_case_sensitive = is_case_sensitive - - @property - def is_encrypted(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_encrypted - - @is_encrypted.setter - def is_encrypted(self, is_encrypted: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_encrypted = is_encrypted - - @property - def max_length(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.max_length - - @max_length.setter - def max_length(self, max_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.max_length = max_length - - @property - def is_nullable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_nullable - - @is_nullable.setter - def is_nullable(self, is_nullable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_nullable = is_nullable - - @property - def precision(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.precision - - @precision.setter - def precision(self, precision: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.precision = precision - - @property - def numeric_scale(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.numeric_scale - - @numeric_scale.setter - def numeric_scale(self, numeric_scale: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.numeric_scale = numeric_scale - - @property - def is_unique(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_unique - - @is_unique.setter - def is_unique(self, is_unique: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_unique = is_unique - - @property - def picklist_values(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.picklist_values - - @picklist_values.setter - def picklist_values(self, picklist_values: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.picklist_values = picklist_values - - @property - def is_polymorphic_foreign_key(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.is_polymorphic_foreign_key - ) - - @is_polymorphic_foreign_key.setter - def is_polymorphic_foreign_key(self, is_polymorphic_foreign_key: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_polymorphic_foreign_key = is_polymorphic_foreign_key - - @property - def default_value_formula(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.default_value_formula - ) - - @default_value_formula.setter - def default_value_formula(self, default_value_formula: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_value_formula = default_value_formula - - @property - def lookup_objects(self) -> Optional[list[SalesforceObject]]: - return None if self.attributes is None else self.attributes.lookup_objects - - @lookup_objects.setter - def lookup_objects(self, lookup_objects: Optional[list[SalesforceObject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.lookup_objects = lookup_objects - - @property - def object(self) -> Optional[SalesforceObject]: - return None if self.attributes is None else self.attributes.object - - @object.setter - def object(self, object: Optional[SalesforceObject]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.object = object - - class Attributes(Salesforce.Attributes): - data_type: Optional[str] = Field(None, description="", alias="dataType") - object_qualified_name: Optional[str] = Field( - None, description="", alias="objectQualifiedName" - ) - order: Optional[int] = Field(None, description="", alias="order") - inline_help_text: Optional[str] = Field( - None, description="", alias="inlineHelpText" - ) - is_calculated: Optional[bool] = Field( - None, description="", alias="isCalculated" - ) - formula: Optional[str] = Field(None, description="", alias="formula") - is_case_sensitive: Optional[bool] = Field( - None, description="", alias="isCaseSensitive" - ) - is_encrypted: Optional[bool] = Field(None, description="", alias="isEncrypted") - max_length: Optional[int] = Field(None, description="", alias="maxLength") - is_nullable: Optional[bool] = Field(None, description="", alias="isNullable") - precision: Optional[int] = Field(None, description="", alias="precision") - numeric_scale: Optional[float] = Field( - None, description="", alias="numericScale" - ) - is_unique: Optional[bool] = Field(None, description="", alias="isUnique") - picklist_values: Optional[set[str]] = Field( - None, description="", alias="picklistValues" - ) - is_polymorphic_foreign_key: Optional[bool] = Field( - None, description="", alias="isPolymorphicForeignKey" - ) - default_value_formula: Optional[str] = Field( - None, description="", alias="defaultValueFormula" - ) - lookup_objects: Optional[list[SalesforceObject]] = Field( - None, description="", alias="lookupObjects" - ) # relationship - object: Optional[SalesforceObject] = Field( - None, description="", alias="object" - ) # relationship - - attributes: "SalesforceField.Attributes" = Field( - default_factory=lambda: SalesforceField.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SalesforceOrganization(Salesforce): - """Description""" - - type_name: str = Field("SalesforceOrganization", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SalesforceOrganization": - raise ValueError("must be SalesforceOrganization") - return v - - def __setattr__(self, name, value): - if name in SalesforceOrganization._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") - """ - Identifier of the organization in Salesforce. - """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - OBJECTS: ClassVar[RelationField] = RelationField("objects") - """ - TBC - """ - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "source_id", - "reports", - "objects", - "dashboards", - ] - - @property - def source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_id - - @source_id.setter - def source_id(self, source_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_id = source_id - - @property - def reports(self) -> Optional[list[SalesforceReport]]: - return None if self.attributes is None else self.attributes.reports - - @reports.setter - def reports(self, reports: Optional[list[SalesforceReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reports = reports - - @property - def objects(self) -> Optional[list[SalesforceObject]]: - return None if self.attributes is None else self.attributes.objects - - @objects.setter - def objects(self, objects: Optional[list[SalesforceObject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.objects = objects - - @property - def dashboards(self) -> Optional[list[SalesforceDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - class Attributes(Salesforce.Attributes): - source_id: Optional[str] = Field(None, description="", alias="sourceId") - reports: Optional[list[SalesforceReport]] = Field( - None, description="", alias="reports" - ) # relationship - objects: Optional[list[SalesforceObject]] = Field( - None, description="", alias="objects" - ) # relationship - dashboards: Optional[list[SalesforceDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - - attributes: "SalesforceOrganization.Attributes" = Field( - default_factory=lambda: SalesforceOrganization.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SalesforceDashboard(Salesforce): - """Description""" - - type_name: str = Field("SalesforceDashboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SalesforceDashboard": - raise ValueError("must be SalesforceDashboard") - return v - - def __setattr__(self, name, value): - if name in SalesforceDashboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") - """ - Identifier of the dashboard in Salesforce. - """ - DASHBOARD_TYPE: ClassVar[KeywordField] = KeywordField( - "dashboardType", "dashboardType" - ) - """ - Type of dashboard in Salesforce. - """ - REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") - """ - Number of reports linked to the dashboard in Salesforce. - """ - - REPORTS: ClassVar[RelationField] = RelationField("reports") - """ - TBC - """ - ORGANIZATION: ClassVar[RelationField] = RelationField("organization") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "source_id", - "dashboard_type", - "report_count", - "reports", - "organization", - ] - - @property - def source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_id - - @source_id.setter - def source_id(self, source_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_id = source_id - - @property - def dashboard_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.dashboard_type - - @dashboard_type.setter - def dashboard_type(self, dashboard_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboard_type = dashboard_type - - @property - def report_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.report_count - - @report_count.setter - def report_count(self, report_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report_count = report_count - - @property - def reports(self) -> Optional[list[SalesforceReport]]: - return None if self.attributes is None else self.attributes.reports - - @reports.setter - def reports(self, reports: Optional[list[SalesforceReport]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.reports = reports - - @property - def organization(self) -> Optional[SalesforceOrganization]: - return None if self.attributes is None else self.attributes.organization - - @organization.setter - def organization(self, organization: Optional[SalesforceOrganization]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.organization = organization - - class Attributes(Salesforce.Attributes): - source_id: Optional[str] = Field(None, description="", alias="sourceId") - dashboard_type: Optional[str] = Field( - None, description="", alias="dashboardType" - ) - report_count: Optional[int] = Field(None, description="", alias="reportCount") - reports: Optional[list[SalesforceReport]] = Field( - None, description="", alias="reports" - ) # relationship - organization: Optional[SalesforceOrganization] = Field( - None, description="", alias="organization" - ) # relationship - - attributes: "SalesforceDashboard.Attributes" = Field( - default_factory=lambda: SalesforceDashboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class SalesforceReport(Salesforce): - """Description""" - - type_name: str = Field("SalesforceReport", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "SalesforceReport": - raise ValueError("must be SalesforceReport") - return v - - def __setattr__(self, name, value): - if name in SalesforceReport._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") - """ - Identifier of the report in Salesforce. - """ - REPORT_TYPE: ClassVar[KeywordField] = KeywordField("reportType", "reportType") - """ - Type of report in Salesforce. - """ - DETAIL_COLUMNS: ClassVar[KeywordField] = KeywordField( - "detailColumns", "detailColumns" - ) - """ - List of column names on the report. - """ - - DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") - """ - TBC - """ - ORGANIZATION: ClassVar[RelationField] = RelationField("organization") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "source_id", - "report_type", - "detail_columns", - "dashboards", - "organization", - ] - - @property - def source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.source_id - - @source_id.setter - def source_id(self, source_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.source_id = source_id - - @property - def report_type(self) -> Optional[dict[str, str]]: - return None if self.attributes is None else self.attributes.report_type - - @report_type.setter - def report_type(self, report_type: Optional[dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.report_type = report_type - - @property - def detail_columns(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.detail_columns - - @detail_columns.setter - def detail_columns(self, detail_columns: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.detail_columns = detail_columns - - @property - def dashboards(self) -> Optional[list[SalesforceDashboard]]: - return None if self.attributes is None else self.attributes.dashboards - - @dashboards.setter - def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dashboards = dashboards - - @property - def organization(self) -> Optional[SalesforceOrganization]: - return None if self.attributes is None else self.attributes.organization - - @organization.setter - def organization(self, organization: Optional[SalesforceOrganization]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.organization = organization - - class Attributes(Salesforce.Attributes): - source_id: Optional[str] = Field(None, description="", alias="sourceId") - report_type: Optional[dict[str, str]] = Field( - None, description="", alias="reportType" - ) - detail_columns: Optional[set[str]] = Field( - None, description="", alias="detailColumns" - ) - dashboards: Optional[list[SalesforceDashboard]] = Field( - None, description="", alias="dashboards" - ) # relationship - organization: Optional[SalesforceOrganization] = Field( - None, description="", alias="organization" - ) # relationship - - attributes: "SalesforceReport.Attributes" = Field( - default_factory=lambda: SalesforceReport.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -SalesforceObject.Attributes.update_forward_refs() - - -SalesforceField.Attributes.update_forward_refs() - - -SalesforceOrganization.Attributes.update_forward_refs() - - -SalesforceDashboard.Attributes.update_forward_refs() - - -SalesforceReport.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/atlas_glossary.py b/pyatlan/model/assets/atlas_glossary.py new file mode 100644 index 000000000..69d3d6fe8 --- /dev/null +++ b/pyatlan/model/assets/atlas_glossary.py @@ -0,0 +1,224 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, StrictStr, root_validator, validator + +from pyatlan.model.enums import AtlanIcon, AtlasGlossaryType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.utils import init_guid, next_id, validate_required_fields + +from .asset import Asset + + +class AtlasGlossary(Asset, type_name="AtlasGlossary"): + """Description""" + + @root_validator() + def _set_qualified_name_fallback(cls, values): + if ( + "attributes" in values + and values["attributes"] + and not values["attributes"].qualified_name + ): + values["attributes"].qualified_name = values["guid"] + return values + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: StrictStr, icon: Optional[AtlanIcon] = None + ) -> AtlasGlossary: + validate_required_fields(["name"], [name]) + return AtlasGlossary( + attributes=AtlasGlossary.Attributes.create(name=name, icon=icon) + ) + + type_name: str = Field(default="AtlasGlossary", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AtlasGlossary": + raise ValueError("must be AtlasGlossary") + return v + + def __setattr__(self, name, value): + if name in AtlasGlossary._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SHORT_DESCRIPTION: ClassVar[KeywordField] = KeywordField( + "shortDescription", "shortDescription" + ) + """ + Unused. A short definition of the glossary. See 'description' and 'userDescription' instead. + """ + LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( + "longDescription", "longDescription" + ) + """ + Unused. A longer description of the glossary. See 'readme' instead. + """ + LANGUAGE: ClassVar[KeywordField] = KeywordField("language", "language") + """ + Unused. Language of the glossary's contents. + """ + USAGE: ClassVar[KeywordField] = KeywordField("usage", "usage") + """ + Unused. Inteded usage for the glossary. + """ + ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( + "additionalAttributes", "additionalAttributes" + ) + """ + Unused. Arbitrary set of additional attributes associated with this glossary. + """ + GLOSSARY_TYPE: ClassVar[KeywordField] = KeywordField("glossaryType", "glossaryType") + """ + TBC + """ + + TERMS: ClassVar[RelationField] = RelationField("terms") + """ + TBC + """ + CATEGORIES: ClassVar[RelationField] = RelationField("categories") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "short_description", + "long_description", + "language", + "usage", + "additional_attributes", + "glossary_type", + "terms", + "categories", + ] + + @property + def short_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.short_description + + @short_description.setter + def short_description(self, short_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.short_description = short_description + + @property + def long_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.long_description + + @long_description.setter + def long_description(self, long_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.long_description = long_description + + @property + def language(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.language + + @language.setter + def language(self, language: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.language = language + + @property + def usage(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.usage + + @usage.setter + def usage(self, usage: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.usage = usage + + @property + def additional_attributes(self) -> Optional[dict[str, str]]: + return ( + None if self.attributes is None else self.attributes.additional_attributes + ) + + @additional_attributes.setter + def additional_attributes(self, additional_attributes: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.additional_attributes = additional_attributes + + @property + def glossary_type(self) -> Optional[AtlasGlossaryType]: + return None if self.attributes is None else self.attributes.glossary_type + + @glossary_type.setter + def glossary_type(self, glossary_type: Optional[AtlasGlossaryType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.glossary_type = glossary_type + + @property + def terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.terms + + @terms.setter + def terms(self, terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.terms = terms + + @property + def categories(self) -> Optional[list[AtlasGlossaryCategory]]: + return None if self.attributes is None else self.attributes.categories + + @categories.setter + def categories(self, categories: Optional[list[AtlasGlossaryCategory]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.categories = categories + + class Attributes(Asset.Attributes): + short_description: Optional[str] = Field(default=None, description="") + long_description: Optional[str] = Field(default=None, description="") + language: Optional[str] = Field(default=None, description="") + usage: Optional[str] = Field(default=None, description="") + additional_attributes: Optional[dict[str, str]] = Field( + default=None, description="" + ) + glossary_type: Optional[AtlasGlossaryType] = Field(default=None, description="") + terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + categories: Optional[list[AtlasGlossaryCategory]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: StrictStr, icon: Optional[AtlanIcon] = None + ) -> AtlasGlossary.Attributes: + validate_required_fields(["name"], [name]) + icon_str = icon.value if icon is not None else None + return AtlasGlossary.Attributes( + name=name, qualified_name=next_id(), asset_icon=icon_str + ) + + attributes: "AtlasGlossary.Attributes" = Field( + default_factory=lambda: AtlasGlossary.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .atlas_glossary_category import AtlasGlossaryCategory # noqa +from .atlas_glossary_term import AtlasGlossaryTerm # noqa diff --git a/pyatlan/model/assets/atlas_glossary_category.py b/pyatlan/model/assets/atlas_glossary_category.py new file mode 100644 index 000000000..72fb02c9d --- /dev/null +++ b/pyatlan/model/assets/atlas_glossary_category.py @@ -0,0 +1,280 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, StrictStr, root_validator, validator + +from pyatlan.model.enums import AtlasGlossaryCategoryType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.utils import init_guid, next_id, validate_required_fields + +from .asset import Asset, SelfAsset + + +class AtlasGlossaryCategory(Asset, type_name="AtlasGlossaryCategory"): + """Description""" + + @classmethod + def can_be_archived(self) -> bool: + """ + Indicates if an asset can be archived via the asset.delete_by_guid method. + :returns: True if archiving is supported + """ + return False + + @root_validator() + def _set_qualified_name_fallback(cls, values): + if ( + "attributes" in values + and values["attributes"] + and not values["attributes"].qualified_name + ): + values["attributes"].qualified_name = values["guid"] + return values + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: StrictStr, + anchor: AtlasGlossary, + parent_category: Optional[AtlasGlossaryCategory] = None, + ) -> AtlasGlossaryCategory: + validate_required_fields(["name", "anchor"], [name, anchor]) + return cls( + attributes=AtlasGlossaryCategory.Attributes.create( + name=name, anchor=anchor, parent_category=parent_category + ) + ) + + def trim_to_required(self) -> AtlasGlossaryCategory: + if self.anchor is None or not self.anchor.guid: + raise ValueError("anchor.guid must be available") + return self.create_for_modification( + qualified_name=self.qualified_name or "", + name=self.name or "", + glossary_guid=self.anchor.guid, + ) + + @classmethod + def create_for_modification( + cls: type[SelfAsset], + qualified_name: str = "", + name: str = "", + glossary_guid: str = "", + ) -> SelfAsset: + validate_required_fields( + ["name", "qualified_name", "glossary_guid"], + [name, qualified_name, glossary_guid], + ) + glossary = AtlasGlossary() + glossary.guid = glossary_guid + return cls( + attributes=cls.Attributes( + qualified_name=qualified_name, name=name, anchor=glossary + ) + ) + + ANCHOR: ClassVar[KeywordField] = KeywordField("anchor", "__glossary") + """Glossary in which the category is contained, searchable by the qualifiedName of the glossary.""" + + PARENT_CATEGORY: ClassVar[KeywordField] = KeywordField( + "parentCategory", "__parentCategory" + ) + """Parent category in which a subcategory is contained, searchable by the qualifiedName of the category.""" + + type_name: str = Field(default="AtlasGlossaryCategory", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AtlasGlossaryCategory": + raise ValueError("must be AtlasGlossaryCategory") + return v + + def __setattr__(self, name, value): + if name in AtlasGlossaryCategory._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SHORT_DESCRIPTION: ClassVar[KeywordField] = KeywordField( + "shortDescription", "shortDescription" + ) + """ + Unused. Brief summary of the category. See 'description' and 'userDescription' instead. + """ + LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( + "longDescription", "longDescription" + ) + """ + Unused. Detailed description of the category. See 'readme' instead. + """ + ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( + "additionalAttributes", "additionalAttributes" + ) + """ + Unused. Arbitrary set of additional attributes associated with the category. + """ + CATEGORY_TYPE: ClassVar[KeywordField] = KeywordField("categoryType", "categoryType") + """ + TBC + """ + + TERMS: ClassVar[RelationField] = RelationField("terms") + """ + TBC + """ + CHILDREN_CATEGORIES: ClassVar[RelationField] = RelationField("childrenCategories") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "short_description", + "long_description", + "additional_attributes", + "category_type", + "terms", + "anchor", + "parent_category", + "children_categories", + ] + + @property + def short_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.short_description + + @short_description.setter + def short_description(self, short_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.short_description = short_description + + @property + def long_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.long_description + + @long_description.setter + def long_description(self, long_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.long_description = long_description + + @property + def additional_attributes(self) -> Optional[dict[str, str]]: + return ( + None if self.attributes is None else self.attributes.additional_attributes + ) + + @additional_attributes.setter + def additional_attributes(self, additional_attributes: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.additional_attributes = additional_attributes + + @property + def category_type(self) -> Optional[AtlasGlossaryCategoryType]: + return None if self.attributes is None else self.attributes.category_type + + @category_type.setter + def category_type(self, category_type: Optional[AtlasGlossaryCategoryType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.category_type = category_type + + @property + def terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.terms + + @terms.setter + def terms(self, terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.terms = terms + + @property + def anchor(self) -> Optional[AtlasGlossary]: + return None if self.attributes is None else self.attributes.anchor + + @anchor.setter + def anchor(self, anchor: Optional[AtlasGlossary]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.anchor = anchor + + @property + def parent_category(self) -> Optional[AtlasGlossaryCategory]: + return None if self.attributes is None else self.attributes.parent_category + + @parent_category.setter + def parent_category(self, parent_category: Optional[AtlasGlossaryCategory]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_category = parent_category + + @property + def children_categories(self) -> Optional[list[AtlasGlossaryCategory]]: + return None if self.attributes is None else self.attributes.children_categories + + @children_categories.setter + def children_categories( + self, children_categories: Optional[list[AtlasGlossaryCategory]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.children_categories = children_categories + + class Attributes(Asset.Attributes): + short_description: Optional[str] = Field(default=None, description="") + long_description: Optional[str] = Field(default=None, description="") + additional_attributes: Optional[dict[str, str]] = Field( + default=None, description="" + ) + category_type: Optional[AtlasGlossaryCategoryType] = Field( + default=None, description="" + ) + terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + anchor: Optional[AtlasGlossary] = Field( + default=None, description="" + ) # relationship + parent_category: Optional[AtlasGlossaryCategory] = Field( + default=None, description="" + ) # relationship + children_categories: Optional[list[AtlasGlossaryCategory]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: StrictStr, + anchor: AtlasGlossary, + parent_category: Optional[AtlasGlossaryCategory] = None, + ) -> AtlasGlossaryCategory.Attributes: + validate_required_fields(["name", "anchor"], [name, anchor]) + return AtlasGlossaryCategory.Attributes( + name=name, + anchor=anchor, + parent_category=parent_category, + qualified_name=next_id(), + ) + + attributes: "AtlasGlossaryCategory.Attributes" = Field( + default_factory=lambda: AtlasGlossaryCategory.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .atlas_glossary import AtlasGlossary # noqa +from .atlas_glossary_term import AtlasGlossaryTerm # noqa diff --git a/pyatlan/model/assets/atlas_glossary_term.py b/pyatlan/model/assets/atlas_glossary_term.py new file mode 100644 index 000000000..34e3945e2 --- /dev/null +++ b/pyatlan/model/assets/atlas_glossary_term.py @@ -0,0 +1,554 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, StrictStr, root_validator, validator + +from pyatlan.model.enums import AtlasGlossaryTermType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.utils import ( + init_guid, + next_id, + validate_required_fields, + validate_single_required_field, +) + +from .asset import Asset, SelfAsset + + +class AtlasGlossaryTerm(Asset, type_name="AtlasGlossaryTerm"): + """Description""" + + @root_validator() + def _set_qualified_name_fallback(cls, values): + if ( + "attributes" in values + and values["attributes"] + and not values["attributes"].qualified_name + ): + values["attributes"].qualified_name = values["guid"] + return values + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: StrictStr, + anchor: Optional[AtlasGlossary] = None, + glossary_qualified_name: Optional[StrictStr] = None, + glossary_guid: Optional[StrictStr] = None, + categories: Optional[list[AtlasGlossaryCategory]] = None, + ) -> AtlasGlossaryTerm: + validate_required_fields(["name"], [name]) + return cls( + attributes=AtlasGlossaryTerm.Attributes.create( + name=name, + anchor=anchor, + glossary_qualified_name=glossary_qualified_name, + glossary_guid=glossary_guid, + categories=categories, + ) + ) + + def trim_to_required(self) -> AtlasGlossaryTerm: + if self.anchor is None or not self.anchor.guid: + raise ValueError("anchor.guid must be available") + return self.create_for_modification( + qualified_name=self.qualified_name or "", + name=self.name or "", + glossary_guid=self.anchor.guid, + ) + + @classmethod + def create_for_modification( + cls: type[SelfAsset], + qualified_name: str = "", + name: str = "", + glossary_guid: str = "", + ) -> SelfAsset: + validate_required_fields( + ["name", "qualified_name", "glossary_guid"], + [name, qualified_name, glossary_guid], + ) + glossary = AtlasGlossary() + glossary.guid = glossary_guid + return cls( + attributes=cls.Attributes( + qualified_name=qualified_name, name=name, anchor=glossary + ) + ) + + ANCHOR: ClassVar[KeywordField] = KeywordField("anchor", "__glossary") + """Glossary in which the term is contained, searchable by the qualifiedName of the glossary.""" + + CATEGORIES: ClassVar[KeywordField] = KeywordField("categories", "__categories") + """Categories in which the term is organized, searchable by the qualifiedName of the category.""" + + type_name: str = Field(default="AtlasGlossaryTerm", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "AtlasGlossaryTerm": + raise ValueError("must be AtlasGlossaryTerm") + return v + + def __setattr__(self, name, value): + if name in AtlasGlossaryTerm._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SHORT_DESCRIPTION: ClassVar[KeywordField] = KeywordField( + "shortDescription", "shortDescription" + ) + """ + Unused. Brief summary of the term. See 'description' and 'userDescription' instead. + """ + LONG_DESCRIPTION: ClassVar[KeywordField] = KeywordField( + "longDescription", "longDescription" + ) + """ + Unused. Detailed definition of the term. See 'readme' instead. + """ + EXAMPLES: ClassVar[KeywordField] = KeywordField("examples", "examples") + """ + Unused. Exmaples of the term. + """ + ABBREVIATION: ClassVar[KeywordField] = KeywordField("abbreviation", "abbreviation") + """ + Unused. Abbreviation of the term. + """ + USAGE: ClassVar[KeywordField] = KeywordField("usage", "usage") + """ + Unused. Intended usage for the term. + """ + ADDITIONAL_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( + "additionalAttributes", "additionalAttributes" + ) + """ + Unused. Arbitrary set of additional attributes for the terrm. + """ + TERM_TYPE: ClassVar[KeywordField] = KeywordField("termType", "termType") + """ + TBC + """ + + VALID_VALUES_FOR: ClassVar[RelationField] = RelationField("validValuesFor") + """ + TBC + """ + VALID_VALUES: ClassVar[RelationField] = RelationField("validValues") + """ + TBC + """ + SEE_ALSO: ClassVar[RelationField] = RelationField("seeAlso") + """ + TBC + """ + IS_A: ClassVar[RelationField] = RelationField("isA") + """ + TBC + """ + ANTONYMS: ClassVar[RelationField] = RelationField("antonyms") + """ + TBC + """ + ASSIGNED_ENTITIES: ClassVar[RelationField] = RelationField("assignedEntities") + """ + TBC + """ + CLASSIFIES: ClassVar[RelationField] = RelationField("classifies") + """ + TBC + """ + PREFERRED_TO_TERMS: ClassVar[RelationField] = RelationField("preferredToTerms") + """ + TBC + """ + PREFERRED_TERMS: ClassVar[RelationField] = RelationField("preferredTerms") + """ + TBC + """ + TRANSLATION_TERMS: ClassVar[RelationField] = RelationField("translationTerms") + """ + TBC + """ + SYNONYMS: ClassVar[RelationField] = RelationField("synonyms") + """ + TBC + """ + REPLACED_BY: ClassVar[RelationField] = RelationField("replacedBy") + """ + TBC + """ + REPLACEMENT_TERMS: ClassVar[RelationField] = RelationField("replacementTerms") + """ + TBC + """ + TRANSLATED_TERMS: ClassVar[RelationField] = RelationField("translatedTerms") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "short_description", + "long_description", + "examples", + "abbreviation", + "usage", + "additional_attributes", + "term_type", + "valid_values_for", + "valid_values", + "see_also", + "is_a", + "antonyms", + "assigned_entities", + "classifies", + "categories", + "preferred_to_terms", + "preferred_terms", + "translation_terms", + "synonyms", + "replaced_by", + "replacement_terms", + "translated_terms", + "anchor", + ] + + @property + def short_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.short_description + + @short_description.setter + def short_description(self, short_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.short_description = short_description + + @property + def long_description(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.long_description + + @long_description.setter + def long_description(self, long_description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.long_description = long_description + + @property + def examples(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.examples + + @examples.setter + def examples(self, examples: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.examples = examples + + @property + def abbreviation(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.abbreviation + + @abbreviation.setter + def abbreviation(self, abbreviation: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.abbreviation = abbreviation + + @property + def usage(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.usage + + @usage.setter + def usage(self, usage: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.usage = usage + + @property + def additional_attributes(self) -> Optional[dict[str, str]]: + return ( + None if self.attributes is None else self.attributes.additional_attributes + ) + + @additional_attributes.setter + def additional_attributes(self, additional_attributes: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.additional_attributes = additional_attributes + + @property + def term_type(self) -> Optional[AtlasGlossaryTermType]: + return None if self.attributes is None else self.attributes.term_type + + @term_type.setter + def term_type(self, term_type: Optional[AtlasGlossaryTermType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.term_type = term_type + + @property + def valid_values_for(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.valid_values_for + + @valid_values_for.setter + def valid_values_for(self, valid_values_for: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.valid_values_for = valid_values_for + + @property + def valid_values(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.valid_values + + @valid_values.setter + def valid_values(self, valid_values: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.valid_values = valid_values + + @property + def see_also(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.see_also + + @see_also.setter + def see_also(self, see_also: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.see_also = see_also + + @property + def is_a(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.is_a + + @is_a.setter + def is_a(self, is_a: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_a = is_a + + @property + def antonyms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.antonyms + + @antonyms.setter + def antonyms(self, antonyms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.antonyms = antonyms + + @property + def assigned_entities(self) -> Optional[list[Referenceable]]: + return None if self.attributes is None else self.attributes.assigned_entities + + @assigned_entities.setter + def assigned_entities(self, assigned_entities: Optional[list[Referenceable]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.assigned_entities = assigned_entities + + @property + def classifies(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.classifies + + @classifies.setter + def classifies(self, classifies: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.classifies = classifies + + @property + def categories(self) -> Optional[list[AtlasGlossaryCategory]]: + return None if self.attributes is None else self.attributes.categories + + @categories.setter + def categories(self, categories: Optional[list[AtlasGlossaryCategory]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.categories = categories + + @property + def preferred_to_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.preferred_to_terms + + @preferred_to_terms.setter + def preferred_to_terms(self, preferred_to_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preferred_to_terms = preferred_to_terms + + @property + def preferred_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.preferred_terms + + @preferred_terms.setter + def preferred_terms(self, preferred_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preferred_terms = preferred_terms + + @property + def translation_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.translation_terms + + @translation_terms.setter + def translation_terms(self, translation_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.translation_terms = translation_terms + + @property + def synonyms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.synonyms + + @synonyms.setter + def synonyms(self, synonyms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.synonyms = synonyms + + @property + def replaced_by(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.replaced_by + + @replaced_by.setter + def replaced_by(self, replaced_by: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.replaced_by = replaced_by + + @property + def replacement_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.replacement_terms + + @replacement_terms.setter + def replacement_terms(self, replacement_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.replacement_terms = replacement_terms + + @property + def translated_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.translated_terms + + @translated_terms.setter + def translated_terms(self, translated_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.translated_terms = translated_terms + + @property + def anchor(self) -> Optional[AtlasGlossary]: + return None if self.attributes is None else self.attributes.anchor + + @anchor.setter + def anchor(self, anchor: Optional[AtlasGlossary]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.anchor = anchor + + class Attributes(Asset.Attributes): + short_description: Optional[str] = Field(default=None, description="") + long_description: Optional[str] = Field(default=None, description="") + examples: Optional[set[str]] = Field(default=None, description="") + abbreviation: Optional[str] = Field(default=None, description="") + usage: Optional[str] = Field(default=None, description="") + additional_attributes: Optional[dict[str, str]] = Field( + default=None, description="" + ) + term_type: Optional[AtlasGlossaryTermType] = Field(default=None, description="") + valid_values_for: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + valid_values: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + see_also: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + is_a: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + antonyms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + assigned_entities: Optional[list[Referenceable]] = Field( + default=None, description="" + ) # relationship + classifies: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + categories: Optional[list[AtlasGlossaryCategory]] = Field( + default=None, description="" + ) # relationship + preferred_to_terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + preferred_terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + translation_terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + synonyms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + replaced_by: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + replacement_terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + translated_terms: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + anchor: Optional[AtlasGlossary] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: StrictStr, + anchor: Optional[AtlasGlossary] = None, + glossary_qualified_name: Optional[StrictStr] = None, + glossary_guid: Optional[StrictStr] = None, + categories: Optional[list[AtlasGlossaryCategory]] = None, + ) -> AtlasGlossaryTerm.Attributes: + validate_required_fields(["name"], [name]) + validate_single_required_field( + ["anchor", "glossary_qualified_name", "glossary_guid"], + [anchor, glossary_qualified_name, glossary_guid], + ) + if glossary_qualified_name: + anchor = AtlasGlossary() + anchor.unique_attributes = {"qualifiedName": glossary_qualified_name} + if glossary_guid: + anchor = AtlasGlossary() + anchor.guid = glossary_guid + return AtlasGlossaryTerm.Attributes( + name=name, + anchor=anchor, + categories=categories, + qualified_name=next_id(), + ) + + attributes: "AtlasGlossaryTerm.Attributes" = Field( + default_factory=lambda: AtlasGlossaryTerm.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .atlas_glossary import AtlasGlossary # noqa +from .atlas_glossary_category import AtlasGlossaryCategory # noqa +from .referenceable import Referenceable # noqa diff --git a/pyatlan/model/assets/asset06.py b/pyatlan/model/assets/auth_policy.py similarity index 57% rename from pyatlan/model/assets/asset06.py rename to pyatlan/model/assets/auth_policy.py index df5a6664d..09be48ea7 100644 --- a/pyatlan/model/assets/asset06.py +++ b/pyatlan/model/assets/auth_policy.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import AuthPolicyType from pyatlan.model.fields.atlan_fields import ( @@ -18,7 +18,7 @@ from pyatlan.model.structs import AuthPolicyCondition, AuthPolicyValiditySchedule from pyatlan.utils import validate_required_fields -from .asset00 import Asset, SelfAsset +from .asset import Asset, SelfAsset class AuthPolicy(Asset, type_name="AuthPolicy"): @@ -46,7 +46,7 @@ def create_for_modification( " and then update it in its entirety." ) - type_name: str = Field("AuthPolicy", allow_mutation=False) + type_name: str = Field(default="AuthPolicy", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -377,59 +377,29 @@ def access_control(self, access_control: Optional[AccessControl]): self.attributes.access_control = access_control class Attributes(Asset.Attributes): - policy_type: Optional[AuthPolicyType] = Field( - None, description="", alias="policyType" - ) - policy_service_name: Optional[str] = Field( - None, description="", alias="policyServiceName" - ) - policy_category: Optional[str] = Field( - None, description="", alias="policyCategory" - ) - policy_sub_category: Optional[str] = Field( - None, description="", alias="policySubCategory" - ) - policy_users: Optional[set[str]] = Field( - None, description="", alias="policyUsers" - ) - policy_groups: Optional[set[str]] = Field( - None, description="", alias="policyGroups" - ) - policy_roles: Optional[set[str]] = Field( - None, description="", alias="policyRoles" - ) - policy_actions: Optional[set[str]] = Field( - None, description="", alias="policyActions" - ) - policy_resources: Optional[set[str]] = Field( - None, description="", alias="policyResources" - ) - policy_resource_category: Optional[str] = Field( - None, description="", alias="policyResourceCategory" - ) - policy_priority: Optional[int] = Field( - None, description="", alias="policyPriority" - ) - is_policy_enabled: Optional[bool] = Field( - None, description="", alias="isPolicyEnabled" - ) - policy_mask_type: Optional[str] = Field( - None, description="", alias="policyMaskType" - ) + policy_type: Optional[AuthPolicyType] = Field(default=None, description="") + policy_service_name: Optional[str] = Field(default=None, description="") + policy_category: Optional[str] = Field(default=None, description="") + policy_sub_category: Optional[str] = Field(default=None, description="") + policy_users: Optional[set[str]] = Field(default=None, description="") + policy_groups: Optional[set[str]] = Field(default=None, description="") + policy_roles: Optional[set[str]] = Field(default=None, description="") + policy_actions: Optional[set[str]] = Field(default=None, description="") + policy_resources: Optional[set[str]] = Field(default=None, description="") + policy_resource_category: Optional[str] = Field(default=None, description="") + policy_priority: Optional[int] = Field(default=None, description="") + is_policy_enabled: Optional[bool] = Field(default=None, description="") + policy_mask_type: Optional[str] = Field(default=None, description="") policy_validity_schedule: Optional[list[AuthPolicyValiditySchedule]] = Field( - None, description="", alias="policyValiditySchedule" - ) - policy_resource_signature: Optional[str] = Field( - None, description="", alias="policyResourceSignature" - ) - policy_delegate_admin: Optional[bool] = Field( - None, description="", alias="policyDelegateAdmin" + default=None, description="" ) + policy_resource_signature: Optional[str] = Field(default=None, description="") + policy_delegate_admin: Optional[bool] = Field(default=None, description="") policy_conditions: Optional[list[AuthPolicyCondition]] = Field( - None, description="", alias="policyConditions" + default=None, description="" ) access_control: Optional[AccessControl] = Field( - None, description="", alias="accessControl" + default=None, description="" ) # relationship @classmethod @@ -447,223 +417,4 @@ def __create(cls, name: str) -> AuthPolicy.Attributes: ) -class AccessControl(Asset, type_name="AccessControl"): - """Description""" - - type_name: str = Field("AccessControl", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "AccessControl": - raise ValueError("must be AccessControl") - return v - - def __setattr__(self, name, value): - if name in AccessControl._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - IS_ACCESS_CONTROL_ENABLED: ClassVar[BooleanField] = BooleanField( - "isAccessControlEnabled", "isAccessControlEnabled" - ) - """ - TBC - """ - DENY_CUSTOM_METADATA_GUIDS: ClassVar[KeywordField] = KeywordField( - "denyCustomMetadataGuids", "denyCustomMetadataGuids" - ) - """ - TBC - """ - DENY_ASSET_TABS: ClassVar[KeywordField] = KeywordField( - "denyAssetTabs", "denyAssetTabs" - ) - """ - TBC - """ - DENY_ASSET_FILTERS: ClassVar[KeywordField] = KeywordField( - "denyAssetFilters", "denyAssetFilters" - ) - """ - TBC - """ - CHANNEL_LINK: ClassVar[KeywordField] = KeywordField("channelLink", "channelLink") - """ - TBC - """ - DENY_ASSET_TYPES: ClassVar[KeywordField] = KeywordField( - "denyAssetTypes", "denyAssetTypes" - ) - """ - TBC - """ - DENY_NAVIGATION_PAGES: ClassVar[KeywordField] = KeywordField( - "denyNavigationPages", "denyNavigationPages" - ) - """ - TBC - """ - DEFAULT_NAVIGATION: ClassVar[KeywordField] = KeywordField( - "defaultNavigation", "defaultNavigation" - ) - """ - TBC - """ - - POLICIES: ClassVar[RelationField] = RelationField("policies") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "is_access_control_enabled", - "deny_custom_metadata_guids", - "deny_asset_tabs", - "deny_asset_filters", - "channel_link", - "deny_asset_types", - "deny_navigation_pages", - "default_navigation", - "policies", - ] - - @property - def is_access_control_enabled(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.is_access_control_enabled - ) - - @is_access_control_enabled.setter - def is_access_control_enabled(self, is_access_control_enabled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_access_control_enabled = is_access_control_enabled - - @property - def deny_custom_metadata_guids(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.deny_custom_metadata_guids - ) - - @deny_custom_metadata_guids.setter - def deny_custom_metadata_guids( - self, deny_custom_metadata_guids: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_custom_metadata_guids = deny_custom_metadata_guids - - @property - def deny_asset_tabs(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.deny_asset_tabs - - @deny_asset_tabs.setter - def deny_asset_tabs(self, deny_asset_tabs: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_asset_tabs = deny_asset_tabs - - @property - def deny_asset_filters(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.deny_asset_filters - - @deny_asset_filters.setter - def deny_asset_filters(self, deny_asset_filters: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_asset_filters = deny_asset_filters - - @property - def channel_link(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.channel_link - - @channel_link.setter - def channel_link(self, channel_link: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.channel_link = channel_link - - @property - def deny_asset_types(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.deny_asset_types - - @deny_asset_types.setter - def deny_asset_types(self, deny_asset_types: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_asset_types = deny_asset_types - - @property - def deny_navigation_pages(self) -> Optional[set[str]]: - return ( - None if self.attributes is None else self.attributes.deny_navigation_pages - ) - - @deny_navigation_pages.setter - def deny_navigation_pages(self, deny_navigation_pages: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.deny_navigation_pages = deny_navigation_pages - - @property - def default_navigation(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.default_navigation - - @default_navigation.setter - def default_navigation(self, default_navigation: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_navigation = default_navigation - - @property - def policies(self) -> Optional[list[AuthPolicy]]: - return None if self.attributes is None else self.attributes.policies - - @policies.setter - def policies(self, policies: Optional[list[AuthPolicy]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.policies = policies - - class Attributes(Asset.Attributes): - is_access_control_enabled: Optional[bool] = Field( - None, description="", alias="isAccessControlEnabled" - ) - deny_custom_metadata_guids: Optional[set[str]] = Field( - None, description="", alias="denyCustomMetadataGuids" - ) - deny_asset_tabs: Optional[set[str]] = Field( - None, description="", alias="denyAssetTabs" - ) - deny_asset_filters: Optional[set[str]] = Field( - None, description="", alias="denyAssetFilters" - ) - channel_link: Optional[str] = Field(None, description="", alias="channelLink") - deny_asset_types: Optional[set[str]] = Field( - None, description="", alias="denyAssetTypes" - ) - deny_navigation_pages: Optional[set[str]] = Field( - None, description="", alias="denyNavigationPages" - ) - default_navigation: Optional[str] = Field( - None, description="", alias="defaultNavigation" - ) - policies: Optional[list[AuthPolicy]] = Field( - None, description="", alias="policies" - ) # relationship - - attributes: "AccessControl.Attributes" = Field( - default_factory=lambda: AccessControl.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -AuthPolicy.Attributes.update_forward_refs() - - -AccessControl.Attributes.update_forward_refs() +from .access_control import AccessControl # noqa diff --git a/pyatlan/model/assets/asset08.py b/pyatlan/model/assets/auth_service.py similarity index 87% rename from pyatlan/model/assets/asset08.py rename to pyatlan/model/assets/auth_service.py index 5d4285fa5..4f2b8fd92 100644 --- a/pyatlan/model/assets/asset08.py +++ b/pyatlan/model/assets/auth_service.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, NumericField -from .asset00 import Asset +from .asset import Asset class AuthService(Asset, type_name="AuthService"): """Description""" - type_name: str = Field("AuthService", allow_mutation=False) + type_name: str = Field(default="AuthService", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -125,18 +125,14 @@ def auth_service_policy_last_sync( self.attributes.auth_service_policy_last_sync = auth_service_policy_last_sync class Attributes(Asset.Attributes): - auth_service_type: Optional[str] = Field( - None, description="", alias="authServiceType" - ) - tag_service: Optional[str] = Field(None, description="", alias="tagService") - auth_service_is_enabled: Optional[bool] = Field( - None, description="", alias="authServiceIsEnabled" - ) + auth_service_type: Optional[str] = Field(default=None, description="") + tag_service: Optional[str] = Field(default=None, description="") + auth_service_is_enabled: Optional[bool] = Field(default=None, description="") auth_service_config: Optional[dict[str, str]] = Field( - None, description="", alias="authServiceConfig" + default=None, description="" ) auth_service_policy_last_sync: Optional[int] = Field( - None, description="", alias="authServicePolicyLastSync" + default=None, description="" ) attributes: "AuthService.Attributes" = Field( @@ -144,6 +140,3 @@ class Attributes(Asset.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -AuthService.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset32.py b/pyatlan/model/assets/azure.py similarity index 86% rename from pyatlan/model/assets/asset32.py rename to pyatlan/model/assets/azure.py index a3017e773..3c0876534 100644 --- a/pyatlan/model/assets/asset32.py +++ b/pyatlan/model/assets/azure.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField from pyatlan.model.structs import AzureTag -from .asset09 import Cloud +from .cloud import Cloud class Azure(Cloud): """Description""" - type_name: str = Field("Azure", allow_mutation=False) + type_name: str = Field(default="Azure", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -109,24 +109,15 @@ def azure_tags(self, azure_tags: Optional[list[AzureTag]]): self.attributes.azure_tags = azure_tags class Attributes(Cloud.Attributes): - azure_resource_id: Optional[str] = Field( - None, description="", alias="azureResourceId" - ) - azure_location: Optional[str] = Field( - None, description="", alias="azureLocation" - ) + azure_resource_id: Optional[str] = Field(default=None, description="") + azure_location: Optional[str] = Field(default=None, description="") adls_account_secondary_location: Optional[str] = Field( - None, description="", alias="adlsAccountSecondaryLocation" - ) - azure_tags: Optional[list[AzureTag]] = Field( - None, description="", alias="azureTags" + default=None, description="" ) + azure_tags: Optional[list[AzureTag]] = Field(default=None, description="") attributes: "Azure.Attributes" = Field( default_factory=lambda: Azure.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Azure.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset89.py b/pyatlan/model/assets/azure_event_hub.py similarity index 84% rename from pyatlan/model/assets/asset89.py rename to pyatlan/model/assets/azure_event_hub.py index 165d01a35..3ea3ed239 100644 --- a/pyatlan/model/assets/asset89.py +++ b/pyatlan/model/assets/azure_event_hub.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField -from .asset87 import KafkaTopic +from .kafka_topic import KafkaTopic class AzureEventHub(KafkaTopic): """Description""" - type_name: str = Field("AzureEventHub", allow_mutation=False) + type_name: str = Field(default="AzureEventHub", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -53,15 +53,10 @@ def azure_event_hub_status(self, azure_event_hub_status: Optional[str]): self.attributes.azure_event_hub_status = azure_event_hub_status class Attributes(KafkaTopic.Attributes): - azure_event_hub_status: Optional[str] = Field( - None, description="", alias="azureEventHubStatus" - ) + azure_event_hub_status: Optional[str] = Field(default=None, description="") attributes: "AzureEventHub.Attributes" = Field( default_factory=lambda: AzureEventHub.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -AzureEventHub.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset90.py b/pyatlan/model/assets/azure_event_hub_consumer_group.py similarity index 75% rename from pyatlan/model/assets/asset90.py rename to pyatlan/model/assets/azure_event_hub_consumer_group.py index 21c2d27e7..f86ab87e6 100644 --- a/pyatlan/model/assets/asset90.py +++ b/pyatlan/model/assets/azure_event_hub_consumer_group.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset87 import KafkaConsumerGroup +from .kafka_consumer_group import KafkaConsumerGroup class AzureEventHubConsumerGroup(KafkaConsumerGroup): """Description""" - type_name: str = Field("AzureEventHubConsumerGroup", allow_mutation=False) + type_name: str = Field(default="AzureEventHubConsumerGroup", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -AzureEventHubConsumerGroup.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset19.py b/pyatlan/model/assets/b_i.py similarity index 78% rename from pyatlan/model/assets/asset19.py rename to pyatlan/model/assets/b_i.py index 433a4d4b6..910d7e602 100644 --- a/pyatlan/model/assets/asset19.py +++ b/pyatlan/model/assets/b_i.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Catalog +from .catalog import Catalog class BI(Catalog): """Description""" - type_name: str = Field("BI", allow_mutation=False) + type_name: str = Field(default="BI", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -BI.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset11.py b/pyatlan/model/assets/b_i_process.py similarity index 88% rename from pyatlan/model/assets/asset11.py rename to pyatlan/model/assets/b_i_process.py index fef540836..0e9de58fd 100644 --- a/pyatlan/model/assets/asset11.py +++ b/pyatlan/model/assets/b_i_process.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import RelationField -from .asset00 import Catalog, Process +from .process import Process class BIProcess(Process): """Description""" - type_name: str = Field("BIProcess", allow_mutation=False) + type_name: str = Field(default="BIProcess", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -65,10 +65,10 @@ def inputs(self, inputs: Optional[list[Catalog]]): class Attributes(Process.Attributes): outputs: Optional[list[Catalog]] = Field( - None, description="", alias="outputs" + default=None, description="" ) # relationship inputs: Optional[list[Catalog]] = Field( - None, description="", alias="inputs" + default=None, description="" ) # relationship attributes: "BIProcess.Attributes" = Field( @@ -78,4 +78,4 @@ class Attributes(Process.Attributes): ) -BIProcess.Attributes.update_forward_refs() +from .catalog import Catalog # noqa diff --git a/pyatlan/model/assets/asset05.py b/pyatlan/model/assets/badge.py similarity index 91% rename from pyatlan/model/assets/asset05.py rename to pyatlan/model/assets/badge.py index d2da5bea5..936d194bb 100644 --- a/pyatlan/model/assets/asset05.py +++ b/pyatlan/model/assets/badge.py @@ -6,14 +6,14 @@ from typing import ClassVar, Optional -from pydantic import Field, StrictStr, validator +from pydantic.v1 import Field, StrictStr, validator from pyatlan.model.enums import EntityStatus from pyatlan.model.fields.atlan_fields import KeywordField from pyatlan.model.structs import BadgeCondition from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import Asset +from .asset import Asset class Badge(Asset, type_name="Badge"): @@ -40,7 +40,7 @@ def create( ), ) - type_name: str = Field("Badge", allow_mutation=False) + type_name: str = Field(default="Badge", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -97,11 +97,9 @@ def badge_metadata_attribute(self, badge_metadata_attribute: Optional[str]): class Attributes(Asset.Attributes): badge_conditions: Optional[list[BadgeCondition]] = Field( - None, description="", alias="badgeConditions" - ) - badge_metadata_attribute: Optional[str] = Field( - None, description="", alias="badgeMetadataAttribute" + default=None, description="" ) + badge_metadata_attribute: Optional[str] = Field(default=None, description="") @classmethod # @validate_arguments() @@ -136,6 +134,3 @@ def create( description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Badge.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/catalog.py b/pyatlan/model/assets/catalog.py new file mode 100644 index 000000000..928eead0d --- /dev/null +++ b/pyatlan/model/assets/catalog.py @@ -0,0 +1,136 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .asset import Asset + + +class Catalog(Asset, type_name="Catalog"): + """Description""" + + type_name: str = Field(default="Catalog", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Catalog": + raise ValueError("must be Catalog") + return v + + def __setattr__(self, name, value): + if name in Catalog._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") + """ + TBC + """ + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "outputFromAirflowTasks" + ) + """ + TBC + """ + INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "inputToAirflowTasks" + ) + """ + TBC + """ + OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( + "outputFromProcesses" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "input_to_processes", + "output_from_airflow_tasks", + "input_to_airflow_tasks", + "output_from_processes", + ] + + @property + def input_to_processes(self) -> Optional[list[Process]]: + return None if self.attributes is None else self.attributes.input_to_processes + + @input_to_processes.setter + def input_to_processes(self, input_to_processes: Optional[list[Process]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_processes = input_to_processes + + @property + def output_from_airflow_tasks(self) -> Optional[list[AirflowTask]]: + return ( + None + if self.attributes is None + else self.attributes.output_from_airflow_tasks + ) + + @output_from_airflow_tasks.setter + def output_from_airflow_tasks( + self, output_from_airflow_tasks: Optional[list[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_from_airflow_tasks = output_from_airflow_tasks + + @property + def input_to_airflow_tasks(self) -> Optional[list[AirflowTask]]: + return ( + None if self.attributes is None else self.attributes.input_to_airflow_tasks + ) + + @input_to_airflow_tasks.setter + def input_to_airflow_tasks( + self, input_to_airflow_tasks: Optional[list[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_airflow_tasks = input_to_airflow_tasks + + @property + def output_from_processes(self) -> Optional[list[Process]]: + return ( + None if self.attributes is None else self.attributes.output_from_processes + ) + + @output_from_processes.setter + def output_from_processes(self, output_from_processes: Optional[list[Process]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_from_processes = output_from_processes + + class Attributes(Asset.Attributes): + input_to_processes: Optional[list[Process]] = Field( + default=None, description="" + ) # relationship + output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( + default=None, description="" + ) # relationship + input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( + default=None, description="" + ) # relationship + output_from_processes: Optional[list[Process]] = Field( + default=None, description="" + ) # relationship + + attributes: "Catalog.Attributes" = Field( + default_factory=lambda: Catalog.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .airflow_task import AirflowTask # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/asset09.py b/pyatlan/model/assets/cloud.py similarity index 78% rename from pyatlan/model/assets/asset09.py rename to pyatlan/model/assets/cloud.py index 54f951c2a..40fcdccf0 100644 --- a/pyatlan/model/assets/asset09.py +++ b/pyatlan/model/assets/cloud.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Asset +from .asset import Asset class Cloud(Asset, type_name="Cloud"): """Description""" - type_name: str = Field("Cloud", allow_mutation=False) + type_name: str = Field(default="Cloud", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Cloud.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset15.py b/pyatlan/model/assets/collection.py similarity index 85% rename from pyatlan/model/assets/asset15.py rename to pyatlan/model/assets/collection.py index 2fc730eb7..64137a9a9 100644 --- a/pyatlan/model/assets/asset15.py +++ b/pyatlan/model/assets/collection.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import IconType from pyatlan.model.fields.atlan_fields import KeywordField -from .asset00 import Namespace +from .namespace import Namespace class Collection(Namespace): """Description""" - type_name: str = Field("Collection", allow_mutation=False) + type_name: str = Field(default="Collection", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -65,14 +65,11 @@ def icon_type(self, icon_type: Optional[IconType]): self.attributes.icon_type = icon_type class Attributes(Namespace.Attributes): - icon: Optional[str] = Field(None, description="", alias="icon") - icon_type: Optional[IconType] = Field(None, description="", alias="iconType") + icon: Optional[str] = Field(default=None, description="") + icon_type: Optional[IconType] = Field(default=None, description="") attributes: "Collection.Attributes" = Field( default_factory=lambda: Collection.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Collection.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/column.py b/pyatlan/model/assets/column.py new file mode 100644 index 000000000..04de4ac4b --- /dev/null +++ b/pyatlan/model/assets/column.py @@ -0,0 +1,1381 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from pyatlan.model.structs import ColumnValueFrequencyMap, Histogram +from pyatlan.utils import init_guid, validate_required_fields + +from .s_q_l import SQL + + +class Column(SQL): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, parent_qualified_name: str, parent_type: type, order: int + ) -> Column: + return Column( + attributes=Column.Attributes.create( + name=name, + parent_qualified_name=parent_qualified_name, + parent_type=parent_type, + order=order, + ) + ) + + type_name: str = Field(default="Column", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Column": + raise ValueError("must be Column") + return v + + def __setattr__(self, name, value): + if name in Column._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "dataType", "dataType", "dataType.text" + ) + """ + Data type of values in this column. + """ + SUB_DATA_TYPE: ClassVar[KeywordField] = KeywordField("subDataType", "subDataType") + """ + Sub-data type of this column. + """ + RAW_DATA_TYPE_DEFINITION: ClassVar[KeywordField] = KeywordField( + "rawDataTypeDefinition", "rawDataTypeDefinition" + ) + """ + + """ + ORDER: ClassVar[NumericField] = NumericField("order", "order") + """ + Order (position) in which this column appears in the table (starting at 1). + """ + NESTED_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "nestedColumnCount", "nestedColumnCount" + ) + """ + Number of columns nested within this (STRUCT or NESTED) column. + """ + IS_PARTITION: ClassVar[BooleanField] = BooleanField("isPartition", "isPartition") + """ + Whether this column is a partition column (true) or not (false). + """ + PARTITION_ORDER: ClassVar[NumericField] = NumericField( + "partitionOrder", "partitionOrder" + ) + """ + Order (position) of this partition column in the table. + """ + IS_CLUSTERED: ClassVar[BooleanField] = BooleanField("isClustered", "isClustered") + """ + Whether this column is a clustered column (true) or not (false). + """ + IS_PRIMARY: ClassVar[BooleanField] = BooleanField("isPrimary", "isPrimary") + """ + When true, this column is the primary key for the table. + """ + IS_FOREIGN: ClassVar[BooleanField] = BooleanField("isForeign", "isForeign") + """ + When true, this column is a foreign key to another table. NOTE: this must be true when using the foreignKeyTo relationship to specify columns that refer to this column as a foreign key. + """ # noqa: E501 + IS_INDEXED: ClassVar[BooleanField] = BooleanField("isIndexed", "isIndexed") + """ + When true, this column is indexed in the database. + """ + IS_SORT: ClassVar[BooleanField] = BooleanField("isSort", "isSort") + """ + Whether this column is a sort column (true) or not (false). + """ + IS_DIST: ClassVar[BooleanField] = BooleanField("isDist", "isDist") + """ + Whether this column is a distribution column (true) or not (false). + """ + IS_PINNED: ClassVar[BooleanField] = BooleanField("isPinned", "isPinned") + """ + Whether this column is pinned (true) or not (false). + """ + PINNED_BY: ClassVar[KeywordField] = KeywordField("pinnedBy", "pinnedBy") + """ + User who pinned this column. + """ + PINNED_AT: ClassVar[NumericField] = NumericField("pinnedAt", "pinnedAt") + """ + Time (epoch) at which this column was pinned, in milliseconds. + """ + PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") + """ + Total number of digits allowed, when the dataType is numeric. + """ + DEFAULT_VALUE: ClassVar[KeywordField] = KeywordField("defaultValue", "defaultValue") + """ + Default value for this column. + """ + IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") + """ + When true, the values in this column can be null. + """ + NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") + """ + Number of digits allowed to the right of the decimal point. + """ + MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") + """ + Maximum length of a value in this column. + """ + VALIDATIONS: ClassVar[KeywordField] = KeywordField("validations", "validations") + """ + Validations for this column. + """ + PARENT_COLUMN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "parentColumnQualifiedName", + "parentColumnQualifiedName", + "parentColumnQualifiedName.text", + ) + """ + Unique name of the column this column is nested within, for STRUCT and NESTED columns. + """ + PARENT_COLUMN_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "parentColumnName", "parentColumnName.keyword", "parentColumnName" + ) + """ + Simple name of the column this column is nested within, for STRUCT and NESTED columns. + """ + COLUMN_DISTINCT_VALUES_COUNT: ClassVar[NumericField] = NumericField( + "columnDistinctValuesCount", "columnDistinctValuesCount" + ) + """ + Number of rows that contain distinct values. + """ + COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( + "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" + ) + """ + Number of rows that contain distinct values. + """ + COLUMN_HISTOGRAM: ClassVar[KeywordField] = KeywordField( + "columnHistogram", "columnHistogram" + ) + """ + List of values in a histogram that represents the contents of this column. + """ + COLUMN_MAX: ClassVar[NumericField] = NumericField("columnMax", "columnMax") + """ + Greatest value in a numeric column. + """ + COLUMN_MIN: ClassVar[NumericField] = NumericField("columnMin", "columnMin") + """ + Least value in a numeric column. + """ + COLUMN_MEAN: ClassVar[NumericField] = NumericField("columnMean", "columnMean") + """ + Arithmetic mean of the values in a numeric column. + """ + COLUMN_SUM: ClassVar[NumericField] = NumericField("columnSum", "columnSum") + """ + Calculated sum of the values in a numeric column. + """ + COLUMN_MEDIAN: ClassVar[NumericField] = NumericField("columnMedian", "columnMedian") + """ + Calculated median of the values in a numeric column. + """ + COLUMN_STANDARD_DEVIATION: ClassVar[NumericField] = NumericField( + "columnStandardDeviation", "columnStandardDeviation" + ) + """ + Calculated standard deviation of the values in a numeric column. + """ + COLUMN_UNIQUE_VALUES_COUNT: ClassVar[NumericField] = NumericField( + "columnUniqueValuesCount", "columnUniqueValuesCount" + ) + """ + Number of rows in which a value in this column appears only once. + """ + COLUMN_UNIQUE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( + "columnUniqueValuesCountLong", "columnUniqueValuesCountLong" + ) + """ + Number of rows in which a value in this column appears only once. + """ + COLUMN_AVERAGE: ClassVar[NumericField] = NumericField( + "columnAverage", "columnAverage" + ) + """ + Average value in this column. + """ + COLUMN_AVERAGE_LENGTH: ClassVar[NumericField] = NumericField( + "columnAverageLength", "columnAverageLength" + ) + """ + Average length of values in a string column. + """ + COLUMN_DUPLICATE_VALUES_COUNT: ClassVar[NumericField] = NumericField( + "columnDuplicateValuesCount", "columnDuplicateValuesCount" + ) + """ + Number of rows that contain duplicate values. + """ + COLUMN_DUPLICATE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( + "columnDuplicateValuesCountLong", "columnDuplicateValuesCountLong" + ) + """ + Number of rows that contain duplicate values. + """ + COLUMN_MAXIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( + "columnMaximumStringLength", "columnMaximumStringLength" + ) + """ + Length of the longest value in a string column. + """ + COLUMN_MAXS: ClassVar[KeywordField] = KeywordField("columnMaxs", "columnMaxs") + """ + List of the greatest values in a column. + """ + COLUMN_MINIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( + "columnMinimumStringLength", "columnMinimumStringLength" + ) + """ + Length of the shortest value in a string column. + """ + COLUMN_MINS: ClassVar[KeywordField] = KeywordField("columnMins", "columnMins") + """ + List of the least values in a column. + """ + COLUMN_MISSING_VALUES_COUNT: ClassVar[NumericField] = NumericField( + "columnMissingValuesCount", "columnMissingValuesCount" + ) + """ + Number of rows in a column that do not contain content. + """ + COLUMN_MISSING_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( + "columnMissingValuesCountLong", "columnMissingValuesCountLong" + ) + """ + Number of rows in a column that do not contain content. + """ + COLUMN_MISSING_VALUES_PERCENTAGE: ClassVar[NumericField] = NumericField( + "columnMissingValuesPercentage", "columnMissingValuesPercentage" + ) + """ + Percentage of rows in a column that do not contain content. + """ + COLUMN_UNIQUENESS_PERCENTAGE: ClassVar[NumericField] = NumericField( + "columnUniquenessPercentage", "columnUniquenessPercentage" + ) + """ + Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique. + """ # noqa: E501 + COLUMN_VARIANCE: ClassVar[NumericField] = NumericField( + "columnVariance", "columnVariance" + ) + """ + Calculated variance of the values in a numeric column. + """ + COLUMN_TOP_VALUES: ClassVar[KeywordField] = KeywordField( + "columnTopValues", "columnTopValues" + ) + """ + List of top values in this column. + """ + COLUMN_DEPTH_LEVEL: ClassVar[NumericField] = NumericField( + "columnDepthLevel", "columnDepthLevel" + ) + """ + Level of nesting of this column, used for STRUCT and NESTED columns. + """ + + SNOWFLAKE_DYNAMIC_TABLE: ClassVar[RelationField] = RelationField( + "snowflakeDynamicTable" + ) + """ + TBC + """ + VIEW: ClassVar[RelationField] = RelationField("view") + """ + TBC + """ + NESTED_COLUMNS: ClassVar[RelationField] = RelationField("nestedColumns") + """ + TBC + """ + DATA_QUALITY_METRIC_DIMENSIONS: ClassVar[RelationField] = RelationField( + "dataQualityMetricDimensions" + ) + """ + TBC + """ + DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField("dbtModelColumns") + """ + TBC + """ + TABLE: ClassVar[RelationField] = RelationField("table") + """ + TBC + """ + COLUMN_DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField( + "columnDbtModelColumns" + ) + """ + TBC + """ + MATERIALISED_VIEW: ClassVar[RelationField] = RelationField("materialisedView") + """ + TBC + """ + PARENT_COLUMN: ClassVar[RelationField] = RelationField("parentColumn") + """ + TBC + """ + QUERIES: ClassVar[RelationField] = RelationField("queries") + """ + TBC + """ + METRIC_TIMESTAMPS: ClassVar[RelationField] = RelationField("metricTimestamps") + """ + TBC + """ + FOREIGN_KEY_TO: ClassVar[RelationField] = RelationField("foreignKeyTo") + """ + TBC + """ + FOREIGN_KEY_FROM: ClassVar[RelationField] = RelationField("foreignKeyFrom") + """ + TBC + """ + DBT_METRICS: ClassVar[RelationField] = RelationField("dbtMetrics") + """ + TBC + """ + TABLE_PARTITION: ClassVar[RelationField] = RelationField("tablePartition") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "data_type", + "sub_data_type", + "raw_data_type_definition", + "order", + "nested_column_count", + "is_partition", + "partition_order", + "is_clustered", + "is_primary", + "is_foreign", + "is_indexed", + "is_sort", + "is_dist", + "is_pinned", + "pinned_by", + "pinned_at", + "precision", + "default_value", + "is_nullable", + "numeric_scale", + "max_length", + "validations", + "parent_column_qualified_name", + "parent_column_name", + "column_distinct_values_count", + "column_distinct_values_count_long", + "column_histogram", + "column_max", + "column_min", + "column_mean", + "column_sum", + "column_median", + "column_standard_deviation", + "column_unique_values_count", + "column_unique_values_count_long", + "column_average", + "column_average_length", + "column_duplicate_values_count", + "column_duplicate_values_count_long", + "column_maximum_string_length", + "column_maxs", + "column_minimum_string_length", + "column_mins", + "column_missing_values_count", + "column_missing_values_count_long", + "column_missing_values_percentage", + "column_uniqueness_percentage", + "column_variance", + "column_top_values", + "column_depth_level", + "snowflake_dynamic_table", + "view", + "nested_columns", + "data_quality_metric_dimensions", + "dbt_model_columns", + "table", + "column_dbt_model_columns", + "materialised_view", + "parent_column", + "queries", + "metric_timestamps", + "foreign_key_to", + "foreign_key_from", + "dbt_metrics", + "table_partition", + ] + + @property + def data_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.data_type + + @data_type.setter + def data_type(self, data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_type = data_type + + @property + def sub_data_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sub_data_type + + @sub_data_type.setter + def sub_data_type(self, sub_data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sub_data_type = sub_data_type + + @property + def raw_data_type_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.raw_data_type_definition + ) + + @raw_data_type_definition.setter + def raw_data_type_definition(self, raw_data_type_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.raw_data_type_definition = raw_data_type_definition + + @property + def order(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.order + + @order.setter + def order(self, order: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.order = order + + @property + def nested_column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.nested_column_count + + @nested_column_count.setter + def nested_column_count(self, nested_column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.nested_column_count = nested_column_count + + @property + def is_partition(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partition + + @is_partition.setter + def is_partition(self, is_partition: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partition = is_partition + + @property + def partition_order(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.partition_order + + @partition_order.setter + def partition_order(self, partition_order: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_order = partition_order + + @property + def is_clustered(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_clustered + + @is_clustered.setter + def is_clustered(self, is_clustered: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_clustered = is_clustered + + @property + def is_primary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_primary + + @is_primary.setter + def is_primary(self, is_primary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_primary = is_primary + + @property + def is_foreign(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_foreign + + @is_foreign.setter + def is_foreign(self, is_foreign: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_foreign = is_foreign + + @property + def is_indexed(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_indexed + + @is_indexed.setter + def is_indexed(self, is_indexed: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_indexed = is_indexed + + @property + def is_sort(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_sort + + @is_sort.setter + def is_sort(self, is_sort: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_sort = is_sort + + @property + def is_dist(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_dist + + @is_dist.setter + def is_dist(self, is_dist: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_dist = is_dist + + @property + def is_pinned(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_pinned + + @is_pinned.setter + def is_pinned(self, is_pinned: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_pinned = is_pinned + + @property + def pinned_by(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.pinned_by + + @pinned_by.setter + def pinned_by(self, pinned_by: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.pinned_by = pinned_by + + @property + def pinned_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.pinned_at + + @pinned_at.setter + def pinned_at(self, pinned_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.pinned_at = pinned_at + + @property + def precision(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.precision + + @precision.setter + def precision(self, precision: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.precision = precision + + @property + def default_value(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.default_value + + @default_value.setter + def default_value(self, default_value: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_value = default_value + + @property + def is_nullable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_nullable + + @is_nullable.setter + def is_nullable(self, is_nullable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_nullable = is_nullable + + @property + def numeric_scale(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.numeric_scale + + @numeric_scale.setter + def numeric_scale(self, numeric_scale: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.numeric_scale = numeric_scale + + @property + def max_length(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.max_length + + @max_length.setter + def max_length(self, max_length: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.max_length = max_length + + @property + def validations(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.validations + + @validations.setter + def validations(self, validations: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.validations = validations + + @property + def parent_column_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.parent_column_qualified_name + ) + + @parent_column_qualified_name.setter + def parent_column_qualified_name(self, parent_column_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_column_qualified_name = parent_column_qualified_name + + @property + def parent_column_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.parent_column_name + + @parent_column_name.setter + def parent_column_name(self, parent_column_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_column_name = parent_column_name + + @property + def column_distinct_values_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_distinct_values_count + ) + + @column_distinct_values_count.setter + def column_distinct_values_count(self, column_distinct_values_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_distinct_values_count = column_distinct_values_count + + @property + def column_distinct_values_count_long(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_distinct_values_count_long + ) + + @column_distinct_values_count_long.setter + def column_distinct_values_count_long( + self, column_distinct_values_count_long: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_distinct_values_count_long = ( + column_distinct_values_count_long + ) + + @property + def column_histogram(self) -> Optional[Histogram]: + return None if self.attributes is None else self.attributes.column_histogram + + @column_histogram.setter + def column_histogram(self, column_histogram: Optional[Histogram]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_histogram = column_histogram + + @property + def column_max(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_max + + @column_max.setter + def column_max(self, column_max: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_max = column_max + + @property + def column_min(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_min + + @column_min.setter + def column_min(self, column_min: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_min = column_min + + @property + def column_mean(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_mean + + @column_mean.setter + def column_mean(self, column_mean: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_mean = column_mean + + @property + def column_sum(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_sum + + @column_sum.setter + def column_sum(self, column_sum: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_sum = column_sum + + @property + def column_median(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_median + + @column_median.setter + def column_median(self, column_median: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_median = column_median + + @property + def column_standard_deviation(self) -> Optional[float]: + return ( + None + if self.attributes is None + else self.attributes.column_standard_deviation + ) + + @column_standard_deviation.setter + def column_standard_deviation(self, column_standard_deviation: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_standard_deviation = column_standard_deviation + + @property + def column_unique_values_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_unique_values_count + ) + + @column_unique_values_count.setter + def column_unique_values_count(self, column_unique_values_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_unique_values_count = column_unique_values_count + + @property + def column_unique_values_count_long(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_unique_values_count_long + ) + + @column_unique_values_count_long.setter + def column_unique_values_count_long( + self, column_unique_values_count_long: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_unique_values_count_long = ( + column_unique_values_count_long + ) + + @property + def column_average(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_average + + @column_average.setter + def column_average(self, column_average: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_average = column_average + + @property + def column_average_length(self) -> Optional[float]: + return ( + None if self.attributes is None else self.attributes.column_average_length + ) + + @column_average_length.setter + def column_average_length(self, column_average_length: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_average_length = column_average_length + + @property + def column_duplicate_values_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_duplicate_values_count + ) + + @column_duplicate_values_count.setter + def column_duplicate_values_count( + self, column_duplicate_values_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_duplicate_values_count = column_duplicate_values_count + + @property + def column_duplicate_values_count_long(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_duplicate_values_count_long + ) + + @column_duplicate_values_count_long.setter + def column_duplicate_values_count_long( + self, column_duplicate_values_count_long: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_duplicate_values_count_long = ( + column_duplicate_values_count_long + ) + + @property + def column_maximum_string_length(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_maximum_string_length + ) + + @column_maximum_string_length.setter + def column_maximum_string_length(self, column_maximum_string_length: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_maximum_string_length = column_maximum_string_length + + @property + def column_maxs(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.column_maxs + + @column_maxs.setter + def column_maxs(self, column_maxs: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_maxs = column_maxs + + @property + def column_minimum_string_length(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_minimum_string_length + ) + + @column_minimum_string_length.setter + def column_minimum_string_length(self, column_minimum_string_length: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_minimum_string_length = column_minimum_string_length + + @property + def column_mins(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.column_mins + + @column_mins.setter + def column_mins(self, column_mins: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_mins = column_mins + + @property + def column_missing_values_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_missing_values_count + ) + + @column_missing_values_count.setter + def column_missing_values_count(self, column_missing_values_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_missing_values_count = column_missing_values_count + + @property + def column_missing_values_count_long(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.column_missing_values_count_long + ) + + @column_missing_values_count_long.setter + def column_missing_values_count_long( + self, column_missing_values_count_long: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_missing_values_count_long = ( + column_missing_values_count_long + ) + + @property + def column_missing_values_percentage(self) -> Optional[float]: + return ( + None + if self.attributes is None + else self.attributes.column_missing_values_percentage + ) + + @column_missing_values_percentage.setter + def column_missing_values_percentage( + self, column_missing_values_percentage: Optional[float] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_missing_values_percentage = ( + column_missing_values_percentage + ) + + @property + def column_uniqueness_percentage(self) -> Optional[float]: + return ( + None + if self.attributes is None + else self.attributes.column_uniqueness_percentage + ) + + @column_uniqueness_percentage.setter + def column_uniqueness_percentage( + self, column_uniqueness_percentage: Optional[float] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_uniqueness_percentage = column_uniqueness_percentage + + @property + def column_variance(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.column_variance + + @column_variance.setter + def column_variance(self, column_variance: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_variance = column_variance + + @property + def column_top_values(self) -> Optional[list[ColumnValueFrequencyMap]]: + return None if self.attributes is None else self.attributes.column_top_values + + @column_top_values.setter + def column_top_values( + self, column_top_values: Optional[list[ColumnValueFrequencyMap]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_top_values = column_top_values + + @property + def column_depth_level(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_depth_level + + @column_depth_level.setter + def column_depth_level(self, column_depth_level: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_depth_level = column_depth_level + + @property + def snowflake_dynamic_table(self) -> Optional[SnowflakeDynamicTable]: + return ( + None if self.attributes is None else self.attributes.snowflake_dynamic_table + ) + + @snowflake_dynamic_table.setter + def snowflake_dynamic_table( + self, snowflake_dynamic_table: Optional[SnowflakeDynamicTable] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_dynamic_table = snowflake_dynamic_table + + @property + def view(self) -> Optional[View]: + return None if self.attributes is None else self.attributes.view + + @view.setter + def view(self, view: Optional[View]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view = view + + @property + def nested_columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.nested_columns + + @nested_columns.setter + def nested_columns(self, nested_columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.nested_columns = nested_columns + + @property + def data_quality_metric_dimensions(self) -> Optional[list[Metric]]: + return ( + None + if self.attributes is None + else self.attributes.data_quality_metric_dimensions + ) + + @data_quality_metric_dimensions.setter + def data_quality_metric_dimensions( + self, data_quality_metric_dimensions: Optional[list[Metric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_quality_metric_dimensions = data_quality_metric_dimensions + + @property + def dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: + return None if self.attributes is None else self.attributes.dbt_model_columns + + @dbt_model_columns.setter + def dbt_model_columns(self, dbt_model_columns: Optional[list[DbtModelColumn]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_columns = dbt_model_columns + + @property + def table(self) -> Optional[Table]: + return None if self.attributes is None else self.attributes.table + + @table.setter + def table(self, table: Optional[Table]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table = table + + @property + def column_dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: + return ( + None + if self.attributes is None + else self.attributes.column_dbt_model_columns + ) + + @column_dbt_model_columns.setter + def column_dbt_model_columns( + self, column_dbt_model_columns: Optional[list[DbtModelColumn]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_dbt_model_columns = column_dbt_model_columns + + @property + def materialised_view(self) -> Optional[MaterialisedView]: + return None if self.attributes is None else self.attributes.materialised_view + + @materialised_view.setter + def materialised_view(self, materialised_view: Optional[MaterialisedView]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.materialised_view = materialised_view + + @property + def parent_column(self) -> Optional[Column]: + return None if self.attributes is None else self.attributes.parent_column + + @parent_column.setter + def parent_column(self, parent_column: Optional[Column]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_column = parent_column + + @property + def queries(self) -> Optional[list[Query]]: + return None if self.attributes is None else self.attributes.queries + + @queries.setter + def queries(self, queries: Optional[list[Query]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.queries = queries + + @property + def metric_timestamps(self) -> Optional[list[Metric]]: + return None if self.attributes is None else self.attributes.metric_timestamps + + @metric_timestamps.setter + def metric_timestamps(self, metric_timestamps: Optional[list[Metric]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_timestamps = metric_timestamps + + @property + def foreign_key_to(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.foreign_key_to + + @foreign_key_to.setter + def foreign_key_to(self, foreign_key_to: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.foreign_key_to = foreign_key_to + + @property + def foreign_key_from(self) -> Optional[Column]: + return None if self.attributes is None else self.attributes.foreign_key_from + + @foreign_key_from.setter + def foreign_key_from(self, foreign_key_from: Optional[Column]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.foreign_key_from = foreign_key_from + + @property + def dbt_metrics(self) -> Optional[list[DbtMetric]]: + return None if self.attributes is None else self.attributes.dbt_metrics + + @dbt_metrics.setter + def dbt_metrics(self, dbt_metrics: Optional[list[DbtMetric]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_metrics = dbt_metrics + + @property + def table_partition(self) -> Optional[TablePartition]: + return None if self.attributes is None else self.attributes.table_partition + + @table_partition.setter + def table_partition(self, table_partition: Optional[TablePartition]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_partition = table_partition + + class Attributes(SQL.Attributes): + data_type: Optional[str] = Field(default=None, description="") + sub_data_type: Optional[str] = Field(default=None, description="") + raw_data_type_definition: Optional[str] = Field(default=None, description="") + order: Optional[int] = Field(default=None, description="") + nested_column_count: Optional[int] = Field(default=None, description="") + is_partition: Optional[bool] = Field(default=None, description="") + partition_order: Optional[int] = Field(default=None, description="") + is_clustered: Optional[bool] = Field(default=None, description="") + is_primary: Optional[bool] = Field(default=None, description="") + is_foreign: Optional[bool] = Field(default=None, description="") + is_indexed: Optional[bool] = Field(default=None, description="") + is_sort: Optional[bool] = Field(default=None, description="") + is_dist: Optional[bool] = Field(default=None, description="") + is_pinned: Optional[bool] = Field(default=None, description="") + pinned_by: Optional[str] = Field(default=None, description="") + pinned_at: Optional[datetime] = Field(default=None, description="") + precision: Optional[int] = Field(default=None, description="") + default_value: Optional[str] = Field(default=None, description="") + is_nullable: Optional[bool] = Field(default=None, description="") + numeric_scale: Optional[float] = Field(default=None, description="") + max_length: Optional[int] = Field(default=None, description="") + validations: Optional[dict[str, str]] = Field(default=None, description="") + parent_column_qualified_name: Optional[str] = Field( + default=None, description="" + ) + parent_column_name: Optional[str] = Field(default=None, description="") + column_distinct_values_count: Optional[int] = Field( + default=None, description="" + ) + column_distinct_values_count_long: Optional[int] = Field( + default=None, description="" + ) + column_histogram: Optional[Histogram] = Field(default=None, description="") + column_max: Optional[float] = Field(default=None, description="") + column_min: Optional[float] = Field(default=None, description="") + column_mean: Optional[float] = Field(default=None, description="") + column_sum: Optional[float] = Field(default=None, description="") + column_median: Optional[float] = Field(default=None, description="") + column_standard_deviation: Optional[float] = Field(default=None, description="") + column_unique_values_count: Optional[int] = Field(default=None, description="") + column_unique_values_count_long: Optional[int] = Field( + default=None, description="" + ) + column_average: Optional[float] = Field(default=None, description="") + column_average_length: Optional[float] = Field(default=None, description="") + column_duplicate_values_count: Optional[int] = Field( + default=None, description="" + ) + column_duplicate_values_count_long: Optional[int] = Field( + default=None, description="" + ) + column_maximum_string_length: Optional[int] = Field( + default=None, description="" + ) + column_maxs: Optional[set[str]] = Field(default=None, description="") + column_minimum_string_length: Optional[int] = Field( + default=None, description="" + ) + column_mins: Optional[set[str]] = Field(default=None, description="") + column_missing_values_count: Optional[int] = Field(default=None, description="") + column_missing_values_count_long: Optional[int] = Field( + default=None, description="" + ) + column_missing_values_percentage: Optional[float] = Field( + default=None, description="" + ) + column_uniqueness_percentage: Optional[float] = Field( + default=None, description="" + ) + column_variance: Optional[float] = Field(default=None, description="") + column_top_values: Optional[list[ColumnValueFrequencyMap]] = Field( + default=None, description="" + ) + column_depth_level: Optional[int] = Field(default=None, description="") + snowflake_dynamic_table: Optional[SnowflakeDynamicTable] = Field( + default=None, description="" + ) # relationship + view: Optional[View] = Field(default=None, description="") # relationship + nested_columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + data_quality_metric_dimensions: Optional[list[Metric]] = Field( + default=None, description="" + ) # relationship + dbt_model_columns: Optional[list[DbtModelColumn]] = Field( + default=None, description="" + ) # relationship + table: Optional[Table] = Field(default=None, description="") # relationship + column_dbt_model_columns: Optional[list[DbtModelColumn]] = Field( + default=None, description="" + ) # relationship + materialised_view: Optional[MaterialisedView] = Field( + default=None, description="" + ) # relationship + parent_column: Optional[Column] = Field( + default=None, description="" + ) # relationship + queries: Optional[list[Query]] = Field( + default=None, description="" + ) # relationship + metric_timestamps: Optional[list[Metric]] = Field( + default=None, description="" + ) # relationship + foreign_key_to: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + foreign_key_from: Optional[Column] = Field( + default=None, description="" + ) # relationship + dbt_metrics: Optional[list[DbtMetric]] = Field( + default=None, description="" + ) # relationship + table_partition: Optional[TablePartition] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, parent_qualified_name: str, parent_type: type, order: int + ) -> Column.Attributes: + validate_required_fields( + ["name", "parent_qualified_name", "parent_type", "order"], + [name, parent_qualified_name, parent_type, order], + ) + fields = parent_qualified_name.split("/") + if len(fields) != 6: + raise ValueError("Invalid parent_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid parent_qualified_name") from e + if order < 0: + raise ValueError("Order must be be a positive integer") + ret_value = Column.Attributes( + name=name, + qualified_name=f"{parent_qualified_name}/{name}", + connector_name=connector_type.value, + schema_name=fields[4], + schema_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}/{fields[4]}", + database_name=fields[3], + database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + order=order, + ) + if parent_type == Table: + ret_value.table_qualified_name = parent_qualified_name + ret_value.table = Table.ref_by_qualified_name(parent_qualified_name) + ret_value.table_name = fields[5] + elif parent_type == View: + ret_value.view_qualified_name = parent_qualified_name + ret_value.view = View.ref_by_qualified_name(parent_qualified_name) + ret_value.view_name = fields[5] + elif parent_type == MaterialisedView: + ret_value.view_qualified_name = parent_qualified_name + ret_value.materialised_view = MaterialisedView.ref_by_qualified_name( + parent_qualified_name + ) + ret_value.view_name = fields[5] + else: + raise ValueError( + "parent_type must be either Table, View or MaterializeView" + ) + return ret_value + + attributes: "Column.Attributes" = Field( + default_factory=lambda: Column.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dbt_metric import DbtMetric # noqa +from .dbt_model_column import DbtModelColumn # noqa +from .materialised_view import MaterialisedView # noqa +from .metric import Metric # noqa +from .query import Query # noqa +from .snowflake_dynamic_table import SnowflakeDynamicTable # noqa +from .table import Table # noqa +from .table_partition import TablePartition # noqa +from .view import View # noqa diff --git a/pyatlan/model/assets/column_process.py b/pyatlan/model/assets/column_process.py new file mode 100644 index 000000000..23dd83639 --- /dev/null +++ b/pyatlan/model/assets/column_process.py @@ -0,0 +1,152 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .process import Process + + +class ColumnProcess(Process): + """Description""" + + @classmethod + @init_guid + def create( + cls, + name: str, + connection_qualified_name: str, + inputs: list["Catalog"], + outputs: list["Catalog"], + parent: Process, + process_id: Optional[str] = None, + ) -> ColumnProcess: + return ColumnProcess( + attributes=ColumnProcess.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + process_id=process_id, + inputs=inputs, + outputs=outputs, + parent=parent, + ) + ) + + type_name: str = Field(default="ColumnProcess", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ColumnProcess": + raise ValueError("must be ColumnProcess") + return v + + def __setattr__(self, name, value): + if name in ColumnProcess._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + OUTPUTS: ClassVar[RelationField] = RelationField("outputs") + """ + Assets that are outputs from this process. + """ + PROCESS: ClassVar[RelationField] = RelationField("process") + """ + TBC + """ + INPUTS: ClassVar[RelationField] = RelationField("inputs") + """ + Assets that are inputs to this process. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "outputs", + "process", + "inputs", + ] + + @property + def outputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.outputs + + @outputs.setter + def outputs(self, outputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.outputs = outputs + + @property + def process(self) -> Optional[Process]: + return None if self.attributes is None else self.attributes.process + + @process.setter + def process(self, process: Optional[Process]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.process = process + + @property + def inputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.inputs + + @inputs.setter + def inputs(self, inputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.inputs = inputs + + class Attributes(Process.Attributes): + outputs: Optional[list[Catalog]] = Field( + default=None, description="" + ) # relationship + process: Optional[Process] = Field(default=None, description="") # relationship + inputs: Optional[list[Catalog]] = Field( + default=None, description="" + ) # relationship + + @classmethod + @init_guid + def create( + cls, + name: str, + connection_qualified_name: str, + inputs: list["Catalog"], + outputs: list["Catalog"], + parent: Process, + process_id: Optional[str] = None, + ) -> ColumnProcess.Attributes: + validate_required_fields(["parent"], [parent]) + qualified_name = Process.Attributes.generate_qualified_name( + name=name, + connection_qualified_name=connection_qualified_name, + process_id=process_id, + inputs=inputs, + outputs=outputs, + parent=parent, + ) + connector_name = connection_qualified_name.split("/")[1] + return ColumnProcess.Attributes( + name=name, + qualified_name=qualified_name, + connector_name=connector_name, + connection_qualified_name=connection_qualified_name, + inputs=inputs, + outputs=outputs, + process=parent, + ) + + attributes: "ColumnProcess.Attributes" = Field( + default_factory=lambda: ColumnProcess.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .catalog import Catalog # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/asset03.py b/pyatlan/model/assets/connection.py similarity index 89% rename from pyatlan/model/assets/asset03.py rename to pyatlan/model/assets/connection.py index 4325b6f95..e365b75c6 100644 --- a/pyatlan/model/assets/asset03.py +++ b/pyatlan/model/assets/connection.py @@ -7,13 +7,13 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import AtlanConnectorType, QueryUsernameStrategy from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, NumericField from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import Asset +from .asset import Asset class Connection(Asset, type_name="Connection"): @@ -47,7 +47,7 @@ def create( attr.admin_roles = set() if admin_roles is None else set(admin_roles) return cls(attributes=attr) - type_name: str = Field("Connection", allow_mutation=False) + type_name: str = Field(default="Connection", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -587,71 +587,51 @@ def vector_embeddings_updated_at( self.attributes.vector_embeddings_updated_at = vector_embeddings_updated_at class Attributes(Asset.Attributes): - category: Optional[str] = Field(None, description="", alias="category") - sub_category: Optional[str] = Field(None, description="", alias="subCategory") - host: Optional[str] = Field(None, description="", alias="host") - port: Optional[int] = Field(None, description="", alias="port") - allow_query: Optional[bool] = Field(None, description="", alias="allowQuery") - allow_query_preview: Optional[bool] = Field( - None, description="", alias="allowQueryPreview" - ) + category: Optional[str] = Field(default=None, description="") + sub_category: Optional[str] = Field(default=None, description="") + host: Optional[str] = Field(default=None, description="") + port: Optional[int] = Field(default=None, description="") + allow_query: Optional[bool] = Field(default=None, description="") + allow_query_preview: Optional[bool] = Field(default=None, description="") query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - query_config: Optional[str] = Field(None, description="", alias="queryConfig") - credential_strategy: Optional[str] = Field( - None, description="", alias="credentialStrategy" - ) - preview_credential_strategy: Optional[str] = Field( - None, description="", alias="previewCredentialStrategy" - ) - policy_strategy: Optional[str] = Field( - None, description="", alias="policyStrategy" + default=None, description="" ) + query_config: Optional[str] = Field(default=None, description="") + credential_strategy: Optional[str] = Field(default=None, description="") + preview_credential_strategy: Optional[str] = Field(default=None, description="") + policy_strategy: Optional[str] = Field(default=None, description="") policy_strategy_for_sample_preview: Optional[str] = Field( - None, description="", alias="policyStrategyForSamplePreview" + default=None, description="" ) query_username_strategy: Optional[QueryUsernameStrategy] = Field( - None, description="", alias="queryUsernameStrategy" - ) - row_limit: Optional[int] = Field(None, description="", alias="rowLimit") - query_timeout: Optional[int] = Field(None, description="", alias="queryTimeout") - default_credential_guid: Optional[str] = Field( - None, description="", alias="defaultCredentialGuid" - ) - connector_icon: Optional[str] = Field( - None, description="", alias="connectorIcon" - ) - connector_image: Optional[str] = Field( - None, description="", alias="connectorImage" - ) - source_logo: Optional[str] = Field(None, description="", alias="sourceLogo") + default=None, description="" + ) + row_limit: Optional[int] = Field(default=None, description="") + query_timeout: Optional[int] = Field(default=None, description="") + default_credential_guid: Optional[str] = Field(default=None, description="") + connector_icon: Optional[str] = Field(default=None, description="") + connector_image: Optional[str] = Field(default=None, description="") + source_logo: Optional[str] = Field(default=None, description="") is_sample_data_preview_enabled: Optional[bool] = Field( - None, description="", alias="isSampleDataPreviewEnabled" + default=None, description="" ) popularity_insights_timeframe: Optional[int] = Field( - None, description="", alias="popularityInsightsTimeframe" - ) - has_popularity_insights: Optional[bool] = Field( - None, description="", alias="hasPopularityInsights" + default=None, description="" ) + has_popularity_insights: Optional[bool] = Field(default=None, description="") connection_dbt_environments: Optional[set[str]] = Field( - None, description="", alias="connectionDbtEnvironments" + default=None, description="" ) connection_s_s_o_credential_guid: Optional[str] = Field( - None, description="", alias="connectionSSOCredentialGuid" - ) - use_object_storage: Optional[bool] = Field( - None, description="", alias="useObjectStorage" + default=None, description="" ) + use_object_storage: Optional[bool] = Field(default=None, description="") object_storage_upload_threshold: Optional[int] = Field( - None, description="", alias="objectStorageUploadThreshold" - ) - vector_embeddings_enabled: Optional[bool] = Field( - None, description="", alias="vectorEmbeddingsEnabled" + default=None, description="" ) + vector_embeddings_enabled: Optional[bool] = Field(default=None, description="") vector_embeddings_updated_at: Optional[datetime] = Field( - None, description="", alias="vectorEmbeddingsUpdatedAt" + default=None, description="" ) is_loaded: bool = Field(default=True) @@ -685,6 +665,3 @@ def admin_groups_valid(cls, admin_groups, values): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Connection.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/data_domain.py b/pyatlan/model/assets/data_domain.py new file mode 100644 index 000000000..93c032f23 --- /dev/null +++ b/pyatlan/model/assets/data_domain.py @@ -0,0 +1,169 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, StrictStr, validator + +from pyatlan.model.enums import AtlanIcon +from pyatlan.model.fields.atlan_fields import RelationField +from pyatlan.utils import init_guid, to_camel_case, validate_required_fields + +from .asset import SelfAsset +from .data_mesh import DataMesh + + +class DataDomain(DataMesh): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: StrictStr, + icon: Optional[AtlanIcon] = None, + parent_domain: Optional[DataDomain] = None, + parent_domain_qualified_name: Optional[StrictStr] = None, + ) -> DataDomain: + validate_required_fields(["name"], [name]) + attributes = DataDomain.Attributes.create( + name=name, + icon=icon, + parent_domain=parent_domain, + parent_domain_qualified_name=parent_domain_qualified_name, + ) + return cls(attributes=attributes) + + @classmethod + def create_for_modification( + cls: type[SelfAsset], + qualified_name: str = "", + name: str = "", + ) -> SelfAsset: + validate_required_fields(["name", "qualified_name"], [name, qualified_name]) + # Split the data domain qualified_name to extract data mesh info + fields = qualified_name.split("/") + # for domain and subdomain + if len(fields) not in (3, 5): + raise ValueError(f"Invalid data domain qualified_name: {qualified_name}") + return cls( + attributes=cls.Attributes( + qualified_name=qualified_name, + name=name, + ) + ) + + type_name: str = Field(default="DataDomain", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DataDomain": + raise ValueError("must be DataDomain") + return v + + def __setattr__(self, name, value): + if name in DataDomain._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DATA_PRODUCTS: ClassVar[RelationField] = RelationField("dataProducts") + """ + TBC + """ + PARENT_DOMAIN: ClassVar[RelationField] = RelationField("parentDomain") + """ + TBC + """ + SUB_DOMAINS: ClassVar[RelationField] = RelationField("subDomains") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "data_products", + "parent_domain", + "sub_domains", + ] + + @property + def data_products(self) -> Optional[list[DataProduct]]: + return None if self.attributes is None else self.attributes.data_products + + @data_products.setter + def data_products(self, data_products: Optional[list[DataProduct]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_products = data_products + + @property + def parent_domain(self) -> Optional[DataDomain]: + return None if self.attributes is None else self.attributes.parent_domain + + @parent_domain.setter + def parent_domain(self, parent_domain: Optional[DataDomain]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_domain = parent_domain + + @property + def sub_domains(self) -> Optional[list[DataDomain]]: + return None if self.attributes is None else self.attributes.sub_domains + + @sub_domains.setter + def sub_domains(self, sub_domains: Optional[list[DataDomain]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sub_domains = sub_domains + + class Attributes(DataMesh.Attributes): + data_products: Optional[list[DataProduct]] = Field( + default=None, description="" + ) # relationship + parent_domain: Optional[DataDomain] = Field( + default=None, description="" + ) # relationship + sub_domains: Optional[list[DataDomain]] = Field( + default=None, description="" + ) # relationship + + @classmethod + @init_guid + def create( + cls, + *, + name: StrictStr, + icon: Optional[AtlanIcon] = None, + parent_domain: Optional[DataDomain] = None, + parent_domain_qualified_name: Optional[StrictStr] = None, + ) -> DataDomain.Attributes: + validate_required_fields(["name"], [name]) + mesh_name = to_camel_case(name) + qualified_name = f"default/domain/{mesh_name}" + # If "qualified name" of the parent domain is specified + if parent_domain_qualified_name: + parent_domain = DataDomain() + parent_domain.unique_attributes = { + "qualifiedName": parent_domain_qualified_name + } + qualified_name = f"{parent_domain_qualified_name}/domain/{mesh_name}" + icon_str = icon.value if icon is not None else None + return DataDomain.Attributes( + name=name, + parent_domain=parent_domain, + qualified_name=qualified_name, + asset_icon=icon_str, + ) + + attributes: "DataDomain.Attributes" = Field( + default_factory=lambda: DataDomain.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .data_product import DataProduct # noqa diff --git a/pyatlan/model/assets/data_mesh.py b/pyatlan/model/assets/data_mesh.py new file mode 100644 index 000000000..ce6de3347 --- /dev/null +++ b/pyatlan/model/assets/data_mesh.py @@ -0,0 +1,92 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordTextField + +from .catalog import Catalog + + +class DataMesh(Catalog): + """Description""" + + type_name: str = Field(default="DataMesh", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DataMesh": + raise ValueError("must be DataMesh") + return v + + def __setattr__(self, name, value): + if name in DataMesh._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PARENT_DOMAIN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "parentDomainQualifiedName", + "parentDomainQualifiedName", + "parentDomainQualifiedName.text", + ) + """ + Unique name of the parent domain in which this asset exists. + """ + SUPER_DOMAIN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "superDomainQualifiedName", + "superDomainQualifiedName", + "superDomainQualifiedName.text", + ) + """ + Unique name of the top-level domain in which this asset exists. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "parent_domain_qualified_name", + "super_domain_qualified_name", + ] + + @property + def parent_domain_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.parent_domain_qualified_name + ) + + @parent_domain_qualified_name.setter + def parent_domain_qualified_name(self, parent_domain_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_domain_qualified_name = parent_domain_qualified_name + + @property + def super_domain_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.super_domain_qualified_name + ) + + @super_domain_qualified_name.setter + def super_domain_qualified_name(self, super_domain_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.super_domain_qualified_name = super_domain_qualified_name + + class Attributes(Catalog.Attributes): + parent_domain_qualified_name: Optional[str] = Field( + default=None, description="" + ) + super_domain_qualified_name: Optional[str] = Field(default=None, description="") + + attributes: "DataMesh.Attributes" = Field( + default_factory=lambda: DataMesh.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/data_product.py b/pyatlan/model/assets/data_product.py new file mode 100644 index 000000000..85d9ddbd6 --- /dev/null +++ b/pyatlan/model/assets/data_product.py @@ -0,0 +1,314 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, StrictStr, validator + +from pyatlan.model.enums import ( + AtlanIcon, + DataProductCriticality, + DataProductSensitivity, + DataProductStatus, +) +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField +from pyatlan.model.search import IndexSearchRequest +from pyatlan.utils import ( + init_guid, + to_camel_case, + validate_required_fields, + validate_single_required_field, +) + +from .asset import SelfAsset +from .data_mesh import DataMesh + + +class DataProduct(DataMesh): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: StrictStr, + assets: IndexSearchRequest, + icon: Optional[AtlanIcon] = None, + domain: Optional[DataDomain] = None, + domain_qualified_name: Optional[StrictStr] = None, + ) -> DataProduct: + validate_required_fields(["name", "assets"], [name, assets]) + assets_dsl = assets.get_dsl_str() + attributes = DataProduct.Attributes.create( + name=name, + assets_dsl=assets_dsl, + icon=icon, + domain=domain, + domain_qualified_name=domain_qualified_name, + ) + return cls(attributes=attributes) + + @classmethod + def create_for_modification( + cls: type[SelfAsset], + qualified_name: str = "", + name: str = "", + ) -> SelfAsset: + validate_required_fields( + ["name", "qualified_name"], + [name, qualified_name], + ) + # Split the data product qualified_name to extract data mesh info + fields = qualified_name.split("/") + if len(fields) != 3: + raise ValueError(f"Invalid data product qualified_name: {qualified_name}") + return cls( + attributes=cls.Attributes( + qualified_name=qualified_name, + name=name, + ) + ) + + type_name: str = Field(default="DataProduct", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DataProduct": + raise ValueError("must be DataProduct") + return v + + def __setattr__(self, name, value): + if name in DataProduct._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DATA_PRODUCT_STATUS: ClassVar[KeywordField] = KeywordField( + "dataProductStatus", "dataProductStatus" + ) + """ + Status of this data product. + """ + DATA_PRODUCT_CRITICALITY: ClassVar[KeywordField] = KeywordField( + "dataProductCriticality", "dataProductCriticality" + ) + """ + Criticality of this data product. + """ + DATA_PRODUCT_SENSITIVITY: ClassVar[KeywordField] = KeywordField( + "dataProductSensitivity", "dataProductSensitivity" + ) + """ + Information sensitivity of this data product. + """ + DATA_PRODUCT_ASSETS_DSL: ClassVar[KeywordField] = KeywordField( + "dataProductAssetsDSL", "dataProductAssetsDSL" + ) + """ + Search DSL used to define which assets are part of this data product. + """ + DATA_PRODUCT_ASSETS_PLAYBOOK_FILTER: ClassVar[KeywordField] = KeywordField( + "dataProductAssetsPlaybookFilter", "dataProductAssetsPlaybookFilter" + ) + """ + Playbook filter to define which assets are part of this data product. + """ + DATA_PRODUCT_SCORE_VALUE: ClassVar[NumericField] = NumericField( + "dataProductScoreValue", "dataProductScoreValue" + ) + """ + Score of this data product. + """ + + DATA_DOMAIN: ClassVar[RelationField] = RelationField("dataDomain") + """ + TBC + """ + OUTPUT_PORTS: ClassVar[RelationField] = RelationField("outputPorts") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "data_product_status", + "data_product_criticality", + "data_product_sensitivity", + "data_product_assets_d_s_l", + "data_product_assets_playbook_filter", + "data_product_score_value", + "data_domain", + "output_ports", + ] + + @property + def data_product_status(self) -> Optional[DataProductStatus]: + return None if self.attributes is None else self.attributes.data_product_status + + @data_product_status.setter + def data_product_status(self, data_product_status: Optional[DataProductStatus]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_product_status = data_product_status + + @property + def data_product_criticality(self) -> Optional[DataProductCriticality]: + return ( + None + if self.attributes is None + else self.attributes.data_product_criticality + ) + + @data_product_criticality.setter + def data_product_criticality( + self, data_product_criticality: Optional[DataProductCriticality] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_product_criticality = data_product_criticality + + @property + def data_product_sensitivity(self) -> Optional[DataProductSensitivity]: + return ( + None + if self.attributes is None + else self.attributes.data_product_sensitivity + ) + + @data_product_sensitivity.setter + def data_product_sensitivity( + self, data_product_sensitivity: Optional[DataProductSensitivity] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_product_sensitivity = data_product_sensitivity + + @property + def data_product_assets_d_s_l(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.data_product_assets_d_s_l + ) + + @data_product_assets_d_s_l.setter + def data_product_assets_d_s_l(self, data_product_assets_d_s_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_product_assets_d_s_l = data_product_assets_d_s_l + + @property + def data_product_assets_playbook_filter(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.data_product_assets_playbook_filter + ) + + @data_product_assets_playbook_filter.setter + def data_product_assets_playbook_filter( + self, data_product_assets_playbook_filter: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_product_assets_playbook_filter = ( + data_product_assets_playbook_filter + ) + + @property + def data_product_score_value(self) -> Optional[float]: + return ( + None + if self.attributes is None + else self.attributes.data_product_score_value + ) + + @data_product_score_value.setter + def data_product_score_value(self, data_product_score_value: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_product_score_value = data_product_score_value + + @property + def data_domain(self) -> Optional[DataDomain]: + return None if self.attributes is None else self.attributes.data_domain + + @data_domain.setter + def data_domain(self, data_domain: Optional[DataDomain]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_domain = data_domain + + @property + def output_ports(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.output_ports + + @output_ports.setter + def output_ports(self, output_ports: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_ports = output_ports + + class Attributes(DataMesh.Attributes): + data_product_status: Optional[DataProductStatus] = Field( + default=None, description="" + ) + data_product_criticality: Optional[DataProductCriticality] = Field( + default=None, description="" + ) + data_product_sensitivity: Optional[DataProductSensitivity] = Field( + default=None, description="" + ) + data_product_assets_d_s_l: Optional[str] = Field(default=None, description="") + data_product_assets_playbook_filter: Optional[str] = Field( + default=None, description="" + ) + data_product_score_value: Optional[float] = Field(default=None, description="") + data_domain: Optional[DataDomain] = Field( + default=None, description="" + ) # relationship + output_ports: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + + @classmethod + @init_guid + def create( + cls, + *, + name: StrictStr, + assets_dsl: StrictStr, + icon: Optional[AtlanIcon] = None, + domain: Optional[DataDomain] = None, + domain_qualified_name: Optional[StrictStr] = None, + ) -> DataProduct.Attributes: + validate_required_fields(["name"], [name]) + validate_single_required_field( + ["domain", "domain_qualified_name"], + [domain, domain_qualified_name], + ) + if domain_qualified_name: + domain = DataDomain() + domain.unique_attributes = {"qualifiedName": domain_qualified_name} + icon_str = icon.value if icon is not None else None + camel_case_name = to_camel_case(name) + return DataProduct.Attributes( + name=name, + data_product_assets_d_s_l=assets_dsl, + data_domain=domain, + qualified_name=f"default/product/{camel_case_name}", + asset_icon=icon_str, + ) + + attributes: "DataProduct.Attributes" = Field( + default_factory=lambda: DataProduct.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa +from .data_domain import DataDomain # noqa diff --git a/pyatlan/model/assets/data_quality.py b/pyatlan/model/assets/data_quality.py new file mode 100644 index 000000000..4306b351a --- /dev/null +++ b/pyatlan/model/assets/data_quality.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar + +from pydantic.v1 import Field, validator + +from .catalog import Catalog + + +class DataQuality(Catalog): + """Description""" + + type_name: str = Field(default="DataQuality", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DataQuality": + raise ValueError("must be DataQuality") + return v + + def __setattr__(self, name, value): + if name in DataQuality._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + _convenience_properties: ClassVar[list[str]] = [] diff --git a/pyatlan/model/assets/asset01.py b/pyatlan/model/assets/data_set.py similarity index 78% rename from pyatlan/model/assets/asset01.py rename to pyatlan/model/assets/data_set.py index ed604831e..038d97194 100644 --- a/pyatlan/model/assets/asset01.py +++ b/pyatlan/model/assets/data_set.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Asset +from .asset import Asset class DataSet(Asset, type_name="DataSet"): """Description""" - type_name: str = Field("DataSet", allow_mutation=False) + type_name: str = Field(default="DataSet", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -DataSet.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset47.py b/pyatlan/model/assets/data_studio.py similarity index 86% rename from pyatlan/model/assets/asset47.py rename to pyatlan/model/assets/data_studio.py index 63be78765..90f9365a5 100644 --- a/pyatlan/model/assets/asset47.py +++ b/pyatlan/model/assets/data_studio.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( KeywordField, @@ -16,14 +16,13 @@ ) from pyatlan.model.structs import GoogleLabel, GoogleTag -from .asset00 import AirflowTask, Process -from .asset31 import Google +from .google import Google class DataStudio(Google): """Description""" - type_name: str = Field("DataStudio", allow_mutation=False) + type_name: str = Field(default="DataStudio", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -254,41 +253,25 @@ def output_from_processes(self, output_from_processes: Optional[list[Process]]): self.attributes.output_from_processes = output_from_processes class Attributes(Google.Attributes): - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" - ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" - ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" - ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" - ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" - ) + google_service: Optional[str] = Field(default=None, description="") + google_project_name: Optional[str] = Field(default=None, description="") + google_project_id: Optional[str] = Field(default=None, description="") + google_project_number: Optional[int] = Field(default=None, description="") + google_location: Optional[str] = Field(default=None, description="") + google_location_type: Optional[str] = Field(default=None, description="") + google_labels: Optional[list[GoogleLabel]] = Field(default=None, description="") + google_tags: Optional[list[GoogleTag]] = Field(default=None, description="") input_to_processes: Optional[list[Process]] = Field( - None, description="", alias="inputToProcesses" + default=None, description="" ) # relationship output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="outputFromAirflowTasks" + default=None, description="" ) # relationship input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="inputToAirflowTasks" + default=None, description="" ) # relationship output_from_processes: Optional[list[Process]] = Field( - None, description="", alias="outputFromProcesses" + default=None, description="" ) # relationship attributes: "DataStudio.Attributes" = Field( @@ -298,4 +281,5 @@ class Attributes(Google.Attributes): ) -DataStudio.Attributes.update_forward_refs() +from .airflow_task import AirflowTask # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/asset61.py b/pyatlan/model/assets/data_studio_asset.py similarity index 88% rename from pyatlan/model/assets/asset61.py rename to pyatlan/model/assets/data_studio_asset.py index 3b7b5d790..66f427a9f 100644 --- a/pyatlan/model/assets/asset61.py +++ b/pyatlan/model/assets/data_studio_asset.py @@ -7,7 +7,7 @@ import uuid from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import AtlanConnectorType, GoogleDatastudioAssetType from pyatlan.model.fields.atlan_fields import ( @@ -20,7 +20,7 @@ from pyatlan.model.structs import GoogleLabel, GoogleTag from pyatlan.utils import init_guid, validate_required_fields -from .asset47 import DataStudio +from .data_studio import DataStudio class DataStudioAsset(DataStudio): @@ -51,7 +51,7 @@ def create( ) return cls(attributes=attributes) - type_name: str = Field("DataStudioAsset", allow_mutation=False) + type_name: str = Field(default="DataStudioAsset", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -291,41 +291,21 @@ def google_tags(self, google_tags: Optional[list[GoogleTag]]): class Attributes(DataStudio.Attributes): data_studio_asset_type: Optional[GoogleDatastudioAssetType] = Field( - None, description="", alias="dataStudioAssetType" - ) - data_studio_asset_title: Optional[str] = Field( - None, description="", alias="dataStudioAssetTitle" - ) - data_studio_asset_owner: Optional[str] = Field( - None, description="", alias="dataStudioAssetOwner" + default=None, description="" ) + data_studio_asset_title: Optional[str] = Field(default=None, description="") + data_studio_asset_owner: Optional[str] = Field(default=None, description="") is_trashed_data_studio_asset: Optional[bool] = Field( - None, description="", alias="isTrashedDataStudioAsset" - ) - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" - ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" - ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" - ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" - ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" + default=None, description="" ) + google_service: Optional[str] = Field(default=None, description="") + google_project_name: Optional[str] = Field(default=None, description="") + google_project_id: Optional[str] = Field(default=None, description="") + google_project_number: Optional[int] = Field(default=None, description="") + google_location: Optional[str] = Field(default=None, description="") + google_location_type: Optional[str] = Field(default=None, description="") + google_labels: Optional[list[GoogleLabel]] = Field(default=None, description="") + google_tags: Optional[list[GoogleTag]] = Field(default=None, description="") @classmethod # @validate_arguments() @@ -366,6 +346,3 @@ def create( description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -DataStudioAsset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/database.py b/pyatlan/model/assets/database.py new file mode 100644 index 000000000..03d05c758 --- /dev/null +++ b/pyatlan/model/assets/database.py @@ -0,0 +1,127 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import NumericField, RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .s_q_l import SQL + + +class Database(SQL): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> Database: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + attributes = Database.Attributes( + name=name, + connection_qualified_name=connection_qualified_name, + qualified_name=f"{connection_qualified_name}/{name}", + connector_name=connector_type.value, + ) + return cls(attributes=attributes) + + type_name: str = Field(default="Database", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Database": + raise ValueError("must be Database") + return v + + def __setattr__(self, name, value): + if name in Database._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") + """ + Number of schemas in this database. + """ + + SCHEMAS: ClassVar[RelationField] = RelationField("schemas") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "schema_count", + "schemas", + ] + + @property + def schema_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.schema_count + + @schema_count.setter + def schema_count(self, schema_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_count = schema_count + + @property + def schemas(self) -> Optional[list[Schema]]: + return None if self.attributes is None else self.attributes.schemas + + @schemas.setter + def schemas(self, schemas: Optional[list[Schema]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schemas = schemas + + class Attributes(SQL.Attributes): + schema_count: Optional[int] = Field(default=None, description="") + schemas: Optional[list[Schema]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, name: str, connection_qualified_name: str + ) -> Database.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + return Database.Attributes( + name=name, + connection_qualified_name=connection_qualified_name, + qualified_name=f"{connection_qualified_name}/{name}", + connector_name=connector_type.value, + ) + + attributes: "Database.Attributes" = Field( + default_factory=lambda: Database.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/dbt.py b/pyatlan/model/assets/dbt.py new file mode 100644 index 000000000..335e5d845 --- /dev/null +++ b/pyatlan/model/assets/dbt.py @@ -0,0 +1,399 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, +) + +from .catalog import Catalog + + +class Dbt(Catalog): + """Description""" + + type_name: str = Field(default="Dbt", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Dbt": + raise ValueError("must be Dbt") + return v + + def __setattr__(self, name, value): + if name in Dbt._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAlias", "dbtAlias.keyword", "dbtAlias" + ) + """ + + """ + DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") + """ + + """ + DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" + ) + """ + + """ + DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" + ) + """ + + """ + DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" + ) + """ + + """ + DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" + ) + """ + + """ + DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobName", "dbtJobName.keyword", "dbtJobName" + ) + """ + + """ + DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "dbtJobSchedule", "dbtJobSchedule" + ) + """ + + """ + DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtJobStatus", "dbtJobStatus" + ) + """ + + """ + DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobScheduleCronHumanized", + "dbtJobScheduleCronHumanized.keyword", + "dbtJobScheduleCronHumanized", + ) + """ + + """ + DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( + "dbtJobLastRun", "dbtJobLastRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( + "dbtJobNextRun", "dbtJobNextRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobNextRunHumanized", + "dbtJobNextRunHumanized.keyword", + "dbtJobNextRunHumanized", + ) + """ + + """ + DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" + ) + """ + + """ + DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentDbtVersion", + "dbtEnvironmentDbtVersion.keyword", + "dbtEnvironmentDbtVersion", + ) + """ + + """ + DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") + """ + + """ + DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( + "dbtConnectionContext", "dbtConnectionContext" + ) + """ + + """ + DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( + "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" + ) + """ + + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dbt_alias", + "dbt_meta", + "dbt_unique_id", + "dbt_account_name", + "dbt_project_name", + "dbt_package_name", + "dbt_job_name", + "dbt_job_schedule", + "dbt_job_status", + "dbt_job_schedule_cron_humanized", + "dbt_job_last_run", + "dbt_job_next_run", + "dbt_job_next_run_humanized", + "dbt_environment_name", + "dbt_environment_dbt_version", + "dbt_tags", + "dbt_connection_context", + "dbt_semantic_layer_proxy_url", + ] + + @property + def dbt_alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_alias + + @dbt_alias.setter + def dbt_alias(self, dbt_alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_alias = dbt_alias + + @property + def dbt_meta(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_meta + + @dbt_meta.setter + def dbt_meta(self, dbt_meta: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_meta = dbt_meta + + @property + def dbt_unique_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_unique_id + + @dbt_unique_id.setter + def dbt_unique_id(self, dbt_unique_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_unique_id = dbt_unique_id + + @property + def dbt_account_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_account_name + + @dbt_account_name.setter + def dbt_account_name(self, dbt_account_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_account_name = dbt_account_name + + @property + def dbt_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_project_name + + @dbt_project_name.setter + def dbt_project_name(self, dbt_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_project_name = dbt_project_name + + @property + def dbt_package_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_package_name + + @dbt_package_name.setter + def dbt_package_name(self, dbt_package_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_package_name = dbt_package_name + + @property + def dbt_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_name + + @dbt_job_name.setter + def dbt_job_name(self, dbt_job_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_name = dbt_job_name + + @property + def dbt_job_schedule(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_schedule + + @dbt_job_schedule.setter + def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule = dbt_job_schedule + + @property + def dbt_job_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_status + + @dbt_job_status.setter + def dbt_job_status(self, dbt_job_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_status = dbt_job_status + + @property + def dbt_job_schedule_cron_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_schedule_cron_humanized + ) + + @dbt_job_schedule_cron_humanized.setter + def dbt_job_schedule_cron_humanized( + self, dbt_job_schedule_cron_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule_cron_humanized = ( + dbt_job_schedule_cron_humanized + ) + + @property + def dbt_job_last_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_last_run + + @dbt_job_last_run.setter + def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_last_run = dbt_job_last_run + + @property + def dbt_job_next_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_next_run + + @dbt_job_next_run.setter + def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run = dbt_job_next_run + + @property + def dbt_job_next_run_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_next_run_humanized + ) + + @dbt_job_next_run_humanized.setter + def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized + + @property + def dbt_environment_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_environment_name + + @dbt_environment_name.setter + def dbt_environment_name(self, dbt_environment_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_name = dbt_environment_name + + @property + def dbt_environment_dbt_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_environment_dbt_version + ) + + @dbt_environment_dbt_version.setter + def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version + + @property + def dbt_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.dbt_tags + + @dbt_tags.setter + def dbt_tags(self, dbt_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tags = dbt_tags + + @property + def dbt_connection_context(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_connection_context + ) + + @dbt_connection_context.setter + def dbt_connection_context(self, dbt_connection_context: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_connection_context = dbt_connection_context + + @property + def dbt_semantic_layer_proxy_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_semantic_layer_proxy_url + ) + + @dbt_semantic_layer_proxy_url.setter + def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url + + class Attributes(Catalog.Attributes): + dbt_alias: Optional[str] = Field(default=None, description="") + dbt_meta: Optional[str] = Field(default=None, description="") + dbt_unique_id: Optional[str] = Field(default=None, description="") + dbt_account_name: Optional[str] = Field(default=None, description="") + dbt_project_name: Optional[str] = Field(default=None, description="") + dbt_package_name: Optional[str] = Field(default=None, description="") + dbt_job_name: Optional[str] = Field(default=None, description="") + dbt_job_schedule: Optional[str] = Field(default=None, description="") + dbt_job_status: Optional[str] = Field(default=None, description="") + dbt_job_schedule_cron_humanized: Optional[str] = Field( + default=None, description="" + ) + dbt_job_last_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run_humanized: Optional[str] = Field(default=None, description="") + dbt_environment_name: Optional[str] = Field(default=None, description="") + dbt_environment_dbt_version: Optional[str] = Field(default=None, description="") + dbt_tags: Optional[set[str]] = Field(default=None, description="") + dbt_connection_context: Optional[str] = Field(default=None, description="") + dbt_semantic_layer_proxy_url: Optional[str] = Field( + default=None, description="" + ) + + attributes: "Dbt.Attributes" = Field( + default_factory=lambda: Dbt.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/asset34.py b/pyatlan/model/assets/dbt_column_process.py similarity index 85% rename from pyatlan/model/assets/asset34.py rename to pyatlan/model/assets/dbt_column_process.py index 1bbf18704..cd9ab4f9c 100644 --- a/pyatlan/model/assets/asset34.py +++ b/pyatlan/model/assets/dbt_column_process.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( KeywordField, @@ -16,20 +16,13 @@ RelationField, ) -from .asset00 import ( - AirflowTask, - Catalog, - ColumnProcess, - Dbt, - MatillionComponent, - Process, -) +from .dbt import Dbt class DbtColumnProcess(Dbt): """Description""" - type_name: str = Field("DbtColumnProcess", allow_mutation=False) + type_name: str = Field(default="DbtColumnProcess", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -529,68 +522,44 @@ def column_processes(self, column_processes: Optional[list[ColumnProcess]]): class Attributes(Dbt.Attributes): dbt_column_process_job_status: Optional[str] = Field( - None, description="", alias="dbtColumnProcessJobStatus" - ) - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" + default=None, description="" ) + dbt_alias: Optional[str] = Field(default=None, description="") + dbt_meta: Optional[str] = Field(default=None, description="") + dbt_unique_id: Optional[str] = Field(default=None, description="") + dbt_account_name: Optional[str] = Field(default=None, description="") + dbt_project_name: Optional[str] = Field(default=None, description="") + dbt_package_name: Optional[str] = Field(default=None, description="") + dbt_job_name: Optional[str] = Field(default=None, description="") + dbt_job_schedule: Optional[str] = Field(default=None, description="") + dbt_job_status: Optional[str] = Field(default=None, description="") dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" + default=None, description="" ) + dbt_job_last_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run_humanized: Optional[str] = Field(default=None, description="") + dbt_environment_name: Optional[str] = Field(default=None, description="") + dbt_environment_dbt_version: Optional[str] = Field(default=None, description="") + dbt_tags: Optional[set[str]] = Field(default=None, description="") + dbt_connection_context: Optional[str] = Field(default=None, description="") dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" + default=None, description="" ) - inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") - outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") - code: Optional[str] = Field(None, description="", alias="code") - sql: Optional[str] = Field(None, description="", alias="sql") - ast: Optional[str] = Field(None, description="", alias="ast") + inputs: Optional[list[Catalog]] = Field(default=None, description="") + outputs: Optional[list[Catalog]] = Field(default=None, description="") + code: Optional[str] = Field(default=None, description="") + sql: Optional[str] = Field(default=None, description="") + ast: Optional[str] = Field(default=None, description="") matillion_component: Optional[MatillionComponent] = Field( - None, description="", alias="matillionComponent" - ) # relationship - process: Optional[Process] = Field( - None, description="", alias="process" + default=None, description="" ) # relationship + process: Optional[Process] = Field(default=None, description="") # relationship airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="airflowTasks" + default=None, description="" ) # relationship column_processes: Optional[list[ColumnProcess]] = Field( - None, description="", alias="columnProcesses" + default=None, description="" ) # relationship attributes: "DbtColumnProcess.Attributes" = Field( @@ -600,4 +569,8 @@ class Attributes(Dbt.Attributes): ) -DbtColumnProcess.Attributes.update_forward_refs() +from .airflow_task import AirflowTask # noqa +from .catalog import Catalog # noqa +from .column_process import ColumnProcess # noqa +from .matillion_component import MatillionComponent # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/dbt_metric.py b/pyatlan/model/assets/dbt_metric.py new file mode 100644 index 000000000..6c7e9d0f7 --- /dev/null +++ b/pyatlan/model/assets/dbt_metric.py @@ -0,0 +1,604 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) +from pyatlan.model.structs import DbtMetricFilter + +from .dbt import Dbt + + +class DbtMetric(Dbt): + """Description""" + + type_name: str = Field(default="DbtMetric", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DbtMetric": + raise ValueError("must be DbtMetric") + return v + + def __setattr__(self, name, value): + if name in DbtMetric._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DBT_METRIC_FILTERS: ClassVar[KeywordField] = KeywordField( + "dbtMetricFilters", "dbtMetricFilters" + ) + """ + + """ + DBT_ALIAS: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAlias", "dbtAlias.keyword", "dbtAlias" + ) + """ + + """ + DBT_META: ClassVar[KeywordField] = KeywordField("dbtMeta", "dbtMeta") + """ + + """ + DBT_UNIQUE_ID: ClassVar[KeywordTextField] = KeywordTextField( + "dbtUniqueId", "dbtUniqueId.keyword", "dbtUniqueId" + ) + """ + + """ + DBT_ACCOUNT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtAccountName", "dbtAccountName.keyword", "dbtAccountName" + ) + """ + + """ + DBT_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtProjectName", "dbtProjectName.keyword", "dbtProjectName" + ) + """ + + """ + DBT_PACKAGE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtPackageName", "dbtPackageName.keyword", "dbtPackageName" + ) + """ + + """ + DBT_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobName", "dbtJobName.keyword", "dbtJobName" + ) + """ + + """ + DBT_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "dbtJobSchedule", "dbtJobSchedule" + ) + """ + + """ + DBT_JOB_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtJobStatus", "dbtJobStatus" + ) + """ + + """ + DBT_JOB_SCHEDULE_CRON_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobScheduleCronHumanized", + "dbtJobScheduleCronHumanized.keyword", + "dbtJobScheduleCronHumanized", + ) + """ + + """ + DBT_JOB_LAST_RUN: ClassVar[NumericField] = NumericField( + "dbtJobLastRun", "dbtJobLastRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN: ClassVar[NumericField] = NumericField( + "dbtJobNextRun", "dbtJobNextRun" + ) + """ + + """ + DBT_JOB_NEXT_RUN_HUMANIZED: ClassVar[KeywordTextField] = KeywordTextField( + "dbtJobNextRunHumanized", + "dbtJobNextRunHumanized.keyword", + "dbtJobNextRunHumanized", + ) + """ + + """ + DBT_ENVIRONMENT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentName", "dbtEnvironmentName.keyword", "dbtEnvironmentName" + ) + """ + + """ + DBT_ENVIRONMENT_DBT_VERSION: ClassVar[KeywordTextField] = KeywordTextField( + "dbtEnvironmentDbtVersion", + "dbtEnvironmentDbtVersion.keyword", + "dbtEnvironmentDbtVersion", + ) + """ + + """ + DBT_TAGS: ClassVar[KeywordField] = KeywordField("dbtTags", "dbtTags") + """ + + """ + DBT_CONNECTION_CONTEXT: ClassVar[KeywordField] = KeywordField( + "dbtConnectionContext", "dbtConnectionContext" + ) + """ + + """ + DBT_SEMANTIC_LAYER_PROXY_URL: ClassVar[KeywordField] = KeywordField( + "dbtSemanticLayerProxyUrl", "dbtSemanticLayerProxyUrl" + ) + """ + + """ + METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") + """ + Type of the metric. + """ + METRIC_SQL: ClassVar[KeywordField] = KeywordField("metricSQL", "metricSQL") + """ + SQL query used to compute the metric. + """ + METRIC_FILTERS: ClassVar[TextField] = TextField("metricFilters", "metricFilters") + """ + Filters to be applied to the metric query. + """ + METRIC_TIME_GRAINS: ClassVar[TextField] = TextField( + "metricTimeGrains", "metricTimeGrains" + ) + """ + List of time grains to be applied to the metric query. + """ + + METRIC_TIMESTAMP_COLUMN: ClassVar[RelationField] = RelationField( + "metricTimestampColumn" + ) + """ + TBC + """ + DBT_MODEL: ClassVar[RelationField] = RelationField("dbtModel") + """ + TBC + """ + ASSETS: ClassVar[RelationField] = RelationField("assets") + """ + TBC + """ + METRIC_DIMENSION_COLUMNS: ClassVar[RelationField] = RelationField( + "metricDimensionColumns" + ) + """ + TBC + """ + DBT_METRIC_FILTER_COLUMNS: ClassVar[RelationField] = RelationField( + "dbtMetricFilterColumns" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dbt_metric_filters", + "dbt_alias", + "dbt_meta", + "dbt_unique_id", + "dbt_account_name", + "dbt_project_name", + "dbt_package_name", + "dbt_job_name", + "dbt_job_schedule", + "dbt_job_status", + "dbt_job_schedule_cron_humanized", + "dbt_job_last_run", + "dbt_job_next_run", + "dbt_job_next_run_humanized", + "dbt_environment_name", + "dbt_environment_dbt_version", + "dbt_tags", + "dbt_connection_context", + "dbt_semantic_layer_proxy_url", + "metric_type", + "metric_s_q_l", + "metric_filters", + "metric_time_grains", + "metric_timestamp_column", + "dbt_model", + "assets", + "metric_dimension_columns", + "dbt_metric_filter_columns", + ] + + @property + def dbt_metric_filters(self) -> Optional[list[DbtMetricFilter]]: + return None if self.attributes is None else self.attributes.dbt_metric_filters + + @dbt_metric_filters.setter + def dbt_metric_filters(self, dbt_metric_filters: Optional[list[DbtMetricFilter]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_metric_filters = dbt_metric_filters + + @property + def dbt_alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_alias + + @dbt_alias.setter + def dbt_alias(self, dbt_alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_alias = dbt_alias + + @property + def dbt_meta(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_meta + + @dbt_meta.setter + def dbt_meta(self, dbt_meta: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_meta = dbt_meta + + @property + def dbt_unique_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_unique_id + + @dbt_unique_id.setter + def dbt_unique_id(self, dbt_unique_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_unique_id = dbt_unique_id + + @property + def dbt_account_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_account_name + + @dbt_account_name.setter + def dbt_account_name(self, dbt_account_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_account_name = dbt_account_name + + @property + def dbt_project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_project_name + + @dbt_project_name.setter + def dbt_project_name(self, dbt_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_project_name = dbt_project_name + + @property + def dbt_package_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_package_name + + @dbt_package_name.setter + def dbt_package_name(self, dbt_package_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_package_name = dbt_package_name + + @property + def dbt_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_name + + @dbt_job_name.setter + def dbt_job_name(self, dbt_job_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_name = dbt_job_name + + @property + def dbt_job_schedule(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_schedule + + @dbt_job_schedule.setter + def dbt_job_schedule(self, dbt_job_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule = dbt_job_schedule + + @property + def dbt_job_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_job_status + + @dbt_job_status.setter + def dbt_job_status(self, dbt_job_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_status = dbt_job_status + + @property + def dbt_job_schedule_cron_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_schedule_cron_humanized + ) + + @dbt_job_schedule_cron_humanized.setter + def dbt_job_schedule_cron_humanized( + self, dbt_job_schedule_cron_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_schedule_cron_humanized = ( + dbt_job_schedule_cron_humanized + ) + + @property + def dbt_job_last_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_last_run + + @dbt_job_last_run.setter + def dbt_job_last_run(self, dbt_job_last_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_last_run = dbt_job_last_run + + @property + def dbt_job_next_run(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.dbt_job_next_run + + @dbt_job_next_run.setter + def dbt_job_next_run(self, dbt_job_next_run: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run = dbt_job_next_run + + @property + def dbt_job_next_run_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_job_next_run_humanized + ) + + @dbt_job_next_run_humanized.setter + def dbt_job_next_run_humanized(self, dbt_job_next_run_humanized: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_job_next_run_humanized = dbt_job_next_run_humanized + + @property + def dbt_environment_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_environment_name + + @dbt_environment_name.setter + def dbt_environment_name(self, dbt_environment_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_name = dbt_environment_name + + @property + def dbt_environment_dbt_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_environment_dbt_version + ) + + @dbt_environment_dbt_version.setter + def dbt_environment_dbt_version(self, dbt_environment_dbt_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_environment_dbt_version = dbt_environment_dbt_version + + @property + def dbt_tags(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.dbt_tags + + @dbt_tags.setter + def dbt_tags(self, dbt_tags: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tags = dbt_tags + + @property + def dbt_connection_context(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_connection_context + ) + + @dbt_connection_context.setter + def dbt_connection_context(self, dbt_connection_context: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_connection_context = dbt_connection_context + + @property + def dbt_semantic_layer_proxy_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_semantic_layer_proxy_url + ) + + @dbt_semantic_layer_proxy_url.setter + def dbt_semantic_layer_proxy_url(self, dbt_semantic_layer_proxy_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_semantic_layer_proxy_url = dbt_semantic_layer_proxy_url + + @property + def metric_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metric_type + + @metric_type.setter + def metric_type(self, metric_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_type = metric_type + + @property + def metric_s_q_l(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metric_s_q_l + + @metric_s_q_l.setter + def metric_s_q_l(self, metric_s_q_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_s_q_l = metric_s_q_l + + @property + def metric_filters(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metric_filters + + @metric_filters.setter + def metric_filters(self, metric_filters: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_filters = metric_filters + + @property + def metric_time_grains(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.metric_time_grains + + @metric_time_grains.setter + def metric_time_grains(self, metric_time_grains: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_time_grains = metric_time_grains + + @property + def metric_timestamp_column(self) -> Optional[Column]: + return ( + None if self.attributes is None else self.attributes.metric_timestamp_column + ) + + @metric_timestamp_column.setter + def metric_timestamp_column(self, metric_timestamp_column: Optional[Column]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_timestamp_column = metric_timestamp_column + + @property + def dbt_model(self) -> Optional[DbtModel]: + return None if self.attributes is None else self.attributes.dbt_model + + @dbt_model.setter + def dbt_model(self, dbt_model: Optional[DbtModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model = dbt_model + + @property + def assets(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.assets + + @assets.setter + def assets(self, assets: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.assets = assets + + @property + def metric_dimension_columns(self) -> Optional[list[Column]]: + return ( + None + if self.attributes is None + else self.attributes.metric_dimension_columns + ) + + @metric_dimension_columns.setter + def metric_dimension_columns( + self, metric_dimension_columns: Optional[list[Column]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_dimension_columns = metric_dimension_columns + + @property + def dbt_metric_filter_columns(self) -> Optional[list[Column]]: + return ( + None + if self.attributes is None + else self.attributes.dbt_metric_filter_columns + ) + + @dbt_metric_filter_columns.setter + def dbt_metric_filter_columns( + self, dbt_metric_filter_columns: Optional[list[Column]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_metric_filter_columns = dbt_metric_filter_columns + + class Attributes(Dbt.Attributes): + dbt_metric_filters: Optional[list[DbtMetricFilter]] = Field( + default=None, description="" + ) + dbt_alias: Optional[str] = Field(default=None, description="") + dbt_meta: Optional[str] = Field(default=None, description="") + dbt_unique_id: Optional[str] = Field(default=None, description="") + dbt_account_name: Optional[str] = Field(default=None, description="") + dbt_project_name: Optional[str] = Field(default=None, description="") + dbt_package_name: Optional[str] = Field(default=None, description="") + dbt_job_name: Optional[str] = Field(default=None, description="") + dbt_job_schedule: Optional[str] = Field(default=None, description="") + dbt_job_status: Optional[str] = Field(default=None, description="") + dbt_job_schedule_cron_humanized: Optional[str] = Field( + default=None, description="" + ) + dbt_job_last_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run_humanized: Optional[str] = Field(default=None, description="") + dbt_environment_name: Optional[str] = Field(default=None, description="") + dbt_environment_dbt_version: Optional[str] = Field(default=None, description="") + dbt_tags: Optional[set[str]] = Field(default=None, description="") + dbt_connection_context: Optional[str] = Field(default=None, description="") + dbt_semantic_layer_proxy_url: Optional[str] = Field( + default=None, description="" + ) + metric_type: Optional[str] = Field(default=None, description="") + metric_s_q_l: Optional[str] = Field(default=None, description="") + metric_filters: Optional[str] = Field(default=None, description="") + metric_time_grains: Optional[set[str]] = Field(default=None, description="") + metric_timestamp_column: Optional[Column] = Field( + default=None, description="" + ) # relationship + dbt_model: Optional[DbtModel] = Field( + default=None, description="" + ) # relationship + assets: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + metric_dimension_columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + dbt_metric_filter_columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + + attributes: "DbtMetric.Attributes" = Field( + default_factory=lambda: DbtMetric.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa +from .column import Column # noqa +from .dbt_model import DbtModel # noqa diff --git a/pyatlan/model/assets/dbt_model.py b/pyatlan/model/assets/dbt_model.py new file mode 100644 index 000000000..7da75e884 --- /dev/null +++ b/pyatlan/model/assets/dbt_model.py @@ -0,0 +1,418 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .dbt import Dbt + + +class DbtModel(Dbt): + """Description""" + + type_name: str = Field(default="DbtModel", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DbtModel": + raise ValueError("must be DbtModel") + return v + + def __setattr__(self, name, value): + if name in DbtModel._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DBT_STATUS: ClassVar[KeywordField] = KeywordField("dbtStatus", "dbtStatus") + """ + + """ + DBT_ERROR: ClassVar[KeywordField] = KeywordField("dbtError", "dbtError") + """ + + """ + DBT_RAW_SQL: ClassVar[KeywordField] = KeywordField("dbtRawSQL", "dbtRawSQL") + """ + + """ + DBT_COMPILED_SQL: ClassVar[KeywordField] = KeywordField( + "dbtCompiledSQL", "dbtCompiledSQL" + ) + """ + + """ + DBT_STATS: ClassVar[KeywordField] = KeywordField("dbtStats", "dbtStats") + """ + + """ + DBT_MATERIALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( + "dbtMaterializationType", "dbtMaterializationType" + ) + """ + + """ + DBT_MODEL_COMPILE_STARTED_AT: ClassVar[NumericField] = NumericField( + "dbtModelCompileStartedAt", "dbtModelCompileStartedAt" + ) + """ + + """ + DBT_MODEL_COMPILE_COMPLETED_AT: ClassVar[NumericField] = NumericField( + "dbtModelCompileCompletedAt", "dbtModelCompileCompletedAt" + ) + """ + + """ + DBT_MODEL_EXECUTE_STARTED_AT: ClassVar[NumericField] = NumericField( + "dbtModelExecuteStartedAt", "dbtModelExecuteStartedAt" + ) + """ + + """ + DBT_MODEL_EXECUTE_COMPLETED_AT: ClassVar[NumericField] = NumericField( + "dbtModelExecuteCompletedAt", "dbtModelExecuteCompletedAt" + ) + """ + + """ + DBT_MODEL_EXECUTION_TIME: ClassVar[NumericField] = NumericField( + "dbtModelExecutionTime", "dbtModelExecutionTime" + ) + """ + + """ + DBT_MODEL_RUN_GENERATED_AT: ClassVar[NumericField] = NumericField( + "dbtModelRunGeneratedAt", "dbtModelRunGeneratedAt" + ) + """ + + """ + DBT_MODEL_RUN_ELAPSED_TIME: ClassVar[NumericField] = NumericField( + "dbtModelRunElapsedTime", "dbtModelRunElapsedTime" + ) + """ + + """ + + DBT_METRICS: ClassVar[RelationField] = RelationField("dbtMetrics") + """ + TBC + """ + DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") + """ + TBC + """ + DBT_MODEL_SQL_ASSETS: ClassVar[RelationField] = RelationField("dbtModelSqlAssets") + """ + TBC + """ + DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField("dbtModelColumns") + """ + TBC + """ + SQL_ASSET: ClassVar[RelationField] = RelationField("sqlAsset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dbt_status", + "dbt_error", + "dbt_raw_s_q_l", + "dbt_compiled_s_q_l", + "dbt_stats", + "dbt_materialization_type", + "dbt_model_compile_started_at", + "dbt_model_compile_completed_at", + "dbt_model_execute_started_at", + "dbt_model_execute_completed_at", + "dbt_model_execution_time", + "dbt_model_run_generated_at", + "dbt_model_run_elapsed_time", + "dbt_metrics", + "dbt_tests", + "dbt_model_sql_assets", + "dbt_model_columns", + "sql_asset", + ] + + @property + def dbt_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_status + + @dbt_status.setter + def dbt_status(self, dbt_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_status = dbt_status + + @property + def dbt_error(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_error + + @dbt_error.setter + def dbt_error(self, dbt_error: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_error = dbt_error + + @property + def dbt_raw_s_q_l(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_raw_s_q_l + + @dbt_raw_s_q_l.setter + def dbt_raw_s_q_l(self, dbt_raw_s_q_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_raw_s_q_l = dbt_raw_s_q_l + + @property + def dbt_compiled_s_q_l(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_compiled_s_q_l + + @dbt_compiled_s_q_l.setter + def dbt_compiled_s_q_l(self, dbt_compiled_s_q_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_compiled_s_q_l = dbt_compiled_s_q_l + + @property + def dbt_stats(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_stats + + @dbt_stats.setter + def dbt_stats(self, dbt_stats: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_stats = dbt_stats + + @property + def dbt_materialization_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_materialization_type + ) + + @dbt_materialization_type.setter + def dbt_materialization_type(self, dbt_materialization_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_materialization_type = dbt_materialization_type + + @property + def dbt_model_compile_started_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_compile_started_at + ) + + @dbt_model_compile_started_at.setter + def dbt_model_compile_started_at( + self, dbt_model_compile_started_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_compile_started_at = dbt_model_compile_started_at + + @property + def dbt_model_compile_completed_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_compile_completed_at + ) + + @dbt_model_compile_completed_at.setter + def dbt_model_compile_completed_at( + self, dbt_model_compile_completed_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_compile_completed_at = dbt_model_compile_completed_at + + @property + def dbt_model_execute_started_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_execute_started_at + ) + + @dbt_model_execute_started_at.setter + def dbt_model_execute_started_at( + self, dbt_model_execute_started_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_execute_started_at = dbt_model_execute_started_at + + @property + def dbt_model_execute_completed_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_execute_completed_at + ) + + @dbt_model_execute_completed_at.setter + def dbt_model_execute_completed_at( + self, dbt_model_execute_completed_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_execute_completed_at = dbt_model_execute_completed_at + + @property + def dbt_model_execution_time(self) -> Optional[float]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_execution_time + ) + + @dbt_model_execution_time.setter + def dbt_model_execution_time(self, dbt_model_execution_time: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_execution_time = dbt_model_execution_time + + @property + def dbt_model_run_generated_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_run_generated_at + ) + + @dbt_model_run_generated_at.setter + def dbt_model_run_generated_at( + self, dbt_model_run_generated_at: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_run_generated_at = dbt_model_run_generated_at + + @property + def dbt_model_run_elapsed_time(self) -> Optional[float]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_run_elapsed_time + ) + + @dbt_model_run_elapsed_time.setter + def dbt_model_run_elapsed_time(self, dbt_model_run_elapsed_time: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_run_elapsed_time = dbt_model_run_elapsed_time + + @property + def dbt_metrics(self) -> Optional[list[DbtMetric]]: + return None if self.attributes is None else self.attributes.dbt_metrics + + @dbt_metrics.setter + def dbt_metrics(self, dbt_metrics: Optional[list[DbtMetric]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_metrics = dbt_metrics + + @property + def dbt_tests(self) -> Optional[list[DbtTest]]: + return None if self.attributes is None else self.attributes.dbt_tests + + @dbt_tests.setter + def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tests = dbt_tests + + @property + def dbt_model_sql_assets(self) -> Optional[list[SQL]]: + return None if self.attributes is None else self.attributes.dbt_model_sql_assets + + @dbt_model_sql_assets.setter + def dbt_model_sql_assets(self, dbt_model_sql_assets: Optional[list[SQL]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_sql_assets = dbt_model_sql_assets + + @property + def dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: + return None if self.attributes is None else self.attributes.dbt_model_columns + + @dbt_model_columns.setter + def dbt_model_columns(self, dbt_model_columns: Optional[list[DbtModelColumn]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_columns = dbt_model_columns + + @property + def sql_asset(self) -> Optional[SQL]: + return None if self.attributes is None else self.attributes.sql_asset + + @sql_asset.setter + def sql_asset(self, sql_asset: Optional[SQL]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_asset = sql_asset + + class Attributes(Dbt.Attributes): + dbt_status: Optional[str] = Field(default=None, description="") + dbt_error: Optional[str] = Field(default=None, description="") + dbt_raw_s_q_l: Optional[str] = Field(default=None, description="") + dbt_compiled_s_q_l: Optional[str] = Field(default=None, description="") + dbt_stats: Optional[str] = Field(default=None, description="") + dbt_materialization_type: Optional[str] = Field(default=None, description="") + dbt_model_compile_started_at: Optional[datetime] = Field( + default=None, description="" + ) + dbt_model_compile_completed_at: Optional[datetime] = Field( + default=None, description="" + ) + dbt_model_execute_started_at: Optional[datetime] = Field( + default=None, description="" + ) + dbt_model_execute_completed_at: Optional[datetime] = Field( + default=None, description="" + ) + dbt_model_execution_time: Optional[float] = Field(default=None, description="") + dbt_model_run_generated_at: Optional[datetime] = Field( + default=None, description="" + ) + dbt_model_run_elapsed_time: Optional[float] = Field( + default=None, description="" + ) + dbt_metrics: Optional[list[DbtMetric]] = Field( + default=None, description="" + ) # relationship + dbt_tests: Optional[list[DbtTest]] = Field( + default=None, description="" + ) # relationship + dbt_model_sql_assets: Optional[list[SQL]] = Field( + default=None, description="" + ) # relationship + dbt_model_columns: Optional[list[DbtModelColumn]] = Field( + default=None, description="" + ) # relationship + sql_asset: Optional[SQL] = Field(default=None, description="") # relationship + + attributes: "DbtModel.Attributes" = Field( + default_factory=lambda: DbtModel.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dbt_metric import DbtMetric # noqa +from .dbt_model_column import DbtModelColumn # noqa +from .dbt_test import DbtTest # noqa +from .s_q_l import SQL # noqa diff --git a/pyatlan/model/assets/dbt_model_column.py b/pyatlan/model/assets/dbt_model_column.py new file mode 100644 index 000000000..35bc49dea --- /dev/null +++ b/pyatlan/model/assets/dbt_model_column.py @@ -0,0 +1,197 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .dbt import Dbt + + +class DbtModelColumn(Dbt): + """Description""" + + type_name: str = Field(default="DbtModelColumn", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DbtModelColumn": + raise ValueError("must be DbtModelColumn") + return v + + def __setattr__(self, name, value): + if name in DbtModelColumn._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DBT_MODEL_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "dbtModelQualifiedName", "dbtModelQualifiedName", "dbtModelQualifiedName.text" + ) + """ + + """ + DBT_MODEL_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( + "dbtModelColumnDataType", "dbtModelColumnDataType" + ) + """ + + """ + DBT_MODEL_COLUMN_ORDER: ClassVar[NumericField] = NumericField( + "dbtModelColumnOrder", "dbtModelColumnOrder" + ) + """ + + """ + + SQL_COLUMN: ClassVar[RelationField] = RelationField("sqlColumn") + """ + TBC + """ + DBT_MODEL: ClassVar[RelationField] = RelationField("dbtModel") + """ + TBC + """ + DBT_MODEL_COLUMN_SQL_COLUMNS: ClassVar[RelationField] = RelationField( + "dbtModelColumnSqlColumns" + ) + """ + TBC + """ + DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dbt_model_qualified_name", + "dbt_model_column_data_type", + "dbt_model_column_order", + "sql_column", + "dbt_model", + "dbt_model_column_sql_columns", + "dbt_tests", + ] + + @property + def dbt_model_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_qualified_name + ) + + @dbt_model_qualified_name.setter + def dbt_model_qualified_name(self, dbt_model_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_qualified_name = dbt_model_qualified_name + + @property + def dbt_model_column_data_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_column_data_type + ) + + @dbt_model_column_data_type.setter + def dbt_model_column_data_type(self, dbt_model_column_data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_column_data_type = dbt_model_column_data_type + + @property + def dbt_model_column_order(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.dbt_model_column_order + ) + + @dbt_model_column_order.setter + def dbt_model_column_order(self, dbt_model_column_order: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_column_order = dbt_model_column_order + + @property + def sql_column(self) -> Optional[Column]: + return None if self.attributes is None else self.attributes.sql_column + + @sql_column.setter + def sql_column(self, sql_column: Optional[Column]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_column = sql_column + + @property + def dbt_model(self) -> Optional[DbtModel]: + return None if self.attributes is None else self.attributes.dbt_model + + @dbt_model.setter + def dbt_model(self, dbt_model: Optional[DbtModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model = dbt_model + + @property + def dbt_model_column_sql_columns(self) -> Optional[list[Column]]: + return ( + None + if self.attributes is None + else self.attributes.dbt_model_column_sql_columns + ) + + @dbt_model_column_sql_columns.setter + def dbt_model_column_sql_columns( + self, dbt_model_column_sql_columns: Optional[list[Column]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_column_sql_columns = dbt_model_column_sql_columns + + @property + def dbt_tests(self) -> Optional[list[DbtTest]]: + return None if self.attributes is None else self.attributes.dbt_tests + + @dbt_tests.setter + def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tests = dbt_tests + + class Attributes(Dbt.Attributes): + dbt_model_qualified_name: Optional[str] = Field(default=None, description="") + dbt_model_column_data_type: Optional[str] = Field(default=None, description="") + dbt_model_column_order: Optional[int] = Field(default=None, description="") + sql_column: Optional[Column] = Field( + default=None, description="" + ) # relationship + dbt_model: Optional[DbtModel] = Field( + default=None, description="" + ) # relationship + dbt_model_column_sql_columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + dbt_tests: Optional[list[DbtTest]] = Field( + default=None, description="" + ) # relationship + + attributes: "DbtModelColumn.Attributes" = Field( + default_factory=lambda: DbtModelColumn.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .column import Column # noqa +from .dbt_model import DbtModel # noqa +from .dbt_test import DbtTest # noqa diff --git a/pyatlan/model/assets/asset12.py b/pyatlan/model/assets/dbt_process.py similarity index 85% rename from pyatlan/model/assets/asset12.py rename to pyatlan/model/assets/dbt_process.py index d7dbfd164..639019e66 100644 --- a/pyatlan/model/assets/asset12.py +++ b/pyatlan/model/assets/dbt_process.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( KeywordField, @@ -16,13 +16,13 @@ RelationField, ) -from .asset00 import AirflowTask, Catalog, ColumnProcess, Dbt, MatillionComponent +from .dbt import Dbt class DbtProcess(Dbt): """Description""" - type_name: str = Field("DbtProcess", allow_mutation=False) + type_name: str = Field(default="DbtProcess", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -502,66 +502,42 @@ def column_processes(self, column_processes: Optional[list[ColumnProcess]]): self.attributes.column_processes = column_processes class Attributes(Dbt.Attributes): - dbt_process_job_status: Optional[str] = Field( - None, description="", alias="dbtProcessJobStatus" - ) - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) + dbt_process_job_status: Optional[str] = Field(default=None, description="") + dbt_alias: Optional[str] = Field(default=None, description="") + dbt_meta: Optional[str] = Field(default=None, description="") + dbt_unique_id: Optional[str] = Field(default=None, description="") + dbt_account_name: Optional[str] = Field(default=None, description="") + dbt_project_name: Optional[str] = Field(default=None, description="") + dbt_package_name: Optional[str] = Field(default=None, description="") + dbt_job_name: Optional[str] = Field(default=None, description="") + dbt_job_schedule: Optional[str] = Field(default=None, description="") + dbt_job_status: Optional[str] = Field(default=None, description="") dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" + default=None, description="" ) + dbt_job_last_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run_humanized: Optional[str] = Field(default=None, description="") + dbt_environment_name: Optional[str] = Field(default=None, description="") + dbt_environment_dbt_version: Optional[str] = Field(default=None, description="") + dbt_tags: Optional[set[str]] = Field(default=None, description="") + dbt_connection_context: Optional[str] = Field(default=None, description="") dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" + default=None, description="" ) - inputs: Optional[list[Catalog]] = Field(None, description="", alias="inputs") - outputs: Optional[list[Catalog]] = Field(None, description="", alias="outputs") - code: Optional[str] = Field(None, description="", alias="code") - sql: Optional[str] = Field(None, description="", alias="sql") - ast: Optional[str] = Field(None, description="", alias="ast") + inputs: Optional[list[Catalog]] = Field(default=None, description="") + outputs: Optional[list[Catalog]] = Field(default=None, description="") + code: Optional[str] = Field(default=None, description="") + sql: Optional[str] = Field(default=None, description="") + ast: Optional[str] = Field(default=None, description="") matillion_component: Optional[MatillionComponent] = Field( - None, description="", alias="matillionComponent" + default=None, description="" ) # relationship airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="airflowTasks" + default=None, description="" ) # relationship column_processes: Optional[list[ColumnProcess]] = Field( - None, description="", alias="columnProcesses" + default=None, description="" ) # relationship attributes: "DbtProcess.Attributes" = Field( @@ -571,4 +547,7 @@ class Attributes(Dbt.Attributes): ) -DbtProcess.Attributes.update_forward_refs() +from .airflow_task import AirflowTask # noqa +from .catalog import Catalog # noqa +from .column_process import ColumnProcess # noqa +from .matillion_component import MatillionComponent # noqa diff --git a/pyatlan/model/assets/dbt_source.py b/pyatlan/model/assets/dbt_source.py new file mode 100644 index 000000000..41dccf22b --- /dev/null +++ b/pyatlan/model/assets/dbt_source.py @@ -0,0 +1,135 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .dbt import Dbt + + +class DbtSource(Dbt): + """Description""" + + type_name: str = Field(default="DbtSource", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DbtSource": + raise ValueError("must be DbtSource") + return v + + def __setattr__(self, name, value): + if name in DbtSource._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DBT_STATE: ClassVar[KeywordField] = KeywordField("dbtState", "dbtState") + """ + + """ + DBT_FRESHNESS_CRITERIA: ClassVar[KeywordField] = KeywordField( + "dbtFreshnessCriteria", "dbtFreshnessCriteria" + ) + """ + + """ + + SQL_ASSETS: ClassVar[RelationField] = RelationField("sqlAssets") + """ + TBC + """ + DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") + """ + TBC + """ + SQL_ASSET: ClassVar[RelationField] = RelationField("sqlAsset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dbt_state", + "dbt_freshness_criteria", + "sql_assets", + "dbt_tests", + "sql_asset", + ] + + @property + def dbt_state(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_state + + @dbt_state.setter + def dbt_state(self, dbt_state: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_state = dbt_state + + @property + def dbt_freshness_criteria(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_freshness_criteria + ) + + @dbt_freshness_criteria.setter + def dbt_freshness_criteria(self, dbt_freshness_criteria: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_freshness_criteria = dbt_freshness_criteria + + @property + def sql_assets(self) -> Optional[list[SQL]]: + return None if self.attributes is None else self.attributes.sql_assets + + @sql_assets.setter + def sql_assets(self, sql_assets: Optional[list[SQL]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_assets = sql_assets + + @property + def dbt_tests(self) -> Optional[list[DbtTest]]: + return None if self.attributes is None else self.attributes.dbt_tests + + @dbt_tests.setter + def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tests = dbt_tests + + @property + def sql_asset(self) -> Optional[SQL]: + return None if self.attributes is None else self.attributes.sql_asset + + @sql_asset.setter + def sql_asset(self, sql_asset: Optional[SQL]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_asset = sql_asset + + class Attributes(Dbt.Attributes): + dbt_state: Optional[str] = Field(default=None, description="") + dbt_freshness_criteria: Optional[str] = Field(default=None, description="") + sql_assets: Optional[list[SQL]] = Field( + default=None, description="" + ) # relationship + dbt_tests: Optional[list[DbtTest]] = Field( + default=None, description="" + ) # relationship + sql_asset: Optional[SQL] = Field(default=None, description="") # relationship + + attributes: "DbtSource.Attributes" = Field( + default_factory=lambda: DbtSource.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dbt_test import DbtTest # noqa +from .s_q_l import SQL # noqa diff --git a/pyatlan/model/assets/asset59.py b/pyatlan/model/assets/dbt_tag.py similarity index 85% rename from pyatlan/model/assets/asset59.py rename to pyatlan/model/assets/dbt_tag.py index 68ed957b6..090b0bda3 100644 --- a/pyatlan/model/assets/asset59.py +++ b/pyatlan/model/assets/dbt_tag.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( KeywordField, @@ -16,13 +16,13 @@ ) from pyatlan.model.structs import SourceTagAttribute -from .asset00 import Dbt +from .dbt import Dbt class DbtTag(Dbt): """Description""" - type_name: str = Field("DbtTag", allow_mutation=False) + type_name: str = Field(default="DbtTag", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -438,66 +438,37 @@ def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name class Attributes(Dbt.Attributes): - dbt_alias: Optional[str] = Field(None, description="", alias="dbtAlias") - dbt_meta: Optional[str] = Field(None, description="", alias="dbtMeta") - dbt_unique_id: Optional[str] = Field(None, description="", alias="dbtUniqueId") - dbt_account_name: Optional[str] = Field( - None, description="", alias="dbtAccountName" - ) - dbt_project_name: Optional[str] = Field( - None, description="", alias="dbtProjectName" - ) - dbt_package_name: Optional[str] = Field( - None, description="", alias="dbtPackageName" - ) - dbt_job_name: Optional[str] = Field(None, description="", alias="dbtJobName") - dbt_job_schedule: Optional[str] = Field( - None, description="", alias="dbtJobSchedule" - ) - dbt_job_status: Optional[str] = Field( - None, description="", alias="dbtJobStatus" - ) + dbt_alias: Optional[str] = Field(default=None, description="") + dbt_meta: Optional[str] = Field(default=None, description="") + dbt_unique_id: Optional[str] = Field(default=None, description="") + dbt_account_name: Optional[str] = Field(default=None, description="") + dbt_project_name: Optional[str] = Field(default=None, description="") + dbt_package_name: Optional[str] = Field(default=None, description="") + dbt_job_name: Optional[str] = Field(default=None, description="") + dbt_job_schedule: Optional[str] = Field(default=None, description="") + dbt_job_status: Optional[str] = Field(default=None, description="") dbt_job_schedule_cron_humanized: Optional[str] = Field( - None, description="", alias="dbtJobScheduleCronHumanized" - ) - dbt_job_last_run: Optional[datetime] = Field( - None, description="", alias="dbtJobLastRun" - ) - dbt_job_next_run: Optional[datetime] = Field( - None, description="", alias="dbtJobNextRun" - ) - dbt_job_next_run_humanized: Optional[str] = Field( - None, description="", alias="dbtJobNextRunHumanized" - ) - dbt_environment_name: Optional[str] = Field( - None, description="", alias="dbtEnvironmentName" - ) - dbt_environment_dbt_version: Optional[str] = Field( - None, description="", alias="dbtEnvironmentDbtVersion" - ) - dbt_tags: Optional[set[str]] = Field(None, description="", alias="dbtTags") - dbt_connection_context: Optional[str] = Field( - None, description="", alias="dbtConnectionContext" + default=None, description="" ) + dbt_job_last_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run: Optional[datetime] = Field(default=None, description="") + dbt_job_next_run_humanized: Optional[str] = Field(default=None, description="") + dbt_environment_name: Optional[str] = Field(default=None, description="") + dbt_environment_dbt_version: Optional[str] = Field(default=None, description="") + dbt_tags: Optional[set[str]] = Field(default=None, description="") + dbt_connection_context: Optional[str] = Field(default=None, description="") dbt_semantic_layer_proxy_url: Optional[str] = Field( - None, description="", alias="dbtSemanticLayerProxyUrl" + default=None, description="" ) - tag_id: Optional[str] = Field(None, description="", alias="tagId") + tag_id: Optional[str] = Field(default=None, description="") tag_attributes: Optional[list[SourceTagAttribute]] = Field( - None, description="", alias="tagAttributes" - ) - tag_allowed_values: Optional[set[str]] = Field( - None, description="", alias="tagAllowedValues" - ) - mapped_atlan_tag_name: Optional[str] = Field( - None, description="", alias="mappedClassificationName" + default=None, description="" ) + tag_allowed_values: Optional[set[str]] = Field(default=None, description="") + mapped_atlan_tag_name: Optional[str] = Field(default=None, description="") attributes: "DbtTag.Attributes" = Field( default_factory=lambda: DbtTag.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -DbtTag.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/dbt_test.py b/pyatlan/model/assets/dbt_test.py new file mode 100644 index 000000000..67801620a --- /dev/null +++ b/pyatlan/model/assets/dbt_test.py @@ -0,0 +1,273 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .dbt import Dbt + + +class DbtTest(Dbt): + """Description""" + + type_name: str = Field(default="DbtTest", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DbtTest": + raise ValueError("must be DbtTest") + return v + + def __setattr__(self, name, value): + if name in DbtTest._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DBT_TEST_STATUS: ClassVar[KeywordField] = KeywordField( + "dbtTestStatus", "dbtTestStatus" + ) + """ + Details of the results of the test. For errors, it reads "ERROR". + """ + DBT_TEST_STATE: ClassVar[KeywordField] = KeywordField( + "dbtTestState", "dbtTestState" + ) + """ + Test results. Can be one of, in order of severity, "error", "fail", "warn", "pass". + """ + DBT_TEST_ERROR: ClassVar[KeywordField] = KeywordField( + "dbtTestError", "dbtTestError" + ) + """ + Error message in the case of state being "error". + """ + DBT_TEST_RAW_SQL: ClassVar[KeywordTextField] = KeywordTextField( + "dbtTestRawSQL", "dbtTestRawSQL", "dbtTestRawSQL.text" + ) + """ + Raw SQL of the test. + """ + DBT_TEST_COMPILED_SQL: ClassVar[KeywordField] = KeywordField( + "dbtTestCompiledSQL", "dbtTestCompiledSQL" + ) + """ + Compiled SQL of the test. + """ + DBT_TEST_RAW_CODE: ClassVar[KeywordTextField] = KeywordTextField( + "dbtTestRawCode", "dbtTestRawCode", "dbtTestRawCode.text" + ) + """ + Raw code of the test (when the test is defined using Python). + """ + DBT_TEST_COMPILED_CODE: ClassVar[KeywordField] = KeywordField( + "dbtTestCompiledCode", "dbtTestCompiledCode" + ) + """ + Compiled code of the test (when the test is defined using Python). + """ + DBT_TEST_LANGUAGE: ClassVar[KeywordField] = KeywordField( + "dbtTestLanguage", "dbtTestLanguage" + ) + """ + Language in which the test is written, for example: SQL or Python. + """ + + DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") + """ + TBC + """ + SQL_ASSETS: ClassVar[RelationField] = RelationField("sqlAssets") + """ + TBC + """ + DBT_MODELS: ClassVar[RelationField] = RelationField("dbtModels") + """ + TBC + """ + DBT_MODEL_COLUMNS: ClassVar[RelationField] = RelationField("dbtModelColumns") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dbt_test_status", + "dbt_test_state", + "dbt_test_error", + "dbt_test_raw_s_q_l", + "dbt_test_compiled_s_q_l", + "dbt_test_raw_code", + "dbt_test_compiled_code", + "dbt_test_language", + "dbt_sources", + "sql_assets", + "dbt_models", + "dbt_model_columns", + ] + + @property + def dbt_test_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_test_status + + @dbt_test_status.setter + def dbt_test_status(self, dbt_test_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_status = dbt_test_status + + @property + def dbt_test_state(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_test_state + + @dbt_test_state.setter + def dbt_test_state(self, dbt_test_state: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_state = dbt_test_state + + @property + def dbt_test_error(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_test_error + + @dbt_test_error.setter + def dbt_test_error(self, dbt_test_error: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_error = dbt_test_error + + @property + def dbt_test_raw_s_q_l(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_test_raw_s_q_l + + @dbt_test_raw_s_q_l.setter + def dbt_test_raw_s_q_l(self, dbt_test_raw_s_q_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_raw_s_q_l = dbt_test_raw_s_q_l + + @property + def dbt_test_compiled_s_q_l(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_test_compiled_s_q_l + ) + + @dbt_test_compiled_s_q_l.setter + def dbt_test_compiled_s_q_l(self, dbt_test_compiled_s_q_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_compiled_s_q_l = dbt_test_compiled_s_q_l + + @property + def dbt_test_raw_code(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_test_raw_code + + @dbt_test_raw_code.setter + def dbt_test_raw_code(self, dbt_test_raw_code: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_raw_code = dbt_test_raw_code + + @property + def dbt_test_compiled_code(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dbt_test_compiled_code + ) + + @dbt_test_compiled_code.setter + def dbt_test_compiled_code(self, dbt_test_compiled_code: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_compiled_code = dbt_test_compiled_code + + @property + def dbt_test_language(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dbt_test_language + + @dbt_test_language.setter + def dbt_test_language(self, dbt_test_language: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_test_language = dbt_test_language + + @property + def dbt_sources(self) -> Optional[list[DbtSource]]: + return None if self.attributes is None else self.attributes.dbt_sources + + @dbt_sources.setter + def dbt_sources(self, dbt_sources: Optional[list[DbtSource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_sources = dbt_sources + + @property + def sql_assets(self) -> Optional[list[SQL]]: + return None if self.attributes is None else self.attributes.sql_assets + + @sql_assets.setter + def sql_assets(self, sql_assets: Optional[list[SQL]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_assets = sql_assets + + @property + def dbt_models(self) -> Optional[list[DbtModel]]: + return None if self.attributes is None else self.attributes.dbt_models + + @dbt_models.setter + def dbt_models(self, dbt_models: Optional[list[DbtModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_models = dbt_models + + @property + def dbt_model_columns(self) -> Optional[list[DbtModelColumn]]: + return None if self.attributes is None else self.attributes.dbt_model_columns + + @dbt_model_columns.setter + def dbt_model_columns(self, dbt_model_columns: Optional[list[DbtModelColumn]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_model_columns = dbt_model_columns + + class Attributes(Dbt.Attributes): + dbt_test_status: Optional[str] = Field(default=None, description="") + dbt_test_state: Optional[str] = Field(default=None, description="") + dbt_test_error: Optional[str] = Field(default=None, description="") + dbt_test_raw_s_q_l: Optional[str] = Field(default=None, description="") + dbt_test_compiled_s_q_l: Optional[str] = Field(default=None, description="") + dbt_test_raw_code: Optional[str] = Field(default=None, description="") + dbt_test_compiled_code: Optional[str] = Field(default=None, description="") + dbt_test_language: Optional[str] = Field(default=None, description="") + dbt_sources: Optional[list[DbtSource]] = Field( + default=None, description="" + ) # relationship + sql_assets: Optional[list[SQL]] = Field( + default=None, description="" + ) # relationship + dbt_models: Optional[list[DbtModel]] = Field( + default=None, description="" + ) # relationship + dbt_model_columns: Optional[list[DbtModelColumn]] = Field( + default=None, description="" + ) # relationship + + attributes: "DbtTest.Attributes" = Field( + default_factory=lambda: DbtTest.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dbt_model import DbtModel # noqa +from .dbt_model_column import DbtModelColumn # noqa +from .dbt_source import DbtSource # noqa +from .s_q_l import SQL # noqa diff --git a/pyatlan/model/assets/asset57.py b/pyatlan/model/assets/dynamo_d_b.py similarity index 89% rename from pyatlan/model/assets/asset57.py rename to pyatlan/model/assets/dynamo_d_b.py index 3c59a1028..4c22e08e1 100644 --- a/pyatlan/model/assets/asset57.py +++ b/pyatlan/model/assets/dynamo_d_b.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import DynamoDBStatus from pyatlan.model.fields.atlan_fields import KeywordField, NumericField -from .asset24 import NoSQL +from .no_s_q_l import NoSQL class DynamoDB(NoSQL): """Description""" - type_name: str = Field("DynamoDB", allow_mutation=False) + type_name: str = Field(default="DynamoDB", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -139,19 +139,15 @@ def dynamo_d_b_write_capacity_units( class Attributes(NoSQL.Attributes): dynamo_d_b_status: Optional[DynamoDBStatus] = Field( - None, description="", alias="dynamoDBStatus" - ) - dynamo_d_b_partition_key: Optional[str] = Field( - None, description="", alias="dynamoDBPartitionKey" - ) - dynamo_d_b_sort_key: Optional[str] = Field( - None, description="", alias="dynamoDBSortKey" + default=None, description="" ) + dynamo_d_b_partition_key: Optional[str] = Field(default=None, description="") + dynamo_d_b_sort_key: Optional[str] = Field(default=None, description="") dynamo_d_b_read_capacity_units: Optional[int] = Field( - None, description="", alias="dynamoDBReadCapacityUnits" + default=None, description="" ) dynamo_d_b_write_capacity_units: Optional[int] = Field( - None, description="", alias="dynamoDBWriteCapacityUnits" + default=None, description="" ) attributes: "DynamoDB.Attributes" = Field( @@ -159,6 +155,3 @@ class Attributes(NoSQL.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -DynamoDB.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/dynamo_d_b_global_secondary_index.py b/pyatlan/model/assets/dynamo_d_b_global_secondary_index.py new file mode 100644 index 000000000..2a846a544 --- /dev/null +++ b/pyatlan/model/assets/dynamo_d_b_global_secondary_index.py @@ -0,0 +1,63 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .dynamo_d_b_secondary_index import DynamoDBSecondaryIndex + + +class DynamoDBGlobalSecondaryIndex(DynamoDBSecondaryIndex): + """Description""" + + type_name: str = Field(default="DynamoDBGlobalSecondaryIndex", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DynamoDBGlobalSecondaryIndex": + raise ValueError("must be DynamoDBGlobalSecondaryIndex") + return v + + def __setattr__(self, name, value): + if name in DynamoDBGlobalSecondaryIndex._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DYNAMO_DB_TABLE: ClassVar[RelationField] = RelationField("dynamoDBTable") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dynamo_dbtable", + ] + + @property + def dynamo_dbtable(self) -> Optional[DynamoDBTable]: + return None if self.attributes is None else self.attributes.dynamo_dbtable + + @dynamo_dbtable.setter + def dynamo_dbtable(self, dynamo_dbtable: Optional[DynamoDBTable]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_dbtable = dynamo_dbtable + + class Attributes(DynamoDBSecondaryIndex.Attributes): + dynamo_dbtable: Optional[DynamoDBTable] = Field( + default=None, description="" + ) # relationship + + attributes: "DynamoDBGlobalSecondaryIndex.Attributes" = Field( + default_factory=lambda: DynamoDBGlobalSecondaryIndex.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dynamo_dbtable import DynamoDBTable # noqa diff --git a/pyatlan/model/assets/dynamo_d_b_local_secondary_index.py b/pyatlan/model/assets/dynamo_d_b_local_secondary_index.py new file mode 100644 index 000000000..06c2d594e --- /dev/null +++ b/pyatlan/model/assets/dynamo_d_b_local_secondary_index.py @@ -0,0 +1,63 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .dynamo_d_b_secondary_index import DynamoDBSecondaryIndex + + +class DynamoDBLocalSecondaryIndex(DynamoDBSecondaryIndex): + """Description""" + + type_name: str = Field(default="DynamoDBLocalSecondaryIndex", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "DynamoDBLocalSecondaryIndex": + raise ValueError("must be DynamoDBLocalSecondaryIndex") + return v + + def __setattr__(self, name, value): + if name in DynamoDBLocalSecondaryIndex._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DYNAMO_DB_TABLE: ClassVar[RelationField] = RelationField("dynamoDBTable") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "dynamo_dbtable", + ] + + @property + def dynamo_dbtable(self) -> Optional[DynamoDBTable]: + return None if self.attributes is None else self.attributes.dynamo_dbtable + + @dynamo_dbtable.setter + def dynamo_dbtable(self, dynamo_dbtable: Optional[DynamoDBTable]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dynamo_dbtable = dynamo_dbtable + + class Attributes(DynamoDBSecondaryIndex.Attributes): + dynamo_dbtable: Optional[DynamoDBTable] = Field( + default=None, description="" + ) # relationship + + attributes: "DynamoDBLocalSecondaryIndex.Attributes" = Field( + default_factory=lambda: DynamoDBLocalSecondaryIndex.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dynamo_dbtable import DynamoDBTable # noqa diff --git a/pyatlan/model/assets/asset85.py b/pyatlan/model/assets/dynamo_d_b_secondary_index.py similarity index 86% rename from pyatlan/model/assets/asset85.py rename to pyatlan/model/assets/dynamo_d_b_secondary_index.py index 451cc9385..9929b5923 100644 --- a/pyatlan/model/assets/asset85.py +++ b/pyatlan/model/assets/dynamo_d_b_secondary_index.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import DynamoDBSecondaryIndexProjectionType, DynamoDBStatus from pyatlan.model.fields.atlan_fields import ( @@ -18,13 +18,13 @@ TextField, ) -from .asset00 import Table +from .table import Table class DynamoDBSecondaryIndex(Table): """Description""" - type_name: str = Field("DynamoDBSecondaryIndex", allow_mutation=False) + type_name: str = Field(default="DynamoDBSecondaryIndex", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -674,93 +674,52 @@ def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): class Attributes(Table.Attributes): dynamo_d_b_secondary_index_projection_type: Optional[ DynamoDBSecondaryIndexProjectionType - ] = Field(None, description="", alias="dynamoDBSecondaryIndexProjectionType") - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) + ] = Field(default=None, description="") + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - external_location: Optional[str] = Field( - None, description="", alias="externalLocation" - ) - external_location_region: Optional[str] = Field( - None, description="", alias="externalLocationRegion" - ) - external_location_format: Optional[str] = Field( - None, description="", alias="externalLocationFormat" - ) - is_partitioned: Optional[bool] = Field( - None, description="", alias="isPartitioned" - ) - partition_strategy: Optional[str] = Field( - None, description="", alias="partitionStrategy" - ) - partition_count: Optional[int] = Field( - None, description="", alias="partitionCount" - ) - partition_list: Optional[str] = Field( - None, description="", alias="partitionList" - ) - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) + default=None, description="" + ) + external_location: Optional[str] = Field(default=None, description="") + external_location_region: Optional[str] = Field(default=None, description="") + external_location_format: Optional[str] = Field(default=None, description="") + is_partitioned: Optional[bool] = Field(default=None, description="") + partition_strategy: Optional[str] = Field(default=None, description="") + partition_count: Optional[int] = Field(default=None, description="") + partition_list: Optional[str] = Field(default=None, description="") + query_count: Optional[int] = Field(default=None, description="") + query_user_count: Optional[int] = Field(default=None, description="") + query_user_map: Optional[dict[str, int]] = Field(default=None, description="") + query_count_updated_at: Optional[datetime] = Field(default=None, description="") + database_name: Optional[str] = Field(default=None, description="") + database_qualified_name: Optional[str] = Field(default=None, description="") + schema_name: Optional[str] = Field(default=None, description="") + schema_qualified_name: Optional[str] = Field(default=None, description="") + table_name: Optional[str] = Field(default=None, description="") + table_qualified_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + view_qualified_name: Optional[str] = Field(default=None, description="") + is_profiled: Optional[bool] = Field(default=None, description="") + last_profiled_at: Optional[datetime] = Field(default=None, description="") dynamo_d_b_status: Optional[DynamoDBStatus] = Field( - None, description="", alias="dynamoDBStatus" - ) - dynamo_d_b_partition_key: Optional[str] = Field( - None, description="", alias="dynamoDBPartitionKey" - ) - dynamo_d_b_sort_key: Optional[str] = Field( - None, description="", alias="dynamoDBSortKey" + default=None, description="" ) + dynamo_d_b_partition_key: Optional[str] = Field(default=None, description="") + dynamo_d_b_sort_key: Optional[str] = Field(default=None, description="") dynamo_d_b_read_capacity_units: Optional[int] = Field( - None, description="", alias="dynamoDBReadCapacityUnits" + default=None, description="" ) dynamo_d_b_write_capacity_units: Optional[int] = Field( - None, description="", alias="dynamoDBWriteCapacityUnits" - ) - no_s_q_l_schema_definition: Optional[str] = Field( - None, description="", alias="noSQLSchemaDefinition" + default=None, description="" ) + no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") attributes: "DynamoDBSecondaryIndex.Attributes" = Field( default_factory=lambda: DynamoDBSecondaryIndex.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -DynamoDBSecondaryIndex.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset86.py b/pyatlan/model/assets/dynamo_dbtable.py similarity index 77% rename from pyatlan/model/assets/asset86.py rename to pyatlan/model/assets/dynamo_dbtable.py index 16d3ff265..88facec93 100644 --- a/pyatlan/model/assets/asset86.py +++ b/pyatlan/model/assets/dynamo_dbtable.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import DynamoDBStatus from pyatlan.model.fields.atlan_fields import ( @@ -19,14 +19,13 @@ TextField, ) -from .asset00 import Table -from .asset85 import DynamoDBSecondaryIndex +from .table import Table class DynamoDBTable(Table): """Description""" - type_name: str = Field("DynamoDBTable", allow_mutation=False) + type_name: str = Field(default="DynamoDBTable", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -745,101 +744,59 @@ def dynamo_d_b_global_secondary_indexes( ) class Attributes(Table.Attributes): - dynamo_dbtable_g_s_i_count: Optional[int] = Field( - None, description="", alias="dynamoDBTableGSICount" - ) - dynamo_dbtable_l_s_i_count: Optional[int] = Field( - None, description="", alias="dynamoDBTableLSICount" - ) - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) + dynamo_dbtable_g_s_i_count: Optional[int] = Field(default=None, description="") + dynamo_dbtable_l_s_i_count: Optional[int] = Field(default=None, description="") + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - external_location: Optional[str] = Field( - None, description="", alias="externalLocation" - ) - external_location_region: Optional[str] = Field( - None, description="", alias="externalLocationRegion" - ) - external_location_format: Optional[str] = Field( - None, description="", alias="externalLocationFormat" - ) - is_partitioned: Optional[bool] = Field( - None, description="", alias="isPartitioned" - ) - partition_strategy: Optional[str] = Field( - None, description="", alias="partitionStrategy" - ) - partition_count: Optional[int] = Field( - None, description="", alias="partitionCount" - ) - partition_list: Optional[str] = Field( - None, description="", alias="partitionList" - ) - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) + default=None, description="" + ) + external_location: Optional[str] = Field(default=None, description="") + external_location_region: Optional[str] = Field(default=None, description="") + external_location_format: Optional[str] = Field(default=None, description="") + is_partitioned: Optional[bool] = Field(default=None, description="") + partition_strategy: Optional[str] = Field(default=None, description="") + partition_count: Optional[int] = Field(default=None, description="") + partition_list: Optional[str] = Field(default=None, description="") + query_count: Optional[int] = Field(default=None, description="") + query_user_count: Optional[int] = Field(default=None, description="") + query_user_map: Optional[dict[str, int]] = Field(default=None, description="") + query_count_updated_at: Optional[datetime] = Field(default=None, description="") + database_name: Optional[str] = Field(default=None, description="") + database_qualified_name: Optional[str] = Field(default=None, description="") + schema_name: Optional[str] = Field(default=None, description="") + schema_qualified_name: Optional[str] = Field(default=None, description="") + table_name: Optional[str] = Field(default=None, description="") + table_qualified_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + view_qualified_name: Optional[str] = Field(default=None, description="") + is_profiled: Optional[bool] = Field(default=None, description="") + last_profiled_at: Optional[datetime] = Field(default=None, description="") dynamo_d_b_status: Optional[DynamoDBStatus] = Field( - None, description="", alias="dynamoDBStatus" - ) - dynamo_d_b_partition_key: Optional[str] = Field( - None, description="", alias="dynamoDBPartitionKey" - ) - dynamo_d_b_sort_key: Optional[str] = Field( - None, description="", alias="dynamoDBSortKey" + default=None, description="" ) + dynamo_d_b_partition_key: Optional[str] = Field(default=None, description="") + dynamo_d_b_sort_key: Optional[str] = Field(default=None, description="") dynamo_d_b_read_capacity_units: Optional[int] = Field( - None, description="", alias="dynamoDBReadCapacityUnits" + default=None, description="" ) dynamo_d_b_write_capacity_units: Optional[int] = Field( - None, description="", alias="dynamoDBWriteCapacityUnits" - ) - no_s_q_l_schema_definition: Optional[str] = Field( - None, description="", alias="noSQLSchemaDefinition" + default=None, description="" ) + no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") dynamo_d_b_local_secondary_indexes: Optional[ list[DynamoDBLocalSecondaryIndex] ] = Field( - None, description="", alias="dynamoDBLocalSecondaryIndexes" + default=None, description="" ) # relationship dynamo_d_b_global_secondary_indexes: Optional[ list[DynamoDBGlobalSecondaryIndex] ] = Field( - None, description="", alias="dynamoDBGlobalSecondaryIndexes" + default=None, description="" ) # relationship attributes: "DynamoDBTable.Attributes" = Field( @@ -849,104 +806,5 @@ class Attributes(Table.Attributes): ) -class DynamoDBLocalSecondaryIndex(DynamoDBSecondaryIndex): - """Description""" - - type_name: str = Field("DynamoDBLocalSecondaryIndex", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DynamoDBLocalSecondaryIndex": - raise ValueError("must be DynamoDBLocalSecondaryIndex") - return v - - def __setattr__(self, name, value): - if name in DynamoDBLocalSecondaryIndex._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DYNAMO_DB_TABLE: ClassVar[RelationField] = RelationField("dynamoDBTable") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dynamo_dbtable", - ] - - @property - def dynamo_dbtable(self) -> Optional[DynamoDBTable]: - return None if self.attributes is None else self.attributes.dynamo_dbtable - - @dynamo_dbtable.setter - def dynamo_dbtable(self, dynamo_dbtable: Optional[DynamoDBTable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dynamo_dbtable = dynamo_dbtable - - class Attributes(DynamoDBSecondaryIndex.Attributes): - dynamo_dbtable: Optional[DynamoDBTable] = Field( - None, description="", alias="dynamoDBTable" - ) # relationship - - attributes: "DynamoDBLocalSecondaryIndex.Attributes" = Field( - default_factory=lambda: DynamoDBLocalSecondaryIndex.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -class DynamoDBGlobalSecondaryIndex(DynamoDBSecondaryIndex): - """Description""" - - type_name: str = Field("DynamoDBGlobalSecondaryIndex", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "DynamoDBGlobalSecondaryIndex": - raise ValueError("must be DynamoDBGlobalSecondaryIndex") - return v - - def __setattr__(self, name, value): - if name in DynamoDBGlobalSecondaryIndex._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - DYNAMO_DB_TABLE: ClassVar[RelationField] = RelationField("dynamoDBTable") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "dynamo_dbtable", - ] - - @property - def dynamo_dbtable(self) -> Optional[DynamoDBTable]: - return None if self.attributes is None else self.attributes.dynamo_dbtable - - @dynamo_dbtable.setter - def dynamo_dbtable(self, dynamo_dbtable: Optional[DynamoDBTable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.dynamo_dbtable = dynamo_dbtable - - class Attributes(DynamoDBSecondaryIndex.Attributes): - dynamo_dbtable: Optional[DynamoDBTable] = Field( - None, description="", alias="dynamoDBTable" - ) # relationship - - attributes: "DynamoDBGlobalSecondaryIndex.Attributes" = Field( - default_factory=lambda: DynamoDBGlobalSecondaryIndex.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -DynamoDBTable.Attributes.update_forward_refs() - - -DynamoDBLocalSecondaryIndex.Attributes.update_forward_refs() - - -DynamoDBGlobalSecondaryIndex.Attributes.update_forward_refs() +from .dynamo_d_b_global_secondary_index import DynamoDBGlobalSecondaryIndex # noqa +from .dynamo_d_b_local_secondary_index import DynamoDBLocalSecondaryIndex # noqa diff --git a/pyatlan/model/assets/asset23.py b/pyatlan/model/assets/event_store.py similarity index 77% rename from pyatlan/model/assets/asset23.py rename to pyatlan/model/assets/event_store.py index 06146f9c8..ed91b7456 100644 --- a/pyatlan/model/assets/asset23.py +++ b/pyatlan/model/assets/event_store.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Catalog +from .catalog import Catalog class EventStore(Catalog): """Description""" - type_name: str = Field("EventStore", allow_mutation=False) + type_name: str = Field(default="EventStore", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -EventStore.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/file.py b/pyatlan/model/assets/file.py new file mode 100644 index 000000000..f4167e825 --- /dev/null +++ b/pyatlan/model/assets/file.py @@ -0,0 +1,129 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import FileType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .resource import Resource + + +class File(Resource): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str, file_type: FileType + ) -> File: + return File( + attributes=File.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + file_type=file_type, + ) + ) + + type_name: str = Field(default="File", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "File": + raise ValueError("must be File") + return v + + def __setattr__(self, name, value): + if name in File._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + FILE_TYPE: ClassVar[KeywordField] = KeywordField("fileType", "fileType") + """ + Type (extension) of the file. + """ + FILE_PATH: ClassVar[KeywordField] = KeywordField("filePath", "filePath") + """ + URL giving the online location where the file can be accessed. + """ + + FILE_ASSETS: ClassVar[RelationField] = RelationField("fileAssets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "file_type", + "file_path", + "file_assets", + ] + + @property + def file_type(self) -> Optional[FileType]: + return None if self.attributes is None else self.attributes.file_type + + @file_type.setter + def file_type(self, file_type: Optional[FileType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.file_type = file_type + + @property + def file_path(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.file_path + + @file_path.setter + def file_path(self, file_path: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.file_path = file_path + + @property + def file_assets(self) -> Optional[Asset]: + return None if self.attributes is None else self.attributes.file_assets + + @file_assets.setter + def file_assets(self, file_assets: Optional[Asset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.file_assets = file_assets + + class Attributes(Resource.Attributes): + file_type: Optional[FileType] = Field(default=None, description="") + file_path: Optional[str] = Field(default=None, description="") + file_assets: Optional[Asset] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str, file_type: FileType + ) -> File.Attributes: + validate_required_fields( + ["name", "connection_qualified_name", "file_type"], + [name, connection_qualified_name, file_type], + ) + return File.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + file_type=file_type, + ) + + attributes: "File.Attributes" = Field( + default_factory=lambda: File.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa diff --git a/pyatlan/model/assets/folder.py b/pyatlan/model/assets/folder.py new file mode 100644 index 000000000..2ec3f0b31 --- /dev/null +++ b/pyatlan/model/assets/folder.py @@ -0,0 +1,108 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField + +from .namespace import Namespace + + +class Folder(Namespace): + """Description""" + + type_name: str = Field(default="Folder", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Folder": + raise ValueError("must be Folder") + return v + + def __setattr__(self, name, value): + if name in Folder._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PARENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "parentQualifiedName", "parentQualifiedName", "parentQualifiedName.text" + ) + """ + Unique name of the parent folder or collection in which this folder exists. + """ + COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "collectionQualifiedName", + "collectionQualifiedName", + "collectionQualifiedName.text", + ) + """ + Unique name of the collection in which this folder exists. + """ + + PARENT: ClassVar[RelationField] = RelationField("parent") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "parent_qualified_name", + "collection_qualified_name", + "parent", + ] + + @property + def parent_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.parent_qualified_name + ) + + @parent_qualified_name.setter + def parent_qualified_name(self, parent_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_qualified_name = parent_qualified_name + + @property + def collection_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.collection_qualified_name + ) + + @collection_qualified_name.setter + def collection_qualified_name(self, collection_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.collection_qualified_name = collection_qualified_name + + @property + def parent(self) -> Optional[Namespace]: + return None if self.attributes is None else self.attributes.parent + + @parent.setter + def parent(self, parent: Optional[Namespace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent = parent + + class Attributes(Namespace.Attributes): + parent_qualified_name: Optional[str] = Field(default=None, description="") + collection_qualified_name: Optional[str] = Field(default=None, description="") + parent: Optional[Namespace] = Field( + default=None, description="" + ) # relationship + + attributes: "Folder.Attributes" = Field( + default_factory=lambda: Folder.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .namespace import Namespace # noqa diff --git a/pyatlan/model/assets/function.py b/pyatlan/model/assets/function.py new file mode 100644 index 000000000..3cf32ec4a --- /dev/null +++ b/pyatlan/model/assets/function.py @@ -0,0 +1,208 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, RelationField + +from .s_q_l import SQL + + +class Function(SQL): + """Description""" + + type_name: str = Field(default="Function", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Function": + raise ValueError("must be Function") + return v + + def __setattr__(self, name, value): + if name in Function._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + FUNCTION_DEFINITION: ClassVar[KeywordField] = KeywordField( + "functionDefinition", "functionDefinition" + ) + """ + Code or set of statements that determine the output of the function. + """ + FUNCTION_RETURN_TYPE: ClassVar[KeywordField] = KeywordField( + "functionReturnType", "functionReturnType" + ) + """ + Data type of the value returned by the function. + """ + FUNCTION_ARGUMENTS: ClassVar[KeywordField] = KeywordField( + "functionArguments", "functionArguments" + ) + """ + Arguments that are passed in to the function. + """ + FUNCTION_LANGUAGE: ClassVar[KeywordField] = KeywordField( + "functionLanguage", "functionLanguage" + ) + """ + Programming language in which the function is written. + """ + FUNCTION_TYPE: ClassVar[KeywordField] = KeywordField("functionType", "functionType") + """ + Type of function. + """ + FUNCTION_IS_EXTERNAL: ClassVar[BooleanField] = BooleanField( + "functionIsExternal", "functionIsExternal" + ) + """ + Whether the function is stored or executed externally (true) or internally (false). + """ + FUNCTION_IS_SECURE: ClassVar[BooleanField] = BooleanField( + "functionIsSecure", "functionIsSecure" + ) + """ + Whether sensitive information of the function is omitted for unauthorized users (true) or not (false). + """ + FUNCTION_IS_MEMOIZABLE: ClassVar[BooleanField] = BooleanField( + "functionIsMemoizable", "functionIsMemoizable" + ) + """ + Whether the function must re-compute if there are no underlying changes in the values (false) or not (true). + """ + + FUNCTION_SCHEMA: ClassVar[RelationField] = RelationField("functionSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "function_definition", + "function_return_type", + "function_arguments", + "function_language", + "function_type", + "function_is_external", + "function_is_secure", + "function_is_memoizable", + "function_schema", + ] + + @property + def function_definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.function_definition + + @function_definition.setter + def function_definition(self, function_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_definition = function_definition + + @property + def function_return_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.function_return_type + + @function_return_type.setter + def function_return_type(self, function_return_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_return_type = function_return_type + + @property + def function_arguments(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.function_arguments + + @function_arguments.setter + def function_arguments(self, function_arguments: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_arguments = function_arguments + + @property + def function_language(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.function_language + + @function_language.setter + def function_language(self, function_language: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_language = function_language + + @property + def function_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.function_type + + @function_type.setter + def function_type(self, function_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_type = function_type + + @property + def function_is_external(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.function_is_external + + @function_is_external.setter + def function_is_external(self, function_is_external: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_is_external = function_is_external + + @property + def function_is_secure(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.function_is_secure + + @function_is_secure.setter + def function_is_secure(self, function_is_secure: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_is_secure = function_is_secure + + @property + def function_is_memoizable(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.function_is_memoizable + ) + + @function_is_memoizable.setter + def function_is_memoizable(self, function_is_memoizable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_is_memoizable = function_is_memoizable + + @property + def function_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.function_schema + + @function_schema.setter + def function_schema(self, function_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.function_schema = function_schema + + class Attributes(SQL.Attributes): + function_definition: Optional[str] = Field(default=None, description="") + function_return_type: Optional[str] = Field(default=None, description="") + function_arguments: Optional[set[str]] = Field(default=None, description="") + function_language: Optional[str] = Field(default=None, description="") + function_type: Optional[str] = Field(default=None, description="") + function_is_external: Optional[bool] = Field(default=None, description="") + function_is_secure: Optional[bool] = Field(default=None, description="") + function_is_memoizable: Optional[bool] = Field(default=None, description="") + function_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + attributes: "Function.Attributes" = Field( + default_factory=lambda: Function.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/asset37.py b/pyatlan/model/assets/g_c_s.py similarity index 86% rename from pyatlan/model/assets/asset37.py rename to pyatlan/model/assets/g_c_s.py index 902767c84..45a9cc50d 100644 --- a/pyatlan/model/assets/asset37.py +++ b/pyatlan/model/assets/g_c_s.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( BooleanField, @@ -17,14 +17,13 @@ ) from pyatlan.model.structs import GoogleLabel, GoogleTag -from .asset00 import AirflowTask, Process -from .asset31 import Google +from .google import Google class GCS(Google): """Description""" - type_name: str = Field("GCS", allow_mutation=False) + type_name: str = Field(default="GCS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -357,57 +356,31 @@ def output_from_processes(self, output_from_processes: Optional[list[Process]]): self.attributes.output_from_processes = output_from_processes class Attributes(Google.Attributes): - gcs_storage_class: Optional[str] = Field( - None, description="", alias="gcsStorageClass" - ) - gcs_encryption_type: Optional[str] = Field( - None, description="", alias="gcsEncryptionType" - ) - gcs_e_tag: Optional[str] = Field(None, description="", alias="gcsETag") - gcs_requester_pays: Optional[bool] = Field( - None, description="", alias="gcsRequesterPays" - ) - gcs_access_control: Optional[str] = Field( - None, description="", alias="gcsAccessControl" - ) - gcs_meta_generation_id: Optional[int] = Field( - None, description="", alias="gcsMetaGenerationId" - ) - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" - ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" - ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" - ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" - ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" - ) + gcs_storage_class: Optional[str] = Field(default=None, description="") + gcs_encryption_type: Optional[str] = Field(default=None, description="") + gcs_e_tag: Optional[str] = Field(default=None, description="") + gcs_requester_pays: Optional[bool] = Field(default=None, description="") + gcs_access_control: Optional[str] = Field(default=None, description="") + gcs_meta_generation_id: Optional[int] = Field(default=None, description="") + google_service: Optional[str] = Field(default=None, description="") + google_project_name: Optional[str] = Field(default=None, description="") + google_project_id: Optional[str] = Field(default=None, description="") + google_project_number: Optional[int] = Field(default=None, description="") + google_location: Optional[str] = Field(default=None, description="") + google_location_type: Optional[str] = Field(default=None, description="") + google_labels: Optional[list[GoogleLabel]] = Field(default=None, description="") + google_tags: Optional[list[GoogleTag]] = Field(default=None, description="") input_to_processes: Optional[list[Process]] = Field( - None, description="", alias="inputToProcesses" + default=None, description="" ) # relationship output_from_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="outputFromAirflowTasks" + default=None, description="" ) # relationship input_to_airflow_tasks: Optional[list[AirflowTask]] = Field( - None, description="", alias="inputToAirflowTasks" + default=None, description="" ) # relationship output_from_processes: Optional[list[Process]] = Field( - None, description="", alias="outputFromProcesses" + default=None, description="" ) # relationship attributes: "GCS.Attributes" = Field( @@ -417,4 +390,5 @@ class Attributes(Google.Attributes): ) -GCS.Attributes.update_forward_refs() +from .airflow_task import AirflowTask # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/g_c_s_bucket.py b/pyatlan/model/assets/g_c_s_bucket.py new file mode 100644 index 000000000..dea52c689 --- /dev/null +++ b/pyatlan/model/assets/g_c_s_bucket.py @@ -0,0 +1,273 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + NumericField, + RelationField, + TextField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .g_c_s import GCS + + +class GCSBucket(GCS): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> GCSBucket: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = GCSBucket.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="GCSBucket", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "GCSBucket": + raise ValueError("must be GCSBucket") + return v + + def __setattr__(self, name, value): + if name in GCSBucket._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + GCS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( + "gcsObjectCount", "gcsObjectCount" + ) + """ + Number of objects within the bucket. + """ + GCS_BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( + "gcsBucketVersioningEnabled", "gcsBucketVersioningEnabled" + ) + """ + Whether versioning is enabled on the bucket (true) or not (false). + """ + GCS_BUCKET_RETENTION_LOCKED: ClassVar[BooleanField] = BooleanField( + "gcsBucketRetentionLocked", "gcsBucketRetentionLocked" + ) + """ + Whether retention is locked for this bucket (true) or not (false). + """ + GCS_BUCKET_RETENTION_PERIOD: ClassVar[NumericField] = NumericField( + "gcsBucketRetentionPeriod", "gcsBucketRetentionPeriod" + ) + """ + Retention period for objects in this bucket. + """ + GCS_BUCKET_RETENTION_EFFECTIVE_TIME: ClassVar[NumericField] = NumericField( + "gcsBucketRetentionEffectiveTime", "gcsBucketRetentionEffectiveTime" + ) + """ + Effective time for retention of objects in this bucket. + """ + GCS_BUCKET_LIFECYCLE_RULES: ClassVar[TextField] = TextField( + "gcsBucketLifecycleRules", "gcsBucketLifecycleRules" + ) + """ + Lifecycle rules for this bucket. + """ + GCS_BUCKET_RETENTION_POLICY: ClassVar[TextField] = TextField( + "gcsBucketRetentionPolicy", "gcsBucketRetentionPolicy" + ) + """ + Retention policy for this bucket. + """ + + GCS_OBJECTS: ClassVar[RelationField] = RelationField("gcsObjects") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "gcs_object_count", + "gcs_bucket_versioning_enabled", + "gcs_bucket_retention_locked", + "gcs_bucket_retention_period", + "gcs_bucket_retention_effective_time", + "gcs_bucket_lifecycle_rules", + "gcs_bucket_retention_policy", + "gcs_objects", + ] + + @property + def gcs_object_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.gcs_object_count + + @gcs_object_count.setter + def gcs_object_count(self, gcs_object_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_object_count = gcs_object_count + + @property + def gcs_bucket_versioning_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_versioning_enabled + ) + + @gcs_bucket_versioning_enabled.setter + def gcs_bucket_versioning_enabled( + self, gcs_bucket_versioning_enabled: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_bucket_versioning_enabled = gcs_bucket_versioning_enabled + + @property + def gcs_bucket_retention_locked(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_locked + ) + + @gcs_bucket_retention_locked.setter + def gcs_bucket_retention_locked(self, gcs_bucket_retention_locked: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_bucket_retention_locked = gcs_bucket_retention_locked + + @property + def gcs_bucket_retention_period(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_period + ) + + @gcs_bucket_retention_period.setter + def gcs_bucket_retention_period(self, gcs_bucket_retention_period: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_bucket_retention_period = gcs_bucket_retention_period + + @property + def gcs_bucket_retention_effective_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_effective_time + ) + + @gcs_bucket_retention_effective_time.setter + def gcs_bucket_retention_effective_time( + self, gcs_bucket_retention_effective_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_bucket_retention_effective_time = ( + gcs_bucket_retention_effective_time + ) + + @property + def gcs_bucket_lifecycle_rules(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_lifecycle_rules + ) + + @gcs_bucket_lifecycle_rules.setter + def gcs_bucket_lifecycle_rules(self, gcs_bucket_lifecycle_rules: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_bucket_lifecycle_rules = gcs_bucket_lifecycle_rules + + @property + def gcs_bucket_retention_policy(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.gcs_bucket_retention_policy + ) + + @gcs_bucket_retention_policy.setter + def gcs_bucket_retention_policy(self, gcs_bucket_retention_policy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_bucket_retention_policy = gcs_bucket_retention_policy + + @property + def gcs_objects(self) -> Optional[list[GCSObject]]: + return None if self.attributes is None else self.attributes.gcs_objects + + @gcs_objects.setter + def gcs_objects(self, gcs_objects: Optional[list[GCSObject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.gcs_objects = gcs_objects + + class Attributes(GCS.Attributes): + gcs_object_count: Optional[int] = Field(default=None, description="") + gcs_bucket_versioning_enabled: Optional[bool] = Field( + default=None, description="" + ) + gcs_bucket_retention_locked: Optional[bool] = Field( + default=None, description="" + ) + gcs_bucket_retention_period: Optional[int] = Field(default=None, description="") + gcs_bucket_retention_effective_time: Optional[datetime] = Field( + default=None, description="" + ) + gcs_bucket_lifecycle_rules: Optional[str] = Field(default=None, description="") + gcs_bucket_retention_policy: Optional[str] = Field(default=None, description="") + gcs_objects: Optional[list[GCSObject]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str + ) -> GCSBucket.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + + return GCSBucket.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=connector_type.value, + ) + + attributes: "GCSBucket.Attributes" = Field( + default_factory=lambda: GCSBucket.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .g_c_s_object import GCSObject # noqa diff --git a/pyatlan/model/assets/asset64.py b/pyatlan/model/assets/g_c_s_object.py similarity index 56% rename from pyatlan/model/assets/asset64.py rename to pyatlan/model/assets/g_c_s_object.py index 742711873..58f918a0e 100644 --- a/pyatlan/model/assets/asset64.py +++ b/pyatlan/model/assets/g_c_s_object.py @@ -7,20 +7,18 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import AtlanConnectorType from pyatlan.model.fields.atlan_fields import ( - BooleanField, KeywordField, KeywordTextField, NumericField, RelationField, - TextField, ) from pyatlan.utils import init_guid, validate_required_fields -from .asset37 import GCS +from .g_c_s import GCS class GCSObject(GCS): @@ -38,7 +36,7 @@ def create(cls, *, name: str, gcs_bucket_qualified_name: str) -> GCSObject: ) return cls(attributes=attributes) - type_name: str = Field("GCSObject", allow_mutation=False) + type_name: str = Field(default="GCSObject", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -375,53 +373,29 @@ def gcs_bucket(self, gcs_bucket: Optional[GCSBucket]): self.attributes.gcs_bucket = gcs_bucket class Attributes(GCS.Attributes): - gcs_bucket_name: Optional[str] = Field( - None, description="", alias="gcsBucketName" - ) - gcs_bucket_qualified_name: Optional[str] = Field( - None, description="", alias="gcsBucketQualifiedName" - ) - gcs_object_size: Optional[int] = Field( - None, description="", alias="gcsObjectSize" - ) - gcs_object_key: Optional[str] = Field( - None, description="", alias="gcsObjectKey" - ) - gcs_object_media_link: Optional[str] = Field( - None, description="", alias="gcsObjectMediaLink" - ) - gcs_object_hold_type: Optional[str] = Field( - None, description="", alias="gcsObjectHoldType" - ) - gcs_object_generation_id: Optional[int] = Field( - None, description="", alias="gcsObjectGenerationId" - ) - gcs_object_c_r_c32_c_hash: Optional[str] = Field( - None, description="", alias="gcsObjectCRC32CHash" - ) - gcs_object_m_d5_hash: Optional[str] = Field( - None, description="", alias="gcsObjectMD5Hash" - ) + gcs_bucket_name: Optional[str] = Field(default=None, description="") + gcs_bucket_qualified_name: Optional[str] = Field(default=None, description="") + gcs_object_size: Optional[int] = Field(default=None, description="") + gcs_object_key: Optional[str] = Field(default=None, description="") + gcs_object_media_link: Optional[str] = Field(default=None, description="") + gcs_object_hold_type: Optional[str] = Field(default=None, description="") + gcs_object_generation_id: Optional[int] = Field(default=None, description="") + gcs_object_c_r_c32_c_hash: Optional[str] = Field(default=None, description="") + gcs_object_m_d5_hash: Optional[str] = Field(default=None, description="") gcs_object_data_last_modified_time: Optional[datetime] = Field( - None, description="", alias="gcsObjectDataLastModifiedTime" - ) - gcs_object_content_type: Optional[str] = Field( - None, description="", alias="gcsObjectContentType" - ) - gcs_object_content_encoding: Optional[str] = Field( - None, description="", alias="gcsObjectContentEncoding" + default=None, description="" ) + gcs_object_content_type: Optional[str] = Field(default=None, description="") + gcs_object_content_encoding: Optional[str] = Field(default=None, description="") gcs_object_content_disposition: Optional[str] = Field( - None, description="", alias="gcsObjectContentDisposition" - ) - gcs_object_content_language: Optional[str] = Field( - None, description="", alias="gcsObjectContentLanguage" + default=None, description="" ) + gcs_object_content_language: Optional[str] = Field(default=None, description="") gcs_object_retention_expiration_date: Optional[datetime] = Field( - None, description="", alias="gcsObjectRetentionExpirationDate" + default=None, description="" ) gcs_bucket: Optional[GCSBucket] = Field( - None, description="", alias="gcsBucket" + default=None, description="" ) # relationship @classmethod @@ -460,264 +434,4 @@ def create( ) -class GCSBucket(GCS): - """Description""" - - @classmethod - # @validate_arguments() - @init_guid - def create(cls, *, name: str, connection_qualified_name: str) -> GCSBucket: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = GCSBucket.Attributes.create( - name=name, connection_qualified_name=connection_qualified_name - ) - return cls(attributes=attributes) - - type_name: str = Field("GCSBucket", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "GCSBucket": - raise ValueError("must be GCSBucket") - return v - - def __setattr__(self, name, value): - if name in GCSBucket._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - GCS_OBJECT_COUNT: ClassVar[NumericField] = NumericField( - "gcsObjectCount", "gcsObjectCount" - ) - """ - Number of objects within the bucket. - """ - GCS_BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( - "gcsBucketVersioningEnabled", "gcsBucketVersioningEnabled" - ) - """ - Whether versioning is enabled on the bucket (true) or not (false). - """ - GCS_BUCKET_RETENTION_LOCKED: ClassVar[BooleanField] = BooleanField( - "gcsBucketRetentionLocked", "gcsBucketRetentionLocked" - ) - """ - Whether retention is locked for this bucket (true) or not (false). - """ - GCS_BUCKET_RETENTION_PERIOD: ClassVar[NumericField] = NumericField( - "gcsBucketRetentionPeriod", "gcsBucketRetentionPeriod" - ) - """ - Retention period for objects in this bucket. - """ - GCS_BUCKET_RETENTION_EFFECTIVE_TIME: ClassVar[NumericField] = NumericField( - "gcsBucketRetentionEffectiveTime", "gcsBucketRetentionEffectiveTime" - ) - """ - Effective time for retention of objects in this bucket. - """ - GCS_BUCKET_LIFECYCLE_RULES: ClassVar[TextField] = TextField( - "gcsBucketLifecycleRules", "gcsBucketLifecycleRules" - ) - """ - Lifecycle rules for this bucket. - """ - GCS_BUCKET_RETENTION_POLICY: ClassVar[TextField] = TextField( - "gcsBucketRetentionPolicy", "gcsBucketRetentionPolicy" - ) - """ - Retention policy for this bucket. - """ - - GCS_OBJECTS: ClassVar[RelationField] = RelationField("gcsObjects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "gcs_object_count", - "gcs_bucket_versioning_enabled", - "gcs_bucket_retention_locked", - "gcs_bucket_retention_period", - "gcs_bucket_retention_effective_time", - "gcs_bucket_lifecycle_rules", - "gcs_bucket_retention_policy", - "gcs_objects", - ] - - @property - def gcs_object_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.gcs_object_count - - @gcs_object_count.setter - def gcs_object_count(self, gcs_object_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_object_count = gcs_object_count - - @property - def gcs_bucket_versioning_enabled(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_versioning_enabled - ) - - @gcs_bucket_versioning_enabled.setter - def gcs_bucket_versioning_enabled( - self, gcs_bucket_versioning_enabled: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_versioning_enabled = gcs_bucket_versioning_enabled - - @property - def gcs_bucket_retention_locked(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_retention_locked - ) - - @gcs_bucket_retention_locked.setter - def gcs_bucket_retention_locked(self, gcs_bucket_retention_locked: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_locked = gcs_bucket_retention_locked - - @property - def gcs_bucket_retention_period(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_retention_period - ) - - @gcs_bucket_retention_period.setter - def gcs_bucket_retention_period(self, gcs_bucket_retention_period: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_period = gcs_bucket_retention_period - - @property - def gcs_bucket_retention_effective_time(self) -> Optional[datetime]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_retention_effective_time - ) - - @gcs_bucket_retention_effective_time.setter - def gcs_bucket_retention_effective_time( - self, gcs_bucket_retention_effective_time: Optional[datetime] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_effective_time = ( - gcs_bucket_retention_effective_time - ) - - @property - def gcs_bucket_lifecycle_rules(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_lifecycle_rules - ) - - @gcs_bucket_lifecycle_rules.setter - def gcs_bucket_lifecycle_rules(self, gcs_bucket_lifecycle_rules: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_lifecycle_rules = gcs_bucket_lifecycle_rules - - @property - def gcs_bucket_retention_policy(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.gcs_bucket_retention_policy - ) - - @gcs_bucket_retention_policy.setter - def gcs_bucket_retention_policy(self, gcs_bucket_retention_policy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_bucket_retention_policy = gcs_bucket_retention_policy - - @property - def gcs_objects(self) -> Optional[list[GCSObject]]: - return None if self.attributes is None else self.attributes.gcs_objects - - @gcs_objects.setter - def gcs_objects(self, gcs_objects: Optional[list[GCSObject]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.gcs_objects = gcs_objects - - class Attributes(GCS.Attributes): - gcs_object_count: Optional[int] = Field( - None, description="", alias="gcsObjectCount" - ) - gcs_bucket_versioning_enabled: Optional[bool] = Field( - None, description="", alias="gcsBucketVersioningEnabled" - ) - gcs_bucket_retention_locked: Optional[bool] = Field( - None, description="", alias="gcsBucketRetentionLocked" - ) - gcs_bucket_retention_period: Optional[int] = Field( - None, description="", alias="gcsBucketRetentionPeriod" - ) - gcs_bucket_retention_effective_time: Optional[datetime] = Field( - None, description="", alias="gcsBucketRetentionEffectiveTime" - ) - gcs_bucket_lifecycle_rules: Optional[str] = Field( - None, description="", alias="gcsBucketLifecycleRules" - ) - gcs_bucket_retention_policy: Optional[str] = Field( - None, description="", alias="gcsBucketRetentionPolicy" - ) - gcs_objects: Optional[list[GCSObject]] = Field( - None, description="", alias="gcsObjects" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str - ) -> GCSBucket.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - - # Split the connection_qualified_name to extract necessary information - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - - try: - connector_type = AtlanConnectorType(fields[1]) # type:ignore - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - - return GCSBucket.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=connector_type.value, - ) - - attributes: "GCSBucket.Attributes" = Field( - default_factory=lambda: GCSBucket.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -GCSObject.Attributes.update_forward_refs() - - -GCSBucket.Attributes.update_forward_refs() +from .g_c_s_bucket import GCSBucket # noqa diff --git a/pyatlan/model/assets/asset31.py b/pyatlan/model/assets/google.py similarity index 84% rename from pyatlan/model/assets/asset31.py rename to pyatlan/model/assets/google.py index 8ce729f24..e83091c6f 100644 --- a/pyatlan/model/assets/asset31.py +++ b/pyatlan/model/assets/google.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( KeywordField, @@ -15,13 +15,13 @@ ) from pyatlan.model.structs import GoogleLabel, GoogleTag -from .asset09 import Cloud +from .cloud import Cloud class Google(Cloud): """Description""" - type_name: str = Field("Google", allow_mutation=False) + type_name: str = Field(default="Google", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -173,36 +173,17 @@ def google_tags(self, google_tags: Optional[list[GoogleTag]]): self.attributes.google_tags = google_tags class Attributes(Cloud.Attributes): - google_service: Optional[str] = Field( - None, description="", alias="googleService" - ) - google_project_name: Optional[str] = Field( - None, description="", alias="googleProjectName" - ) - google_project_id: Optional[str] = Field( - None, description="", alias="googleProjectId" - ) - google_project_number: Optional[int] = Field( - None, description="", alias="googleProjectNumber" - ) - google_location: Optional[str] = Field( - None, description="", alias="googleLocation" - ) - google_location_type: Optional[str] = Field( - None, description="", alias="googleLocationType" - ) - google_labels: Optional[list[GoogleLabel]] = Field( - None, description="", alias="googleLabels" - ) - google_tags: Optional[list[GoogleTag]] = Field( - None, description="", alias="googleTags" - ) + google_service: Optional[str] = Field(default=None, description="") + google_project_name: Optional[str] = Field(default=None, description="") + google_project_id: Optional[str] = Field(default=None, description="") + google_project_number: Optional[int] = Field(default=None, description="") + google_location: Optional[str] = Field(default=None, description="") + google_location_type: Optional[str] = Field(default=None, description="") + google_labels: Optional[list[GoogleLabel]] = Field(default=None, description="") + google_tags: Optional[list[GoogleTag]] = Field(default=None, description="") attributes: "Google.Attributes" = Field( default_factory=lambda: Google.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Google.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset10.py b/pyatlan/model/assets/infrastructure.py similarity index 77% rename from pyatlan/model/assets/asset10.py rename to pyatlan/model/assets/infrastructure.py index 2ef41930d..371525752 100644 --- a/pyatlan/model/assets/asset10.py +++ b/pyatlan/model/assets/infrastructure.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Asset +from .asset import Asset class Infrastructure(Asset, type_name="Infrastructure"): """Description""" - type_name: str = Field("Infrastructure", allow_mutation=False) + type_name: str = Field(default="Infrastructure", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Infrastructure.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset27.py b/pyatlan/model/assets/insight.py similarity index 77% rename from pyatlan/model/assets/asset27.py rename to pyatlan/model/assets/insight.py index eb7a44ed4..a27a55768 100644 --- a/pyatlan/model/assets/asset27.py +++ b/pyatlan/model/assets/insight.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Catalog +from .catalog import Catalog class Insight(Catalog): """Description""" - type_name: str = Field("Insight", allow_mutation=False) + type_name: str = Field(default="Insight", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Insight.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset56.py b/pyatlan/model/assets/kafka.py similarity index 77% rename from pyatlan/model/assets/asset56.py rename to pyatlan/model/assets/kafka.py index 7eabcaf30..3881afa62 100644 --- a/pyatlan/model/assets/asset56.py +++ b/pyatlan/model/assets/kafka.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset23 import EventStore +from .event_store import EventStore class Kafka(EventStore): """Description""" - type_name: str = Field("Kafka", allow_mutation=False) + type_name: str = Field(default="Kafka", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Kafka.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/kafka_consumer_group.py b/pyatlan/model/assets/kafka_consumer_group.py new file mode 100644 index 000000000..ea318c656 --- /dev/null +++ b/pyatlan/model/assets/kafka_consumer_group.py @@ -0,0 +1,173 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField +from pyatlan.model.structs import KafkaTopicConsumption + +from .kafka import Kafka + + +class KafkaConsumerGroup(Kafka): + """Description""" + + type_name: str = Field(default="KafkaConsumerGroup", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "KafkaConsumerGroup": + raise ValueError("must be KafkaConsumerGroup") + return v + + def __setattr__(self, name, value): + if name in KafkaConsumerGroup._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + KAFKA_CONSUMER_GROUP_TOPIC_CONSUMPTION_PROPERTIES: ClassVar[ + KeywordField + ] = KeywordField( + "kafkaConsumerGroupTopicConsumptionProperties", + "kafkaConsumerGroupTopicConsumptionProperties", + ) + """ + List of consumption properties for Kafka topics, for this consumer group. + """ + KAFKA_CONSUMER_GROUP_MEMBER_COUNT: ClassVar[NumericField] = NumericField( + "kafkaConsumerGroupMemberCount", "kafkaConsumerGroupMemberCount" + ) + """ + Number of members in this consumer group. + """ + KAFKA_TOPIC_NAMES: ClassVar[KeywordField] = KeywordField( + "kafkaTopicNames", "kafkaTopicNames" + ) + """ + Simple names of the topics consumed by this consumer group. + """ + KAFKA_TOPIC_QUALIFIED_NAMES: ClassVar[KeywordField] = KeywordField( + "kafkaTopicQualifiedNames", "kafkaTopicQualifiedNames" + ) + """ + Unique names of the topics consumed by this consumer group. + """ + + KAFKA_TOPICS: ClassVar[RelationField] = RelationField("kafkaTopics") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "kafka_consumer_group_topic_consumption_properties", + "kafka_consumer_group_member_count", + "kafka_topic_names", + "kafka_topic_qualified_names", + "kafka_topics", + ] + + @property + def kafka_consumer_group_topic_consumption_properties( + self, + ) -> Optional[list[KafkaTopicConsumption]]: + return ( + None + if self.attributes is None + else self.attributes.kafka_consumer_group_topic_consumption_properties + ) + + @kafka_consumer_group_topic_consumption_properties.setter + def kafka_consumer_group_topic_consumption_properties( + self, + kafka_consumer_group_topic_consumption_properties: Optional[ + list[KafkaTopicConsumption] + ], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.kafka_consumer_group_topic_consumption_properties = ( + kafka_consumer_group_topic_consumption_properties + ) + + @property + def kafka_consumer_group_member_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.kafka_consumer_group_member_count + ) + + @kafka_consumer_group_member_count.setter + def kafka_consumer_group_member_count( + self, kafka_consumer_group_member_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.kafka_consumer_group_member_count = ( + kafka_consumer_group_member_count + ) + + @property + def kafka_topic_names(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.kafka_topic_names + + @kafka_topic_names.setter + def kafka_topic_names(self, kafka_topic_names: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.kafka_topic_names = kafka_topic_names + + @property + def kafka_topic_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.kafka_topic_qualified_names + ) + + @kafka_topic_qualified_names.setter + def kafka_topic_qualified_names( + self, kafka_topic_qualified_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.kafka_topic_qualified_names = kafka_topic_qualified_names + + @property + def kafka_topics(self) -> Optional[list[KafkaTopic]]: + return None if self.attributes is None else self.attributes.kafka_topics + + @kafka_topics.setter + def kafka_topics(self, kafka_topics: Optional[list[KafkaTopic]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.kafka_topics = kafka_topics + + class Attributes(Kafka.Attributes): + kafka_consumer_group_topic_consumption_properties: Optional[ + list[KafkaTopicConsumption] + ] = Field(default=None, description="") + kafka_consumer_group_member_count: Optional[int] = Field( + default=None, description="" + ) + kafka_topic_names: Optional[set[str]] = Field(default=None, description="") + kafka_topic_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + kafka_topics: Optional[list[KafkaTopic]] = Field( + default=None, description="" + ) # relationship + + attributes: "KafkaConsumerGroup.Attributes" = Field( + default_factory=lambda: KafkaConsumerGroup.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .kafka_topic import KafkaTopic # noqa diff --git a/pyatlan/model/assets/asset87.py b/pyatlan/model/assets/kafka_topic.py similarity index 57% rename from pyatlan/model/assets/asset87.py rename to pyatlan/model/assets/kafka_topic.py index 95b8812ba..11ef689c6 100644 --- a/pyatlan/model/assets/asset87.py +++ b/pyatlan/model/assets/kafka_topic.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import KafkaTopicCleanupPolicy, KafkaTopicCompressionType from pyatlan.model.fields.atlan_fields import ( @@ -15,15 +15,14 @@ NumericField, RelationField, ) -from pyatlan.model.structs import KafkaTopicConsumption -from .asset56 import Kafka +from .kafka import Kafka class KafkaTopic(Kafka): """Description""" - type_name: str = Field("KafkaTopic", allow_mutation=False) + type_name: str = Field(default="KafkaTopic", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -260,35 +259,27 @@ def kafka_consumer_groups( self.attributes.kafka_consumer_groups = kafka_consumer_groups class Attributes(Kafka.Attributes): - kafka_topic_is_internal: Optional[bool] = Field( - None, description="", alias="kafkaTopicIsInternal" - ) + kafka_topic_is_internal: Optional[bool] = Field(default=None, description="") kafka_topic_compression_type: Optional[KafkaTopicCompressionType] = Field( - None, description="", alias="kafkaTopicCompressionType" + default=None, description="" ) kafka_topic_replication_factor: Optional[int] = Field( - None, description="", alias="kafkaTopicReplicationFactor" - ) - kafka_topic_segment_bytes: Optional[int] = Field( - None, description="", alias="kafkaTopicSegmentBytes" + default=None, description="" ) + kafka_topic_segment_bytes: Optional[int] = Field(default=None, description="") kafka_topic_retention_time_in_ms: Optional[int] = Field( - None, description="", alias="kafkaTopicRetentionTimeInMs" + default=None, description="" ) kafka_topic_partitions_count: Optional[int] = Field( - None, description="", alias="kafkaTopicPartitionsCount" - ) - kafka_topic_size_in_bytes: Optional[int] = Field( - None, description="", alias="kafkaTopicSizeInBytes" - ) - kafka_topic_record_count: Optional[int] = Field( - None, description="", alias="kafkaTopicRecordCount" + default=None, description="" ) + kafka_topic_size_in_bytes: Optional[int] = Field(default=None, description="") + kafka_topic_record_count: Optional[int] = Field(default=None, description="") kafka_topic_cleanup_policy: Optional[KafkaTopicCleanupPolicy] = Field( - None, description="", alias="kafkaTopicCleanupPolicy" + default=None, description="" ) kafka_consumer_groups: Optional[list[KafkaConsumerGroup]] = Field( - None, description="", alias="kafkaConsumerGroups" + default=None, description="" ) # relationship attributes: "KafkaTopic.Attributes" = Field( @@ -298,167 +289,4 @@ class Attributes(Kafka.Attributes): ) -class KafkaConsumerGroup(Kafka): - """Description""" - - type_name: str = Field("KafkaConsumerGroup", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "KafkaConsumerGroup": - raise ValueError("must be KafkaConsumerGroup") - return v - - def __setattr__(self, name, value): - if name in KafkaConsumerGroup._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - KAFKA_CONSUMER_GROUP_TOPIC_CONSUMPTION_PROPERTIES: ClassVar[ - KeywordField - ] = KeywordField( - "kafkaConsumerGroupTopicConsumptionProperties", - "kafkaConsumerGroupTopicConsumptionProperties", - ) - """ - List of consumption properties for Kafka topics, for this consumer group. - """ - KAFKA_CONSUMER_GROUP_MEMBER_COUNT: ClassVar[NumericField] = NumericField( - "kafkaConsumerGroupMemberCount", "kafkaConsumerGroupMemberCount" - ) - """ - Number of members in this consumer group. - """ - KAFKA_TOPIC_NAMES: ClassVar[KeywordField] = KeywordField( - "kafkaTopicNames", "kafkaTopicNames" - ) - """ - Simple names of the topics consumed by this consumer group. - """ - KAFKA_TOPIC_QUALIFIED_NAMES: ClassVar[KeywordField] = KeywordField( - "kafkaTopicQualifiedNames", "kafkaTopicQualifiedNames" - ) - """ - Unique names of the topics consumed by this consumer group. - """ - - KAFKA_TOPICS: ClassVar[RelationField] = RelationField("kafkaTopics") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "kafka_consumer_group_topic_consumption_properties", - "kafka_consumer_group_member_count", - "kafka_topic_names", - "kafka_topic_qualified_names", - "kafka_topics", - ] - - @property - def kafka_consumer_group_topic_consumption_properties( - self, - ) -> Optional[list[KafkaTopicConsumption]]: - return ( - None - if self.attributes is None - else self.attributes.kafka_consumer_group_topic_consumption_properties - ) - - @kafka_consumer_group_topic_consumption_properties.setter - def kafka_consumer_group_topic_consumption_properties( - self, - kafka_consumer_group_topic_consumption_properties: Optional[ - list[KafkaTopicConsumption] - ], - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.kafka_consumer_group_topic_consumption_properties = ( - kafka_consumer_group_topic_consumption_properties - ) - - @property - def kafka_consumer_group_member_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.kafka_consumer_group_member_count - ) - - @kafka_consumer_group_member_count.setter - def kafka_consumer_group_member_count( - self, kafka_consumer_group_member_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.kafka_consumer_group_member_count = ( - kafka_consumer_group_member_count - ) - - @property - def kafka_topic_names(self) -> Optional[set[str]]: - return None if self.attributes is None else self.attributes.kafka_topic_names - - @kafka_topic_names.setter - def kafka_topic_names(self, kafka_topic_names: Optional[set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.kafka_topic_names = kafka_topic_names - - @property - def kafka_topic_qualified_names(self) -> Optional[set[str]]: - return ( - None - if self.attributes is None - else self.attributes.kafka_topic_qualified_names - ) - - @kafka_topic_qualified_names.setter - def kafka_topic_qualified_names( - self, kafka_topic_qualified_names: Optional[set[str]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.kafka_topic_qualified_names = kafka_topic_qualified_names - - @property - def kafka_topics(self) -> Optional[list[KafkaTopic]]: - return None if self.attributes is None else self.attributes.kafka_topics - - @kafka_topics.setter - def kafka_topics(self, kafka_topics: Optional[list[KafkaTopic]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.kafka_topics = kafka_topics - - class Attributes(Kafka.Attributes): - kafka_consumer_group_topic_consumption_properties: Optional[ - list[KafkaTopicConsumption] - ] = Field( - None, description="", alias="kafkaConsumerGroupTopicConsumptionProperties" - ) - kafka_consumer_group_member_count: Optional[int] = Field( - None, description="", alias="kafkaConsumerGroupMemberCount" - ) - kafka_topic_names: Optional[set[str]] = Field( - None, description="", alias="kafkaTopicNames" - ) - kafka_topic_qualified_names: Optional[set[str]] = Field( - None, description="", alias="kafkaTopicQualifiedNames" - ) - kafka_topics: Optional[list[KafkaTopic]] = Field( - None, description="", alias="kafkaTopics" - ) # relationship - - attributes: "KafkaConsumerGroup.Attributes" = Field( - default_factory=lambda: KafkaConsumerGroup.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -KafkaTopic.Attributes.update_forward_refs() - - -KafkaConsumerGroup.Attributes.update_forward_refs() +from .kafka_consumer_group import KafkaConsumerGroup # noqa diff --git a/pyatlan/model/assets/link.py b/pyatlan/model/assets/link.py new file mode 100644 index 000000000..fdbc5d775 --- /dev/null +++ b/pyatlan/model/assets/link.py @@ -0,0 +1,124 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +import uuid +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import IconType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .resource import Resource + + +class Link(Resource): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, asset: Asset, name: str, link: str, idempotent: bool = False + ) -> Link: + return Link( + attributes=Link.Attributes.create( + asset=asset, name=name, link=link, idempotent=idempotent + ) + ) + + type_name: str = Field(default="Link", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Link": + raise ValueError("must be Link") + return v + + def __setattr__(self, name, value): + if name in Link._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + ICON: ClassVar[KeywordField] = KeywordField("icon", "icon") + """ + Icon for the link. + """ + ICON_TYPE: ClassVar[KeywordField] = KeywordField("iconType", "iconType") + """ + Type of icon for the link, for example: image or emoji. + """ + + ASSET: ClassVar[RelationField] = RelationField("asset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "icon", + "icon_type", + "asset", + ] + + @property + def icon(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.icon + + @icon.setter + def icon(self, icon: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.icon = icon + + @property + def icon_type(self) -> Optional[IconType]: + return None if self.attributes is None else self.attributes.icon_type + + @icon_type.setter + def icon_type(self, icon_type: Optional[IconType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.icon_type = icon_type + + @property + def asset(self) -> Optional[Asset]: + return None if self.attributes is None else self.attributes.asset + + @asset.setter + def asset(self, asset: Optional[Asset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset = asset + + class Attributes(Resource.Attributes): + icon: Optional[str] = Field(default=None, description="") + icon_type: Optional[IconType] = Field(default=None, description="") + asset: Optional[Asset] = Field(default=None, description="") # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, asset: Asset, name: str, link: str, idempotent: bool + ) -> Link.Attributes: + validate_required_fields(["asset", "name", "link"], [asset, name, link]) + qn = f"{asset.qualified_name}/{name}" if idempotent else str(uuid.uuid4()) + return Link.Attributes( + qualified_name=qn, + name=name, + link=link, + asset=asset.trim_to_reference(), + ) + + attributes: "Link.Attributes" = Field( + default_factory=lambda: Link.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa diff --git a/pyatlan/model/assets/asset44.py b/pyatlan/model/assets/looker.py similarity index 78% rename from pyatlan/model/assets/asset44.py rename to pyatlan/model/assets/looker.py index 2db54f3ea..58a329687 100644 --- a/pyatlan/model/assets/asset44.py +++ b/pyatlan/model/assets/looker.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset19 import BI +from .b_i import BI class Looker(BI): """Description""" - type_name: str = Field("Looker", allow_mutation=False) + type_name: str = Field(default="Looker", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Looker.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/looker_dashboard.py b/pyatlan/model/assets/looker_dashboard.py new file mode 100644 index 000000000..b7b019915 --- /dev/null +++ b/pyatlan/model/assets/looker_dashboard.py @@ -0,0 +1,235 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .looker import Looker + + +class LookerDashboard(Looker): + """Description""" + + type_name: str = Field(default="LookerDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerDashboard": + raise ValueError("must be LookerDashboard") + return v + + def __setattr__(self, name, value): + if name in LookerDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") + """ + Name of the parent folder in Looker that contains this dashboard. + """ + SOURCE_USER_ID: ClassVar[NumericField] = NumericField( + "sourceUserId", "sourceUserId" + ) + """ + Identifier of the user who created this dashboard, from Looker. + """ + SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( + "sourceViewCount", "sourceViewCount" + ) + """ + Number of times the dashboard has been viewed through the Looker web UI. + """ + SOURCE_METADATA_ID: ClassVar[NumericField] = NumericField( + "sourceMetadataId", "sourceMetadataId" + ) + """ + Identifier of the dashboard's content metadata, from Looker. + """ + SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( + "sourcelastUpdaterId", "sourcelastUpdaterId" + ) + """ + Identifier of the user who last updated the dashboard, from Looker. + """ + SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( + "sourceLastAccessedAt", "sourceLastAccessedAt" + ) + """ + Timestamp (epoch) when the dashboard was last accessed by a user, in milliseconds. + """ + SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( + "sourceLastViewedAt", "sourceLastViewedAt" + ) + """ + Timestamp (epoch) when the dashboard was last viewed by a user. + """ + + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + LOOKS: ClassVar[RelationField] = RelationField("looks") + """ + TBC + """ + FOLDER: ClassVar[RelationField] = RelationField("folder") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "folder_name", + "source_user_id", + "source_view_count", + "source_metadata_id", + "sourcelast_updater_id", + "source_last_accessed_at", + "source_last_viewed_at", + "tiles", + "looks", + "folder", + ] + + @property + def folder_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.folder_name + + @folder_name.setter + def folder_name(self, folder_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder_name = folder_name + + @property + def source_user_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_user_id + + @source_user_id.setter + def source_user_id(self, source_user_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_user_id = source_user_id + + @property + def source_view_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_view_count + + @source_view_count.setter + def source_view_count(self, source_view_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_view_count = source_view_count + + @property + def source_metadata_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_metadata_id + + @source_metadata_id.setter + def source_metadata_id(self, source_metadata_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_metadata_id = source_metadata_id + + @property + def sourcelast_updater_id(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.sourcelast_updater_id + ) + + @sourcelast_updater_id.setter + def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sourcelast_updater_id = sourcelast_updater_id + + @property + def source_last_accessed_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.source_last_accessed_at + ) + + @source_last_accessed_at.setter + def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_accessed_at = source_last_accessed_at + + @property + def source_last_viewed_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.source_last_viewed_at + ) + + @source_last_viewed_at.setter + def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_viewed_at = source_last_viewed_at + + @property + def tiles(self) -> Optional[list[LookerTile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[LookerTile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def looks(self) -> Optional[list[LookerLook]]: + return None if self.attributes is None else self.attributes.looks + + @looks.setter + def looks(self, looks: Optional[list[LookerLook]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looks = looks + + @property + def folder(self) -> Optional[LookerFolder]: + return None if self.attributes is None else self.attributes.folder + + @folder.setter + def folder(self, folder: Optional[LookerFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder = folder + + class Attributes(Looker.Attributes): + folder_name: Optional[str] = Field(default=None, description="") + source_user_id: Optional[int] = Field(default=None, description="") + source_view_count: Optional[int] = Field(default=None, description="") + source_metadata_id: Optional[int] = Field(default=None, description="") + sourcelast_updater_id: Optional[int] = Field(default=None, description="") + source_last_accessed_at: Optional[datetime] = Field( + default=None, description="" + ) + source_last_viewed_at: Optional[datetime] = Field(default=None, description="") + tiles: Optional[list[LookerTile]] = Field( + default=None, description="" + ) # relationship + looks: Optional[list[LookerLook]] = Field( + default=None, description="" + ) # relationship + folder: Optional[LookerFolder] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerDashboard.Attributes" = Field( + default_factory=lambda: LookerDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_folder import LookerFolder # noqa +from .looker_look import LookerLook # noqa +from .looker_tile import LookerTile # noqa diff --git a/pyatlan/model/assets/looker_explore.py b/pyatlan/model/assets/looker_explore.py new file mode 100644 index 000000000..ab5f3dba1 --- /dev/null +++ b/pyatlan/model/assets/looker_explore.py @@ -0,0 +1,194 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .looker import Looker + + +class LookerExplore(Looker): + """Description""" + + type_name: str = Field(default="LookerExplore", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerExplore": + raise ValueError("must be LookerExplore") + return v + + def __setattr__(self, name, value): + if name in LookerExplore._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the parent project of this Explore. + """ + MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + """ + Name of the parent model of this Explore. + """ + SOURCE_CONNECTION_NAME: ClassVar[KeywordField] = KeywordField( + "sourceConnectionName", "sourceConnectionName" + ) + """ + Connection name for the Explore, from Looker. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Name of the view for the Explore. + """ + SQL_TABLE_NAME: ClassVar[KeywordField] = KeywordField( + "sqlTableName", "sqlTableName" + ) + """ + Name of the SQL table used to declare the Explore. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "model_name", + "source_connection_name", + "view_name", + "sql_table_name", + "project", + "model", + "fields", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def model_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.model_name + + @model_name.setter + def model_name(self, model_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_name = model_name + + @property + def source_connection_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.source_connection_name + ) + + @source_connection_name.setter + def source_connection_name(self, source_connection_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_connection_name = source_connection_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def sql_table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sql_table_name + + @sql_table_name.setter + def sql_table_name(self, sql_table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_table_name = sql_table_name + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(default=None, description="") + model_name: Optional[str] = Field(default=None, description="") + source_connection_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + sql_table_name: Optional[str] = Field(default=None, description="") + project: Optional[LookerProject] = Field( + default=None, description="" + ) # relationship + model: Optional[LookerModel] = Field( + default=None, description="" + ) # relationship + fields: Optional[list[LookerField]] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerExplore.Attributes" = Field( + default_factory=lambda: LookerExplore.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_field import LookerField # noqa +from .looker_model import LookerModel # noqa +from .looker_project import LookerProject # noqa diff --git a/pyatlan/model/assets/looker_field.py b/pyatlan/model/assets/looker_field.py new file mode 100644 index 000000000..bdbb3d218 --- /dev/null +++ b/pyatlan/model/assets/looker_field.py @@ -0,0 +1,264 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .looker import Looker + + +class LookerField(Looker): + """Description""" + + type_name: str = Field(default="LookerField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerField": + raise ValueError("must be LookerField") + return v + + def __setattr__(self, name, value): + if name in LookerField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the project in which this field exists. + """ + LOOKER_EXPLORE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "lookerExploreQualifiedName", + "lookerExploreQualifiedName", + "lookerExploreQualifiedName.text", + ) + """ + Unique name of the Explore in which this field exists. + """ + LOOKER_VIEW_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "lookerViewQualifiedName", + "lookerViewQualifiedName", + "lookerViewQualifiedName.text", + ) + """ + Unique name of the view in which this field exists. + """ + MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + """ + Name of the model in which this field exists. + """ + SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( + "sourceDefinition", "sourceDefinition" + ) + """ + Deprecated. + """ + LOOKER_FIELD_DATA_TYPE: ClassVar[KeywordField] = KeywordField( + "lookerFieldDataType", "lookerFieldDataType" + ) + """ + Deprecated. + """ + LOOKER_TIMES_USED: ClassVar[NumericField] = NumericField( + "lookerTimesUsed", "lookerTimesUsed" + ) + """ + Deprecated. + """ + + EXPLORE: ClassVar[RelationField] = RelationField("explore") + """ + TBC + """ + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + VIEW: ClassVar[RelationField] = RelationField("view") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "looker_explore_qualified_name", + "looker_view_qualified_name", + "model_name", + "source_definition", + "looker_field_data_type", + "looker_times_used", + "explore", + "project", + "view", + "model", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def looker_explore_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.looker_explore_qualified_name + ) + + @looker_explore_qualified_name.setter + def looker_explore_qualified_name( + self, looker_explore_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_explore_qualified_name = looker_explore_qualified_name + + @property + def looker_view_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.looker_view_qualified_name + ) + + @looker_view_qualified_name.setter + def looker_view_qualified_name(self, looker_view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_view_qualified_name = looker_view_qualified_name + + @property + def model_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.model_name + + @model_name.setter + def model_name(self, model_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_name = model_name + + @property + def source_definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_definition + + @source_definition.setter + def source_definition(self, source_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition = source_definition + + @property + def looker_field_data_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.looker_field_data_type + ) + + @looker_field_data_type.setter + def looker_field_data_type(self, looker_field_data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_field_data_type = looker_field_data_type + + @property + def looker_times_used(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.looker_times_used + + @looker_times_used.setter + def looker_times_used(self, looker_times_used: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_times_used = looker_times_used + + @property + def explore(self) -> Optional[LookerExplore]: + return None if self.attributes is None else self.attributes.explore + + @explore.setter + def explore(self, explore: Optional[LookerExplore]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.explore = explore + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def view(self) -> Optional[LookerView]: + return None if self.attributes is None else self.attributes.view + + @view.setter + def view(self, view: Optional[LookerView]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view = view + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(default=None, description="") + looker_explore_qualified_name: Optional[str] = Field( + default=None, description="" + ) + looker_view_qualified_name: Optional[str] = Field(default=None, description="") + model_name: Optional[str] = Field(default=None, description="") + source_definition: Optional[str] = Field(default=None, description="") + looker_field_data_type: Optional[str] = Field(default=None, description="") + looker_times_used: Optional[int] = Field(default=None, description="") + explore: Optional[LookerExplore] = Field( + default=None, description="" + ) # relationship + project: Optional[LookerProject] = Field( + default=None, description="" + ) # relationship + view: Optional[LookerView] = Field(default=None, description="") # relationship + model: Optional[LookerModel] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerField.Attributes" = Field( + default_factory=lambda: LookerField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_explore import LookerExplore # noqa +from .looker_model import LookerModel # noqa +from .looker_project import LookerProject # noqa +from .looker_view import LookerView # noqa diff --git a/pyatlan/model/assets/looker_folder.py b/pyatlan/model/assets/looker_folder.py new file mode 100644 index 000000000..6762684b0 --- /dev/null +++ b/pyatlan/model/assets/looker_folder.py @@ -0,0 +1,195 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .looker import Looker + + +class LookerFolder(Looker): + """Description""" + + type_name: str = Field(default="LookerFolder", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerFolder": + raise ValueError("must be LookerFolder") + return v + + def __setattr__(self, name, value): + if name in LookerFolder._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( + "sourceContentMetadataId", "sourceContentMetadataId" + ) + """ + Identifier for the folder's content metadata in Looker. + """ + SOURCE_CREATOR_ID: ClassVar[NumericField] = NumericField( + "sourceCreatorId", "sourceCreatorId" + ) + """ + Identifier of the user who created the folder, from Looker. + """ + SOURCE_CHILD_COUNT: ClassVar[NumericField] = NumericField( + "sourceChildCount", "sourceChildCount" + ) + """ + Number of subfolders in this folder. + """ + SOURCE_PARENT_ID: ClassVar[NumericField] = NumericField( + "sourceParentID", "sourceParentID" + ) + """ + Identifier of the parent folder of this folder, from Looker. + """ + + LOOKER_SUB_FOLDERS: ClassVar[RelationField] = RelationField("lookerSubFolders") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + LOOKS: ClassVar[RelationField] = RelationField("looks") + """ + TBC + """ + LOOKER_PARENT_FOLDER: ClassVar[RelationField] = RelationField("lookerParentFolder") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "source_content_metadata_id", + "source_creator_id", + "source_child_count", + "source_parent_i_d", + "looker_sub_folders", + "dashboards", + "looks", + "looker_parent_folder", + ] + + @property + def source_content_metadata_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.source_content_metadata_id + ) + + @source_content_metadata_id.setter + def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_content_metadata_id = source_content_metadata_id + + @property + def source_creator_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_creator_id + + @source_creator_id.setter + def source_creator_id(self, source_creator_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_creator_id = source_creator_id + + @property + def source_child_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_child_count + + @source_child_count.setter + def source_child_count(self, source_child_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_child_count = source_child_count + + @property + def source_parent_i_d(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_parent_i_d + + @source_parent_i_d.setter + def source_parent_i_d(self, source_parent_i_d: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_parent_i_d = source_parent_i_d + + @property + def looker_sub_folders(self) -> Optional[list[LookerFolder]]: + return None if self.attributes is None else self.attributes.looker_sub_folders + + @looker_sub_folders.setter + def looker_sub_folders(self, looker_sub_folders: Optional[list[LookerFolder]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_sub_folders = looker_sub_folders + + @property + def dashboards(self) -> Optional[list[LookerDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[LookerDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + @property + def looks(self) -> Optional[list[LookerLook]]: + return None if self.attributes is None else self.attributes.looks + + @looks.setter + def looks(self, looks: Optional[list[LookerLook]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looks = looks + + @property + def looker_parent_folder(self) -> Optional[LookerFolder]: + return None if self.attributes is None else self.attributes.looker_parent_folder + + @looker_parent_folder.setter + def looker_parent_folder(self, looker_parent_folder: Optional[LookerFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_parent_folder = looker_parent_folder + + class Attributes(Looker.Attributes): + source_content_metadata_id: Optional[int] = Field(default=None, description="") + source_creator_id: Optional[int] = Field(default=None, description="") + source_child_count: Optional[int] = Field(default=None, description="") + source_parent_i_d: Optional[int] = Field(default=None, description="") + looker_sub_folders: Optional[list[LookerFolder]] = Field( + default=None, description="" + ) # relationship + dashboards: Optional[list[LookerDashboard]] = Field( + default=None, description="" + ) # relationship + looks: Optional[list[LookerLook]] = Field( + default=None, description="" + ) # relationship + looker_parent_folder: Optional[LookerFolder] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerFolder.Attributes" = Field( + default_factory=lambda: LookerFolder.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_dashboard import LookerDashboard # noqa +from .looker_look import LookerLook # noqa diff --git a/pyatlan/model/assets/looker_look.py b/pyatlan/model/assets/looker_look.py new file mode 100644 index 000000000..9d39d3769 --- /dev/null +++ b/pyatlan/model/assets/looker_look.py @@ -0,0 +1,309 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .looker import Looker + + +class LookerLook(Looker): + """Description""" + + type_name: str = Field(default="LookerLook", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerLook": + raise ValueError("must be LookerLook") + return v + + def __setattr__(self, name, value): + if name in LookerLook._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + FOLDER_NAME: ClassVar[KeywordField] = KeywordField("folderName", "folderName") + """ + Name of the folder in which the Look is organized. + """ + SOURCE_USER_ID: ClassVar[NumericField] = NumericField( + "sourceUserId", "sourceUserId" + ) + """ + Identifier of the user who created the Look, from Looker. + """ + SOURCE_VIEW_COUNT: ClassVar[NumericField] = NumericField( + "sourceViewCount", "sourceViewCount" + ) + """ + Number of times the look has been viewed in the Looker web UI. + """ + SOURCELAST_UPDATER_ID: ClassVar[NumericField] = NumericField( + "sourcelastUpdaterId", "sourcelastUpdaterId" + ) + """ + Identifier of the user that last updated the Look, from Looker. + """ + SOURCE_LAST_ACCESSED_AT: ClassVar[NumericField] = NumericField( + "sourceLastAccessedAt", "sourceLastAccessedAt" + ) + """ + Time (epoch) when the Look was last accessed by a user, in milliseconds. + """ + SOURCE_LAST_VIEWED_AT: ClassVar[NumericField] = NumericField( + "sourceLastViewedAt", "sourceLastViewedAt" + ) + """ + Time (epoch) when the Look was last viewed by a user, in milliseconds. + """ + SOURCE_CONTENT_METADATA_ID: ClassVar[NumericField] = NumericField( + "sourceContentMetadataId", "sourceContentMetadataId" + ) + """ + Identifier of the Look's content metadata, from Looker. + """ + SOURCE_QUERY_ID: ClassVar[NumericField] = NumericField( + "sourceQueryId", "sourceQueryId" + ) + """ + Identifier of the query for the Look, from Looker. + """ + MODEL_NAME: ClassVar[KeywordField] = KeywordField("modelName", "modelName") + """ + Name of the model in which this Look exists. + """ + + QUERY: ClassVar[RelationField] = RelationField("query") + """ + TBC + """ + FOLDER: ClassVar[RelationField] = RelationField("folder") + """ + TBC + """ + TILE: ClassVar[RelationField] = RelationField("tile") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "folder_name", + "source_user_id", + "source_view_count", + "sourcelast_updater_id", + "source_last_accessed_at", + "source_last_viewed_at", + "source_content_metadata_id", + "source_query_id", + "model_name", + "query", + "folder", + "tile", + "model", + "dashboard", + ] + + @property + def folder_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.folder_name + + @folder_name.setter + def folder_name(self, folder_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder_name = folder_name + + @property + def source_user_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_user_id + + @source_user_id.setter + def source_user_id(self, source_user_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_user_id = source_user_id + + @property + def source_view_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_view_count + + @source_view_count.setter + def source_view_count(self, source_view_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_view_count = source_view_count + + @property + def sourcelast_updater_id(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.sourcelast_updater_id + ) + + @sourcelast_updater_id.setter + def sourcelast_updater_id(self, sourcelast_updater_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sourcelast_updater_id = sourcelast_updater_id + + @property + def source_last_accessed_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.source_last_accessed_at + ) + + @source_last_accessed_at.setter + def source_last_accessed_at(self, source_last_accessed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_accessed_at = source_last_accessed_at + + @property + def source_last_viewed_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.source_last_viewed_at + ) + + @source_last_viewed_at.setter + def source_last_viewed_at(self, source_last_viewed_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_last_viewed_at = source_last_viewed_at + + @property + def source_content_metadata_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.source_content_metadata_id + ) + + @source_content_metadata_id.setter + def source_content_metadata_id(self, source_content_metadata_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_content_metadata_id = source_content_metadata_id + + @property + def source_query_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.source_query_id + + @source_query_id.setter + def source_query_id(self, source_query_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_query_id = source_query_id + + @property + def model_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.model_name + + @model_name.setter + def model_name(self, model_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_name = model_name + + @property + def query(self) -> Optional[LookerQuery]: + return None if self.attributes is None else self.attributes.query + + @query.setter + def query(self, query: Optional[LookerQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query = query + + @property + def folder(self) -> Optional[LookerFolder]: + return None if self.attributes is None else self.attributes.folder + + @folder.setter + def folder(self, folder: Optional[LookerFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.folder = folder + + @property + def tile(self) -> Optional[LookerTile]: + return None if self.attributes is None else self.attributes.tile + + @tile.setter + def tile(self, tile: Optional[LookerTile]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tile = tile + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + @property + def dashboard(self) -> Optional[LookerDashboard]: + return None if self.attributes is None else self.attributes.dashboard + + @dashboard.setter + def dashboard(self, dashboard: Optional[LookerDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard = dashboard + + class Attributes(Looker.Attributes): + folder_name: Optional[str] = Field(default=None, description="") + source_user_id: Optional[int] = Field(default=None, description="") + source_view_count: Optional[int] = Field(default=None, description="") + sourcelast_updater_id: Optional[int] = Field(default=None, description="") + source_last_accessed_at: Optional[datetime] = Field( + default=None, description="" + ) + source_last_viewed_at: Optional[datetime] = Field(default=None, description="") + source_content_metadata_id: Optional[int] = Field(default=None, description="") + source_query_id: Optional[int] = Field(default=None, description="") + model_name: Optional[str] = Field(default=None, description="") + query: Optional[LookerQuery] = Field( + default=None, description="" + ) # relationship + folder: Optional[LookerFolder] = Field( + default=None, description="" + ) # relationship + tile: Optional[LookerTile] = Field(default=None, description="") # relationship + model: Optional[LookerModel] = Field( + default=None, description="" + ) # relationship + dashboard: Optional[LookerDashboard] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerLook.Attributes" = Field( + default_factory=lambda: LookerLook.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_dashboard import LookerDashboard # noqa +from .looker_folder import LookerFolder # noqa +from .looker_model import LookerModel # noqa +from .looker_query import LookerQuery # noqa +from .looker_tile import LookerTile # noqa diff --git a/pyatlan/model/assets/looker_model.py b/pyatlan/model/assets/looker_model.py new file mode 100644 index 000000000..776368cea --- /dev/null +++ b/pyatlan/model/assets/looker_model.py @@ -0,0 +1,154 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .looker import Looker + + +class LookerModel(Looker): + """Description""" + + type_name: str = Field(default="LookerModel", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerModel": + raise ValueError("must be LookerModel") + return v + + def __setattr__(self, name, value): + if name in LookerModel._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the project in which the model exists. + """ + + EXPLORES: ClassVar[RelationField] = RelationField("explores") + """ + TBC + """ + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + LOOK: ClassVar[RelationField] = RelationField("look") + """ + TBC + """ + QUERIES: ClassVar[RelationField] = RelationField("queries") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "explores", + "project", + "look", + "queries", + "fields", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def explores(self) -> Optional[list[LookerExplore]]: + return None if self.attributes is None else self.attributes.explores + + @explores.setter + def explores(self, explores: Optional[list[LookerExplore]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.explores = explores + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def look(self) -> Optional[LookerLook]: + return None if self.attributes is None else self.attributes.look + + @look.setter + def look(self, look: Optional[LookerLook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.look = look + + @property + def queries(self) -> Optional[list[LookerQuery]]: + return None if self.attributes is None else self.attributes.queries + + @queries.setter + def queries(self, queries: Optional[list[LookerQuery]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.queries = queries + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(default=None, description="") + explores: Optional[list[LookerExplore]] = Field( + default=None, description="" + ) # relationship + project: Optional[LookerProject] = Field( + default=None, description="" + ) # relationship + look: Optional[LookerLook] = Field(default=None, description="") # relationship + queries: Optional[list[LookerQuery]] = Field( + default=None, description="" + ) # relationship + fields: Optional[list[LookerField]] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerModel.Attributes" = Field( + default_factory=lambda: LookerModel.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_explore import LookerExplore # noqa +from .looker_field import LookerField # noqa +from .looker_look import LookerLook # noqa +from .looker_project import LookerProject # noqa +from .looker_query import LookerQuery # noqa diff --git a/pyatlan/model/assets/looker_project.py b/pyatlan/model/assets/looker_project.py new file mode 100644 index 000000000..c1ba5225f --- /dev/null +++ b/pyatlan/model/assets/looker_project.py @@ -0,0 +1,120 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .looker import Looker + + +class LookerProject(Looker): + """Description""" + + type_name: str = Field(default="LookerProject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerProject": + raise ValueError("must be LookerProject") + return v + + def __setattr__(self, name, value): + if name in LookerProject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODELS: ClassVar[RelationField] = RelationField("models") + """ + TBC + """ + EXPLORES: ClassVar[RelationField] = RelationField("explores") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + VIEWS: ClassVar[RelationField] = RelationField("views") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "models", + "explores", + "fields", + "views", + ] + + @property + def models(self) -> Optional[list[LookerModel]]: + return None if self.attributes is None else self.attributes.models + + @models.setter + def models(self, models: Optional[list[LookerModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.models = models + + @property + def explores(self) -> Optional[list[LookerExplore]]: + return None if self.attributes is None else self.attributes.explores + + @explores.setter + def explores(self, explores: Optional[list[LookerExplore]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.explores = explores + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + @property + def views(self) -> Optional[list[LookerView]]: + return None if self.attributes is None else self.attributes.views + + @views.setter + def views(self, views: Optional[list[LookerView]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.views = views + + class Attributes(Looker.Attributes): + models: Optional[list[LookerModel]] = Field( + default=None, description="" + ) # relationship + explores: Optional[list[LookerExplore]] = Field( + default=None, description="" + ) # relationship + fields: Optional[list[LookerField]] = Field( + default=None, description="" + ) # relationship + views: Optional[list[LookerView]] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerProject.Attributes" = Field( + default_factory=lambda: LookerProject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_explore import LookerExplore # noqa +from .looker_field import LookerField # noqa +from .looker_model import LookerModel # noqa +from .looker_view import LookerView # noqa diff --git a/pyatlan/model/assets/looker_query.py b/pyatlan/model/assets/looker_query.py new file mode 100644 index 000000000..302edeb20 --- /dev/null +++ b/pyatlan/model/assets/looker_query.py @@ -0,0 +1,180 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .looker import Looker + + +class LookerQuery(Looker): + """Description""" + + type_name: str = Field(default="LookerQuery", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerQuery": + raise ValueError("must be LookerQuery") + return v + + def __setattr__(self, name, value): + if name in LookerQuery._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SOURCE_DEFINITION: ClassVar[KeywordField] = KeywordField( + "sourceDefinition", "sourceDefinition" + ) + """ + Deprecated. + """ + SOURCE_DEFINITION_DATABASE: ClassVar[KeywordField] = KeywordField( + "sourceDefinitionDatabase", "sourceDefinitionDatabase" + ) + """ + Deprecated. + """ + SOURCE_DEFINITION_SCHEMA: ClassVar[KeywordField] = KeywordField( + "sourceDefinitionSchema", "sourceDefinitionSchema" + ) + """ + Deprecated. + """ + FIELDS: ClassVar[KeywordField] = KeywordField("fields", "fields") + """ + Deprecated. + """ + + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + LOOKS: ClassVar[RelationField] = RelationField("looks") + """ + TBC + """ + MODEL: ClassVar[RelationField] = RelationField("model") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "source_definition", + "source_definition_database", + "source_definition_schema", + "fields", + "tiles", + "looks", + "model", + ] + + @property + def source_definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_definition + + @source_definition.setter + def source_definition(self, source_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition = source_definition + + @property + def source_definition_database(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.source_definition_database + ) + + @source_definition_database.setter + def source_definition_database(self, source_definition_database: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition_database = source_definition_database + + @property + def source_definition_schema(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.source_definition_schema + ) + + @source_definition_schema.setter + def source_definition_schema(self, source_definition_schema: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_definition_schema = source_definition_schema + + @property + def fields(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + @property + def tiles(self) -> Optional[list[LookerTile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[LookerTile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def looks(self) -> Optional[list[LookerLook]]: + return None if self.attributes is None else self.attributes.looks + + @looks.setter + def looks(self, looks: Optional[list[LookerLook]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looks = looks + + @property + def model(self) -> Optional[LookerModel]: + return None if self.attributes is None else self.attributes.model + + @model.setter + def model(self, model: Optional[LookerModel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model = model + + class Attributes(Looker.Attributes): + source_definition: Optional[str] = Field(default=None, description="") + source_definition_database: Optional[str] = Field(default=None, description="") + source_definition_schema: Optional[str] = Field(default=None, description="") + fields: Optional[set[str]] = Field(default=None, description="") + tiles: Optional[list[LookerTile]] = Field( + default=None, description="" + ) # relationship + looks: Optional[list[LookerLook]] = Field( + default=None, description="" + ) # relationship + model: Optional[LookerModel] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerQuery.Attributes" = Field( + default_factory=lambda: LookerQuery.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_look import LookerLook # noqa +from .looker_model import LookerModel # noqa +from .looker_tile import LookerTile # noqa diff --git a/pyatlan/model/assets/looker_tile.py b/pyatlan/model/assets/looker_tile.py new file mode 100644 index 000000000..beec83dac --- /dev/null +++ b/pyatlan/model/assets/looker_tile.py @@ -0,0 +1,218 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .looker import Looker + + +class LookerTile(Looker): + """Description""" + + type_name: str = Field(default="LookerTile", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerTile": + raise ValueError("must be LookerTile") + return v + + def __setattr__(self, name, value): + if name in LookerTile._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + LOOKML_LINK_ID: ClassVar[KeywordField] = KeywordField( + "lookmlLinkId", "lookmlLinkId" + ) + """ + Identifier for the LoomML link. + """ + MERGE_RESULT_ID: ClassVar[KeywordField] = KeywordField( + "mergeResultId", "mergeResultId" + ) + """ + Identifier for the merge result. + """ + NOTE_TEXT: ClassVar[KeywordField] = KeywordField("noteText", "noteText") + """ + Text of notes added to the tile. + """ + QUERY_ID: ClassVar[NumericField] = NumericField("queryID", "queryID") + """ + Identifier for the query used to build this tile, from Looker. + """ + RESULT_MAKER_ID: ClassVar[NumericField] = NumericField( + "resultMakerID", "resultMakerID" + ) + """ + Identifier of the ResultMarkerLookup entry, from Looker. + """ + SUBTITLE_TEXT: ClassVar[KeywordField] = KeywordField("subtitleText", "subtitleText") + """ + Text for the subtitle for text tiles. + """ + LOOK_ID: ClassVar[NumericField] = NumericField("lookId", "lookId") + """ + Identifier of the Look used to create this tile, from Looker. + """ + + QUERY: ClassVar[RelationField] = RelationField("query") + """ + TBC + """ + LOOK: ClassVar[RelationField] = RelationField("look") + """ + TBC + """ + DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "lookml_link_id", + "merge_result_id", + "note_text", + "query_i_d", + "result_maker_i_d", + "subtitle_text", + "look_id", + "query", + "look", + "dashboard", + ] + + @property + def lookml_link_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.lookml_link_id + + @lookml_link_id.setter + def lookml_link_id(self, lookml_link_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.lookml_link_id = lookml_link_id + + @property + def merge_result_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.merge_result_id + + @merge_result_id.setter + def merge_result_id(self, merge_result_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.merge_result_id = merge_result_id + + @property + def note_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.note_text + + @note_text.setter + def note_text(self, note_text: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.note_text = note_text + + @property + def query_i_d(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_i_d + + @query_i_d.setter + def query_i_d(self, query_i_d: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_i_d = query_i_d + + @property + def result_maker_i_d(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.result_maker_i_d + + @result_maker_i_d.setter + def result_maker_i_d(self, result_maker_i_d: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.result_maker_i_d = result_maker_i_d + + @property + def subtitle_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.subtitle_text + + @subtitle_text.setter + def subtitle_text(self, subtitle_text: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.subtitle_text = subtitle_text + + @property + def look_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.look_id + + @look_id.setter + def look_id(self, look_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.look_id = look_id + + @property + def query(self) -> Optional[LookerQuery]: + return None if self.attributes is None else self.attributes.query + + @query.setter + def query(self, query: Optional[LookerQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query = query + + @property + def look(self) -> Optional[LookerLook]: + return None if self.attributes is None else self.attributes.look + + @look.setter + def look(self, look: Optional[LookerLook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.look = look + + @property + def dashboard(self) -> Optional[LookerDashboard]: + return None if self.attributes is None else self.attributes.dashboard + + @dashboard.setter + def dashboard(self, dashboard: Optional[LookerDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard = dashboard + + class Attributes(Looker.Attributes): + lookml_link_id: Optional[str] = Field(default=None, description="") + merge_result_id: Optional[str] = Field(default=None, description="") + note_text: Optional[str] = Field(default=None, description="") + query_i_d: Optional[int] = Field(default=None, description="") + result_maker_i_d: Optional[int] = Field(default=None, description="") + subtitle_text: Optional[str] = Field(default=None, description="") + look_id: Optional[int] = Field(default=None, description="") + query: Optional[LookerQuery] = Field( + default=None, description="" + ) # relationship + look: Optional[LookerLook] = Field(default=None, description="") # relationship + dashboard: Optional[LookerDashboard] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerTile.Attributes" = Field( + default_factory=lambda: LookerTile.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_dashboard import LookerDashboard # noqa +from .looker_look import LookerLook # noqa +from .looker_query import LookerQuery # noqa diff --git a/pyatlan/model/assets/looker_view.py b/pyatlan/model/assets/looker_view.py new file mode 100644 index 000000000..5f53a6d0b --- /dev/null +++ b/pyatlan/model/assets/looker_view.py @@ -0,0 +1,139 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .looker import Looker + + +class LookerView(Looker): + """Description""" + + type_name: str = Field(default="LookerView", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "LookerView": + raise ValueError("must be LookerView") + return v + + def __setattr__(self, name, value): + if name in LookerView._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECT_NAME: ClassVar[KeywordField] = KeywordField("projectName", "projectName") + """ + Name of the project in which this view exists. + """ + LOOKER_VIEW_FILE_PATH: ClassVar[KeywordField] = KeywordField( + "lookerViewFilePath", "lookerViewFilePath" + ) + """ + File path of this view within the project. + """ + LOOKER_VIEW_FILE_NAME: ClassVar[KeywordField] = KeywordField( + "lookerViewFileName", "lookerViewFileName" + ) + """ + File name of this view. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "project_name", + "looker_view_file_path", + "looker_view_file_name", + "project", + "fields", + ] + + @property + def project_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.project_name + + @project_name.setter + def project_name(self, project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_name = project_name + + @property + def looker_view_file_path(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.looker_view_file_path + ) + + @looker_view_file_path.setter + def looker_view_file_path(self, looker_view_file_path: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_view_file_path = looker_view_file_path + + @property + def looker_view_file_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.looker_view_file_name + ) + + @looker_view_file_name.setter + def looker_view_file_name(self, looker_view_file_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.looker_view_file_name = looker_view_file_name + + @property + def project(self) -> Optional[LookerProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[LookerProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def fields(self) -> Optional[list[LookerField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[LookerField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Looker.Attributes): + project_name: Optional[str] = Field(default=None, description="") + looker_view_file_path: Optional[str] = Field(default=None, description="") + looker_view_file_name: Optional[str] = Field(default=None, description="") + project: Optional[LookerProject] = Field( + default=None, description="" + ) # relationship + fields: Optional[list[LookerField]] = Field( + default=None, description="" + ) # relationship + + attributes: "LookerView.Attributes" = Field( + default_factory=lambda: LookerView.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .looker_field import LookerField # noqa +from .looker_project import LookerProject # noqa diff --git a/pyatlan/model/assets/m_c_incident.py b/pyatlan/model/assets/m_c_incident.py new file mode 100644 index 000000000..050f9c0c8 --- /dev/null +++ b/pyatlan/model/assets/m_c_incident.py @@ -0,0 +1,195 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .monte_carlo import MonteCarlo + + +class MCIncident(MonteCarlo): + """Description""" + + type_name: str = Field(default="MCIncident", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MCIncident": + raise ValueError("must be MCIncident") + return v + + def __setattr__(self, name, value): + if name in MCIncident._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MC_INCIDENT_ID: ClassVar[KeywordField] = KeywordField( + "mcIncidentId", "mcIncidentId" + ) + """ + Identifier of this incident, from Monte Carlo. + """ + MC_INCIDENT_TYPE: ClassVar[KeywordField] = KeywordField( + "mcIncidentType", "mcIncidentType" + ) + """ + Type of this incident. + """ + MC_INCIDENT_SUB_TYPES: ClassVar[KeywordField] = KeywordField( + "mcIncidentSubTypes", "mcIncidentSubTypes" + ) + """ + Subtypes of this incident. + """ + MC_INCIDENT_SEVERITY: ClassVar[KeywordField] = KeywordField( + "mcIncidentSeverity", "mcIncidentSeverity" + ) + """ + Severity of this incident. + """ + MC_INCIDENT_STATE: ClassVar[KeywordField] = KeywordField( + "mcIncidentState", "mcIncidentState" + ) + """ + State of this incident. + """ + MC_INCIDENT_WAREHOUSE: ClassVar[KeywordField] = KeywordField( + "mcIncidentWarehouse", "mcIncidentWarehouse" + ) + """ + Name of this incident's warehouse. + """ + + MC_MONITOR: ClassVar[RelationField] = RelationField("mcMonitor") + """ + TBC + """ + MC_INCIDENT_ASSETS: ClassVar[RelationField] = RelationField("mcIncidentAssets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mc_incident_id", + "mc_incident_type", + "mc_incident_sub_types", + "mc_incident_severity", + "mc_incident_state", + "mc_incident_warehouse", + "mc_monitor", + "mc_incident_assets", + ] + + @property + def mc_incident_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_incident_id + + @mc_incident_id.setter + def mc_incident_id(self, mc_incident_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_id = mc_incident_id + + @property + def mc_incident_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_incident_type + + @mc_incident_type.setter + def mc_incident_type(self, mc_incident_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_type = mc_incident_type + + @property + def mc_incident_sub_types(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.mc_incident_sub_types + ) + + @mc_incident_sub_types.setter + def mc_incident_sub_types(self, mc_incident_sub_types: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_sub_types = mc_incident_sub_types + + @property + def mc_incident_severity(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_incident_severity + + @mc_incident_severity.setter + def mc_incident_severity(self, mc_incident_severity: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_severity = mc_incident_severity + + @property + def mc_incident_state(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_incident_state + + @mc_incident_state.setter + def mc_incident_state(self, mc_incident_state: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_state = mc_incident_state + + @property + def mc_incident_warehouse(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mc_incident_warehouse + ) + + @mc_incident_warehouse.setter + def mc_incident_warehouse(self, mc_incident_warehouse: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_warehouse = mc_incident_warehouse + + @property + def mc_monitor(self) -> Optional[MCMonitor]: + return None if self.attributes is None else self.attributes.mc_monitor + + @mc_monitor.setter + def mc_monitor(self, mc_monitor: Optional[MCMonitor]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor = mc_monitor + + @property + def mc_incident_assets(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.mc_incident_assets + + @mc_incident_assets.setter + def mc_incident_assets(self, mc_incident_assets: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_incident_assets = mc_incident_assets + + class Attributes(MonteCarlo.Attributes): + mc_incident_id: Optional[str] = Field(default=None, description="") + mc_incident_type: Optional[str] = Field(default=None, description="") + mc_incident_sub_types: Optional[set[str]] = Field(default=None, description="") + mc_incident_severity: Optional[str] = Field(default=None, description="") + mc_incident_state: Optional[str] = Field(default=None, description="") + mc_incident_warehouse: Optional[str] = Field(default=None, description="") + mc_monitor: Optional[MCMonitor] = Field( + default=None, description="" + ) # relationship + mc_incident_assets: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + + attributes: "MCIncident.Attributes" = Field( + default_factory=lambda: MCIncident.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa +from .m_c_monitor import MCMonitor # noqa diff --git a/pyatlan/model/assets/m_c_monitor.py b/pyatlan/model/assets/m_c_monitor.py new file mode 100644 index 000000000..b116112ff --- /dev/null +++ b/pyatlan/model/assets/m_c_monitor.py @@ -0,0 +1,447 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) +from pyatlan.model.structs import MCRuleComparison, MCRuleSchedule + +from .monte_carlo import MonteCarlo + + +class MCMonitor(MonteCarlo): + """Description""" + + type_name: str = Field(default="MCMonitor", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MCMonitor": + raise ValueError("must be MCMonitor") + return v + + def __setattr__(self, name, value): + if name in MCMonitor._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MC_MONITOR_ID: ClassVar[KeywordField] = KeywordField("mcMonitorId", "mcMonitorId") + """ + Unique identifier for this monitor, from Monte Carlo. + """ + MC_MONITOR_STATUS: ClassVar[KeywordField] = KeywordField( + "mcMonitorStatus", "mcMonitorStatus" + ) + """ + Status of this monitor. + """ + MC_MONITOR_TYPE: ClassVar[KeywordField] = KeywordField( + "mcMonitorType", "mcMonitorType" + ) + """ + Type of this monitor, for example: field health (stats) or dimension tracking (categories). + """ + MC_MONITOR_WAREHOUSE: ClassVar[KeywordField] = KeywordField( + "mcMonitorWarehouse", "mcMonitorWarehouse" + ) + """ + Name of the warehouse for this monitor. + """ + MC_MONITOR_SCHEDULE_TYPE: ClassVar[KeywordField] = KeywordField( + "mcMonitorScheduleType", "mcMonitorScheduleType" + ) + """ + Type of schedule for this monitor, for example: fixed or dynamic. + """ + MC_MONITOR_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( + "mcMonitorNamespace", "mcMonitorNamespace.keyword", "mcMonitorNamespace" + ) + """ + Namespace of this monitor. + """ + MC_MONITOR_RULE_TYPE: ClassVar[KeywordField] = KeywordField( + "mcMonitorRuleType", "mcMonitorRuleType" + ) + """ + Type of rule for this monitor. + """ + MC_MONITOR_RULE_CUSTOM_SQL: ClassVar[KeywordField] = KeywordField( + "mcMonitorRuleCustomSql", "mcMonitorRuleCustomSql" + ) + """ + SQL code for custom SQL rules. + """ + MC_MONITOR_RULE_SCHEDULE_CONFIG: ClassVar[KeywordField] = KeywordField( + "mcMonitorRuleScheduleConfig", "mcMonitorRuleScheduleConfig" + ) + """ + Schedule details for the rule. + """ + MC_MONITOR_RULE_SCHEDULE_CONFIG_HUMANIZED: ClassVar[TextField] = TextField( + "mcMonitorRuleScheduleConfigHumanized", "mcMonitorRuleScheduleConfigHumanized" + ) + """ + Readable description of the schedule for the rule. + """ + MC_MONITOR_ALERT_CONDITION: ClassVar[TextField] = TextField( + "mcMonitorAlertCondition", "mcMonitorAlertCondition" + ) + """ + Condition on which the monitor produces an alert. + """ + MC_MONITOR_RULE_NEXT_EXECUTION_TIME: ClassVar[NumericField] = NumericField( + "mcMonitorRuleNextExecutionTime", "mcMonitorRuleNextExecutionTime" + ) + """ + Time at which the next execution of the rule should occur. + """ + MC_MONITOR_RULE_PREVIOUS_EXECUTION_TIME: ClassVar[NumericField] = NumericField( + "mcMonitorRulePreviousExecutionTime", "mcMonitorRulePreviousExecutionTime" + ) + """ + Time at which the previous execution of the rule occurred. + """ + MC_MONITOR_RULE_COMPARISONS: ClassVar[KeywordField] = KeywordField( + "mcMonitorRuleComparisons", "mcMonitorRuleComparisons" + ) + """ + Comparison logic used for the rule. + """ + MC_MONITOR_RULE_IS_SNOOZED: ClassVar[BooleanField] = BooleanField( + "mcMonitorRuleIsSnoozed", "mcMonitorRuleIsSnoozed" + ) + """ + Whether the rule is currently snoozed (true) or not (false). + """ + MC_MONITOR_BREACH_RATE: ClassVar[NumericField] = NumericField( + "mcMonitorBreachRate", "mcMonitorBreachRate" + ) + """ + Rate at which this monitor is breached. + """ + MC_MONITOR_INCIDENT_COUNT: ClassVar[NumericField] = NumericField( + "mcMonitorIncidentCount", "mcMonitorIncidentCount" + ) + """ + Number of incidents associated with this monitor. + """ + + MC_MONITOR_ASSETS: ClassVar[RelationField] = RelationField("mcMonitorAssets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mc_monitor_id", + "mc_monitor_status", + "mc_monitor_type", + "mc_monitor_warehouse", + "mc_monitor_schedule_type", + "mc_monitor_namespace", + "mc_monitor_rule_type", + "mc_monitor_rule_custom_sql", + "mc_monitor_rule_schedule_config", + "mc_monitor_rule_schedule_config_humanized", + "mc_monitor_alert_condition", + "mc_monitor_rule_next_execution_time", + "mc_monitor_rule_previous_execution_time", + "mc_monitor_rule_comparisons", + "mc_monitor_rule_is_snoozed", + "mc_monitor_breach_rate", + "mc_monitor_incident_count", + "mc_monitor_assets", + ] + + @property + def mc_monitor_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_monitor_id + + @mc_monitor_id.setter + def mc_monitor_id(self, mc_monitor_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_id = mc_monitor_id + + @property + def mc_monitor_status(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_monitor_status + + @mc_monitor_status.setter + def mc_monitor_status(self, mc_monitor_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_status = mc_monitor_status + + @property + def mc_monitor_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_monitor_type + + @mc_monitor_type.setter + def mc_monitor_type(self, mc_monitor_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_type = mc_monitor_type + + @property + def mc_monitor_warehouse(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_monitor_warehouse + + @mc_monitor_warehouse.setter + def mc_monitor_warehouse(self, mc_monitor_warehouse: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_warehouse = mc_monitor_warehouse + + @property + def mc_monitor_schedule_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_schedule_type + ) + + @mc_monitor_schedule_type.setter + def mc_monitor_schedule_type(self, mc_monitor_schedule_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_schedule_type = mc_monitor_schedule_type + + @property + def mc_monitor_namespace(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_monitor_namespace + + @mc_monitor_namespace.setter + def mc_monitor_namespace(self, mc_monitor_namespace: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_namespace = mc_monitor_namespace + + @property + def mc_monitor_rule_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mc_monitor_rule_type + + @mc_monitor_rule_type.setter + def mc_monitor_rule_type(self, mc_monitor_rule_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_type = mc_monitor_rule_type + + @property + def mc_monitor_rule_custom_sql(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_custom_sql + ) + + @mc_monitor_rule_custom_sql.setter + def mc_monitor_rule_custom_sql(self, mc_monitor_rule_custom_sql: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_custom_sql = mc_monitor_rule_custom_sql + + @property + def mc_monitor_rule_schedule_config(self) -> Optional[MCRuleSchedule]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_schedule_config + ) + + @mc_monitor_rule_schedule_config.setter + def mc_monitor_rule_schedule_config( + self, mc_monitor_rule_schedule_config: Optional[MCRuleSchedule] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_schedule_config = ( + mc_monitor_rule_schedule_config + ) + + @property + def mc_monitor_rule_schedule_config_humanized(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_schedule_config_humanized + ) + + @mc_monitor_rule_schedule_config_humanized.setter + def mc_monitor_rule_schedule_config_humanized( + self, mc_monitor_rule_schedule_config_humanized: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_schedule_config_humanized = ( + mc_monitor_rule_schedule_config_humanized + ) + + @property + def mc_monitor_alert_condition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_alert_condition + ) + + @mc_monitor_alert_condition.setter + def mc_monitor_alert_condition(self, mc_monitor_alert_condition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_alert_condition = mc_monitor_alert_condition + + @property + def mc_monitor_rule_next_execution_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_next_execution_time + ) + + @mc_monitor_rule_next_execution_time.setter + def mc_monitor_rule_next_execution_time( + self, mc_monitor_rule_next_execution_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_next_execution_time = ( + mc_monitor_rule_next_execution_time + ) + + @property + def mc_monitor_rule_previous_execution_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_previous_execution_time + ) + + @mc_monitor_rule_previous_execution_time.setter + def mc_monitor_rule_previous_execution_time( + self, mc_monitor_rule_previous_execution_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_previous_execution_time = ( + mc_monitor_rule_previous_execution_time + ) + + @property + def mc_monitor_rule_comparisons(self) -> Optional[list[MCRuleComparison]]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_comparisons + ) + + @mc_monitor_rule_comparisons.setter + def mc_monitor_rule_comparisons( + self, mc_monitor_rule_comparisons: Optional[list[MCRuleComparison]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_comparisons = mc_monitor_rule_comparisons + + @property + def mc_monitor_rule_is_snoozed(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_rule_is_snoozed + ) + + @mc_monitor_rule_is_snoozed.setter + def mc_monitor_rule_is_snoozed(self, mc_monitor_rule_is_snoozed: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_rule_is_snoozed = mc_monitor_rule_is_snoozed + + @property + def mc_monitor_breach_rate(self) -> Optional[float]: + return ( + None if self.attributes is None else self.attributes.mc_monitor_breach_rate + ) + + @mc_monitor_breach_rate.setter + def mc_monitor_breach_rate(self, mc_monitor_breach_rate: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_breach_rate = mc_monitor_breach_rate + + @property + def mc_monitor_incident_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mc_monitor_incident_count + ) + + @mc_monitor_incident_count.setter + def mc_monitor_incident_count(self, mc_monitor_incident_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_incident_count = mc_monitor_incident_count + + @property + def mc_monitor_assets(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.mc_monitor_assets + + @mc_monitor_assets.setter + def mc_monitor_assets(self, mc_monitor_assets: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_assets = mc_monitor_assets + + class Attributes(MonteCarlo.Attributes): + mc_monitor_id: Optional[str] = Field(default=None, description="") + mc_monitor_status: Optional[str] = Field(default=None, description="") + mc_monitor_type: Optional[str] = Field(default=None, description="") + mc_monitor_warehouse: Optional[str] = Field(default=None, description="") + mc_monitor_schedule_type: Optional[str] = Field(default=None, description="") + mc_monitor_namespace: Optional[str] = Field(default=None, description="") + mc_monitor_rule_type: Optional[str] = Field(default=None, description="") + mc_monitor_rule_custom_sql: Optional[str] = Field(default=None, description="") + mc_monitor_rule_schedule_config: Optional[MCRuleSchedule] = Field( + default=None, description="" + ) + mc_monitor_rule_schedule_config_humanized: Optional[str] = Field( + default=None, description="" + ) + mc_monitor_alert_condition: Optional[str] = Field(default=None, description="") + mc_monitor_rule_next_execution_time: Optional[datetime] = Field( + default=None, description="" + ) + mc_monitor_rule_previous_execution_time: Optional[datetime] = Field( + default=None, description="" + ) + mc_monitor_rule_comparisons: Optional[list[MCRuleComparison]] = Field( + default=None, description="" + ) + mc_monitor_rule_is_snoozed: Optional[bool] = Field(default=None, description="") + mc_monitor_breach_rate: Optional[float] = Field(default=None, description="") + mc_monitor_incident_count: Optional[int] = Field(default=None, description="") + mc_monitor_assets: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + + attributes: "MCMonitor.Attributes" = Field( + default_factory=lambda: MCMonitor.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa diff --git a/pyatlan/model/assets/materialised_view.py b/pyatlan/model/assets/materialised_view.py new file mode 100644 index 000000000..204a3119f --- /dev/null +++ b/pyatlan/model/assets/materialised_view.py @@ -0,0 +1,333 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .s_q_l import SQL + + +class MaterialisedView(SQL): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, schema_qualified_name: str) -> MaterialisedView: + validate_required_fields( + ["name", "schema_qualified_name"], [name, schema_qualified_name] + ) + attributes = MaterialisedView.Attributes.create( + name=name, schema_qualified_name=schema_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="MaterialisedView", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MaterialisedView": + raise ValueError("must be MaterialisedView") + return v + + def __setattr__(self, name, value): + if name in MaterialisedView._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + REFRESH_MODE: ClassVar[KeywordField] = KeywordField("refreshMode", "refreshMode") + """ + Refresh mode for this materialized view. + """ + REFRESH_METHOD: ClassVar[KeywordField] = KeywordField( + "refreshMethod", "refreshMethod" + ) + """ + Refresh method for this materialized view. + """ + STALENESS: ClassVar[KeywordField] = KeywordField("staleness", "staleness") + """ + Staleness of this materialized view. + """ + STALE_SINCE_DATE: ClassVar[NumericField] = NumericField( + "staleSinceDate", "staleSinceDate" + ) + """ + Time (epoch) from which this materialized view is stale, in milliseconds. + """ + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this materialized view. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this materialized view. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this materialized view, in bytes. + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether it's possible to run a preview query on this materialized view (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for the query preview of this materialized view. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this materialized view. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this materialized view is temporary (true) or not (false). + """ + DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") + """ + SQL definition of this materialized view. + """ + + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "refresh_mode", + "refresh_method", + "staleness", + "stale_since_date", + "column_count", + "row_count", + "size_bytes", + "is_query_preview", + "query_preview_config", + "alias", + "is_temporary", + "definition", + "columns", + "atlan_schema", + ] + + @property + def refresh_mode(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.refresh_mode + + @refresh_mode.setter + def refresh_mode(self, refresh_mode: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.refresh_mode = refresh_mode + + @property + def refresh_method(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.refresh_method + + @refresh_method.setter + def refresh_method(self, refresh_method: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.refresh_method = refresh_method + + @property + def staleness(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.staleness + + @staleness.setter + def staleness(self, staleness: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.staleness = staleness + + @property + def stale_since_date(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.stale_since_date + + @stale_since_date.setter + def stale_since_date(self, stale_since_date: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.stale_since_date = stale_since_date + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.alias = alias + + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.definition + + @definition.setter + def definition(self, definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.definition = definition + + @property + def columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.columns + + @columns.setter + def columns(self, columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.columns = columns + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + class Attributes(SQL.Attributes): + refresh_mode: Optional[str] = Field(default=None, description="") + refresh_method: Optional[str] = Field(default=None, description="") + staleness: Optional[str] = Field(default=None, description="") + stale_since_date: Optional[datetime] = Field(default=None, description="") + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") + query_preview_config: Optional[dict[str, str]] = Field( + default=None, description="" + ) + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + definition: Optional[str] = Field(default=None, description="") + columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, schema_qualified_name: str + ) -> MaterialisedView.Attributes: + if not name: + raise ValueError("name cannot be blank") + validate_required_fields(["schema_qualified_name"], [schema_qualified_name]) + fields = schema_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid schema_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid schema_qualified_name") from e + return MaterialisedView.Attributes( + name=name, + database_name=fields[3], + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", + qualified_name=f"{schema_qualified_name}/{name}", + schema_qualified_name=schema_qualified_name, + schema_name=fields[4], + connector_name=connector_type.value, + atlan_schema=Schema.ref_by_qualified_name(schema_qualified_name), + ) + + attributes: "MaterialisedView.Attributes" = Field( + default_factory=lambda: MaterialisedView.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .column import Column # noqa +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/matillion.py b/pyatlan/model/assets/matillion.py new file mode 100644 index 000000000..3825e1acd --- /dev/null +++ b/pyatlan/model/assets/matillion.py @@ -0,0 +1,60 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField + +from .catalog import Catalog + + +class Matillion(Catalog): + """Description""" + + type_name: str = Field(default="Matillion", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Matillion": + raise ValueError("must be Matillion") + return v + + def __setattr__(self, name, value): + if name in Matillion._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MATILLION_VERSION: ClassVar[KeywordField] = KeywordField( + "matillionVersion", "matillionVersion" + ) + """ + Current point in time state of a project. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "matillion_version", + ] + + @property + def matillion_version(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.matillion_version + + @matillion_version.setter + def matillion_version(self, matillion_version: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_version = matillion_version + + class Attributes(Catalog.Attributes): + matillion_version: Optional[str] = Field(default=None, description="") + + attributes: "Matillion.Attributes" = Field( + default_factory=lambda: Matillion.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/matillion_component.py b/pyatlan/model/assets/matillion_component.py new file mode 100644 index 000000000..683a86a79 --- /dev/null +++ b/pyatlan/model/assets/matillion_component.py @@ -0,0 +1,285 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .matillion import Matillion + + +class MatillionComponent(Matillion): + """Description""" + + type_name: str = Field(default="MatillionComponent", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MatillionComponent": + raise ValueError("must be MatillionComponent") + return v + + def __setattr__(self, name, value): + if name in MatillionComponent._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MATILLION_COMPONENT_ID: ClassVar[KeywordField] = KeywordField( + "matillionComponentId", "matillionComponentId" + ) + """ + Unique identifier of the component in Matillion. + """ + MATILLION_COMPONENT_IMPLEMENTATION_ID: ClassVar[KeywordField] = KeywordField( + "matillionComponentImplementationId", "matillionComponentImplementationId" + ) + """ + Unique identifier for the type of the component in Matillion. + """ + MATILLION_COMPONENT_LINKED_JOB: ClassVar[KeywordField] = KeywordField( + "matillionComponentLinkedJob", "matillionComponentLinkedJob" + ) + """ + Job details of the job to which the component internally links. + """ + MATILLION_COMPONENT_LAST_RUN_STATUS: ClassVar[KeywordField] = KeywordField( + "matillionComponentLastRunStatus", "matillionComponentLastRunStatus" + ) + """ + Latest run status of the component within a job. + """ + MATILLION_COMPONENT_LAST_FIVE_RUN_STATUS: ClassVar[KeywordField] = KeywordField( + "matillionComponentLastFiveRunStatus", "matillionComponentLastFiveRunStatus" + ) + """ + Last five run statuses of the component within a job. + """ + MATILLION_COMPONENT_SQLS: ClassVar[KeywordField] = KeywordField( + "matillionComponentSqls", "matillionComponentSqls" + ) + """ + SQL queries used by the component. + """ + MATILLION_JOB_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "matillionJobName", "matillionJobName.keyword", "matillionJobName" + ) + """ + Simple name of the job to which the component belongs. + """ + MATILLION_JOB_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "matillionJobQualifiedName", + "matillionJobQualifiedName", + "matillionJobQualifiedName.text", + ) + """ + Unique name of the job to which the component belongs. + """ + + MATILLION_PROCESS: ClassVar[RelationField] = RelationField("matillionProcess") + """ + TBC + """ + MATILLION_JOB: ClassVar[RelationField] = RelationField("matillionJob") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "matillion_component_id", + "matillion_component_implementation_id", + "matillion_component_linked_job", + "matillion_component_last_run_status", + "matillion_component_last_five_run_status", + "matillion_component_sqls", + "matillion_job_name", + "matillion_job_qualified_name", + "matillion_process", + "matillion_job", + ] + + @property + def matillion_component_id(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.matillion_component_id + ) + + @matillion_component_id.setter + def matillion_component_id(self, matillion_component_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component_id = matillion_component_id + + @property + def matillion_component_implementation_id(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.matillion_component_implementation_id + ) + + @matillion_component_implementation_id.setter + def matillion_component_implementation_id( + self, matillion_component_implementation_id: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component_implementation_id = ( + matillion_component_implementation_id + ) + + @property + def matillion_component_linked_job(self) -> Optional[dict[str, str]]: + return ( + None + if self.attributes is None + else self.attributes.matillion_component_linked_job + ) + + @matillion_component_linked_job.setter + def matillion_component_linked_job( + self, matillion_component_linked_job: Optional[dict[str, str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component_linked_job = matillion_component_linked_job + + @property + def matillion_component_last_run_status(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.matillion_component_last_run_status + ) + + @matillion_component_last_run_status.setter + def matillion_component_last_run_status( + self, matillion_component_last_run_status: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component_last_run_status = ( + matillion_component_last_run_status + ) + + @property + def matillion_component_last_five_run_status(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.matillion_component_last_five_run_status + ) + + @matillion_component_last_five_run_status.setter + def matillion_component_last_five_run_status( + self, matillion_component_last_five_run_status: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component_last_five_run_status = ( + matillion_component_last_five_run_status + ) + + @property + def matillion_component_sqls(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.matillion_component_sqls + ) + + @matillion_component_sqls.setter + def matillion_component_sqls(self, matillion_component_sqls: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component_sqls = matillion_component_sqls + + @property + def matillion_job_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.matillion_job_name + + @matillion_job_name.setter + def matillion_job_name(self, matillion_job_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job_name = matillion_job_name + + @property + def matillion_job_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.matillion_job_qualified_name + ) + + @matillion_job_qualified_name.setter + def matillion_job_qualified_name(self, matillion_job_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job_qualified_name = matillion_job_qualified_name + + @property + def matillion_process(self) -> Optional[Process]: + return None if self.attributes is None else self.attributes.matillion_process + + @matillion_process.setter + def matillion_process(self, matillion_process: Optional[Process]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_process = matillion_process + + @property + def matillion_job(self) -> Optional[MatillionJob]: + return None if self.attributes is None else self.attributes.matillion_job + + @matillion_job.setter + def matillion_job(self, matillion_job: Optional[MatillionJob]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job = matillion_job + + class Attributes(Matillion.Attributes): + matillion_component_id: Optional[str] = Field(default=None, description="") + matillion_component_implementation_id: Optional[str] = Field( + default=None, description="" + ) + matillion_component_linked_job: Optional[dict[str, str]] = Field( + default=None, description="" + ) + matillion_component_last_run_status: Optional[str] = Field( + default=None, description="" + ) + matillion_component_last_five_run_status: Optional[str] = Field( + default=None, description="" + ) + matillion_component_sqls: Optional[set[str]] = Field( + default=None, description="" + ) + matillion_job_name: Optional[str] = Field(default=None, description="") + matillion_job_qualified_name: Optional[str] = Field( + default=None, description="" + ) + matillion_process: Optional[Process] = Field( + default=None, description="" + ) # relationship + matillion_job: Optional[MatillionJob] = Field( + default=None, description="" + ) # relationship + + attributes: "MatillionComponent.Attributes" = Field( + default_factory=lambda: MatillionComponent.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .matillion_job import MatillionJob # noqa +from .process import Process # noqa diff --git a/pyatlan/model/assets/matillion_group.py b/pyatlan/model/assets/matillion_group.py new file mode 100644 index 000000000..094dbf545 --- /dev/null +++ b/pyatlan/model/assets/matillion_group.py @@ -0,0 +1,84 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .matillion import Matillion + + +class MatillionGroup(Matillion): + """Description""" + + type_name: str = Field(default="MatillionGroup", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MatillionGroup": + raise ValueError("must be MatillionGroup") + return v + + def __setattr__(self, name, value): + if name in MatillionGroup._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MATILLION_PROJECT_COUNT: ClassVar[NumericField] = NumericField( + "matillionProjectCount", "matillionProjectCount" + ) + """ + Number of projects within the group. + """ + + MATILLION_PROJECTS: ClassVar[RelationField] = RelationField("matillionProjects") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "matillion_project_count", + "matillion_projects", + ] + + @property + def matillion_project_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.matillion_project_count + ) + + @matillion_project_count.setter + def matillion_project_count(self, matillion_project_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_project_count = matillion_project_count + + @property + def matillion_projects(self) -> Optional[list[MatillionProject]]: + return None if self.attributes is None else self.attributes.matillion_projects + + @matillion_projects.setter + def matillion_projects(self, matillion_projects: Optional[list[MatillionProject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_projects = matillion_projects + + class Attributes(Matillion.Attributes): + matillion_project_count: Optional[int] = Field(default=None, description="") + matillion_projects: Optional[list[MatillionProject]] = Field( + default=None, description="" + ) # relationship + + attributes: "MatillionGroup.Attributes" = Field( + default_factory=lambda: MatillionGroup.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .matillion_project import MatillionProject # noqa diff --git a/pyatlan/model/assets/matillion_job.py b/pyatlan/model/assets/matillion_job.py new file mode 100644 index 000000000..94e8b1ccf --- /dev/null +++ b/pyatlan/model/assets/matillion_job.py @@ -0,0 +1,225 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import MatillionJobType +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .matillion import Matillion + + +class MatillionJob(Matillion): + """Description""" + + type_name: str = Field(default="MatillionJob", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MatillionJob": + raise ValueError("must be MatillionJob") + return v + + def __setattr__(self, name, value): + if name in MatillionJob._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MATILLION_JOB_TYPE: ClassVar[KeywordField] = KeywordField( + "matillionJobType", "matillionJobType" + ) + """ + Type of the job, for example: orchestration or transformation. + """ + MATILLION_JOB_PATH: ClassVar[KeywordTextField] = KeywordTextField( + "matillionJobPath", "matillionJobPath", "matillionJobPath.text" + ) + """ + Path of the job within the project. Jobs can be managed at multiple folder levels within a project. + """ + MATILLION_JOB_COMPONENT_COUNT: ClassVar[NumericField] = NumericField( + "matillionJobComponentCount", "matillionJobComponentCount" + ) + """ + Number of components within the job. + """ + MATILLION_JOB_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "matillionJobSchedule", "matillionJobSchedule" + ) + """ + How the job is scheduled, for example: weekly or monthly. + """ + MATILLION_PROJECT_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "matillionProjectName", "matillionProjectName.keyword", "matillionProjectName" + ) + """ + Simple name of the project to which the job belongs. + """ + MATILLION_PROJECT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "matillionProjectQualifiedName", + "matillionProjectQualifiedName", + "matillionProjectQualifiedName.text", + ) + """ + Unique name of the project to which the job belongs. + """ + + MATILLION_PROJECT: ClassVar[RelationField] = RelationField("matillionProject") + """ + TBC + """ + MATILLION_COMPONENTS: ClassVar[RelationField] = RelationField("matillionComponents") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "matillion_job_type", + "matillion_job_path", + "matillion_job_component_count", + "matillion_job_schedule", + "matillion_project_name", + "matillion_project_qualified_name", + "matillion_project", + "matillion_components", + ] + + @property + def matillion_job_type(self) -> Optional[MatillionJobType]: + return None if self.attributes is None else self.attributes.matillion_job_type + + @matillion_job_type.setter + def matillion_job_type(self, matillion_job_type: Optional[MatillionJobType]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job_type = matillion_job_type + + @property + def matillion_job_path(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.matillion_job_path + + @matillion_job_path.setter + def matillion_job_path(self, matillion_job_path: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job_path = matillion_job_path + + @property + def matillion_job_component_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.matillion_job_component_count + ) + + @matillion_job_component_count.setter + def matillion_job_component_count( + self, matillion_job_component_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job_component_count = matillion_job_component_count + + @property + def matillion_job_schedule(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.matillion_job_schedule + ) + + @matillion_job_schedule.setter + def matillion_job_schedule(self, matillion_job_schedule: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_job_schedule = matillion_job_schedule + + @property + def matillion_project_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.matillion_project_name + ) + + @matillion_project_name.setter + def matillion_project_name(self, matillion_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_project_name = matillion_project_name + + @property + def matillion_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.matillion_project_qualified_name + ) + + @matillion_project_qualified_name.setter + def matillion_project_qualified_name( + self, matillion_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_project_qualified_name = ( + matillion_project_qualified_name + ) + + @property + def matillion_project(self) -> Optional[MatillionProject]: + return None if self.attributes is None else self.attributes.matillion_project + + @matillion_project.setter + def matillion_project(self, matillion_project: Optional[MatillionProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_project = matillion_project + + @property + def matillion_components(self) -> Optional[list[MatillionComponent]]: + return None if self.attributes is None else self.attributes.matillion_components + + @matillion_components.setter + def matillion_components( + self, matillion_components: Optional[list[MatillionComponent]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_components = matillion_components + + class Attributes(Matillion.Attributes): + matillion_job_type: Optional[MatillionJobType] = Field( + default=None, description="" + ) + matillion_job_path: Optional[str] = Field(default=None, description="") + matillion_job_component_count: Optional[int] = Field( + default=None, description="" + ) + matillion_job_schedule: Optional[str] = Field(default=None, description="") + matillion_project_name: Optional[str] = Field(default=None, description="") + matillion_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + matillion_project: Optional[MatillionProject] = Field( + default=None, description="" + ) # relationship + matillion_components: Optional[list[MatillionComponent]] = Field( + default=None, description="" + ) # relationship + + attributes: "MatillionJob.Attributes" = Field( + default_factory=lambda: MatillionJob.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .matillion_component import MatillionComponent # noqa +from .matillion_project import MatillionProject # noqa diff --git a/pyatlan/model/assets/matillion_project.py b/pyatlan/model/assets/matillion_project.py new file mode 100644 index 000000000..c6534d399 --- /dev/null +++ b/pyatlan/model/assets/matillion_project.py @@ -0,0 +1,194 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .matillion import Matillion + + +class MatillionProject(Matillion): + """Description""" + + type_name: str = Field(default="MatillionProject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MatillionProject": + raise ValueError("must be MatillionProject") + return v + + def __setattr__(self, name, value): + if name in MatillionProject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MATILLION_VERSIONS: ClassVar[KeywordField] = KeywordField( + "matillionVersions", "matillionVersions" + ) + """ + List of versions in the project. + """ + MATILLION_ENVIRONMENTS: ClassVar[KeywordField] = KeywordField( + "matillionEnvironments", "matillionEnvironments" + ) + """ + List of environments in the project. + """ + MATILLION_PROJECT_JOB_COUNT: ClassVar[NumericField] = NumericField( + "matillionProjectJobCount", "matillionProjectJobCount" + ) + """ + Number of jobs in the project. + """ + MATILLION_GROUP_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "matillionGroupName", "matillionGroupName.keyword", "matillionGroupName" + ) + """ + Simple name of the Matillion group to which the project belongs. + """ + MATILLION_GROUP_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "matillionGroupQualifiedName", + "matillionGroupQualifiedName", + "matillionGroupQualifiedName.text", + ) + """ + Unique name of the Matillion group to which the project belongs. + """ + + MATILLION_JOBS: ClassVar[RelationField] = RelationField("matillionJobs") + """ + TBC + """ + MATILLION_GROUP: ClassVar[RelationField] = RelationField("matillionGroup") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "matillion_versions", + "matillion_environments", + "matillion_project_job_count", + "matillion_group_name", + "matillion_group_qualified_name", + "matillion_jobs", + "matillion_group", + ] + + @property + def matillion_versions(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.matillion_versions + + @matillion_versions.setter + def matillion_versions(self, matillion_versions: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_versions = matillion_versions + + @property + def matillion_environments(self) -> Optional[set[str]]: + return ( + None if self.attributes is None else self.attributes.matillion_environments + ) + + @matillion_environments.setter + def matillion_environments(self, matillion_environments: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_environments = matillion_environments + + @property + def matillion_project_job_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.matillion_project_job_count + ) + + @matillion_project_job_count.setter + def matillion_project_job_count(self, matillion_project_job_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_project_job_count = matillion_project_job_count + + @property + def matillion_group_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.matillion_group_name + + @matillion_group_name.setter + def matillion_group_name(self, matillion_group_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_group_name = matillion_group_name + + @property + def matillion_group_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.matillion_group_qualified_name + ) + + @matillion_group_qualified_name.setter + def matillion_group_qualified_name( + self, matillion_group_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_group_qualified_name = matillion_group_qualified_name + + @property + def matillion_jobs(self) -> Optional[list[MatillionJob]]: + return None if self.attributes is None else self.attributes.matillion_jobs + + @matillion_jobs.setter + def matillion_jobs(self, matillion_jobs: Optional[list[MatillionJob]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_jobs = matillion_jobs + + @property + def matillion_group(self) -> Optional[MatillionGroup]: + return None if self.attributes is None else self.attributes.matillion_group + + @matillion_group.setter + def matillion_group(self, matillion_group: Optional[MatillionGroup]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_group = matillion_group + + class Attributes(Matillion.Attributes): + matillion_versions: Optional[set[str]] = Field(default=None, description="") + matillion_environments: Optional[set[str]] = Field(default=None, description="") + matillion_project_job_count: Optional[int] = Field(default=None, description="") + matillion_group_name: Optional[str] = Field(default=None, description="") + matillion_group_qualified_name: Optional[str] = Field( + default=None, description="" + ) + matillion_jobs: Optional[list[MatillionJob]] = Field( + default=None, description="" + ) # relationship + matillion_group: Optional[MatillionGroup] = Field( + default=None, description="" + ) # relationship + + attributes: "MatillionProject.Attributes" = Field( + default_factory=lambda: MatillionProject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .matillion_group import MatillionGroup # noqa +from .matillion_job import MatillionJob # noqa diff --git a/pyatlan/model/assets/asset48.py b/pyatlan/model/assets/metabase.py similarity index 88% rename from pyatlan/model/assets/asset48.py rename to pyatlan/model/assets/metabase.py index f72c84b6c..7b4313e5c 100644 --- a/pyatlan/model/assets/asset48.py +++ b/pyatlan/model/assets/metabase.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordTextField -from .asset19 import BI +from .b_i import BI class Metabase(BI): """Description""" - type_name: str = Field("Metabase", allow_mutation=False) + type_name: str = Field(default="Metabase", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -84,11 +84,9 @@ def metabase_collection_qualified_name( ) class Attributes(BI.Attributes): - metabase_collection_name: Optional[str] = Field( - None, description="", alias="metabaseCollectionName" - ) + metabase_collection_name: Optional[str] = Field(default=None, description="") metabase_collection_qualified_name: Optional[str] = Field( - None, description="", alias="metabaseCollectionQualifiedName" + default=None, description="" ) attributes: "Metabase.Attributes" = Field( @@ -96,6 +94,3 @@ class Attributes(BI.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Metabase.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/metabase_collection.py b/pyatlan/model/assets/metabase_collection.py new file mode 100644 index 000000000..2ea76b66d --- /dev/null +++ b/pyatlan/model/assets/metabase_collection.py @@ -0,0 +1,172 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + RelationField, +) + +from .metabase import Metabase + + +class MetabaseCollection(Metabase): + """Description""" + + type_name: str = Field(default="MetabaseCollection", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MetabaseCollection": + raise ValueError("must be MetabaseCollection") + return v + + def __setattr__(self, name, value): + if name in MetabaseCollection._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + METABASE_SLUG: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseSlug", "metabaseSlug", "metabaseSlug.text" + ) + """ + + """ + METABASE_COLOR: ClassVar[KeywordField] = KeywordField( + "metabaseColor", "metabaseColor" + ) + """ + + """ + METABASE_NAMESPACE: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseNamespace", "metabaseNamespace", "metabaseNamespace.text" + ) + """ + + """ + METABASE_IS_PERSONAL_COLLECTION: ClassVar[BooleanField] = BooleanField( + "metabaseIsPersonalCollection", "metabaseIsPersonalCollection" + ) + """ + + """ + + METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") + """ + TBC + """ + METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "metabase_slug", + "metabase_color", + "metabase_namespace", + "metabase_is_personal_collection", + "metabase_dashboards", + "metabase_questions", + ] + + @property + def metabase_slug(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_slug + + @metabase_slug.setter + def metabase_slug(self, metabase_slug: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_slug = metabase_slug + + @property + def metabase_color(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_color + + @metabase_color.setter + def metabase_color(self, metabase_color: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_color = metabase_color + + @property + def metabase_namespace(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_namespace + + @metabase_namespace.setter + def metabase_namespace(self, metabase_namespace: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_namespace = metabase_namespace + + @property + def metabase_is_personal_collection(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.metabase_is_personal_collection + ) + + @metabase_is_personal_collection.setter + def metabase_is_personal_collection( + self, metabase_is_personal_collection: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_is_personal_collection = ( + metabase_is_personal_collection + ) + + @property + def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: + return None if self.attributes is None else self.attributes.metabase_dashboards + + @metabase_dashboards.setter + def metabase_dashboards( + self, metabase_dashboards: Optional[list[MetabaseDashboard]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_dashboards = metabase_dashboards + + @property + def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: + return None if self.attributes is None else self.attributes.metabase_questions + + @metabase_questions.setter + def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_questions = metabase_questions + + class Attributes(Metabase.Attributes): + metabase_slug: Optional[str] = Field(default=None, description="") + metabase_color: Optional[str] = Field(default=None, description="") + metabase_namespace: Optional[str] = Field(default=None, description="") + metabase_is_personal_collection: Optional[bool] = Field( + default=None, description="" + ) + metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( + default=None, description="" + ) # relationship + metabase_questions: Optional[list[MetabaseQuestion]] = Field( + default=None, description="" + ) # relationship + + attributes: "MetabaseCollection.Attributes" = Field( + default_factory=lambda: MetabaseCollection.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .metabase_dashboard import MetabaseDashboard # noqa +from .metabase_question import MetabaseQuestion # noqa diff --git a/pyatlan/model/assets/metabase_dashboard.py b/pyatlan/model/assets/metabase_dashboard.py new file mode 100644 index 000000000..93e6b41a8 --- /dev/null +++ b/pyatlan/model/assets/metabase_dashboard.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .metabase import Metabase + + +class MetabaseDashboard(Metabase): + """Description""" + + type_name: str = Field(default="MetabaseDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MetabaseDashboard": + raise ValueError("must be MetabaseDashboard") + return v + + def __setattr__(self, name, value): + if name in MetabaseDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + METABASE_QUESTION_COUNT: ClassVar[NumericField] = NumericField( + "metabaseQuestionCount", "metabaseQuestionCount" + ) + """ + + """ + + METABASE_QUESTIONS: ClassVar[RelationField] = RelationField("metabaseQuestions") + """ + TBC + """ + METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "metabase_question_count", + "metabase_questions", + "metabase_collection", + ] + + @property + def metabase_question_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.metabase_question_count + ) + + @metabase_question_count.setter + def metabase_question_count(self, metabase_question_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_question_count = metabase_question_count + + @property + def metabase_questions(self) -> Optional[list[MetabaseQuestion]]: + return None if self.attributes is None else self.attributes.metabase_questions + + @metabase_questions.setter + def metabase_questions(self, metabase_questions: Optional[list[MetabaseQuestion]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_questions = metabase_questions + + @property + def metabase_collection(self) -> Optional[MetabaseCollection]: + return None if self.attributes is None else self.attributes.metabase_collection + + @metabase_collection.setter + def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_collection = metabase_collection + + class Attributes(Metabase.Attributes): + metabase_question_count: Optional[int] = Field(default=None, description="") + metabase_questions: Optional[list[MetabaseQuestion]] = Field( + default=None, description="" + ) # relationship + metabase_collection: Optional[MetabaseCollection] = Field( + default=None, description="" + ) # relationship + + attributes: "MetabaseDashboard.Attributes" = Field( + default_factory=lambda: MetabaseDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .metabase_collection import MetabaseCollection # noqa +from .metabase_question import MetabaseQuestion # noqa diff --git a/pyatlan/model/assets/metabase_question.py b/pyatlan/model/assets/metabase_question.py new file mode 100644 index 000000000..165020a6f --- /dev/null +++ b/pyatlan/model/assets/metabase_question.py @@ -0,0 +1,147 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordTextField, + NumericField, + RelationField, +) + +from .metabase import Metabase + + +class MetabaseQuestion(Metabase): + """Description""" + + type_name: str = Field(default="MetabaseQuestion", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MetabaseQuestion": + raise ValueError("must be MetabaseQuestion") + return v + + def __setattr__(self, name, value): + if name in MetabaseQuestion._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + METABASE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( + "metabaseDashboardCount", "metabaseDashboardCount" + ) + """ + + """ + METABASE_QUERY_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseQueryType", "metabaseQueryType", "metabaseQueryType.text" + ) + """ + + """ + METABASE_QUERY: ClassVar[KeywordTextField] = KeywordTextField( + "metabaseQuery", "metabaseQuery.keyword", "metabaseQuery" + ) + """ + + """ + + METABASE_DASHBOARDS: ClassVar[RelationField] = RelationField("metabaseDashboards") + """ + TBC + """ + METABASE_COLLECTION: ClassVar[RelationField] = RelationField("metabaseCollection") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "metabase_dashboard_count", + "metabase_query_type", + "metabase_query", + "metabase_dashboards", + "metabase_collection", + ] + + @property + def metabase_dashboard_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.metabase_dashboard_count + ) + + @metabase_dashboard_count.setter + def metabase_dashboard_count(self, metabase_dashboard_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_dashboard_count = metabase_dashboard_count + + @property + def metabase_query_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_query_type + + @metabase_query_type.setter + def metabase_query_type(self, metabase_query_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_query_type = metabase_query_type + + @property + def metabase_query(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metabase_query + + @metabase_query.setter + def metabase_query(self, metabase_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_query = metabase_query + + @property + def metabase_dashboards(self) -> Optional[list[MetabaseDashboard]]: + return None if self.attributes is None else self.attributes.metabase_dashboards + + @metabase_dashboards.setter + def metabase_dashboards( + self, metabase_dashboards: Optional[list[MetabaseDashboard]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_dashboards = metabase_dashboards + + @property + def metabase_collection(self) -> Optional[MetabaseCollection]: + return None if self.attributes is None else self.attributes.metabase_collection + + @metabase_collection.setter + def metabase_collection(self, metabase_collection: Optional[MetabaseCollection]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metabase_collection = metabase_collection + + class Attributes(Metabase.Attributes): + metabase_dashboard_count: Optional[int] = Field(default=None, description="") + metabase_query_type: Optional[str] = Field(default=None, description="") + metabase_query: Optional[str] = Field(default=None, description="") + metabase_dashboards: Optional[list[MetabaseDashboard]] = Field( + default=None, description="" + ) # relationship + metabase_collection: Optional[MetabaseCollection] = Field( + default=None, description="" + ) # relationship + + attributes: "MetabaseQuestion.Attributes" = Field( + default_factory=lambda: MetabaseQuestion.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .metabase_collection import MetabaseCollection # noqa +from .metabase_dashboard import MetabaseDashboard # noqa diff --git a/pyatlan/model/assets/metric.py b/pyatlan/model/assets/metric.py new file mode 100644 index 000000000..000b85504 --- /dev/null +++ b/pyatlan/model/assets/metric.py @@ -0,0 +1,185 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField, TextField + +from .data_quality import DataQuality + + +class Metric(DataQuality): + """Description""" + + type_name: str = Field(default="Metric", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Metric": + raise ValueError("must be Metric") + return v + + def __setattr__(self, name, value): + if name in Metric._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") + """ + Type of the metric. + """ + METRIC_SQL: ClassVar[KeywordField] = KeywordField("metricSQL", "metricSQL") + """ + SQL query used to compute the metric. + """ + METRIC_FILTERS: ClassVar[TextField] = TextField("metricFilters", "metricFilters") + """ + Filters to be applied to the metric query. + """ + METRIC_TIME_GRAINS: ClassVar[TextField] = TextField( + "metricTimeGrains", "metricTimeGrains" + ) + """ + List of time grains to be applied to the metric query. + """ + + METRIC_TIMESTAMP_COLUMN: ClassVar[RelationField] = RelationField( + "metricTimestampColumn" + ) + """ + TBC + """ + ASSETS: ClassVar[RelationField] = RelationField("assets") + """ + TBC + """ + METRIC_DIMENSION_COLUMNS: ClassVar[RelationField] = RelationField( + "metricDimensionColumns" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "metric_type", + "metric_s_q_l", + "metric_filters", + "metric_time_grains", + "metric_timestamp_column", + "assets", + "metric_dimension_columns", + ] + + @property + def metric_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metric_type + + @metric_type.setter + def metric_type(self, metric_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_type = metric_type + + @property + def metric_s_q_l(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metric_s_q_l + + @metric_s_q_l.setter + def metric_s_q_l(self, metric_s_q_l: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_s_q_l = metric_s_q_l + + @property + def metric_filters(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.metric_filters + + @metric_filters.setter + def metric_filters(self, metric_filters: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_filters = metric_filters + + @property + def metric_time_grains(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.metric_time_grains + + @metric_time_grains.setter + def metric_time_grains(self, metric_time_grains: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_time_grains = metric_time_grains + + @property + def metric_timestamp_column(self) -> Optional[Column]: + return ( + None if self.attributes is None else self.attributes.metric_timestamp_column + ) + + @metric_timestamp_column.setter + def metric_timestamp_column(self, metric_timestamp_column: Optional[Column]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_timestamp_column = metric_timestamp_column + + @property + def assets(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.assets + + @assets.setter + def assets(self, assets: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.assets = assets + + @property + def metric_dimension_columns(self) -> Optional[list[Column]]: + return ( + None + if self.attributes is None + else self.attributes.metric_dimension_columns + ) + + @metric_dimension_columns.setter + def metric_dimension_columns( + self, metric_dimension_columns: Optional[list[Column]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.metric_dimension_columns = metric_dimension_columns + + class Attributes(DataQuality.Attributes): + metric_type: Optional[str] = Field(default=None, description="") + metric_s_q_l: Optional[str] = Field(default=None, description="") + metric_filters: Optional[str] = Field(default=None, description="") + metric_time_grains: Optional[set[str]] = Field(default=None, description="") + metric_timestamp_column: Optional[Column] = Field( + default=None, description="" + ) # relationship + assets: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + metric_dimension_columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + + attributes: "Metric.Attributes" = Field( + default_factory=lambda: Metric.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +# Imports required for fixing circular dependencies: +from .asset import Asset # noqa # isort:skip +from .catalog import Catalog # noqa # isort:skip +from .s_q_l import SQL # noqa # isort:skip + + +from .asset import Asset # noqa +from .column import Column # noqa diff --git a/pyatlan/model/assets/asset52.py b/pyatlan/model/assets/micro_strategy.py similarity index 91% rename from pyatlan/model/assets/asset52.py rename to pyatlan/model/assets/micro_strategy.py index a4f5b4088..f76dbb827 100644 --- a/pyatlan/model/assets/asset52.py +++ b/pyatlan/model/assets/micro_strategy.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( BooleanField, @@ -16,13 +16,13 @@ NumericField, ) -from .asset19 import BI +from .b_i import BI class MicroStrategy(BI): """Description""" - type_name: str = Field("MicroStrategy", allow_mutation=False) + type_name: str = Field(default="MicroStrategy", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -283,34 +283,30 @@ def micro_strategy_location( class Attributes(BI.Attributes): micro_strategy_project_qualified_name: Optional[str] = Field( - None, description="", alias="microStrategyProjectQualifiedName" - ) - micro_strategy_project_name: Optional[str] = Field( - None, description="", alias="microStrategyProjectName" + default=None, description="" ) + micro_strategy_project_name: Optional[str] = Field(default=None, description="") micro_strategy_cube_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyCubeQualifiedNames" + default=None, description="" ) micro_strategy_cube_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyCubeNames" + default=None, description="" ) micro_strategy_report_qualified_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyReportQualifiedNames" + default=None, description="" ) micro_strategy_report_names: Optional[set[str]] = Field( - None, description="", alias="microStrategyReportNames" + default=None, description="" ) micro_strategy_is_certified: Optional[bool] = Field( - None, description="", alias="microStrategyIsCertified" - ) - micro_strategy_certified_by: Optional[str] = Field( - None, description="", alias="microStrategyCertifiedBy" + default=None, description="" ) + micro_strategy_certified_by: Optional[str] = Field(default=None, description="") micro_strategy_certified_at: Optional[datetime] = Field( - None, description="", alias="microStrategyCertifiedAt" + default=None, description="" ) micro_strategy_location: Optional[list[dict[str, str]]] = Field( - None, description="", alias="microStrategyLocation" + default=None, description="" ) attributes: "MicroStrategy.Attributes" = Field( @@ -318,6 +314,3 @@ class Attributes(BI.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -MicroStrategy.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/micro_strategy_attribute.py b/pyatlan/model/assets/micro_strategy_attribute.py new file mode 100644 index 000000000..8fe354b77 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_attribute.py @@ -0,0 +1,167 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyAttribute(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyAttribute", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyAttribute": + raise ValueError("must be MicroStrategyAttribute") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyAttribute._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_ATTRIBUTE_FORMS: ClassVar[KeywordField] = KeywordField( + "microStrategyAttributeForms", "microStrategyAttributeForms" + ) + """ + JSON string specifying the attribute's name, description, displayFormat, etc. + """ + + MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( + "microStrategyReports" + ) + """ + TBC + """ + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) + """ + TBC + """ + MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_attribute_forms", + "micro_strategy_reports", + "micro_strategy_metrics", + "micro_strategy_cubes", + "micro_strategy_project", + ] + + @property + def micro_strategy_attribute_forms(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attribute_forms + ) + + @micro_strategy_attribute_forms.setter + def micro_strategy_attribute_forms( + self, micro_strategy_attribute_forms: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attribute_forms = micro_strategy_attribute_forms + + @property + def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_reports + ) + + @micro_strategy_reports.setter + def micro_strategy_reports( + self, micro_strategy_reports: Optional[list[MicroStrategyReport]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_reports = micro_strategy_reports + + @property + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) + + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metrics = micro_strategy_metrics + + @property + def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: + return None if self.attributes is None else self.attributes.micro_strategy_cubes + + @micro_strategy_cubes.setter + def micro_strategy_cubes( + self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_cubes = micro_strategy_cubes + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_attribute_forms: Optional[str] = Field( + default=None, description="" + ) + micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( + default=None, description="" + ) # relationship + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyAttribute.Attributes" = Field( + default_factory=lambda: MicroStrategyAttribute.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_cube import MicroStrategyCube # noqa +from .micro_strategy_metric import MicroStrategyMetric # noqa +from .micro_strategy_project import MicroStrategyProject # noqa +from .micro_strategy_report import MicroStrategyReport # noqa diff --git a/pyatlan/model/assets/micro_strategy_cube.py b/pyatlan/model/assets/micro_strategy_cube.py new file mode 100644 index 000000000..895b82080 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_cube.py @@ -0,0 +1,166 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyCube(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyCube", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyCube": + raise ValueError("must be MicroStrategyCube") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyCube._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_CUBE_TYPE: ClassVar[KeywordField] = KeywordField( + "microStrategyCubeType", "microStrategyCubeType" + ) + """ + Type of cube, for example: OLAP or MTDI. + """ + MICRO_STRATEGY_CUBE_QUERY: ClassVar[KeywordField] = KeywordField( + "microStrategyCubeQuery", "microStrategyCubeQuery" + ) + """ + Query used to create the cube. + """ + + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_cube_type", + "micro_strategy_cube_query", + "micro_strategy_metrics", + "micro_strategy_project", + "micro_strategy_attributes", + ] + + @property + def micro_strategy_cube_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_cube_type + ) + + @micro_strategy_cube_type.setter + def micro_strategy_cube_type(self, micro_strategy_cube_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_cube_type = micro_strategy_cube_type + + @property + def micro_strategy_cube_query(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_cube_query + ) + + @micro_strategy_cube_query.setter + def micro_strategy_cube_query(self, micro_strategy_cube_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_cube_query = micro_strategy_cube_query + + @property + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) + + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metrics = micro_strategy_metrics + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + @property + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) + + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_cube_type: Optional[str] = Field(default=None, description="") + micro_strategy_cube_query: Optional[str] = Field(default=None, description="") + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyCube.Attributes" = Field( + default_factory=lambda: MicroStrategyCube.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_attribute import MicroStrategyAttribute # noqa +from .micro_strategy_metric import MicroStrategyMetric # noqa +from .micro_strategy_project import MicroStrategyProject # noqa diff --git a/pyatlan/model/assets/micro_strategy_document.py b/pyatlan/model/assets/micro_strategy_document.py new file mode 100644 index 000000000..c1d54a047 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_document.py @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyDocument(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyDocument", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyDocument": + raise ValueError("must be MicroStrategyDocument") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyDocument._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_project", + ] + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyDocument.Attributes" = Field( + default_factory=lambda: MicroStrategyDocument.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_project import MicroStrategyProject # noqa diff --git a/pyatlan/model/assets/micro_strategy_dossier.py b/pyatlan/model/assets/micro_strategy_dossier.py new file mode 100644 index 000000000..52a1c3b77 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_dossier.py @@ -0,0 +1,129 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyDossier(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyDossier", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyDossier": + raise ValueError("must be MicroStrategyDossier") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyDossier._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_DOSSIER_CHAPTER_NAMES: ClassVar[KeywordField] = KeywordField( + "microStrategyDossierChapterNames", "microStrategyDossierChapterNames" + ) + """ + List of chapter names in this dossier. + """ + + MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( + "microStrategyVisualizations" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_dossier_chapter_names", + "micro_strategy_visualizations", + "micro_strategy_project", + ] + + @property + def micro_strategy_dossier_chapter_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_dossier_chapter_names + ) + + @micro_strategy_dossier_chapter_names.setter + def micro_strategy_dossier_chapter_names( + self, micro_strategy_dossier_chapter_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_dossier_chapter_names = ( + micro_strategy_dossier_chapter_names + ) + + @property + def micro_strategy_visualizations( + self, + ) -> Optional[list[MicroStrategyVisualization]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_visualizations + ) + + @micro_strategy_visualizations.setter + def micro_strategy_visualizations( + self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_visualizations = micro_strategy_visualizations + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_dossier_chapter_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_visualizations: Optional[ + list[MicroStrategyVisualization] + ] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyDossier.Attributes" = Field( + default_factory=lambda: MicroStrategyDossier.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_project import MicroStrategyProject # noqa +from .micro_strategy_visualization import MicroStrategyVisualization # noqa diff --git a/pyatlan/model/assets/micro_strategy_fact.py b/pyatlan/model/assets/micro_strategy_fact.py new file mode 100644 index 000000000..8e4fb6747 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_fact.py @@ -0,0 +1,123 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyFact(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyFact", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyFact": + raise ValueError("must be MicroStrategyFact") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyFact._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_FACT_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( + "microStrategyFactExpressions", "microStrategyFactExpressions" + ) + """ + List of expressions for this fact. + """ + + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_fact_expressions", + "micro_strategy_metrics", + "micro_strategy_project", + ] + + @property + def micro_strategy_fact_expressions(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_fact_expressions + ) + + @micro_strategy_fact_expressions.setter + def micro_strategy_fact_expressions( + self, micro_strategy_fact_expressions: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_fact_expressions = ( + micro_strategy_fact_expressions + ) + + @property + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) + + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metrics = micro_strategy_metrics + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_fact_expressions: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyFact.Attributes" = Field( + default_factory=lambda: MicroStrategyFact.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_metric import MicroStrategyMetric # noqa +from .micro_strategy_project import MicroStrategyProject # noqa diff --git a/pyatlan/model/assets/micro_strategy_metric.py b/pyatlan/model/assets/micro_strategy_metric.py new file mode 100644 index 000000000..d18cb3902 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_metric.py @@ -0,0 +1,426 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .micro_strategy import MicroStrategy + + +class MicroStrategyMetric(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyMetric", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyMetric": + raise ValueError("must be MicroStrategyMetric") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyMetric._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_METRIC_EXPRESSION: ClassVar[KeywordField] = KeywordField( + "microStrategyMetricExpression", "microStrategyMetricExpression" + ) + """ + Text specifiying this metric's expression. + """ + MICRO_STRATEGY_ATTRIBUTE_QUALIFIED_NAMES: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyAttributeQualifiedNames", + "microStrategyAttributeQualifiedNames", + "microStrategyAttributeQualifiedNames.text", + ) + """ + List of unique names of attributes related to this metric. + """ + MICRO_STRATEGY_ATTRIBUTE_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyAttributeNames", + "microStrategyAttributeNames.keyword", + "microStrategyAttributeNames", + ) + """ + List of simple names of attributes related to this metric. + """ + MICRO_STRATEGY_FACT_QUALIFIED_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyFactQualifiedNames", + "microStrategyFactQualifiedNames", + "microStrategyFactQualifiedNames.text", + ) + """ + List of unique names of facts related to this metric. + """ + MICRO_STRATEGY_FACT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyFactNames", + "microStrategyFactNames.keyword", + "microStrategyFactNames", + ) + """ + List of simple names of facts related to this metric. + """ + MICRO_STRATEGY_METRIC_PARENT_QUALIFIED_NAMES: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyMetricParentQualifiedNames", + "microStrategyMetricParentQualifiedNames", + "microStrategyMetricParentQualifiedNames.text", + ) + """ + List of unique names of parent metrics of this metric. + """ + MICRO_STRATEGY_METRIC_PARENT_NAMES: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyMetricParentNames", + "microStrategyMetricParentNames.keyword", + "microStrategyMetricParentNames", + ) + """ + List of simple names of parent metrics of this metric. + """ + + MICRO_STRATEGY_METRIC_PARENTS: ClassVar[RelationField] = RelationField( + "microStrategyMetricParents" + ) + """ + TBC + """ + MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") + """ + TBC + """ + MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( + "microStrategyReports" + ) + """ + TBC + """ + MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") + """ + TBC + """ + MICRO_STRATEGY_METRIC_CHILDREN: ClassVar[RelationField] = RelationField( + "microStrategyMetricChildren" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_metric_expression", + "micro_strategy_attribute_qualified_names", + "micro_strategy_attribute_names", + "micro_strategy_fact_qualified_names", + "micro_strategy_fact_names", + "micro_strategy_metric_parent_qualified_names", + "micro_strategy_metric_parent_names", + "micro_strategy_metric_parents", + "micro_strategy_facts", + "micro_strategy_reports", + "micro_strategy_cubes", + "micro_strategy_metric_children", + "micro_strategy_project", + "micro_strategy_attributes", + ] + + @property + def micro_strategy_metric_expression(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_expression + ) + + @micro_strategy_metric_expression.setter + def micro_strategy_metric_expression( + self, micro_strategy_metric_expression: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metric_expression = ( + micro_strategy_metric_expression + ) + + @property + def micro_strategy_attribute_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attribute_qualified_names + ) + + @micro_strategy_attribute_qualified_names.setter + def micro_strategy_attribute_qualified_names( + self, micro_strategy_attribute_qualified_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attribute_qualified_names = ( + micro_strategy_attribute_qualified_names + ) + + @property + def micro_strategy_attribute_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attribute_names + ) + + @micro_strategy_attribute_names.setter + def micro_strategy_attribute_names( + self, micro_strategy_attribute_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attribute_names = micro_strategy_attribute_names + + @property + def micro_strategy_fact_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_fact_qualified_names + ) + + @micro_strategy_fact_qualified_names.setter + def micro_strategy_fact_qualified_names( + self, micro_strategy_fact_qualified_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_fact_qualified_names = ( + micro_strategy_fact_qualified_names + ) + + @property + def micro_strategy_fact_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_fact_names + ) + + @micro_strategy_fact_names.setter + def micro_strategy_fact_names(self, micro_strategy_fact_names: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_fact_names = micro_strategy_fact_names + + @property + def micro_strategy_metric_parent_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_parent_qualified_names + ) + + @micro_strategy_metric_parent_qualified_names.setter + def micro_strategy_metric_parent_qualified_names( + self, micro_strategy_metric_parent_qualified_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metric_parent_qualified_names = ( + micro_strategy_metric_parent_qualified_names + ) + + @property + def micro_strategy_metric_parent_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_parent_names + ) + + @micro_strategy_metric_parent_names.setter + def micro_strategy_metric_parent_names( + self, micro_strategy_metric_parent_names: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metric_parent_names = ( + micro_strategy_metric_parent_names + ) + + @property + def micro_strategy_metric_parents(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_parents + ) + + @micro_strategy_metric_parents.setter + def micro_strategy_metric_parents( + self, micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metric_parents = micro_strategy_metric_parents + + @property + def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: + return None if self.attributes is None else self.attributes.micro_strategy_facts + + @micro_strategy_facts.setter + def micro_strategy_facts( + self, micro_strategy_facts: Optional[list[MicroStrategyFact]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_facts = micro_strategy_facts + + @property + def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_reports + ) + + @micro_strategy_reports.setter + def micro_strategy_reports( + self, micro_strategy_reports: Optional[list[MicroStrategyReport]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_reports = micro_strategy_reports + + @property + def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: + return None if self.attributes is None else self.attributes.micro_strategy_cubes + + @micro_strategy_cubes.setter + def micro_strategy_cubes( + self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_cubes = micro_strategy_cubes + + @property + def micro_strategy_metric_children(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_metric_children + ) + + @micro_strategy_metric_children.setter + def micro_strategy_metric_children( + self, micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metric_children = micro_strategy_metric_children + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + @property + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) + + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_metric_expression: Optional[str] = Field( + default=None, description="" + ) + micro_strategy_attribute_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_attribute_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_fact_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_fact_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_metric_parent_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_metric_parent_names: Optional[set[str]] = Field( + default=None, description="" + ) + micro_strategy_metric_parents: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( + default=None, description="" + ) # relationship + micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( + default=None, description="" + ) # relationship + micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( + default=None, description="" + ) # relationship + micro_strategy_metric_children: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyMetric.Attributes" = Field( + default_factory=lambda: MicroStrategyMetric.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_attribute import MicroStrategyAttribute # noqa +from .micro_strategy_cube import MicroStrategyCube # noqa +from .micro_strategy_fact import MicroStrategyFact # noqa +from .micro_strategy_project import MicroStrategyProject # noqa +from .micro_strategy_report import MicroStrategyReport # noqa diff --git a/pyatlan/model/assets/micro_strategy_project.py b/pyatlan/model/assets/micro_strategy_project.py new file mode 100644 index 000000000..f53470562 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_project.py @@ -0,0 +1,246 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyProject(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyProject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyProject": + raise ValueError("must be MicroStrategyProject") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyProject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_REPORTS: ClassVar[RelationField] = RelationField( + "microStrategyReports" + ) + """ + TBC + """ + MICRO_STRATEGY_FACTS: ClassVar[RelationField] = RelationField("microStrategyFacts") + """ + TBC + """ + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) + """ + TBC + """ + MICRO_STRATEGY_VISUALIZATIONS: ClassVar[RelationField] = RelationField( + "microStrategyVisualizations" + ) + """ + TBC + """ + MICRO_STRATEGY_DOCUMENTS: ClassVar[RelationField] = RelationField( + "microStrategyDocuments" + ) + """ + TBC + """ + MICRO_STRATEGY_CUBES: ClassVar[RelationField] = RelationField("microStrategyCubes") + """ + TBC + """ + MICRO_STRATEGY_DOSSIERS: ClassVar[RelationField] = RelationField( + "microStrategyDossiers" + ) + """ + TBC + """ + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_reports", + "micro_strategy_facts", + "micro_strategy_metrics", + "micro_strategy_visualizations", + "micro_strategy_documents", + "micro_strategy_cubes", + "micro_strategy_dossiers", + "micro_strategy_attributes", + ] + + @property + def micro_strategy_reports(self) -> Optional[list[MicroStrategyReport]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_reports + ) + + @micro_strategy_reports.setter + def micro_strategy_reports( + self, micro_strategy_reports: Optional[list[MicroStrategyReport]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_reports = micro_strategy_reports + + @property + def micro_strategy_facts(self) -> Optional[list[MicroStrategyFact]]: + return None if self.attributes is None else self.attributes.micro_strategy_facts + + @micro_strategy_facts.setter + def micro_strategy_facts( + self, micro_strategy_facts: Optional[list[MicroStrategyFact]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_facts = micro_strategy_facts + + @property + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) + + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metrics = micro_strategy_metrics + + @property + def micro_strategy_visualizations( + self, + ) -> Optional[list[MicroStrategyVisualization]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_visualizations + ) + + @micro_strategy_visualizations.setter + def micro_strategy_visualizations( + self, micro_strategy_visualizations: Optional[list[MicroStrategyVisualization]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_visualizations = micro_strategy_visualizations + + @property + def micro_strategy_documents(self) -> Optional[list[MicroStrategyDocument]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_documents + ) + + @micro_strategy_documents.setter + def micro_strategy_documents( + self, micro_strategy_documents: Optional[list[MicroStrategyDocument]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_documents = micro_strategy_documents + + @property + def micro_strategy_cubes(self) -> Optional[list[MicroStrategyCube]]: + return None if self.attributes is None else self.attributes.micro_strategy_cubes + + @micro_strategy_cubes.setter + def micro_strategy_cubes( + self, micro_strategy_cubes: Optional[list[MicroStrategyCube]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_cubes = micro_strategy_cubes + + @property + def micro_strategy_dossiers(self) -> Optional[list[MicroStrategyDossier]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_dossiers + ) + + @micro_strategy_dossiers.setter + def micro_strategy_dossiers( + self, micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_dossiers = micro_strategy_dossiers + + @property + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) + + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_reports: Optional[list[MicroStrategyReport]] = Field( + default=None, description="" + ) # relationship + micro_strategy_facts: Optional[list[MicroStrategyFact]] = Field( + default=None, description="" + ) # relationship + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_visualizations: Optional[ + list[MicroStrategyVisualization] + ] = Field( + default=None, description="" + ) # relationship + micro_strategy_documents: Optional[list[MicroStrategyDocument]] = Field( + default=None, description="" + ) # relationship + micro_strategy_cubes: Optional[list[MicroStrategyCube]] = Field( + default=None, description="" + ) # relationship + micro_strategy_dossiers: Optional[list[MicroStrategyDossier]] = Field( + default=None, description="" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyProject.Attributes" = Field( + default_factory=lambda: MicroStrategyProject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_attribute import MicroStrategyAttribute # noqa +from .micro_strategy_cube import MicroStrategyCube # noqa +from .micro_strategy_document import MicroStrategyDocument # noqa +from .micro_strategy_dossier import MicroStrategyDossier # noqa +from .micro_strategy_fact import MicroStrategyFact # noqa +from .micro_strategy_metric import MicroStrategyMetric # noqa +from .micro_strategy_report import MicroStrategyReport # noqa +from .micro_strategy_visualization import MicroStrategyVisualization # noqa diff --git a/pyatlan/model/assets/micro_strategy_report.py b/pyatlan/model/assets/micro_strategy_report.py new file mode 100644 index 000000000..3c42f075e --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_report.py @@ -0,0 +1,144 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .micro_strategy import MicroStrategy + + +class MicroStrategyReport(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyReport", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyReport": + raise ValueError("must be MicroStrategyReport") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyReport._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_REPORT_TYPE: ClassVar[KeywordField] = KeywordField( + "microStrategyReportType", "microStrategyReportType" + ) + """ + Type of report, for example: Grid or Chart. + """ + + MICRO_STRATEGY_METRICS: ClassVar[RelationField] = RelationField( + "microStrategyMetrics" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + MICRO_STRATEGY_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "microStrategyAttributes" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_report_type", + "micro_strategy_metrics", + "micro_strategy_project", + "micro_strategy_attributes", + ] + + @property + def micro_strategy_report_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_report_type + ) + + @micro_strategy_report_type.setter + def micro_strategy_report_type(self, micro_strategy_report_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_report_type = micro_strategy_report_type + + @property + def micro_strategy_metrics(self) -> Optional[list[MicroStrategyMetric]]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_metrics + ) + + @micro_strategy_metrics.setter + def micro_strategy_metrics( + self, micro_strategy_metrics: Optional[list[MicroStrategyMetric]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_metrics = micro_strategy_metrics + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + @property + def micro_strategy_attributes(self) -> Optional[list[MicroStrategyAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_attributes + ) + + @micro_strategy_attributes.setter + def micro_strategy_attributes( + self, micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_attributes = micro_strategy_attributes + + class Attributes(MicroStrategy.Attributes): + micro_strategy_report_type: Optional[str] = Field(default=None, description="") + micro_strategy_metrics: Optional[list[MicroStrategyMetric]] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + micro_strategy_attributes: Optional[list[MicroStrategyAttribute]] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyReport.Attributes" = Field( + default_factory=lambda: MicroStrategyReport.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_attribute import MicroStrategyAttribute # noqa +from .micro_strategy_metric import MicroStrategyMetric # noqa +from .micro_strategy_project import MicroStrategyProject # noqa diff --git a/pyatlan/model/assets/micro_strategy_visualization.py b/pyatlan/model/assets/micro_strategy_visualization.py new file mode 100644 index 000000000..eeaeec470 --- /dev/null +++ b/pyatlan/model/assets/micro_strategy_visualization.py @@ -0,0 +1,183 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .micro_strategy import MicroStrategy + + +class MicroStrategyVisualization(MicroStrategy): + """Description""" + + type_name: str = Field(default="MicroStrategyVisualization", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MicroStrategyVisualization": + raise ValueError("must be MicroStrategyVisualization") + return v + + def __setattr__(self, name, value): + if name in MicroStrategyVisualization._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MICRO_STRATEGY_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( + "microStrategyVisualizationType", "microStrategyVisualizationType" + ) + """ + Type of visualization. + """ + MICRO_STRATEGY_DOSSIER_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "microStrategyDossierQualifiedName", + "microStrategyDossierQualifiedName", + "microStrategyDossierQualifiedName.text", + ) + """ + Unique name of the dossier in which this visualization exists. + """ + MICRO_STRATEGY_DOSSIER_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "microStrategyDossierName", + "microStrategyDossierName.keyword", + "microStrategyDossierName", + ) + """ + Simple name of the dossier in which this visualization exists. + """ + + MICRO_STRATEGY_DOSSIER: ClassVar[RelationField] = RelationField( + "microStrategyDossier" + ) + """ + TBC + """ + MICRO_STRATEGY_PROJECT: ClassVar[RelationField] = RelationField( + "microStrategyProject" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "micro_strategy_visualization_type", + "micro_strategy_dossier_qualified_name", + "micro_strategy_dossier_name", + "micro_strategy_dossier", + "micro_strategy_project", + ] + + @property + def micro_strategy_visualization_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_visualization_type + ) + + @micro_strategy_visualization_type.setter + def micro_strategy_visualization_type( + self, micro_strategy_visualization_type: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_visualization_type = ( + micro_strategy_visualization_type + ) + + @property + def micro_strategy_dossier_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_dossier_qualified_name + ) + + @micro_strategy_dossier_qualified_name.setter + def micro_strategy_dossier_qualified_name( + self, micro_strategy_dossier_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_dossier_qualified_name = ( + micro_strategy_dossier_qualified_name + ) + + @property + def micro_strategy_dossier_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.micro_strategy_dossier_name + ) + + @micro_strategy_dossier_name.setter + def micro_strategy_dossier_name(self, micro_strategy_dossier_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_dossier_name = micro_strategy_dossier_name + + @property + def micro_strategy_dossier(self) -> Optional[MicroStrategyDossier]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_dossier + ) + + @micro_strategy_dossier.setter + def micro_strategy_dossier( + self, micro_strategy_dossier: Optional[MicroStrategyDossier] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_dossier = micro_strategy_dossier + + @property + def micro_strategy_project(self) -> Optional[MicroStrategyProject]: + return ( + None if self.attributes is None else self.attributes.micro_strategy_project + ) + + @micro_strategy_project.setter + def micro_strategy_project( + self, micro_strategy_project: Optional[MicroStrategyProject] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.micro_strategy_project = micro_strategy_project + + class Attributes(MicroStrategy.Attributes): + micro_strategy_visualization_type: Optional[str] = Field( + default=None, description="" + ) + micro_strategy_dossier_qualified_name: Optional[str] = Field( + default=None, description="" + ) + micro_strategy_dossier_name: Optional[str] = Field(default=None, description="") + micro_strategy_dossier: Optional[MicroStrategyDossier] = Field( + default=None, description="" + ) # relationship + micro_strategy_project: Optional[MicroStrategyProject] = Field( + default=None, description="" + ) # relationship + + attributes: "MicroStrategyVisualization.Attributes" = Field( + default_factory=lambda: MicroStrategyVisualization.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .micro_strategy_dossier import MicroStrategyDossier # noqa +from .micro_strategy_project import MicroStrategyProject # noqa diff --git a/pyatlan/model/assets/asset41.py b/pyatlan/model/assets/mode.py similarity index 85% rename from pyatlan/model/assets/asset41.py rename to pyatlan/model/assets/mode.py index c82c9a95a..e603cd739 100644 --- a/pyatlan/model/assets/asset41.py +++ b/pyatlan/model/assets/mode.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from .asset19 import BI +from .b_i import BI class Mode(BI): """Description""" - type_name: str = Field("Mode", allow_mutation=False) + type_name: str = Field(default="Mode", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -207,35 +207,20 @@ def mode_query_qualified_name(self, mode_query_qualified_name: Optional[str]): self.attributes.mode_query_qualified_name = mode_query_qualified_name class Attributes(BI.Attributes): - mode_id: Optional[str] = Field(None, description="", alias="modeId") - mode_token: Optional[str] = Field(None, description="", alias="modeToken") - mode_workspace_name: Optional[str] = Field( - None, description="", alias="modeWorkspaceName" - ) - mode_workspace_username: Optional[str] = Field( - None, description="", alias="modeWorkspaceUsername" - ) + mode_id: Optional[str] = Field(default=None, description="") + mode_token: Optional[str] = Field(default=None, description="") + mode_workspace_name: Optional[str] = Field(default=None, description="") + mode_workspace_username: Optional[str] = Field(default=None, description="") mode_workspace_qualified_name: Optional[str] = Field( - None, description="", alias="modeWorkspaceQualifiedName" - ) - mode_report_name: Optional[str] = Field( - None, description="", alias="modeReportName" - ) - mode_report_qualified_name: Optional[str] = Field( - None, description="", alias="modeReportQualifiedName" - ) - mode_query_name: Optional[str] = Field( - None, description="", alias="modeQueryName" - ) - mode_query_qualified_name: Optional[str] = Field( - None, description="", alias="modeQueryQualifiedName" + default=None, description="" ) + mode_report_name: Optional[str] = Field(default=None, description="") + mode_report_qualified_name: Optional[str] = Field(default=None, description="") + mode_query_name: Optional[str] = Field(default=None, description="") + mode_query_qualified_name: Optional[str] = Field(default=None, description="") attributes: "Mode.Attributes" = Field( default_factory=lambda: Mode.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Mode.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/mode_chart.py b/pyatlan/model/assets/mode_chart.py new file mode 100644 index 000000000..1988e7c69 --- /dev/null +++ b/pyatlan/model/assets/mode_chart.py @@ -0,0 +1,82 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .mode import Mode + + +class ModeChart(Mode): + """Description""" + + type_name: str = Field(default="ModeChart", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ModeChart": + raise ValueError("must be ModeChart") + return v + + def __setattr__(self, name, value): + if name in ModeChart._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODE_CHART_TYPE: ClassVar[KeywordField] = KeywordField( + "modeChartType", "modeChartType" + ) + """ + Type of chart. + """ + + MODE_QUERY: ClassVar[RelationField] = RelationField("modeQuery") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mode_chart_type", + "mode_query", + ] + + @property + def mode_chart_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_chart_type + + @mode_chart_type.setter + def mode_chart_type(self, mode_chart_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_chart_type = mode_chart_type + + @property + def mode_query(self) -> Optional[ModeQuery]: + return None if self.attributes is None else self.attributes.mode_query + + @mode_query.setter + def mode_query(self, mode_query: Optional[ModeQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_query = mode_query + + class Attributes(Mode.Attributes): + mode_chart_type: Optional[str] = Field(default=None, description="") + mode_query: Optional[ModeQuery] = Field( + default=None, description="" + ) # relationship + + attributes: "ModeChart.Attributes" = Field( + default_factory=lambda: ModeChart.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .mode_query import ModeQuery # noqa diff --git a/pyatlan/model/assets/mode_collection.py b/pyatlan/model/assets/mode_collection.py new file mode 100644 index 000000000..e8ad53489 --- /dev/null +++ b/pyatlan/model/assets/mode_collection.py @@ -0,0 +1,121 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .mode import Mode + + +class ModeCollection(Mode): + """Description""" + + type_name: str = Field(default="ModeCollection", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ModeCollection": + raise ValueError("must be ModeCollection") + return v + + def __setattr__(self, name, value): + if name in ModeCollection._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODE_COLLECTION_TYPE: ClassVar[KeywordField] = KeywordField( + "modeCollectionType", "modeCollectionType" + ) + """ + Type of this collection. + """ + MODE_COLLECTION_STATE: ClassVar[KeywordField] = KeywordField( + "modeCollectionState", "modeCollectionState" + ) + """ + State of this collection. + """ + + MODE_WORKSPACE: ClassVar[RelationField] = RelationField("modeWorkspace") + """ + TBC + """ + MODE_REPORTS: ClassVar[RelationField] = RelationField("modeReports") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mode_collection_type", + "mode_collection_state", + "mode_workspace", + "mode_reports", + ] + + @property + def mode_collection_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_collection_type + + @mode_collection_type.setter + def mode_collection_type(self, mode_collection_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collection_type = mode_collection_type + + @property + def mode_collection_state(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mode_collection_state + ) + + @mode_collection_state.setter + def mode_collection_state(self, mode_collection_state: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collection_state = mode_collection_state + + @property + def mode_workspace(self) -> Optional[ModeWorkspace]: + return None if self.attributes is None else self.attributes.mode_workspace + + @mode_workspace.setter + def mode_workspace(self, mode_workspace: Optional[ModeWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_workspace = mode_workspace + + @property + def mode_reports(self) -> Optional[list[ModeReport]]: + return None if self.attributes is None else self.attributes.mode_reports + + @mode_reports.setter + def mode_reports(self, mode_reports: Optional[list[ModeReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_reports = mode_reports + + class Attributes(Mode.Attributes): + mode_collection_type: Optional[str] = Field(default=None, description="") + mode_collection_state: Optional[str] = Field(default=None, description="") + mode_workspace: Optional[ModeWorkspace] = Field( + default=None, description="" + ) # relationship + mode_reports: Optional[list[ModeReport]] = Field( + default=None, description="" + ) # relationship + + attributes: "ModeCollection.Attributes" = Field( + default_factory=lambda: ModeCollection.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .mode_report import ModeReport # noqa +from .mode_workspace import ModeWorkspace # noqa diff --git a/pyatlan/model/assets/mode_query.py b/pyatlan/model/assets/mode_query.py new file mode 100644 index 000000000..b6c021a3f --- /dev/null +++ b/pyatlan/model/assets/mode_query.py @@ -0,0 +1,121 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField, TextField + +from .mode import Mode + + +class ModeQuery(Mode): + """Description""" + + type_name: str = Field(default="ModeQuery", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ModeQuery": + raise ValueError("must be ModeQuery") + return v + + def __setattr__(self, name, value): + if name in ModeQuery._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODE_RAW_QUERY: ClassVar[TextField] = TextField("modeRawQuery", "modeRawQuery") + """ + + """ + MODE_REPORT_IMPORT_COUNT: ClassVar[NumericField] = NumericField( + "modeReportImportCount", "modeReportImportCount" + ) + """ + + """ + + MODE_CHARTS: ClassVar[RelationField] = RelationField("modeCharts") + """ + TBC + """ + MODE_REPORT: ClassVar[RelationField] = RelationField("modeReport") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mode_raw_query", + "mode_report_import_count", + "mode_charts", + "mode_report", + ] + + @property + def mode_raw_query(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_raw_query + + @mode_raw_query.setter + def mode_raw_query(self, mode_raw_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_raw_query = mode_raw_query + + @property + def mode_report_import_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mode_report_import_count + ) + + @mode_report_import_count.setter + def mode_report_import_count(self, mode_report_import_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_report_import_count = mode_report_import_count + + @property + def mode_charts(self) -> Optional[list[ModeChart]]: + return None if self.attributes is None else self.attributes.mode_charts + + @mode_charts.setter + def mode_charts(self, mode_charts: Optional[list[ModeChart]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_charts = mode_charts + + @property + def mode_report(self) -> Optional[ModeReport]: + return None if self.attributes is None else self.attributes.mode_report + + @mode_report.setter + def mode_report(self, mode_report: Optional[ModeReport]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_report = mode_report + + class Attributes(Mode.Attributes): + mode_raw_query: Optional[str] = Field(default=None, description="") + mode_report_import_count: Optional[int] = Field(default=None, description="") + mode_charts: Optional[list[ModeChart]] = Field( + default=None, description="" + ) # relationship + mode_report: Optional[ModeReport] = Field( + default=None, description="" + ) # relationship + + attributes: "ModeQuery.Attributes" = Field( + default_factory=lambda: ModeQuery.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .mode_chart import ModeChart # noqa +from .mode_report import ModeReport # noqa diff --git a/pyatlan/model/assets/mode_report.py b/pyatlan/model/assets/mode_report.py new file mode 100644 index 000000000..a98c6bd2b --- /dev/null +++ b/pyatlan/model/assets/mode_report.py @@ -0,0 +1,224 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, + TextField, +) + +from .mode import Mode + + +class ModeReport(Mode): + """Description""" + + type_name: str = Field(default="ModeReport", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ModeReport": + raise ValueError("must be ModeReport") + return v + + def __setattr__(self, name, value): + if name in ModeReport._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODE_COLLECTION_TOKEN: ClassVar[KeywordField] = KeywordField( + "modeCollectionToken", "modeCollectionToken" + ) + """ + + """ + MODE_REPORT_PUBLISHED_AT: ClassVar[NumericField] = NumericField( + "modeReportPublishedAt", "modeReportPublishedAt" + ) + """ + + """ + MODE_QUERY_COUNT: ClassVar[NumericField] = NumericField( + "modeQueryCount", "modeQueryCount" + ) + """ + + """ + MODE_CHART_COUNT: ClassVar[NumericField] = NumericField( + "modeChartCount", "modeChartCount" + ) + """ + + """ + MODE_QUERY_PREVIEW: ClassVar[TextField] = TextField( + "modeQueryPreview", "modeQueryPreview" + ) + """ + + """ + MODE_IS_PUBLIC: ClassVar[BooleanField] = BooleanField( + "modeIsPublic", "modeIsPublic" + ) + """ + + """ + MODE_IS_SHARED: ClassVar[BooleanField] = BooleanField( + "modeIsShared", "modeIsShared" + ) + """ + + """ + + MODE_QUERIES: ClassVar[RelationField] = RelationField("modeQueries") + """ + TBC + """ + MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mode_collection_token", + "mode_report_published_at", + "mode_query_count", + "mode_chart_count", + "mode_query_preview", + "mode_is_public", + "mode_is_shared", + "mode_queries", + "mode_collections", + ] + + @property + def mode_collection_token(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mode_collection_token + ) + + @mode_collection_token.setter + def mode_collection_token(self, mode_collection_token: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collection_token = mode_collection_token + + @property + def mode_report_published_at(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.mode_report_published_at + ) + + @mode_report_published_at.setter + def mode_report_published_at(self, mode_report_published_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_report_published_at = mode_report_published_at + + @property + def mode_query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.mode_query_count + + @mode_query_count.setter + def mode_query_count(self, mode_query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_query_count = mode_query_count + + @property + def mode_chart_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.mode_chart_count + + @mode_chart_count.setter + def mode_chart_count(self, mode_chart_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_chart_count = mode_chart_count + + @property + def mode_query_preview(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.mode_query_preview + + @mode_query_preview.setter + def mode_query_preview(self, mode_query_preview: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_query_preview = mode_query_preview + + @property + def mode_is_public(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.mode_is_public + + @mode_is_public.setter + def mode_is_public(self, mode_is_public: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_is_public = mode_is_public + + @property + def mode_is_shared(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.mode_is_shared + + @mode_is_shared.setter + def mode_is_shared(self, mode_is_shared: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_is_shared = mode_is_shared + + @property + def mode_queries(self) -> Optional[list[ModeQuery]]: + return None if self.attributes is None else self.attributes.mode_queries + + @mode_queries.setter + def mode_queries(self, mode_queries: Optional[list[ModeQuery]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_queries = mode_queries + + @property + def mode_collections(self) -> Optional[list[ModeCollection]]: + return None if self.attributes is None else self.attributes.mode_collections + + @mode_collections.setter + def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collections = mode_collections + + class Attributes(Mode.Attributes): + mode_collection_token: Optional[str] = Field(default=None, description="") + mode_report_published_at: Optional[datetime] = Field( + default=None, description="" + ) + mode_query_count: Optional[int] = Field(default=None, description="") + mode_chart_count: Optional[int] = Field(default=None, description="") + mode_query_preview: Optional[str] = Field(default=None, description="") + mode_is_public: Optional[bool] = Field(default=None, description="") + mode_is_shared: Optional[bool] = Field(default=None, description="") + mode_queries: Optional[list[ModeQuery]] = Field( + default=None, description="" + ) # relationship + mode_collections: Optional[list[ModeCollection]] = Field( + default=None, description="" + ) # relationship + + attributes: "ModeReport.Attributes" = Field( + default_factory=lambda: ModeReport.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .mode_collection import ModeCollection # noqa +from .mode_query import ModeQuery # noqa diff --git a/pyatlan/model/assets/mode_workspace.py b/pyatlan/model/assets/mode_workspace.py new file mode 100644 index 000000000..71208e93d --- /dev/null +++ b/pyatlan/model/assets/mode_workspace.py @@ -0,0 +1,84 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .mode import Mode + + +class ModeWorkspace(Mode): + """Description""" + + type_name: str = Field(default="ModeWorkspace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ModeWorkspace": + raise ValueError("must be ModeWorkspace") + return v + + def __setattr__(self, name, value): + if name in ModeWorkspace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MODE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( + "modeCollectionCount", "modeCollectionCount" + ) + """ + Number of collections in this workspace. + """ + + MODE_COLLECTIONS: ClassVar[RelationField] = RelationField("modeCollections") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mode_collection_count", + "mode_collections", + ] + + @property + def mode_collection_count(self) -> Optional[int]: + return ( + None if self.attributes is None else self.attributes.mode_collection_count + ) + + @mode_collection_count.setter + def mode_collection_count(self, mode_collection_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collection_count = mode_collection_count + + @property + def mode_collections(self) -> Optional[list[ModeCollection]]: + return None if self.attributes is None else self.attributes.mode_collections + + @mode_collections.setter + def mode_collections(self, mode_collections: Optional[list[ModeCollection]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mode_collections = mode_collections + + class Attributes(Mode.Attributes): + mode_collection_count: Optional[int] = Field(default=None, description="") + mode_collections: Optional[list[ModeCollection]] = Field( + default=None, description="" + ) # relationship + + attributes: "ModeWorkspace.Attributes" = Field( + default_factory=lambda: ModeWorkspace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .mode_collection import ModeCollection # noqa diff --git a/pyatlan/model/assets/asset58.py b/pyatlan/model/assets/mongo_d_b.py similarity index 77% rename from pyatlan/model/assets/asset58.py rename to pyatlan/model/assets/mongo_d_b.py index 8544433c8..9985be1b1 100644 --- a/pyatlan/model/assets/asset58.py +++ b/pyatlan/model/assets/mongo_d_b.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset24 import NoSQL +from .no_s_q_l import NoSQL class MongoDB(NoSQL): """Description""" - type_name: str = Field("MongoDB", allow_mutation=False) + type_name: str = Field(default="MongoDB", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -MongoDB.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset84.py b/pyatlan/model/assets/mongo_d_b_collection.py similarity index 62% rename from pyatlan/model/assets/asset84.py rename to pyatlan/model/assets/mongo_d_b_collection.py index f7470fc7e..24bc24567 100644 --- a/pyatlan/model/assets/asset84.py +++ b/pyatlan/model/assets/mongo_d_b_collection.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( BooleanField, @@ -18,13 +18,13 @@ TextField, ) -from .asset00 import Database, Table +from .table import Table class MongoDBCollection(Table): """Description""" - type_name: str = Field("MongoDBCollection", allow_mutation=False) + type_name: str = Field(default="MongoDBCollection", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -850,108 +850,74 @@ def mongo_d_b_database(self, mongo_d_b_database: Optional[MongoDBDatabase]): class Attributes(Table.Attributes): mongo_d_b_collection_subtype: Optional[str] = Field( - None, description="", alias="mongoDBCollectionSubtype" + default=None, description="" ) mongo_d_b_collection_is_capped: Optional[bool] = Field( - None, description="", alias="mongoDBCollectionIsCapped" + default=None, description="" ) mongo_d_b_collection_time_field: Optional[str] = Field( - None, description="", alias="mongoDBCollectionTimeField" + default=None, description="" ) mongo_d_b_collection_time_granularity: Optional[str] = Field( - None, description="", alias="mongoDBCollectionTimeGranularity" + default=None, description="" ) mongo_d_b_collection_expire_after_seconds: Optional[int] = Field( - None, description="", alias="mongoDBCollectionExpireAfterSeconds" + default=None, description="" ) mongo_d_b_collection_maximum_document_count: Optional[int] = Field( - None, description="", alias="mongoDBCollectionMaximumDocumentCount" + default=None, description="" ) mongo_d_b_collection_max_size: Optional[int] = Field( - None, description="", alias="mongoDBCollectionMaxSize" + default=None, description="" ) mongo_d_b_collection_num_orphan_docs: Optional[int] = Field( - None, description="", alias="mongoDBCollectionNumOrphanDocs" + default=None, description="" ) mongo_d_b_collection_num_indexes: Optional[int] = Field( - None, description="", alias="mongoDBCollectionNumIndexes" + default=None, description="" ) mongo_d_b_collection_total_index_size: Optional[int] = Field( - None, description="", alias="mongoDBCollectionTotalIndexSize" + default=None, description="" ) mongo_d_b_collection_average_object_size: Optional[int] = Field( - None, description="", alias="mongoDBCollectionAverageObjectSize" + default=None, description="" ) mongo_d_b_collection_schema_definition: Optional[str] = Field( - None, description="", alias="mongoDBCollectionSchemaDefinition" - ) - column_count: Optional[int] = Field(None, description="", alias="columnCount") - row_count: Optional[int] = Field(None, description="", alias="rowCount") - size_bytes: Optional[int] = Field(None, description="", alias="sizeBytes") - alias: Optional[str] = Field(None, description="", alias="alias") - is_temporary: Optional[bool] = Field(None, description="", alias="isTemporary") - is_query_preview: Optional[bool] = Field( - None, description="", alias="isQueryPreview" - ) + default=None, description="" + ) + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") query_preview_config: Optional[dict[str, str]] = Field( - None, description="", alias="queryPreviewConfig" - ) - external_location: Optional[str] = Field( - None, description="", alias="externalLocation" - ) - external_location_region: Optional[str] = Field( - None, description="", alias="externalLocationRegion" - ) - external_location_format: Optional[str] = Field( - None, description="", alias="externalLocationFormat" - ) - is_partitioned: Optional[bool] = Field( - None, description="", alias="isPartitioned" - ) - partition_strategy: Optional[str] = Field( - None, description="", alias="partitionStrategy" - ) - partition_count: Optional[int] = Field( - None, description="", alias="partitionCount" - ) - partition_list: Optional[str] = Field( - None, description="", alias="partitionList" - ) - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) - no_s_q_l_schema_definition: Optional[str] = Field( - None, description="", alias="noSQLSchemaDefinition" - ) + default=None, description="" + ) + external_location: Optional[str] = Field(default=None, description="") + external_location_region: Optional[str] = Field(default=None, description="") + external_location_format: Optional[str] = Field(default=None, description="") + is_partitioned: Optional[bool] = Field(default=None, description="") + partition_strategy: Optional[str] = Field(default=None, description="") + partition_count: Optional[int] = Field(default=None, description="") + partition_list: Optional[str] = Field(default=None, description="") + query_count: Optional[int] = Field(default=None, description="") + query_user_count: Optional[int] = Field(default=None, description="") + query_user_map: Optional[dict[str, int]] = Field(default=None, description="") + query_count_updated_at: Optional[datetime] = Field(default=None, description="") + database_name: Optional[str] = Field(default=None, description="") + database_qualified_name: Optional[str] = Field(default=None, description="") + schema_name: Optional[str] = Field(default=None, description="") + schema_qualified_name: Optional[str] = Field(default=None, description="") + table_name: Optional[str] = Field(default=None, description="") + table_qualified_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + view_qualified_name: Optional[str] = Field(default=None, description="") + is_profiled: Optional[bool] = Field(default=None, description="") + last_profiled_at: Optional[datetime] = Field(default=None, description="") + no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") mongo_d_b_database: Optional[MongoDBDatabase] = Field( - None, description="", alias="mongoDBDatabase" + default=None, description="" ) # relationship attributes: "MongoDBCollection.Attributes" = Field( @@ -961,397 +927,4 @@ class Attributes(Table.Attributes): ) -class MongoDBDatabase(Database): - """Description""" - - type_name: str = Field("MongoDBDatabase", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "MongoDBDatabase": - raise ValueError("must be MongoDBDatabase") - return v - - def __setattr__(self, name, value): - if name in MongoDBDatabase._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - MONGO_DB_DATABASE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( - "mongoDBDatabaseCollectionCount", "mongoDBDatabaseCollectionCount" - ) - """ - Number of collections in the database. - """ - SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") - """ - Number of schemas in this database. - """ - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") - """ - Number of times this asset has been queried. - """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" - ) - """ - Number of unique users who have queried this asset. - """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" - ) - """ - Map of unique users who have queried this asset to the number of times they have queried it. - """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) - """ - Time (epoch) at which the query count was last updated, in milliseconds. - """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) - """ - Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) - """ - Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) - """ - Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) - """ - Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) - """ - Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" - ) - """ - Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" - ) - """ - Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - Whether this asset has been profiled (true) or not (false). - """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) - """ - Time (epoch) at which this asset was last profiled, in milliseconds. - """ - NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( - "noSQLSchemaDefinition", "noSQLSchemaDefinition" - ) - """ - Represents attributes for describing the key schema for the table and indexes. - """ - - MONGO_DB_COLLECTIONS: ClassVar[RelationField] = RelationField("mongoDBCollections") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "mongo_d_b_database_collection_count", - "schema_count", - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "is_profiled", - "last_profiled_at", - "no_s_q_l_schema_definition", - "mongo_d_b_collections", - ] - - @property - def mongo_d_b_database_collection_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.mongo_d_b_database_collection_count - ) - - @mongo_d_b_database_collection_count.setter - def mongo_d_b_database_collection_count( - self, mongo_d_b_database_collection_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_database_collection_count = ( - mongo_d_b_database_collection_count - ) - - @property - def schema_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.schema_count - - @schema_count.setter - def schema_count(self, schema_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_count = schema_count - - @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count - - @query_count.setter - def query_count(self, query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count = query_count - - @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count - - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count - - @property - def query_user_map(self) -> Optional[dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map - - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[dict[str, int]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map - - @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at - ) - - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at - - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name - ) - - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name - - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name - - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) - - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name - - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name - - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name - - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name - - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name - - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name - - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name - - @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name - - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name - - @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled - - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled - - @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at - - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at - - @property - def no_s_q_l_schema_definition(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.no_s_q_l_schema_definition - ) - - @no_s_q_l_schema_definition.setter - def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition - - @property - def mongo_d_b_collections(self) -> Optional[list[MongoDBCollection]]: - return ( - None if self.attributes is None else self.attributes.mongo_d_b_collections - ) - - @mongo_d_b_collections.setter - def mongo_d_b_collections( - self, mongo_d_b_collections: Optional[list[MongoDBCollection]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.mongo_d_b_collections = mongo_d_b_collections - - class Attributes(Database.Attributes): - mongo_d_b_database_collection_count: Optional[int] = Field( - None, description="", alias="mongoDBDatabaseCollectionCount" - ) - schema_count: Optional[int] = Field(None, description="", alias="schemaCount") - query_count: Optional[int] = Field(None, description="", alias="queryCount") - query_user_count: Optional[int] = Field( - None, description="", alias="queryUserCount" - ) - query_user_map: Optional[dict[str, int]] = Field( - None, description="", alias="queryUserMap" - ) - query_count_updated_at: Optional[datetime] = Field( - None, description="", alias="queryCountUpdatedAt" - ) - database_name: Optional[str] = Field(None, description="", alias="databaseName") - database_qualified_name: Optional[str] = Field( - None, description="", alias="databaseQualifiedName" - ) - schema_name: Optional[str] = Field(None, description="", alias="schemaName") - schema_qualified_name: Optional[str] = Field( - None, description="", alias="schemaQualifiedName" - ) - table_name: Optional[str] = Field(None, description="", alias="tableName") - table_qualified_name: Optional[str] = Field( - None, description="", alias="tableQualifiedName" - ) - view_name: Optional[str] = Field(None, description="", alias="viewName") - view_qualified_name: Optional[str] = Field( - None, description="", alias="viewQualifiedName" - ) - is_profiled: Optional[bool] = Field(None, description="", alias="isProfiled") - last_profiled_at: Optional[datetime] = Field( - None, description="", alias="lastProfiledAt" - ) - no_s_q_l_schema_definition: Optional[str] = Field( - None, description="", alias="noSQLSchemaDefinition" - ) - mongo_d_b_collections: Optional[list[MongoDBCollection]] = Field( - None, description="", alias="mongoDBCollections" - ) # relationship - - attributes: "MongoDBDatabase.Attributes" = Field( - default_factory=lambda: MongoDBDatabase.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -MongoDBCollection.Attributes.update_forward_refs() - - -MongoDBDatabase.Attributes.update_forward_refs() +from .mongo_d_b_database import MongoDBDatabase # noqa diff --git a/pyatlan/model/assets/mongo_d_b_database.py b/pyatlan/model/assets/mongo_d_b_database.py new file mode 100644 index 000000000..6e40bd49f --- /dev/null +++ b/pyatlan/model/assets/mongo_d_b_database.py @@ -0,0 +1,396 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) + +from .database import Database + + +class MongoDBDatabase(Database): + """Description""" + + type_name: str = Field(default="MongoDBDatabase", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MongoDBDatabase": + raise ValueError("must be MongoDBDatabase") + return v + + def __setattr__(self, name, value): + if name in MongoDBDatabase._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MONGO_DB_DATABASE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( + "mongoDBDatabaseCollectionCount", "mongoDBDatabaseCollectionCount" + ) + """ + Number of collections in the database. + """ + SCHEMA_COUNT: ClassVar[NumericField] = NumericField("schemaCount", "schemaCount") + """ + Number of schemas in this database. + """ + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "noSQLSchemaDefinition", "noSQLSchemaDefinition" + ) + """ + Represents attributes for describing the key schema for the table and indexes. + """ + + MONGO_DB_COLLECTIONS: ClassVar[RelationField] = RelationField("mongoDBCollections") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mongo_d_b_database_collection_count", + "schema_count", + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "no_s_q_l_schema_definition", + "mongo_d_b_collections", + ] + + @property + def mongo_d_b_database_collection_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.mongo_d_b_database_collection_count + ) + + @mongo_d_b_database_collection_count.setter + def mongo_d_b_database_collection_count( + self, mongo_d_b_database_collection_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_database_collection_count = ( + mongo_d_b_database_collection_count + ) + + @property + def schema_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.schema_count + + @schema_count.setter + def schema_count(self, schema_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_count = schema_count + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def no_s_q_l_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.no_s_q_l_schema_definition + ) + + @no_s_q_l_schema_definition.setter + def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition + + @property + def mongo_d_b_collections(self) -> Optional[list[MongoDBCollection]]: + return ( + None if self.attributes is None else self.attributes.mongo_d_b_collections + ) + + @mongo_d_b_collections.setter + def mongo_d_b_collections( + self, mongo_d_b_collections: Optional[list[MongoDBCollection]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mongo_d_b_collections = mongo_d_b_collections + + class Attributes(Database.Attributes): + mongo_d_b_database_collection_count: Optional[int] = Field( + default=None, description="" + ) + schema_count: Optional[int] = Field(default=None, description="") + query_count: Optional[int] = Field(default=None, description="") + query_user_count: Optional[int] = Field(default=None, description="") + query_user_map: Optional[dict[str, int]] = Field(default=None, description="") + query_count_updated_at: Optional[datetime] = Field(default=None, description="") + database_name: Optional[str] = Field(default=None, description="") + database_qualified_name: Optional[str] = Field(default=None, description="") + schema_name: Optional[str] = Field(default=None, description="") + schema_qualified_name: Optional[str] = Field(default=None, description="") + table_name: Optional[str] = Field(default=None, description="") + table_qualified_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + view_qualified_name: Optional[str] = Field(default=None, description="") + is_profiled: Optional[bool] = Field(default=None, description="") + last_profiled_at: Optional[datetime] = Field(default=None, description="") + no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") + mongo_d_b_collections: Optional[list[MongoDBCollection]] = Field( + default=None, description="" + ) # relationship + + attributes: "MongoDBDatabase.Attributes" = Field( + default_factory=lambda: MongoDBDatabase.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .mongo_d_b_collection import MongoDBCollection # noqa diff --git a/pyatlan/model/assets/monte_carlo.py b/pyatlan/model/assets/monte_carlo.py new file mode 100644 index 000000000..648e860a1 --- /dev/null +++ b/pyatlan/model/assets/monte_carlo.py @@ -0,0 +1,82 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField + +from .data_quality import DataQuality + + +class MonteCarlo(DataQuality): + """Description""" + + type_name: str = Field(default="MonteCarlo", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "MonteCarlo": + raise ValueError("must be MonteCarlo") + return v + + def __setattr__(self, name, value): + if name in MonteCarlo._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + MC_LABELS: ClassVar[KeywordField] = KeywordField("mcLabels", "mcLabels") + """ + List of labels for this Monte Carlo asset. + """ + MC_ASSET_QUALIFIED_NAMES: ClassVar[KeywordField] = KeywordField( + "mcAssetQualifiedNames", "mcAssetQualifiedNames" + ) + """ + List of unique names of assets that are part of this Monte Carlo asset. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "mc_labels", + "mc_asset_qualified_names", + ] + + @property + def mc_labels(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.mc_labels + + @mc_labels.setter + def mc_labels(self, mc_labels: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_labels = mc_labels + + @property + def mc_asset_qualified_names(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.mc_asset_qualified_names + ) + + @mc_asset_qualified_names.setter + def mc_asset_qualified_names(self, mc_asset_qualified_names: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_asset_qualified_names = mc_asset_qualified_names + + class Attributes(DataQuality.Attributes): + mc_labels: Optional[set[str]] = Field(default=None, description="") + mc_asset_qualified_names: Optional[set[str]] = Field( + default=None, description="" + ) + + attributes: "MonteCarlo.Attributes" = Field( + default_factory=lambda: MonteCarlo.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/namespace.py b/pyatlan/model/assets/namespace.py new file mode 100644 index 000000000..7a8d02e39 --- /dev/null +++ b/pyatlan/model/assets/namespace.py @@ -0,0 +1,82 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .asset import Asset + + +class Namespace(Asset, type_name="Namespace"): + """Description""" + + type_name: str = Field(default="Namespace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Namespace": + raise ValueError("must be Namespace") + return v + + def __setattr__(self, name, value): + if name in Namespace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + CHILDREN_QUERIES: ClassVar[RelationField] = RelationField("childrenQueries") + """ + TBC + """ + CHILDREN_FOLDERS: ClassVar[RelationField] = RelationField("childrenFolders") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "children_queries", + "children_folders", + ] + + @property + def children_queries(self) -> Optional[list[Query]]: + return None if self.attributes is None else self.attributes.children_queries + + @children_queries.setter + def children_queries(self, children_queries: Optional[list[Query]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.children_queries = children_queries + + @property + def children_folders(self) -> Optional[list[Folder]]: + return None if self.attributes is None else self.attributes.children_folders + + @children_folders.setter + def children_folders(self, children_folders: Optional[list[Folder]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.children_folders = children_folders + + class Attributes(Asset.Attributes): + children_queries: Optional[list[Query]] = Field( + default=None, description="" + ) # relationship + children_folders: Optional[list[Folder]] = Field( + default=None, description="" + ) # relationship + + attributes: "Namespace.Attributes" = Field( + default_factory=lambda: Namespace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .folder import Folder # noqa +from .query import Query # noqa diff --git a/pyatlan/model/assets/asset24.py b/pyatlan/model/assets/no_s_q_l.py similarity index 85% rename from pyatlan/model/assets/asset24.py rename to pyatlan/model/assets/no_s_q_l.py index 8c25b2298..8624dfe4a 100644 --- a/pyatlan/model/assets/asset24.py +++ b/pyatlan/model/assets/no_s_q_l.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import TextField -from .asset00 import Catalog +from .catalog import Catalog class NoSQL(Catalog): """Description""" - type_name: str = Field("NoSQL", allow_mutation=False) + type_name: str = Field(default="NoSQL", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -55,15 +55,10 @@ def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition class Attributes(Catalog.Attributes): - no_s_q_l_schema_definition: Optional[str] = Field( - None, description="", alias="noSQLSchemaDefinition" - ) + no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") attributes: "NoSQL.Attributes" = Field( default_factory=lambda: NoSQL.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -NoSQL.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset17.py b/pyatlan/model/assets/object_store.py similarity index 77% rename from pyatlan/model/assets/asset17.py rename to pyatlan/model/assets/object_store.py index 90fcc2897..6811ea5df 100644 --- a/pyatlan/model/assets/asset17.py +++ b/pyatlan/model/assets/object_store.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Catalog +from .catalog import Catalog class ObjectStore(Catalog): """Description""" - type_name: str = Field("ObjectStore", allow_mutation=False) + type_name: str = Field(default="ObjectStore", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -ObjectStore.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset13.py b/pyatlan/model/assets/persona.py similarity index 94% rename from pyatlan/model/assets/asset13.py rename to pyatlan/model/assets/persona.py index 3cd34db35..f56348b18 100644 --- a/pyatlan/model/assets/asset13.py +++ b/pyatlan/model/assets/persona.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional, Set -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import ( AuthPolicyCategory, @@ -20,8 +20,9 @@ from pyatlan.model.fields.atlan_fields import KeywordField from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import SelfAsset -from .asset06 import AccessControl, AuthPolicy +from .access_control import AccessControl +from .asset import SelfAsset +from .auth_policy import AuthPolicy class Persona(AccessControl): @@ -169,7 +170,7 @@ def create_for_modification( ) ) - type_name: str = Field("Persona", allow_mutation=False) + type_name: str = Field(default="Persona", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -234,13 +235,9 @@ def role_id(self, role_id: Optional[str]): self.attributes.role_id = role_id class Attributes(AccessControl.Attributes): - persona_groups: Optional[set[str]] = Field( - None, description="", alias="personaGroups" - ) - persona_users: Optional[set[str]] = Field( - None, description="", alias="personaUsers" - ) - role_id: Optional[str] = Field(None, description="", alias="roleId") + persona_groups: Optional[set[str]] = Field(default=None, description="") + persona_users: Optional[set[str]] = Field(default=None, description="") + role_id: Optional[str] = Field(default=None, description="") @classmethod # @validate_arguments() @@ -262,6 +259,3 @@ def create(cls, name: str) -> Persona.Attributes: description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Persona.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset51.py b/pyatlan/model/assets/power_b_i.py similarity index 88% rename from pyatlan/model/assets/asset51.py rename to pyatlan/model/assets/power_b_i.py index dbca9ee26..d1df2bc94 100644 --- a/pyatlan/model/assets/asset51.py +++ b/pyatlan/model/assets/power_b_i.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import PowerbiEndorsement from pyatlan.model.fields.atlan_fields import ( @@ -15,13 +15,13 @@ KeywordTextField, ) -from .asset19 import BI +from .b_i import BI class PowerBI(BI): """Description""" - type_name: str = Field("PowerBI", allow_mutation=False) + type_name: str = Field(default="PowerBI", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -121,17 +121,13 @@ def power_b_i_endorsement( self.attributes.power_b_i_endorsement = power_b_i_endorsement class Attributes(BI.Attributes): - power_b_i_is_hidden: Optional[bool] = Field( - None, description="", alias="powerBIIsHidden" - ) + power_b_i_is_hidden: Optional[bool] = Field(default=None, description="") power_b_i_table_qualified_name: Optional[str] = Field( - None, description="", alias="powerBITableQualifiedName" - ) - power_b_i_format_string: Optional[str] = Field( - None, description="", alias="powerBIFormatString" + default=None, description="" ) + power_b_i_format_string: Optional[str] = Field(default=None, description="") power_b_i_endorsement: Optional[PowerbiEndorsement] = Field( - None, description="", alias="powerBIEndorsement" + default=None, description="" ) attributes: "PowerBI.Attributes" = Field( @@ -139,6 +135,3 @@ class Attributes(BI.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -PowerBI.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/power_b_i_column.py b/pyatlan/model/assets/power_b_i_column.py new file mode 100644 index 000000000..e3645e0a7 --- /dev/null +++ b/pyatlan/model/assets/power_b_i_column.py @@ -0,0 +1,202 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIColumn(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIColumn", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIColumn": + raise ValueError("must be PowerBIColumn") + return v + + def __setattr__(self, name, value): + if name in PowerBIColumn._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this column exists. + """ + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" + ) + """ + Unique name of the dataset in which this column exists. + """ + POWER_BI_COLUMN_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( + "powerBIColumnDataCategory", "powerBIColumnDataCategory" + ) + """ + Data category that describes the data in this column. + """ + POWER_BI_COLUMN_DATA_TYPE: ClassVar[KeywordField] = KeywordField( + "powerBIColumnDataType", "powerBIColumnDataType" + ) + """ + Data type of this column. + """ + POWER_BI_SORT_BY_COLUMN: ClassVar[KeywordField] = KeywordField( + "powerBISortByColumn", "powerBISortByColumn" + ) + """ + Name of a column in the same table to use to order this column. + """ + POWER_BI_COLUMN_SUMMARIZE_BY: ClassVar[KeywordField] = KeywordField( + "powerBIColumnSummarizeBy", "powerBIColumnSummarizeBy" + ) + """ + Aggregate function to use for summarizing this column. + """ + + TABLE: ClassVar[RelationField] = RelationField("table") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "dataset_qualified_name", + "power_b_i_column_data_category", + "power_b_i_column_data_type", + "power_b_i_sort_by_column", + "power_b_i_column_summarize_by", + "table", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name + ) + + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def power_b_i_column_data_category(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_column_data_category + ) + + @power_b_i_column_data_category.setter + def power_b_i_column_data_category( + self, power_b_i_column_data_category: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_column_data_category = power_b_i_column_data_category + + @property + def power_b_i_column_data_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_column_data_type + ) + + @power_b_i_column_data_type.setter + def power_b_i_column_data_type(self, power_b_i_column_data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_column_data_type = power_b_i_column_data_type + + @property + def power_b_i_sort_by_column(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_sort_by_column + ) + + @power_b_i_sort_by_column.setter + def power_b_i_sort_by_column(self, power_b_i_sort_by_column: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_sort_by_column = power_b_i_sort_by_column + + @property + def power_b_i_column_summarize_by(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_column_summarize_by + ) + + @power_b_i_column_summarize_by.setter + def power_b_i_column_summarize_by( + self, power_b_i_column_summarize_by: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_column_summarize_by = power_b_i_column_summarize_by + + @property + def table(self) -> Optional[PowerBITable]: + return None if self.attributes is None else self.attributes.table + + @table.setter + def table(self, table: Optional[PowerBITable]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table = table + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + dataset_qualified_name: Optional[str] = Field(default=None, description="") + power_b_i_column_data_category: Optional[str] = Field( + default=None, description="" + ) + power_b_i_column_data_type: Optional[str] = Field(default=None, description="") + power_b_i_sort_by_column: Optional[str] = Field(default=None, description="") + power_b_i_column_summarize_by: Optional[str] = Field( + default=None, description="" + ) + table: Optional[PowerBITable] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIColumn.Attributes" = Field( + default_factory=lambda: PowerBIColumn.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_table import PowerBITable # noqa diff --git a/pyatlan/model/assets/power_b_i_dashboard.py b/pyatlan/model/assets/power_b_i_dashboard.py new file mode 100644 index 000000000..61e79d5ad --- /dev/null +++ b/pyatlan/model/assets/power_b_i_dashboard.py @@ -0,0 +1,137 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIDashboard(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDashboard": + raise ValueError("must be PowerBIDashboard") + return v + + def __setattr__(self, name, value): + if name in PowerBIDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this dashboard exists. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + TILE_COUNT: ClassVar[NumericField] = NumericField("tileCount", "tileCount") + """ + Number of tiles in this table. + """ + + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "web_url", + "tile_count", + "workspace", + "tiles", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def tile_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.tile_count + + @tile_count.setter + def tile_count(self, tile_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tile_count = tile_count + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def tiles(self) -> Optional[list[PowerBITile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[PowerBITile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + web_url: Optional[str] = Field(default=None, description="") + tile_count: Optional[int] = Field(default=None, description="") + workspace: Optional[PowerBIWorkspace] = Field( + default=None, description="" + ) # relationship + tiles: Optional[list[PowerBITile]] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIDashboard.Attributes" = Field( + default_factory=lambda: PowerBIDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_tile import PowerBITile # noqa +from .power_b_i_workspace import PowerBIWorkspace # noqa diff --git a/pyatlan/model/assets/power_b_i_dataflow.py b/pyatlan/model/assets/power_b_i_dataflow.py new file mode 100644 index 000000000..7e3bd65ee --- /dev/null +++ b/pyatlan/model/assets/power_b_i_dataflow.py @@ -0,0 +1,121 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIDataflow(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIDataflow", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDataflow": + raise ValueError("must be PowerBIDataflow") + return v + + def __setattr__(self, name, value): + if name in PowerBIDataflow._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this dataflow exists. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + DATASETS: ClassVar[RelationField] = RelationField("datasets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "web_url", + "workspace", + "datasets", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def datasets(self) -> Optional[list[PowerBIDataset]]: + return None if self.attributes is None else self.attributes.datasets + + @datasets.setter + def datasets(self, datasets: Optional[list[PowerBIDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasets = datasets + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + web_url: Optional[str] = Field(default=None, description="") + workspace: Optional[PowerBIWorkspace] = Field( + default=None, description="" + ) # relationship + datasets: Optional[list[PowerBIDataset]] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIDataflow.Attributes" = Field( + default_factory=lambda: PowerBIDataflow.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_dataset import PowerBIDataset # noqa +from .power_b_i_workspace import PowerBIWorkspace # noqa diff --git a/pyatlan/model/assets/power_b_i_dataset.py b/pyatlan/model/assets/power_b_i_dataset.py new file mode 100644 index 000000000..7a570d198 --- /dev/null +++ b/pyatlan/model/assets/power_b_i_dataset.py @@ -0,0 +1,197 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIDataset(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDataset": + raise ValueError("must be PowerBIDataset") + return v + + def __setattr__(self, name, value): + if name in PowerBIDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this dataset exists. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") + """ + TBC + """ + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + TABLES: ClassVar[RelationField] = RelationField("tables") + """ + TBC + """ + DATASOURCES: ClassVar[RelationField] = RelationField("datasources") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "web_url", + "reports", + "workspace", + "dataflows", + "tiles", + "tables", + "datasources", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def reports(self) -> Optional[list[PowerBIReport]]: + return None if self.attributes is None else self.attributes.reports + + @reports.setter + def reports(self, reports: Optional[list[PowerBIReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reports = reports + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def dataflows(self) -> Optional[list[PowerBIDataflow]]: + return None if self.attributes is None else self.attributes.dataflows + + @dataflows.setter + def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataflows = dataflows + + @property + def tiles(self) -> Optional[list[PowerBITile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[PowerBITile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def tables(self) -> Optional[list[PowerBITable]]: + return None if self.attributes is None else self.attributes.tables + + @tables.setter + def tables(self, tables: Optional[list[PowerBITable]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tables = tables + + @property + def datasources(self) -> Optional[list[PowerBIDatasource]]: + return None if self.attributes is None else self.attributes.datasources + + @datasources.setter + def datasources(self, datasources: Optional[list[PowerBIDatasource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasources = datasources + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + web_url: Optional[str] = Field(default=None, description="") + reports: Optional[list[PowerBIReport]] = Field( + default=None, description="" + ) # relationship + workspace: Optional[PowerBIWorkspace] = Field( + default=None, description="" + ) # relationship + dataflows: Optional[list[PowerBIDataflow]] = Field( + default=None, description="" + ) # relationship + tiles: Optional[list[PowerBITile]] = Field( + default=None, description="" + ) # relationship + tables: Optional[list[PowerBITable]] = Field( + default=None, description="" + ) # relationship + datasources: Optional[list[PowerBIDatasource]] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIDataset.Attributes" = Field( + default_factory=lambda: PowerBIDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_dataflow import PowerBIDataflow # noqa +from .power_b_i_datasource import PowerBIDatasource # noqa +from .power_b_i_report import PowerBIReport # noqa +from .power_b_i_table import PowerBITable # noqa +from .power_b_i_tile import PowerBITile # noqa +from .power_b_i_workspace import PowerBIWorkspace # noqa diff --git a/pyatlan/model/assets/power_b_i_datasource.py b/pyatlan/model/assets/power_b_i_datasource.py new file mode 100644 index 000000000..8d586d5e1 --- /dev/null +++ b/pyatlan/model/assets/power_b_i_datasource.py @@ -0,0 +1,84 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIDatasource(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIDatasource", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIDatasource": + raise ValueError("must be PowerBIDatasource") + return v + + def __setattr__(self, name, value): + if name in PowerBIDatasource._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + CONNECTION_DETAILS: ClassVar[KeywordField] = KeywordField( + "connectionDetails", "connectionDetails" + ) + """ + Connection details of the datasource. + """ + + DATASETS: ClassVar[RelationField] = RelationField("datasets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "connection_details", + "datasets", + ] + + @property + def connection_details(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.connection_details + + @connection_details.setter + def connection_details(self, connection_details: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.connection_details = connection_details + + @property + def datasets(self) -> Optional[list[PowerBIDataset]]: + return None if self.attributes is None else self.attributes.datasets + + @datasets.setter + def datasets(self, datasets: Optional[list[PowerBIDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasets = datasets + + class Attributes(PowerBI.Attributes): + connection_details: Optional[dict[str, str]] = Field( + default=None, description="" + ) + datasets: Optional[list[PowerBIDataset]] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIDatasource.Attributes" = Field( + default_factory=lambda: PowerBIDatasource.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_dataset import PowerBIDataset # noqa diff --git a/pyatlan/model/assets/power_b_i_measure.py b/pyatlan/model/assets/power_b_i_measure.py new file mode 100644 index 000000000..2a2a3abca --- /dev/null +++ b/pyatlan/model/assets/power_b_i_measure.py @@ -0,0 +1,161 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + RelationField, + TextField, +) + +from .power_b_i import PowerBI + + +class PowerBIMeasure(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIMeasure", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIMeasure": + raise ValueError("must be PowerBIMeasure") + return v + + def __setattr__(self, name, value): + if name in PowerBIMeasure._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this measure exists. + """ + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" + ) + """ + Unique name of the dataset in which this measure exists. + """ + POWER_BI_MEASURE_EXPRESSION: ClassVar[TextField] = TextField( + "powerBIMeasureExpression", "powerBIMeasureExpression" + ) + """ + DAX expression for this measure. + """ + POWER_BI_IS_EXTERNAL_MEASURE: ClassVar[BooleanField] = BooleanField( + "powerBIIsExternalMeasure", "powerBIIsExternalMeasure" + ) + """ + Whether this measure is external (true) or internal (false). + """ + + TABLE: ClassVar[RelationField] = RelationField("table") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "dataset_qualified_name", + "power_b_i_measure_expression", + "power_b_i_is_external_measure", + "table", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name + ) + + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def power_b_i_measure_expression(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_measure_expression + ) + + @power_b_i_measure_expression.setter + def power_b_i_measure_expression(self, power_b_i_measure_expression: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_measure_expression = power_b_i_measure_expression + + @property + def power_b_i_is_external_measure(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_is_external_measure + ) + + @power_b_i_is_external_measure.setter + def power_b_i_is_external_measure( + self, power_b_i_is_external_measure: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_is_external_measure = power_b_i_is_external_measure + + @property + def table(self) -> Optional[PowerBITable]: + return None if self.attributes is None else self.attributes.table + + @table.setter + def table(self, table: Optional[PowerBITable]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table = table + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + dataset_qualified_name: Optional[str] = Field(default=None, description="") + power_b_i_measure_expression: Optional[str] = Field( + default=None, description="" + ) + power_b_i_is_external_measure: Optional[bool] = Field( + default=None, description="" + ) + table: Optional[PowerBITable] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIMeasure.Attributes" = Field( + default_factory=lambda: PowerBIMeasure.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_table import PowerBITable # noqa diff --git a/pyatlan/model/assets/power_b_i_page.py b/pyatlan/model/assets/power_b_i_page.py new file mode 100644 index 000000000..7f39d3c18 --- /dev/null +++ b/pyatlan/model/assets/power_b_i_page.py @@ -0,0 +1,106 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIPage(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIPage", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIPage": + raise ValueError("must be PowerBIPage") + return v + + def __setattr__(self, name, value): + if name in PowerBIPage._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this page exists. + """ + REPORT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "reportQualifiedName", "reportQualifiedName" + ) + """ + Unique name of the report in which this page exists. + """ + + REPORT: ClassVar[RelationField] = RelationField("report") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "report_qualified_name", + "report", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def report_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.report_qualified_name + ) + + @report_qualified_name.setter + def report_qualified_name(self, report_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report_qualified_name = report_qualified_name + + @property + def report(self) -> Optional[PowerBIReport]: + return None if self.attributes is None else self.attributes.report + + @report.setter + def report(self, report: Optional[PowerBIReport]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report = report + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + report_qualified_name: Optional[str] = Field(default=None, description="") + report: Optional[PowerBIReport] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIPage.Attributes" = Field( + default_factory=lambda: PowerBIPage.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_report import PowerBIReport # noqa diff --git a/pyatlan/model/assets/power_b_i_report.py b/pyatlan/model/assets/power_b_i_report.py new file mode 100644 index 000000000..d1eb39149 --- /dev/null +++ b/pyatlan/model/assets/power_b_i_report.py @@ -0,0 +1,195 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIReport(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIReport", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIReport": + raise ValueError("must be PowerBIReport") + return v + + def __setattr__(self, name, value): + if name in PowerBIReport._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this report exists. + """ + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" + ) + """ + Unique name of the dataset used to build this report. + """ + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. See 'sourceUrl' instead. + """ + PAGE_COUNT: ClassVar[NumericField] = NumericField("pageCount", "pageCount") + """ + Number of pages in this report. + """ + + WORKSPACE: ClassVar[RelationField] = RelationField("workspace") + """ + TBC + """ + TILES: ClassVar[RelationField] = RelationField("tiles") + """ + TBC + """ + PAGES: ClassVar[RelationField] = RelationField("pages") + """ + TBC + """ + DATASET: ClassVar[RelationField] = RelationField("dataset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "dataset_qualified_name", + "web_url", + "page_count", + "workspace", + "tiles", + "pages", + "dataset", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name + ) + + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def page_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.page_count + + @page_count.setter + def page_count(self, page_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.page_count = page_count + + @property + def workspace(self) -> Optional[PowerBIWorkspace]: + return None if self.attributes is None else self.attributes.workspace + + @workspace.setter + def workspace(self, workspace: Optional[PowerBIWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace = workspace + + @property + def tiles(self) -> Optional[list[PowerBITile]]: + return None if self.attributes is None else self.attributes.tiles + + @tiles.setter + def tiles(self, tiles: Optional[list[PowerBITile]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tiles = tiles + + @property + def pages(self) -> Optional[list[PowerBIPage]]: + return None if self.attributes is None else self.attributes.pages + + @pages.setter + def pages(self, pages: Optional[list[PowerBIPage]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.pages = pages + + @property + def dataset(self) -> Optional[PowerBIDataset]: + return None if self.attributes is None else self.attributes.dataset + + @dataset.setter + def dataset(self, dataset: Optional[PowerBIDataset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset = dataset + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + dataset_qualified_name: Optional[str] = Field(default=None, description="") + web_url: Optional[str] = Field(default=None, description="") + page_count: Optional[int] = Field(default=None, description="") + workspace: Optional[PowerBIWorkspace] = Field( + default=None, description="" + ) # relationship + tiles: Optional[list[PowerBITile]] = Field( + default=None, description="" + ) # relationship + pages: Optional[list[PowerBIPage]] = Field( + default=None, description="" + ) # relationship + dataset: Optional[PowerBIDataset] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIReport.Attributes" = Field( + default_factory=lambda: PowerBIReport.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_dataset import PowerBIDataset # noqa +from .power_b_i_page import PowerBIPage # noqa +from .power_b_i_tile import PowerBITile # noqa +from .power_b_i_workspace import PowerBIWorkspace # noqa diff --git a/pyatlan/model/assets/power_b_i_table.py b/pyatlan/model/assets/power_b_i_table.py new file mode 100644 index 000000000..2977637ef --- /dev/null +++ b/pyatlan/model/assets/power_b_i_table.py @@ -0,0 +1,222 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .power_b_i import PowerBI + + +class PowerBITable(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBITable", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBITable": + raise ValueError("must be PowerBITable") + return v + + def __setattr__(self, name, value): + if name in PowerBITable._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this table exists. + """ + DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasetQualifiedName", "datasetQualifiedName" + ) + """ + Unique name of the dataset in which this table exists. + """ + POWER_BI_TABLE_SOURCE_EXPRESSIONS: ClassVar[KeywordField] = KeywordField( + "powerBITableSourceExpressions", "powerBITableSourceExpressions" + ) + """ + Power Query M expressions for the table. + """ + POWER_BI_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "powerBITableColumnCount", "powerBITableColumnCount" + ) + """ + Number of columns in this table. + """ + POWER_BI_TABLE_MEASURE_COUNT: ClassVar[NumericField] = NumericField( + "powerBITableMeasureCount", "powerBITableMeasureCount" + ) + """ + Number of measures in this table. + """ + + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + MEASURES: ClassVar[RelationField] = RelationField("measures") + """ + TBC + """ + DATASET: ClassVar[RelationField] = RelationField("dataset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "dataset_qualified_name", + "power_b_i_table_source_expressions", + "power_b_i_table_column_count", + "power_b_i_table_measure_count", + "columns", + "measures", + "dataset", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dataset_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.dataset_qualified_name + ) + + @dataset_qualified_name.setter + def dataset_qualified_name(self, dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_qualified_name = dataset_qualified_name + + @property + def power_b_i_table_source_expressions(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_table_source_expressions + ) + + @power_b_i_table_source_expressions.setter + def power_b_i_table_source_expressions( + self, power_b_i_table_source_expressions: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_table_source_expressions = ( + power_b_i_table_source_expressions + ) + + @property + def power_b_i_table_column_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_table_column_count + ) + + @power_b_i_table_column_count.setter + def power_b_i_table_column_count(self, power_b_i_table_column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_table_column_count = power_b_i_table_column_count + + @property + def power_b_i_table_measure_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.power_b_i_table_measure_count + ) + + @power_b_i_table_measure_count.setter + def power_b_i_table_measure_count( + self, power_b_i_table_measure_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.power_b_i_table_measure_count = power_b_i_table_measure_count + + @property + def columns(self) -> Optional[list[PowerBIColumn]]: + return None if self.attributes is None else self.attributes.columns + + @columns.setter + def columns(self, columns: Optional[list[PowerBIColumn]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.columns = columns + + @property + def measures(self) -> Optional[list[PowerBIMeasure]]: + return None if self.attributes is None else self.attributes.measures + + @measures.setter + def measures(self, measures: Optional[list[PowerBIMeasure]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.measures = measures + + @property + def dataset(self) -> Optional[PowerBIDataset]: + return None if self.attributes is None else self.attributes.dataset + + @dataset.setter + def dataset(self, dataset: Optional[PowerBIDataset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset = dataset + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + dataset_qualified_name: Optional[str] = Field(default=None, description="") + power_b_i_table_source_expressions: Optional[set[str]] = Field( + default=None, description="" + ) + power_b_i_table_column_count: Optional[int] = Field( + default=None, description="" + ) + power_b_i_table_measure_count: Optional[int] = Field( + default=None, description="" + ) + columns: Optional[list[PowerBIColumn]] = Field( + default=None, description="" + ) # relationship + measures: Optional[list[PowerBIMeasure]] = Field( + default=None, description="" + ) # relationship + dataset: Optional[PowerBIDataset] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBITable.Attributes" = Field( + default_factory=lambda: PowerBITable.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_column import PowerBIColumn # noqa +from .power_b_i_dataset import PowerBIDataset # noqa +from .power_b_i_measure import PowerBIMeasure # noqa diff --git a/pyatlan/model/assets/power_b_i_tile.py b/pyatlan/model/assets/power_b_i_tile.py new file mode 100644 index 000000000..de9909899 --- /dev/null +++ b/pyatlan/model/assets/power_b_i_tile.py @@ -0,0 +1,146 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .power_b_i import PowerBI + + +class PowerBITile(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBITile", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBITile": + raise ValueError("must be PowerBITile") + return v + + def __setattr__(self, name, value): + if name in PowerBITile._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WORKSPACE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workspaceQualifiedName", "workspaceQualifiedName" + ) + """ + Unique name of the workspace in which this tile exists. + """ + DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "dashboardQualifiedName", "dashboardQualifiedName" + ) + """ + Unique name of the dashboard in which this tile is pinned. + """ + + REPORT: ClassVar[RelationField] = RelationField("report") + """ + TBC + """ + DATASET: ClassVar[RelationField] = RelationField("dataset") + """ + TBC + """ + DASHBOARD: ClassVar[RelationField] = RelationField("dashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "workspace_qualified_name", + "dashboard_qualified_name", + "report", + "dataset", + "dashboard", + ] + + @property + def workspace_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.workspace_qualified_name + ) + + @workspace_qualified_name.setter + def workspace_qualified_name(self, workspace_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workspace_qualified_name = workspace_qualified_name + + @property + def dashboard_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.dashboard_qualified_name + ) + + @dashboard_qualified_name.setter + def dashboard_qualified_name(self, dashboard_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard_qualified_name = dashboard_qualified_name + + @property + def report(self) -> Optional[PowerBIReport]: + return None if self.attributes is None else self.attributes.report + + @report.setter + def report(self, report: Optional[PowerBIReport]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report = report + + @property + def dataset(self) -> Optional[PowerBIDataset]: + return None if self.attributes is None else self.attributes.dataset + + @dataset.setter + def dataset(self, dataset: Optional[PowerBIDataset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset = dataset + + @property + def dashboard(self) -> Optional[PowerBIDashboard]: + return None if self.attributes is None else self.attributes.dashboard + + @dashboard.setter + def dashboard(self, dashboard: Optional[PowerBIDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard = dashboard + + class Attributes(PowerBI.Attributes): + workspace_qualified_name: Optional[str] = Field(default=None, description="") + dashboard_qualified_name: Optional[str] = Field(default=None, description="") + report: Optional[PowerBIReport] = Field( + default=None, description="" + ) # relationship + dataset: Optional[PowerBIDataset] = Field( + default=None, description="" + ) # relationship + dashboard: Optional[PowerBIDashboard] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBITile.Attributes" = Field( + default_factory=lambda: PowerBITile.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_dashboard import PowerBIDashboard # noqa +from .power_b_i_dataset import PowerBIDataset # noqa +from .power_b_i_report import PowerBIReport # noqa diff --git a/pyatlan/model/assets/power_b_i_workspace.py b/pyatlan/model/assets/power_b_i_workspace.py new file mode 100644 index 000000000..b2293147f --- /dev/null +++ b/pyatlan/model/assets/power_b_i_workspace.py @@ -0,0 +1,205 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .power_b_i import PowerBI + + +class PowerBIWorkspace(PowerBI): + """Description""" + + type_name: str = Field(default="PowerBIWorkspace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PowerBIWorkspace": + raise ValueError("must be PowerBIWorkspace") + return v + + def __setattr__(self, name, value): + if name in PowerBIWorkspace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + WEB_URL: ClassVar[KeywordField] = KeywordField("webUrl", "webUrl") + """ + Deprecated. + """ + REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") + """ + Number of reports in this workspace. + """ + DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( + "dashboardCount", "dashboardCount" + ) + """ + Number of dashboards in this workspace. + """ + DATASET_COUNT: ClassVar[NumericField] = NumericField("datasetCount", "datasetCount") + """ + Number of datasets in this workspace. + """ + DATAFLOW_COUNT: ClassVar[NumericField] = NumericField( + "dataflowCount", "dataflowCount" + ) + """ + Number of dataflows in this workspace. + """ + + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + DATASETS: ClassVar[RelationField] = RelationField("datasets") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + DATAFLOWS: ClassVar[RelationField] = RelationField("dataflows") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "web_url", + "report_count", + "dashboard_count", + "dataset_count", + "dataflow_count", + "reports", + "datasets", + "dashboards", + "dataflows", + ] + + @property + def web_url(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.web_url + + @web_url.setter + def web_url(self, web_url: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.web_url = web_url + + @property + def report_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.report_count + + @report_count.setter + def report_count(self, report_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report_count = report_count + + @property + def dashboard_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.dashboard_count + + @dashboard_count.setter + def dashboard_count(self, dashboard_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard_count = dashboard_count + + @property + def dataset_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataset_count = dataset_count + + @property + def dataflow_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.dataflow_count + + @dataflow_count.setter + def dataflow_count(self, dataflow_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataflow_count = dataflow_count + + @property + def reports(self) -> Optional[list[PowerBIReport]]: + return None if self.attributes is None else self.attributes.reports + + @reports.setter + def reports(self, reports: Optional[list[PowerBIReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reports = reports + + @property + def datasets(self) -> Optional[list[PowerBIDataset]]: + return None if self.attributes is None else self.attributes.datasets + + @datasets.setter + def datasets(self, datasets: Optional[list[PowerBIDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasets = datasets + + @property + def dashboards(self) -> Optional[list[PowerBIDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[PowerBIDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + @property + def dataflows(self) -> Optional[list[PowerBIDataflow]]: + return None if self.attributes is None else self.attributes.dataflows + + @dataflows.setter + def dataflows(self, dataflows: Optional[list[PowerBIDataflow]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dataflows = dataflows + + class Attributes(PowerBI.Attributes): + web_url: Optional[str] = Field(default=None, description="") + report_count: Optional[int] = Field(default=None, description="") + dashboard_count: Optional[int] = Field(default=None, description="") + dataset_count: Optional[int] = Field(default=None, description="") + dataflow_count: Optional[int] = Field(default=None, description="") + reports: Optional[list[PowerBIReport]] = Field( + default=None, description="" + ) # relationship + datasets: Optional[list[PowerBIDataset]] = Field( + default=None, description="" + ) # relationship + dashboards: Optional[list[PowerBIDashboard]] = Field( + default=None, description="" + ) # relationship + dataflows: Optional[list[PowerBIDataflow]] = Field( + default=None, description="" + ) # relationship + + attributes: "PowerBIWorkspace.Attributes" = Field( + default_factory=lambda: PowerBIWorkspace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .power_b_i_dashboard import PowerBIDashboard # noqa +from .power_b_i_dataflow import PowerBIDataflow # noqa +from .power_b_i_dataset import PowerBIDataset # noqa +from .power_b_i_report import PowerBIReport # noqa diff --git a/pyatlan/model/assets/asset40.py b/pyatlan/model/assets/preset.py similarity index 88% rename from pyatlan/model/assets/asset40.py rename to pyatlan/model/assets/preset.py index 060e68d98..2fee62f52 100644 --- a/pyatlan/model/assets/asset40.py +++ b/pyatlan/model/assets/preset.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordTextField, NumericField -from .asset19 import BI +from .b_i import BI class Preset(BI): """Description""" - type_name: str = Field("Preset", allow_mutation=False) + type_name: str = Field(default="Preset", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -122,17 +122,13 @@ def preset_dashboard_qualified_name( ) class Attributes(BI.Attributes): - preset_workspace_id: Optional[int] = Field( - None, description="", alias="presetWorkspaceId" - ) + preset_workspace_id: Optional[int] = Field(default=None, description="") preset_workspace_qualified_name: Optional[str] = Field( - None, description="", alias="presetWorkspaceQualifiedName" - ) - preset_dashboard_id: Optional[int] = Field( - None, description="", alias="presetDashboardId" + default=None, description="" ) + preset_dashboard_id: Optional[int] = Field(default=None, description="") preset_dashboard_qualified_name: Optional[str] = Field( - None, description="", alias="presetDashboardQualifiedName" + default=None, description="" ) attributes: "Preset.Attributes" = Field( @@ -140,6 +136,3 @@ class Attributes(BI.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Preset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/preset_chart.py b/pyatlan/model/assets/preset_chart.py new file mode 100644 index 000000000..59e8b9b74 --- /dev/null +++ b/pyatlan/model/assets/preset_chart.py @@ -0,0 +1,161 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField, TextField +from pyatlan.utils import init_guid, validate_required_fields + +from .preset import Preset + + +class PresetChart(Preset): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, preset_dashboard_qualified_name: str) -> PresetChart: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + attributes = PresetChart.Attributes.create( + name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="PresetChart", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PresetChart": + raise ValueError("must be PresetChart") + return v + + def __setattr__(self, name, value): + if name in PresetChart._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PRESET_CHART_DESCRIPTION_MARKDOWN: ClassVar[TextField] = TextField( + "presetChartDescriptionMarkdown", "presetChartDescriptionMarkdown" + ) + """ + + """ + PRESET_CHART_FORM_DATA: ClassVar[KeywordField] = KeywordField( + "presetChartFormData", "presetChartFormData" + ) + """ + + """ + + PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "preset_chart_description_markdown", + "preset_chart_form_data", + "preset_dashboard", + ] + + @property + def preset_chart_description_markdown(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_chart_description_markdown + ) + + @preset_chart_description_markdown.setter + def preset_chart_description_markdown( + self, preset_chart_description_markdown: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_chart_description_markdown = ( + preset_chart_description_markdown + ) + + @property + def preset_chart_form_data(self) -> Optional[dict[str, str]]: + return ( + None if self.attributes is None else self.attributes.preset_chart_form_data + ) + + @preset_chart_form_data.setter + def preset_chart_form_data(self, preset_chart_form_data: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_chart_form_data = preset_chart_form_data + + @property + def preset_dashboard(self) -> Optional[PresetDashboard]: + return None if self.attributes is None else self.attributes.preset_dashboard + + @preset_dashboard.setter + def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard = preset_dashboard + + class Attributes(Preset.Attributes): + preset_chart_description_markdown: Optional[str] = Field( + default=None, description="" + ) + preset_chart_form_data: Optional[dict[str, str]] = Field( + default=None, description="" + ) + preset_dashboard: Optional[PresetDashboard] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_dashboard_qualified_name: str + ) -> PresetChart.Attributes: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + + # Split the preset_dashboard_qualified_name to extract necessary information + fields = preset_dashboard_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid preset_dashboard_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid preset_dashboard_qualified_name") from e + + return PresetChart.Attributes( + name=name, + preset_dashboard_qualified_name=preset_dashboard_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{preset_dashboard_qualified_name}/{name}", + connector_name=connector_type.value, + preset_dashboard=PresetDashboard.ref_by_qualified_name( + preset_dashboard_qualified_name + ), + ) + + attributes: "PresetChart.Attributes" = Field( + default_factory=lambda: PresetChart.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .preset_dashboard import PresetDashboard # noqa diff --git a/pyatlan/model/assets/preset_dashboard.py b/pyatlan/model/assets/preset_dashboard.py new file mode 100644 index 000000000..0e951338c --- /dev/null +++ b/pyatlan/model/assets/preset_dashboard.py @@ -0,0 +1,322 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextStemmedField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .preset import Preset + + +class PresetDashboard(Preset): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_workspace_qualified_name: str + ) -> PresetDashboard: + validate_required_fields( + ["name", "preset_workspace_qualified_name"], + [name, preset_workspace_qualified_name], + ) + attributes = PresetDashboard.Attributes.create( + name=name, preset_workspace_qualified_name=preset_workspace_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="PresetDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PresetDashboard": + raise ValueError("must be PresetDashboard") + return v + + def __setattr__(self, name, value): + if name in PresetDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PRESET_DASHBOARD_CHANGED_BY_NAME: ClassVar[ + KeywordTextStemmedField + ] = KeywordTextStemmedField( + "presetDashboardChangedByName", + "presetDashboardChangedByName.keyword", + "presetDashboardChangedByName", + "presetDashboardChangedByName.stemmed", + ) + """ + + """ + PRESET_DASHBOARD_CHANGED_BY_URL: ClassVar[KeywordField] = KeywordField( + "presetDashboardChangedByURL", "presetDashboardChangedByURL" + ) + """ + + """ + PRESET_DASHBOARD_IS_MANAGED_EXTERNALLY: ClassVar[BooleanField] = BooleanField( + "presetDashboardIsManagedExternally", "presetDashboardIsManagedExternally" + ) + """ + + """ + PRESET_DASHBOARD_IS_PUBLISHED: ClassVar[BooleanField] = BooleanField( + "presetDashboardIsPublished", "presetDashboardIsPublished" + ) + """ + + """ + PRESET_DASHBOARD_THUMBNAIL_URL: ClassVar[KeywordField] = KeywordField( + "presetDashboardThumbnailURL", "presetDashboardThumbnailURL" + ) + """ + + """ + PRESET_DASHBOARD_CHART_COUNT: ClassVar[NumericField] = NumericField( + "presetDashboardChartCount", "presetDashboardChartCount" + ) + """ + + """ + + PRESET_DATASETS: ClassVar[RelationField] = RelationField("presetDatasets") + """ + TBC + """ + PRESET_CHARTS: ClassVar[RelationField] = RelationField("presetCharts") + """ + TBC + """ + PRESET_WORKSPACE: ClassVar[RelationField] = RelationField("presetWorkspace") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "preset_dashboard_changed_by_name", + "preset_dashboard_changed_by_url", + "preset_dashboard_is_managed_externally", + "preset_dashboard_is_published", + "preset_dashboard_thumbnail_url", + "preset_dashboard_chart_count", + "preset_datasets", + "preset_charts", + "preset_workspace", + ] + + @property + def preset_dashboard_changed_by_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_changed_by_name + ) + + @preset_dashboard_changed_by_name.setter + def preset_dashboard_changed_by_name( + self, preset_dashboard_changed_by_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_changed_by_name = ( + preset_dashboard_changed_by_name + ) + + @property + def preset_dashboard_changed_by_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_changed_by_url + ) + + @preset_dashboard_changed_by_url.setter + def preset_dashboard_changed_by_url( + self, preset_dashboard_changed_by_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_changed_by_url = ( + preset_dashboard_changed_by_url + ) + + @property + def preset_dashboard_is_managed_externally(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_is_managed_externally + ) + + @preset_dashboard_is_managed_externally.setter + def preset_dashboard_is_managed_externally( + self, preset_dashboard_is_managed_externally: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_is_managed_externally = ( + preset_dashboard_is_managed_externally + ) + + @property + def preset_dashboard_is_published(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_is_published + ) + + @preset_dashboard_is_published.setter + def preset_dashboard_is_published( + self, preset_dashboard_is_published: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_is_published = preset_dashboard_is_published + + @property + def preset_dashboard_thumbnail_url(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_thumbnail_url + ) + + @preset_dashboard_thumbnail_url.setter + def preset_dashboard_thumbnail_url( + self, preset_dashboard_thumbnail_url: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_thumbnail_url = preset_dashboard_thumbnail_url + + @property + def preset_dashboard_chart_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_dashboard_chart_count + ) + + @preset_dashboard_chart_count.setter + def preset_dashboard_chart_count(self, preset_dashboard_chart_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard_chart_count = preset_dashboard_chart_count + + @property + def preset_datasets(self) -> Optional[list[PresetDataset]]: + return None if self.attributes is None else self.attributes.preset_datasets + + @preset_datasets.setter + def preset_datasets(self, preset_datasets: Optional[list[PresetDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_datasets = preset_datasets + + @property + def preset_charts(self) -> Optional[list[PresetChart]]: + return None if self.attributes is None else self.attributes.preset_charts + + @preset_charts.setter + def preset_charts(self, preset_charts: Optional[list[PresetChart]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_charts = preset_charts + + @property + def preset_workspace(self) -> Optional[PresetWorkspace]: + return None if self.attributes is None else self.attributes.preset_workspace + + @preset_workspace.setter + def preset_workspace(self, preset_workspace: Optional[PresetWorkspace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace = preset_workspace + + class Attributes(Preset.Attributes): + preset_dashboard_changed_by_name: Optional[str] = Field( + default=None, description="" + ) + preset_dashboard_changed_by_url: Optional[str] = Field( + default=None, description="" + ) + preset_dashboard_is_managed_externally: Optional[bool] = Field( + default=None, description="" + ) + preset_dashboard_is_published: Optional[bool] = Field( + default=None, description="" + ) + preset_dashboard_thumbnail_url: Optional[str] = Field( + default=None, description="" + ) + preset_dashboard_chart_count: Optional[int] = Field( + default=None, description="" + ) + preset_datasets: Optional[list[PresetDataset]] = Field( + default=None, description="" + ) # relationship + preset_charts: Optional[list[PresetChart]] = Field( + default=None, description="" + ) # relationship + preset_workspace: Optional[PresetWorkspace] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_workspace_qualified_name: str + ) -> PresetDashboard.Attributes: + validate_required_fields( + ["name", "preset_workspace_qualified_name"], + [name, preset_workspace_qualified_name], + ) + + # Split the preset_workspace_qualified_name to extract necessary information + fields = preset_workspace_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid preset_workspace_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid preset_workspace_qualified_name") from e + + return PresetDashboard.Attributes( + name=name, + preset_workspace_qualified_name=preset_workspace_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{preset_workspace_qualified_name}/{name}", + connector_name=connector_type.value, + preset_workspace=PresetWorkspace.ref_by_qualified_name( + preset_workspace_qualified_name + ), + ) + + attributes: "PresetDashboard.Attributes" = Field( + default_factory=lambda: PresetDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .preset_chart import PresetChart # noqa +from .preset_dataset import PresetDataset # noqa +from .preset_workspace import PresetWorkspace # noqa diff --git a/pyatlan/model/assets/preset_dataset.py b/pyatlan/model/assets/preset_dataset.py new file mode 100644 index 000000000..26d9df9b9 --- /dev/null +++ b/pyatlan/model/assets/preset_dataset.py @@ -0,0 +1,185 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextStemmedField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .preset import Preset + + +class PresetDataset(Preset): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_dashboard_qualified_name: str + ) -> PresetDataset: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + attributes = PresetDataset.Attributes.create( + name=name, preset_dashboard_qualified_name=preset_dashboard_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="PresetDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PresetDataset": + raise ValueError("must be PresetDataset") + return v + + def __setattr__(self, name, value): + if name in PresetDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PRESET_DATASET_DATASOURCE_NAME: ClassVar[ + KeywordTextStemmedField + ] = KeywordTextStemmedField( + "presetDatasetDatasourceName", + "presetDatasetDatasourceName.keyword", + "presetDatasetDatasourceName", + "presetDatasetDatasourceName.stemmed", + ) + """ + + """ + PRESET_DATASET_ID: ClassVar[NumericField] = NumericField( + "presetDatasetId", "presetDatasetId" + ) + """ + + """ + PRESET_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( + "presetDatasetType", "presetDatasetType" + ) + """ + + """ + + PRESET_DASHBOARD: ClassVar[RelationField] = RelationField("presetDashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "preset_dataset_datasource_name", + "preset_dataset_id", + "preset_dataset_type", + "preset_dashboard", + ] + + @property + def preset_dataset_datasource_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_dataset_datasource_name + ) + + @preset_dataset_datasource_name.setter + def preset_dataset_datasource_name( + self, preset_dataset_datasource_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dataset_datasource_name = preset_dataset_datasource_name + + @property + def preset_dataset_id(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.preset_dataset_id + + @preset_dataset_id.setter + def preset_dataset_id(self, preset_dataset_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dataset_id = preset_dataset_id + + @property + def preset_dataset_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.preset_dataset_type + + @preset_dataset_type.setter + def preset_dataset_type(self, preset_dataset_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dataset_type = preset_dataset_type + + @property + def preset_dashboard(self) -> Optional[PresetDashboard]: + return None if self.attributes is None else self.attributes.preset_dashboard + + @preset_dashboard.setter + def preset_dashboard(self, preset_dashboard: Optional[PresetDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboard = preset_dashboard + + class Attributes(Preset.Attributes): + preset_dataset_datasource_name: Optional[str] = Field( + default=None, description="" + ) + preset_dataset_id: Optional[int] = Field(default=None, description="") + preset_dataset_type: Optional[str] = Field(default=None, description="") + preset_dashboard: Optional[PresetDashboard] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, preset_dashboard_qualified_name: str + ) -> PresetDataset.Attributes: + validate_required_fields( + ["name", "preset_dashboard_qualified_name"], + [name, preset_dashboard_qualified_name], + ) + + # Split the preset_dashboard_qualified_name to extract necessary information + fields = preset_dashboard_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid preset_dashboard_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid preset_dashboard_qualified_name") from e + + return PresetDataset.Attributes( + name=name, + preset_dashboard_qualified_name=preset_dashboard_qualified_name, + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + qualified_name=f"{preset_dashboard_qualified_name}/{name}", + connector_name=connector_type.value, + preset_dashboard=PresetDashboard.ref_by_qualified_name( + preset_dashboard_qualified_name + ), + ) + + attributes: "PresetDataset.Attributes" = Field( + default_factory=lambda: PresetDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .preset_dashboard import PresetDashboard # noqa diff --git a/pyatlan/model/assets/preset_workspace.py b/pyatlan/model/assets/preset_workspace.py new file mode 100644 index 000000000..7bbb6cda2 --- /dev/null +++ b/pyatlan/model/assets/preset_workspace.py @@ -0,0 +1,334 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .preset import Preset + + +class PresetWorkspace(Preset): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, connection_qualified_name: str) -> PresetWorkspace: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = PresetWorkspace.Attributes.create( + name=name, connection_qualified_name=connection_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="PresetWorkspace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "PresetWorkspace": + raise ValueError("must be PresetWorkspace") + return v + + def __setattr__(self, name, value): + if name in PresetWorkspace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PRESET_WORKSPACE_PUBLIC_DASHBOARDS_ALLOWED: ClassVar[BooleanField] = BooleanField( + "presetWorkspacePublicDashboardsAllowed", + "presetWorkspacePublicDashboardsAllowed", + ) + """ + + """ + PRESET_WORKSPACE_CLUSTER_ID: ClassVar[NumericField] = NumericField( + "presetWorkspaceClusterId", "presetWorkspaceClusterId" + ) + """ + + """ + PRESET_WORKSPACE_HOSTNAME: ClassVar[KeywordTextField] = KeywordTextField( + "presetWorkspaceHostname", + "presetWorkspaceHostname", + "presetWorkspaceHostname.text", + ) + """ + + """ + PRESET_WORKSPACE_IS_IN_MAINTENANCE_MODE: ClassVar[BooleanField] = BooleanField( + "presetWorkspaceIsInMaintenanceMode", "presetWorkspaceIsInMaintenanceMode" + ) + """ + + """ + PRESET_WORKSPACE_REGION: ClassVar[KeywordTextField] = KeywordTextField( + "presetWorkspaceRegion", "presetWorkspaceRegion", "presetWorkspaceRegion.text" + ) + """ + + """ + PRESET_WORKSPACE_STATUS: ClassVar[KeywordField] = KeywordField( + "presetWorkspaceStatus", "presetWorkspaceStatus" + ) + """ + + """ + PRESET_WORKSPACE_DEPLOYMENT_ID: ClassVar[NumericField] = NumericField( + "presetWorkspaceDeploymentId", "presetWorkspaceDeploymentId" + ) + """ + + """ + PRESET_WORKSPACE_DASHBOARD_COUNT: ClassVar[NumericField] = NumericField( + "presetWorkspaceDashboardCount", "presetWorkspaceDashboardCount" + ) + """ + + """ + PRESET_WORKSPACE_DATASET_COUNT: ClassVar[NumericField] = NumericField( + "presetWorkspaceDatasetCount", "presetWorkspaceDatasetCount" + ) + """ + + """ + + PRESET_DASHBOARDS: ClassVar[RelationField] = RelationField("presetDashboards") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "preset_workspace_public_dashboards_allowed", + "preset_workspace_cluster_id", + "preset_workspace_hostname", + "preset_workspace_is_in_maintenance_mode", + "preset_workspace_region", + "preset_workspace_status", + "preset_workspace_deployment_id", + "preset_workspace_dashboard_count", + "preset_workspace_dataset_count", + "preset_dashboards", + ] + + @property + def preset_workspace_public_dashboards_allowed(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_public_dashboards_allowed + ) + + @preset_workspace_public_dashboards_allowed.setter + def preset_workspace_public_dashboards_allowed( + self, preset_workspace_public_dashboards_allowed: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_public_dashboards_allowed = ( + preset_workspace_public_dashboards_allowed + ) + + @property + def preset_workspace_cluster_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_cluster_id + ) + + @preset_workspace_cluster_id.setter + def preset_workspace_cluster_id(self, preset_workspace_cluster_id: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_cluster_id = preset_workspace_cluster_id + + @property + def preset_workspace_hostname(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_hostname + ) + + @preset_workspace_hostname.setter + def preset_workspace_hostname(self, preset_workspace_hostname: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_hostname = preset_workspace_hostname + + @property + def preset_workspace_is_in_maintenance_mode(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_is_in_maintenance_mode + ) + + @preset_workspace_is_in_maintenance_mode.setter + def preset_workspace_is_in_maintenance_mode( + self, preset_workspace_is_in_maintenance_mode: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_is_in_maintenance_mode = ( + preset_workspace_is_in_maintenance_mode + ) + + @property + def preset_workspace_region(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.preset_workspace_region + ) + + @preset_workspace_region.setter + def preset_workspace_region(self, preset_workspace_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_region = preset_workspace_region + + @property + def preset_workspace_status(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.preset_workspace_status + ) + + @preset_workspace_status.setter + def preset_workspace_status(self, preset_workspace_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_status = preset_workspace_status + + @property + def preset_workspace_deployment_id(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_deployment_id + ) + + @preset_workspace_deployment_id.setter + def preset_workspace_deployment_id( + self, preset_workspace_deployment_id: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_deployment_id = preset_workspace_deployment_id + + @property + def preset_workspace_dashboard_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_dashboard_count + ) + + @preset_workspace_dashboard_count.setter + def preset_workspace_dashboard_count( + self, preset_workspace_dashboard_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_dashboard_count = ( + preset_workspace_dashboard_count + ) + + @property + def preset_workspace_dataset_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.preset_workspace_dataset_count + ) + + @preset_workspace_dataset_count.setter + def preset_workspace_dataset_count( + self, preset_workspace_dataset_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_workspace_dataset_count = preset_workspace_dataset_count + + @property + def preset_dashboards(self) -> Optional[list[PresetDashboard]]: + return None if self.attributes is None else self.attributes.preset_dashboards + + @preset_dashboards.setter + def preset_dashboards(self, preset_dashboards: Optional[list[PresetDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.preset_dashboards = preset_dashboards + + class Attributes(Preset.Attributes): + preset_workspace_public_dashboards_allowed: Optional[bool] = Field( + default=None, description="" + ) + preset_workspace_cluster_id: Optional[int] = Field(default=None, description="") + preset_workspace_hostname: Optional[str] = Field(default=None, description="") + preset_workspace_is_in_maintenance_mode: Optional[bool] = Field( + default=None, description="" + ) + preset_workspace_region: Optional[str] = Field(default=None, description="") + preset_workspace_status: Optional[str] = Field(default=None, description="") + preset_workspace_deployment_id: Optional[int] = Field( + default=None, description="" + ) + preset_workspace_dashboard_count: Optional[int] = Field( + default=None, description="" + ) + preset_workspace_dataset_count: Optional[int] = Field( + default=None, description="" + ) + preset_dashboards: Optional[list[PresetDashboard]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str + ) -> PresetWorkspace.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + + # Split the connection_qualified_name to extract necessary information + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + + return PresetWorkspace.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=connector_type.value, + ) + + attributes: "PresetWorkspace.Attributes" = Field( + default_factory=lambda: PresetWorkspace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .preset_dashboard import PresetDashboard # noqa diff --git a/pyatlan/model/assets/procedure.py b/pyatlan/model/assets/procedure.py new file mode 100644 index 000000000..be343e014 --- /dev/null +++ b/pyatlan/model/assets/procedure.py @@ -0,0 +1,80 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .s_q_l import SQL + + +class Procedure(SQL): + """Description""" + + type_name: str = Field(default="Procedure", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Procedure": + raise ValueError("must be Procedure") + return v + + def __setattr__(self, name, value): + if name in Procedure._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") + """ + SQL definition of the procedure. + """ + + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "definition", + "atlan_schema", + ] + + @property + def definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.definition + + @definition.setter + def definition(self, definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.definition = definition + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + class Attributes(SQL.Attributes): + definition: Optional[str] = Field(default=None, description="") + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + attributes: "Procedure.Attributes" = Field( + default_factory=lambda: Procedure.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/process.py b/pyatlan/model/assets/process.py new file mode 100644 index 000000000..b0f61522f --- /dev/null +++ b/pyatlan/model/assets/process.py @@ -0,0 +1,265 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +import hashlib +from io import StringIO +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .asset import Asset + + +class Process(Asset, type_name="Process"): + """Description""" + + @classmethod + @init_guid + def create( + cls, + name: str, + connection_qualified_name: str, + inputs: list["Catalog"], + outputs: list["Catalog"], + process_id: Optional[str] = None, + parent: Optional[Process] = None, + ) -> Process: + return Process( + attributes=Process.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + process_id=process_id, + inputs=inputs, + outputs=outputs, + parent=parent, + ) + ) + + type_name: str = Field(default="Process", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Process": + raise ValueError("must be Process") + return v + + def __setattr__(self, name, value): + if name in Process._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + CODE: ClassVar[KeywordField] = KeywordField("code", "code") + """ + Code that ran within the process. + """ + SQL: ClassVar[KeywordField] = KeywordField("sql", "sql") + """ + SQL query that ran to produce the outputs. + """ + AST: ClassVar[KeywordField] = KeywordField("ast", "ast") + """ + Parsed AST of the code or SQL statements that describe the logic of this process. + """ + + MATILLION_COMPONENT: ClassVar[RelationField] = RelationField("matillionComponent") + """ + TBC + """ + AIRFLOW_TASKS: ClassVar[RelationField] = RelationField("airflowTasks") + """ + TBC + """ + COLUMN_PROCESSES: ClassVar[RelationField] = RelationField("columnProcesses") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "inputs", + "outputs", + "code", + "sql", + "ast", + "matillion_component", + "airflow_tasks", + "column_processes", + ] + + @property + def inputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.inputs + + @inputs.setter + def inputs(self, inputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.inputs = inputs + + @property + def outputs(self) -> Optional[list[Catalog]]: + return None if self.attributes is None else self.attributes.outputs + + @outputs.setter + def outputs(self, outputs: Optional[list[Catalog]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.outputs = outputs + + @property + def code(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.code + + @code.setter + def code(self, code: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.code = code + + @property + def sql(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sql + + @sql.setter + def sql(self, sql: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql = sql + + @property + def ast(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.ast + + @ast.setter + def ast(self, ast: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.ast = ast + + @property + def matillion_component(self) -> Optional[MatillionComponent]: + return None if self.attributes is None else self.attributes.matillion_component + + @matillion_component.setter + def matillion_component(self, matillion_component: Optional[MatillionComponent]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.matillion_component = matillion_component + + @property + def airflow_tasks(self) -> Optional[list[AirflowTask]]: + return None if self.attributes is None else self.attributes.airflow_tasks + + @airflow_tasks.setter + def airflow_tasks(self, airflow_tasks: Optional[list[AirflowTask]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.airflow_tasks = airflow_tasks + + @property + def column_processes(self) -> Optional[list[ColumnProcess]]: + return None if self.attributes is None else self.attributes.column_processes + + @column_processes.setter + def column_processes(self, column_processes: Optional[list[ColumnProcess]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_processes = column_processes + + class Attributes(Asset.Attributes): + inputs: Optional[list[Catalog]] = Field(default=None, description="") + outputs: Optional[list[Catalog]] = Field(default=None, description="") + code: Optional[str] = Field(default=None, description="") + sql: Optional[str] = Field(default=None, description="") + ast: Optional[str] = Field(default=None, description="") + matillion_component: Optional[MatillionComponent] = Field( + default=None, description="" + ) # relationship + airflow_tasks: Optional[list[AirflowTask]] = Field( + default=None, description="" + ) # relationship + column_processes: Optional[list[ColumnProcess]] = Field( + default=None, description="" + ) # relationship + + @staticmethod + def generate_qualified_name( + name: str, + connection_qualified_name: str, + inputs: list["Catalog"], + outputs: list["Catalog"], + parent: Optional["Process"] = None, + process_id: Optional[str] = None, + ) -> str: + def append_relationship(output: StringIO, relationship: Asset): + if relationship.guid: + output.write(relationship.guid) + + def append_relationships(output: StringIO, relationships: list["Catalog"]): + for catalog in relationships: + append_relationship(output, catalog) + + validate_required_fields( + ["name", "connection_qualified_name", "inputs", "outputs"], + [name, connection_qualified_name, inputs, outputs], + ) + if process_id and process_id.strip(): + return f"{connection_qualified_name}/{process_id}" + buffer = StringIO() + buffer.write(name) + buffer.write(connection_qualified_name) + if parent: + append_relationship(buffer, parent) + append_relationships(buffer, inputs) + append_relationships(buffer, outputs) + ret_value = hashlib.md5( + buffer.getvalue().encode(), usedforsecurity=False + ).hexdigest() + buffer.close() + return ret_value + + @classmethod + @init_guid + def create( + cls, + name: str, + connection_qualified_name: str, + inputs: list["Catalog"], + outputs: list["Catalog"], + process_id: Optional[str] = None, + parent: Optional[Process] = None, + ) -> Process.Attributes: + qualified_name = Process.Attributes.generate_qualified_name( + name=name, + connection_qualified_name=connection_qualified_name, + process_id=process_id, + inputs=inputs, + outputs=outputs, + parent=parent, + ) + connector_name = connection_qualified_name.split("/")[1] + return Process.Attributes( + name=name, + qualified_name=qualified_name, + connector_name=connector_name, + connection_qualified_name=connection_qualified_name, + inputs=inputs, + outputs=outputs, + ) + + attributes: "Process.Attributes" = Field( + default_factory=lambda: Process.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .airflow_task import AirflowTask # noqa +from .catalog import Catalog # noqa +from .column_process import ColumnProcess # noqa +from .matillion_component import MatillionComponent # noqa diff --git a/pyatlan/model/assets/asset07.py b/pyatlan/model/assets/process_execution.py similarity index 77% rename from pyatlan/model/assets/asset07.py rename to pyatlan/model/assets/process_execution.py index 9c75e7cdf..504a3ee1d 100644 --- a/pyatlan/model/assets/asset07.py +++ b/pyatlan/model/assets/process_execution.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Asset +from .asset import Asset class ProcessExecution(Asset, type_name="ProcessExecution"): """Description""" - type_name: str = Field("ProcessExecution", allow_mutation=False) + type_name: str = Field(default="ProcessExecution", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -ProcessExecution.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset14.py b/pyatlan/model/assets/purpose.py similarity index 94% rename from pyatlan/model/assets/asset14.py rename to pyatlan/model/assets/purpose.py index 9edd3af85..17606b348 100644 --- a/pyatlan/model/assets/asset14.py +++ b/pyatlan/model/assets/purpose.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional, Set -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.core import AtlanTagName from pyatlan.model.enums import ( @@ -19,8 +19,9 @@ from pyatlan.model.fields.atlan_fields import KeywordField from pyatlan.utils import init_guid, validate_required_fields -from .asset00 import SelfAsset -from .asset06 import AccessControl, AuthPolicy +from .access_control import AccessControl +from .asset import SelfAsset +from .auth_policy import AuthPolicy class Purpose(AccessControl): @@ -29,7 +30,7 @@ class Purpose(AccessControl): @classmethod # @validate_arguments() @init_guid - def create(cls, *, name: str, atlan_tags: list[str]) -> Purpose: + def create(cls, *, name: str, atlan_tags: list[AtlanTagName]) -> Purpose: validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) attributes = Purpose.Attributes.create(name=name, atlan_tags=atlan_tags) return cls(attributes=attributes) @@ -172,7 +173,7 @@ def create_for_modification( ) ) - type_name: str = Field("Purpose", allow_mutation=False) + type_name: str = Field(default="Purpose", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -208,13 +209,15 @@ def purpose_atlan_tags(self, purpose_atlan_tags: Optional[list[AtlanTagName]]): class Attributes(AccessControl.Attributes): purpose_atlan_tags: Optional[list[AtlanTagName]] = Field( - None, description="", alias="purposeClassifications" + default=None, description="" ) @classmethod # @validate_arguments() @init_guid - def create(cls, name: str, atlan_tags: list[str]) -> Purpose.Attributes: + def create( + cls, name: str, atlan_tags: list[AtlanTagName] + ) -> Purpose.Attributes: validate_required_fields(["name", "atlan_tags"], [name, atlan_tags]) return Purpose.Attributes( qualified_name=name, @@ -230,6 +233,3 @@ def create(cls, name: str, atlan_tags: list[str]) -> Purpose.Attributes: description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Purpose.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset53.py b/pyatlan/model/assets/qlik.py similarity index 85% rename from pyatlan/model/assets/asset53.py rename to pyatlan/model/assets/qlik.py index fd812c157..a27d5df19 100644 --- a/pyatlan/model/assets/asset53.py +++ b/pyatlan/model/assets/qlik.py @@ -6,7 +6,7 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( BooleanField, @@ -14,13 +14,13 @@ KeywordTextField, ) -from .asset19 import BI +from .b_i import BI class Qlik(BI): """Description""" - type_name: str = Field("Qlik", allow_mutation=False) + type_name: str = Field(default="Qlik", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -174,26 +174,17 @@ def qlik_is_published(self, qlik_is_published: Optional[bool]): self.attributes.qlik_is_published = qlik_is_published class Attributes(BI.Attributes): - qlik_id: Optional[str] = Field(None, description="", alias="qlikId") - qlik_q_r_i: Optional[str] = Field(None, description="", alias="qlikQRI") - qlik_space_id: Optional[str] = Field(None, description="", alias="qlikSpaceId") - qlik_space_qualified_name: Optional[str] = Field( - None, description="", alias="qlikSpaceQualifiedName" - ) - qlik_app_id: Optional[str] = Field(None, description="", alias="qlikAppId") - qlik_app_qualified_name: Optional[str] = Field( - None, description="", alias="qlikAppQualifiedName" - ) - qlik_owner_id: Optional[str] = Field(None, description="", alias="qlikOwnerId") - qlik_is_published: Optional[bool] = Field( - None, description="", alias="qlikIsPublished" - ) + qlik_id: Optional[str] = Field(default=None, description="") + qlik_q_r_i: Optional[str] = Field(default=None, description="") + qlik_space_id: Optional[str] = Field(default=None, description="") + qlik_space_qualified_name: Optional[str] = Field(default=None, description="") + qlik_app_id: Optional[str] = Field(default=None, description="") + qlik_app_qualified_name: Optional[str] = Field(default=None, description="") + qlik_owner_id: Optional[str] = Field(default=None, description="") + qlik_is_published: Optional[bool] = Field(default=None, description="") attributes: "Qlik.Attributes" = Field( default_factory=lambda: Qlik.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Qlik.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/qlik_app.py b/pyatlan/model/assets/qlik_app.py new file mode 100644 index 000000000..5e5258a67 --- /dev/null +++ b/pyatlan/model/assets/qlik_app.py @@ -0,0 +1,188 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) + +from .qlik import Qlik + + +class QlikApp(Qlik): + """Description""" + + type_name: str = Field(default="QlikApp", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikApp": + raise ValueError("must be QlikApp") + return v + + def __setattr__(self, name, value): + if name in QlikApp._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_HAS_SECTION_ACCESS: ClassVar[BooleanField] = BooleanField( + "qlikHasSectionAccess", "qlikHasSectionAccess" + ) + """ + Whether section access or data masking is enabled on the source (true) or not (false). + """ + QLIK_ORIGIN_APP_ID: ClassVar[KeywordField] = KeywordField( + "qlikOriginAppId", "qlikOriginAppId" + ) + """ + Value of originAppId for this app. + """ + QLIK_IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField( + "qlikIsEncrypted", "qlikIsEncrypted" + ) + """ + Whether this app is encrypted (true) or not (false). + """ + QLIK_IS_DIRECT_QUERY_MODE: ClassVar[BooleanField] = BooleanField( + "qlikIsDirectQueryMode", "qlikIsDirectQueryMode" + ) + """ + Whether this app is in direct query mode (true) or not (false). + """ + QLIK_APP_STATIC_BYTE_SIZE: ClassVar[NumericField] = NumericField( + "qlikAppStaticByteSize", "qlikAppStaticByteSize" + ) + """ + Static space used by this app, in bytes. + """ + + QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") + """ + TBC + """ + QLIK_SHEETS: ClassVar[RelationField] = RelationField("qlikSheets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_has_section_access", + "qlik_origin_app_id", + "qlik_is_encrypted", + "qlik_is_direct_query_mode", + "qlik_app_static_byte_size", + "qlik_space", + "qlik_sheets", + ] + + @property + def qlik_has_section_access(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.qlik_has_section_access + ) + + @qlik_has_section_access.setter + def qlik_has_section_access(self, qlik_has_section_access: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_has_section_access = qlik_has_section_access + + @property + def qlik_origin_app_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_origin_app_id + + @qlik_origin_app_id.setter + def qlik_origin_app_id(self, qlik_origin_app_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_origin_app_id = qlik_origin_app_id + + @property + def qlik_is_encrypted(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.qlik_is_encrypted + + @qlik_is_encrypted.setter + def qlik_is_encrypted(self, qlik_is_encrypted: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_is_encrypted = qlik_is_encrypted + + @property + def qlik_is_direct_query_mode(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.qlik_is_direct_query_mode + ) + + @qlik_is_direct_query_mode.setter + def qlik_is_direct_query_mode(self, qlik_is_direct_query_mode: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_is_direct_query_mode = qlik_is_direct_query_mode + + @property + def qlik_app_static_byte_size(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.qlik_app_static_byte_size + ) + + @qlik_app_static_byte_size.setter + def qlik_app_static_byte_size(self, qlik_app_static_byte_size: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_app_static_byte_size = qlik_app_static_byte_size + + @property + def qlik_space(self) -> Optional[QlikSpace]: + return None if self.attributes is None else self.attributes.qlik_space + + @qlik_space.setter + def qlik_space(self, qlik_space: Optional[QlikSpace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space = qlik_space + + @property + def qlik_sheets(self) -> Optional[list[QlikSheet]]: + return None if self.attributes is None else self.attributes.qlik_sheets + + @qlik_sheets.setter + def qlik_sheets(self, qlik_sheets: Optional[list[QlikSheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_sheets = qlik_sheets + + class Attributes(Qlik.Attributes): + qlik_has_section_access: Optional[bool] = Field(default=None, description="") + qlik_origin_app_id: Optional[str] = Field(default=None, description="") + qlik_is_encrypted: Optional[bool] = Field(default=None, description="") + qlik_is_direct_query_mode: Optional[bool] = Field(default=None, description="") + qlik_app_static_byte_size: Optional[int] = Field(default=None, description="") + qlik_space: Optional[QlikSpace] = Field( + default=None, description="" + ) # relationship + qlik_sheets: Optional[list[QlikSheet]] = Field( + default=None, description="" + ) # relationship + + attributes: "QlikApp.Attributes" = Field( + default_factory=lambda: QlikApp.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .qlik_sheet import QlikSheet # noqa +from .qlik_space import QlikSpace # noqa diff --git a/pyatlan/model/assets/qlik_chart.py b/pyatlan/model/assets/qlik_chart.py new file mode 100644 index 000000000..c34f9ec6a --- /dev/null +++ b/pyatlan/model/assets/qlik_chart.py @@ -0,0 +1,138 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField, TextField + +from .qlik import Qlik + + +class QlikChart(Qlik): + """Description""" + + type_name: str = Field(default="QlikChart", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikChart": + raise ValueError("must be QlikChart") + return v + + def __setattr__(self, name, value): + if name in QlikChart._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_CHART_SUBTITLE: ClassVar[TextField] = TextField( + "qlikChartSubtitle", "qlikChartSubtitle" + ) + """ + Subtitle of this chart. + """ + QLIK_CHART_FOOTNOTE: ClassVar[TextField] = TextField( + "qlikChartFootnote", "qlikChartFootnote" + ) + """ + Footnote of this chart. + """ + QLIK_CHART_ORIENTATION: ClassVar[KeywordField] = KeywordField( + "qlikChartOrientation", "qlikChartOrientation" + ) + """ + Orientation of this chart. + """ + QLIK_CHART_TYPE: ClassVar[KeywordField] = KeywordField( + "qlikChartType", "qlikChartType" + ) + """ + Subtype of this chart, for example: bar, graph, pie, etc. + """ + + QLIK_SHEET: ClassVar[RelationField] = RelationField("qlikSheet") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_chart_subtitle", + "qlik_chart_footnote", + "qlik_chart_orientation", + "qlik_chart_type", + "qlik_sheet", + ] + + @property + def qlik_chart_subtitle(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_chart_subtitle + + @qlik_chart_subtitle.setter + def qlik_chart_subtitle(self, qlik_chart_subtitle: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_subtitle = qlik_chart_subtitle + + @property + def qlik_chart_footnote(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_chart_footnote + + @qlik_chart_footnote.setter + def qlik_chart_footnote(self, qlik_chart_footnote: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_footnote = qlik_chart_footnote + + @property + def qlik_chart_orientation(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.qlik_chart_orientation + ) + + @qlik_chart_orientation.setter + def qlik_chart_orientation(self, qlik_chart_orientation: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_orientation = qlik_chart_orientation + + @property + def qlik_chart_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_chart_type + + @qlik_chart_type.setter + def qlik_chart_type(self, qlik_chart_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_chart_type = qlik_chart_type + + @property + def qlik_sheet(self) -> Optional[QlikSheet]: + return None if self.attributes is None else self.attributes.qlik_sheet + + @qlik_sheet.setter + def qlik_sheet(self, qlik_sheet: Optional[QlikSheet]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_sheet = qlik_sheet + + class Attributes(Qlik.Attributes): + qlik_chart_subtitle: Optional[str] = Field(default=None, description="") + qlik_chart_footnote: Optional[str] = Field(default=None, description="") + qlik_chart_orientation: Optional[str] = Field(default=None, description="") + qlik_chart_type: Optional[str] = Field(default=None, description="") + qlik_sheet: Optional[QlikSheet] = Field( + default=None, description="" + ) # relationship + + attributes: "QlikChart.Attributes" = Field( + default_factory=lambda: QlikChart.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .qlik_sheet import QlikSheet # noqa diff --git a/pyatlan/model/assets/qlik_dataset.py b/pyatlan/model/assets/qlik_dataset.py new file mode 100644 index 000000000..882982537 --- /dev/null +++ b/pyatlan/model/assets/qlik_dataset.py @@ -0,0 +1,146 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .qlik import Qlik + + +class QlikDataset(Qlik): + """Description""" + + type_name: str = Field(default="QlikDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikDataset": + raise ValueError("must be QlikDataset") + return v + + def __setattr__(self, name, value): + if name in QlikDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_DATASET_TECHNICAL_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "qlikDatasetTechnicalName", + "qlikDatasetTechnicalName.keyword", + "qlikDatasetTechnicalName", + ) + """ + Technical name of this asset. + """ + QLIK_DATASET_TYPE: ClassVar[KeywordField] = KeywordField( + "qlikDatasetType", "qlikDatasetType" + ) + """ + Type of this data asset, for example: qix-df, snowflake, etc. + """ + QLIK_DATASET_URI: ClassVar[KeywordTextField] = KeywordTextField( + "qlikDatasetUri", "qlikDatasetUri", "qlikDatasetUri.text" + ) + """ + URI of this dataset. + """ + QLIK_DATASET_SUBTYPE: ClassVar[KeywordField] = KeywordField( + "qlikDatasetSubtype", "qlikDatasetSubtype" + ) + """ + Subtype this dataset asset. + """ + + QLIK_SPACE: ClassVar[RelationField] = RelationField("qlikSpace") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_dataset_technical_name", + "qlik_dataset_type", + "qlik_dataset_uri", + "qlik_dataset_subtype", + "qlik_space", + ] + + @property + def qlik_dataset_technical_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.qlik_dataset_technical_name + ) + + @qlik_dataset_technical_name.setter + def qlik_dataset_technical_name(self, qlik_dataset_technical_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_technical_name = qlik_dataset_technical_name + + @property + def qlik_dataset_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_dataset_type + + @qlik_dataset_type.setter + def qlik_dataset_type(self, qlik_dataset_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_type = qlik_dataset_type + + @property + def qlik_dataset_uri(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_dataset_uri + + @qlik_dataset_uri.setter + def qlik_dataset_uri(self, qlik_dataset_uri: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_uri = qlik_dataset_uri + + @property + def qlik_dataset_subtype(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_dataset_subtype + + @qlik_dataset_subtype.setter + def qlik_dataset_subtype(self, qlik_dataset_subtype: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_dataset_subtype = qlik_dataset_subtype + + @property + def qlik_space(self) -> Optional[QlikSpace]: + return None if self.attributes is None else self.attributes.qlik_space + + @qlik_space.setter + def qlik_space(self, qlik_space: Optional[QlikSpace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space = qlik_space + + class Attributes(Qlik.Attributes): + qlik_dataset_technical_name: Optional[str] = Field(default=None, description="") + qlik_dataset_type: Optional[str] = Field(default=None, description="") + qlik_dataset_uri: Optional[str] = Field(default=None, description="") + qlik_dataset_subtype: Optional[str] = Field(default=None, description="") + qlik_space: Optional[QlikSpace] = Field( + default=None, description="" + ) # relationship + + attributes: "QlikDataset.Attributes" = Field( + default_factory=lambda: QlikDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .qlik_space import QlikSpace # noqa diff --git a/pyatlan/model/assets/qlik_sheet.py b/pyatlan/model/assets/qlik_sheet.py new file mode 100644 index 000000000..5580a98d6 --- /dev/null +++ b/pyatlan/model/assets/qlik_sheet.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, RelationField + +from .qlik import Qlik + + +class QlikSheet(Qlik): + """Description""" + + type_name: str = Field(default="QlikSheet", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikSheet": + raise ValueError("must be QlikSheet") + return v + + def __setattr__(self, name, value): + if name in QlikSheet._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_SHEET_IS_APPROVED: ClassVar[BooleanField] = BooleanField( + "qlikSheetIsApproved", "qlikSheetIsApproved" + ) + """ + Whether this is approved (true) or not (false). + """ + + QLIK_APP: ClassVar[RelationField] = RelationField("qlikApp") + """ + TBC + """ + QLIK_CHARTS: ClassVar[RelationField] = RelationField("qlikCharts") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_sheet_is_approved", + "qlik_app", + "qlik_charts", + ] + + @property + def qlik_sheet_is_approved(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.qlik_sheet_is_approved + ) + + @qlik_sheet_is_approved.setter + def qlik_sheet_is_approved(self, qlik_sheet_is_approved: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_sheet_is_approved = qlik_sheet_is_approved + + @property + def qlik_app(self) -> Optional[QlikApp]: + return None if self.attributes is None else self.attributes.qlik_app + + @qlik_app.setter + def qlik_app(self, qlik_app: Optional[QlikApp]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_app = qlik_app + + @property + def qlik_charts(self) -> Optional[list[QlikChart]]: + return None if self.attributes is None else self.attributes.qlik_charts + + @qlik_charts.setter + def qlik_charts(self, qlik_charts: Optional[list[QlikChart]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_charts = qlik_charts + + class Attributes(Qlik.Attributes): + qlik_sheet_is_approved: Optional[bool] = Field(default=None, description="") + qlik_app: Optional[QlikApp] = Field( + default=None, description="" + ) # relationship + qlik_charts: Optional[list[QlikChart]] = Field( + default=None, description="" + ) # relationship + + attributes: "QlikSheet.Attributes" = Field( + default_factory=lambda: QlikSheet.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .qlik_app import QlikApp # noqa +from .qlik_chart import QlikChart # noqa diff --git a/pyatlan/model/assets/qlik_space.py b/pyatlan/model/assets/qlik_space.py new file mode 100644 index 000000000..1d5c6d054 --- /dev/null +++ b/pyatlan/model/assets/qlik_space.py @@ -0,0 +1,101 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .qlik import Qlik + + +class QlikSpace(Qlik): + """Description""" + + type_name: str = Field(default="QlikSpace", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QlikSpace": + raise ValueError("must be QlikSpace") + return v + + def __setattr__(self, name, value): + if name in QlikSpace._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QLIK_SPACE_TYPE: ClassVar[KeywordField] = KeywordField( + "qlikSpaceType", "qlikSpaceType" + ) + """ + Type of this space, for exmaple: Private, Shared, etc. + """ + + QLIK_DATASETS: ClassVar[RelationField] = RelationField("qlikDatasets") + """ + TBC + """ + QLIK_APPS: ClassVar[RelationField] = RelationField("qlikApps") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "qlik_space_type", + "qlik_datasets", + "qlik_apps", + ] + + @property + def qlik_space_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qlik_space_type + + @qlik_space_type.setter + def qlik_space_type(self, qlik_space_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_space_type = qlik_space_type + + @property + def qlik_datasets(self) -> Optional[list[QlikDataset]]: + return None if self.attributes is None else self.attributes.qlik_datasets + + @qlik_datasets.setter + def qlik_datasets(self, qlik_datasets: Optional[list[QlikDataset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_datasets = qlik_datasets + + @property + def qlik_apps(self) -> Optional[list[QlikApp]]: + return None if self.attributes is None else self.attributes.qlik_apps + + @qlik_apps.setter + def qlik_apps(self, qlik_apps: Optional[list[QlikApp]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qlik_apps = qlik_apps + + class Attributes(Qlik.Attributes): + qlik_space_type: Optional[str] = Field(default=None, description="") + qlik_datasets: Optional[list[QlikDataset]] = Field( + default=None, description="" + ) # relationship + qlik_apps: Optional[list[QlikApp]] = Field( + default=None, description="" + ) # relationship + + attributes: "QlikSpace.Attributes" = Field( + default_factory=lambda: QlikSpace.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .qlik_app import QlikApp # noqa +from .qlik_dataset import QlikDataset # noqa diff --git a/pyatlan/model/assets/asset88.py b/pyatlan/model/assets/qlik_stream.py similarity index 77% rename from pyatlan/model/assets/asset88.py rename to pyatlan/model/assets/qlik_stream.py index 2ca5705cd..8b314c688 100644 --- a/pyatlan/model/assets/asset88.py +++ b/pyatlan/model/assets/qlik_stream.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset81 import QlikSpace +from .qlik_space import QlikSpace class QlikStream(QlikSpace): """Description""" - type_name: str = Field("QlikStream", allow_mutation=False) + type_name: str = Field(default="QlikStream", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -QlikStream.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/query.py b/pyatlan/model/assets/query.py new file mode 100644 index 000000000..5b91b5ed5 --- /dev/null +++ b/pyatlan/model/assets/query.py @@ -0,0 +1,374 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + RelationField, +) + +from .s_q_l import SQL + + +class Query(SQL): + """Description""" + + type_name: str = Field(default="Query", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Query": + raise ValueError("must be Query") + return v + + def __setattr__(self, name, value): + if name in Query._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + RAW_QUERY: ClassVar[KeywordField] = KeywordField("rawQuery", "rawQuery") + """ + Deprecated. See 'longRawQuery' instead. + """ + LONG_RAW_QUERY: ClassVar[KeywordField] = KeywordField( + "longRawQuery", "longRawQuery" + ) + """ + Raw SQL query string. + """ + RAW_QUERY_TEXT: ClassVar[RelationField] = RelationField("rawQueryText") + """ + + """ + DEFAULT_SCHEMA_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "defaultSchemaQualifiedName", + "defaultSchemaQualifiedName", + "defaultSchemaQualifiedName.text", + ) + """ + Unique name of the default schema to use for this query. + """ + DEFAULT_DATABASE_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "defaultDatabaseQualifiedName", + "defaultDatabaseQualifiedName", + "defaultDatabaseQualifiedName.text", + ) + """ + Unique name of the default database to use for this query. + """ + VARIABLES_SCHEMA_BASE64: ClassVar[KeywordField] = KeywordField( + "variablesSchemaBase64", "variablesSchemaBase64" + ) + """ + Base64-encoded string of the variables to use in this query. + """ + IS_PRIVATE: ClassVar[BooleanField] = BooleanField("isPrivate", "isPrivate") + """ + Whether this query is private (true) or shared (false). + """ + IS_SQL_SNIPPET: ClassVar[BooleanField] = BooleanField( + "isSqlSnippet", "isSqlSnippet" + ) + """ + Whether this query is a SQL snippet (true) or not (false). + """ + PARENT_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "parentQualifiedName", "parentQualifiedName", "parentQualifiedName.text" + ) + """ + Unique name of the parent collection or folder in which this query exists. + """ + COLLECTION_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "collectionQualifiedName", + "collectionQualifiedName", + "collectionQualifiedName.text", + ) + """ + Unique name of the collection in which this query exists. + """ + IS_VISUAL_QUERY: ClassVar[BooleanField] = BooleanField( + "isVisualQuery", "isVisualQuery" + ) + """ + Whether this query is a visual query (true) or not (false). + """ + VISUAL_BUILDER_SCHEMA_BASE64: ClassVar[KeywordField] = KeywordField( + "visualBuilderSchemaBase64", "visualBuilderSchemaBase64" + ) + """ + Base64-encoded string for the visual query builder. + """ + + PARENT: ClassVar[RelationField] = RelationField("parent") + """ + TBC + """ + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + TABLES: ClassVar[RelationField] = RelationField("tables") + """ + TBC + """ + VIEWS: ClassVar[RelationField] = RelationField("views") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "raw_query", + "long_raw_query", + "raw_query_text", + "default_schema_qualified_name", + "default_database_qualified_name", + "variables_schema_base64", + "is_private", + "is_sql_snippet", + "parent_qualified_name", + "collection_qualified_name", + "is_visual_query", + "visual_builder_schema_base64", + "parent", + "columns", + "tables", + "views", + ] + + @property + def raw_query(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.raw_query + + @raw_query.setter + def raw_query(self, raw_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.raw_query = raw_query + + @property + def long_raw_query(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.long_raw_query + + @long_raw_query.setter + def long_raw_query(self, long_raw_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.long_raw_query = long_raw_query + + @property + def raw_query_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.raw_query_text + + @raw_query_text.setter + def raw_query_text(self, raw_query_text: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.raw_query_text = raw_query_text + + @property + def default_schema_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.default_schema_qualified_name + ) + + @default_schema_qualified_name.setter + def default_schema_qualified_name( + self, default_schema_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_schema_qualified_name = default_schema_qualified_name + + @property + def default_database_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.default_database_qualified_name + ) + + @default_database_qualified_name.setter + def default_database_qualified_name( + self, default_database_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_database_qualified_name = ( + default_database_qualified_name + ) + + @property + def variables_schema_base64(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.variables_schema_base64 + ) + + @variables_schema_base64.setter + def variables_schema_base64(self, variables_schema_base64: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.variables_schema_base64 = variables_schema_base64 + + @property + def is_private(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_private + + @is_private.setter + def is_private(self, is_private: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_private = is_private + + @property + def is_sql_snippet(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_sql_snippet + + @is_sql_snippet.setter + def is_sql_snippet(self, is_sql_snippet: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_sql_snippet = is_sql_snippet + + @property + def parent_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.parent_qualified_name + ) + + @parent_qualified_name.setter + def parent_qualified_name(self, parent_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_qualified_name = parent_qualified_name + + @property + def collection_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.collection_qualified_name + ) + + @collection_qualified_name.setter + def collection_qualified_name(self, collection_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.collection_qualified_name = collection_qualified_name + + @property + def is_visual_query(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_visual_query + + @is_visual_query.setter + def is_visual_query(self, is_visual_query: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_visual_query = is_visual_query + + @property + def visual_builder_schema_base64(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.visual_builder_schema_base64 + ) + + @visual_builder_schema_base64.setter + def visual_builder_schema_base64(self, visual_builder_schema_base64: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.visual_builder_schema_base64 = visual_builder_schema_base64 + + @property + def parent(self) -> Optional[Namespace]: + return None if self.attributes is None else self.attributes.parent + + @parent.setter + def parent(self, parent: Optional[Namespace]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent = parent + + @property + def columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.columns + + @columns.setter + def columns(self, columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.columns = columns + + @property + def tables(self) -> Optional[list[Table]]: + return None if self.attributes is None else self.attributes.tables + + @tables.setter + def tables(self, tables: Optional[list[Table]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tables = tables + + @property + def views(self) -> Optional[list[View]]: + return None if self.attributes is None else self.attributes.views + + @views.setter + def views(self, views: Optional[list[View]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.views = views + + class Attributes(SQL.Attributes): + raw_query: Optional[str] = Field(default=None, description="") + long_raw_query: Optional[str] = Field(default=None, description="") + raw_query_text: Optional[str] = Field(default=None, description="") + default_schema_qualified_name: Optional[str] = Field( + default=None, description="" + ) + default_database_qualified_name: Optional[str] = Field( + default=None, description="" + ) + variables_schema_base64: Optional[str] = Field(default=None, description="") + is_private: Optional[bool] = Field(default=None, description="") + is_sql_snippet: Optional[bool] = Field(default=None, description="") + parent_qualified_name: Optional[str] = Field(default=None, description="") + collection_qualified_name: Optional[str] = Field(default=None, description="") + is_visual_query: Optional[bool] = Field(default=None, description="") + visual_builder_schema_base64: Optional[str] = Field( + default=None, description="" + ) + parent: Optional[Namespace] = Field( + default=None, description="" + ) # relationship + columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + tables: Optional[list[Table]] = Field( + default=None, description="" + ) # relationship + views: Optional[list[View]] = Field( + default=None, description="" + ) # relationship + + attributes: "Query.Attributes" = Field( + default_factory=lambda: Query.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .column import Column # noqa +from .namespace import Namespace # noqa +from .table import Table # noqa +from .view import View # noqa diff --git a/pyatlan/model/assets/asset49.py b/pyatlan/model/assets/quick_sight.py similarity index 84% rename from pyatlan/model/assets/asset49.py rename to pyatlan/model/assets/quick_sight.py index 3a75388fa..1ab91913f 100644 --- a/pyatlan/model/assets/asset49.py +++ b/pyatlan/model/assets/quick_sight.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from .asset19 import BI +from .b_i import BI class QuickSight(BI): """Description""" - type_name: str = Field("QuickSight", allow_mutation=False) + type_name: str = Field(default="QuickSight", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -87,21 +87,12 @@ def quick_sight_sheet_name(self, quick_sight_sheet_name: Optional[str]): self.attributes.quick_sight_sheet_name = quick_sight_sheet_name class Attributes(BI.Attributes): - quick_sight_id: Optional[str] = Field( - None, description="", alias="quickSightId" - ) - quick_sight_sheet_id: Optional[str] = Field( - None, description="", alias="quickSightSheetId" - ) - quick_sight_sheet_name: Optional[str] = Field( - None, description="", alias="quickSightSheetName" - ) + quick_sight_id: Optional[str] = Field(default=None, description="") + quick_sight_sheet_id: Optional[str] = Field(default=None, description="") + quick_sight_sheet_name: Optional[str] = Field(default=None, description="") attributes: "QuickSight.Attributes" = Field( default_factory=lambda: QuickSight.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -QuickSight.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/quick_sight_analysis.py b/pyatlan/model/assets/quick_sight_analysis.py new file mode 100644 index 000000000..c1a6b9829 --- /dev/null +++ b/pyatlan/model/assets/quick_sight_analysis.py @@ -0,0 +1,211 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import QuickSightAnalysisStatus +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .quick_sight import QuickSight + + +class QuickSightAnalysis(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightAnalysis", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightAnalysis": + raise ValueError("must be QuickSightAnalysis") + return v + + def __setattr__(self, name, value): + if name in QuickSightAnalysis._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_ANALYSIS_STATUS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisStatus", "quickSightAnalysisStatus" + ) + """ + Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc. + """ + QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisCalculatedFields", "quickSightAnalysisCalculatedFields" + ) + """ + List of field names calculated by this analysis. + """ + QUICK_SIGHT_ANALYSIS_PARAMETER_DECLARATIONS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisParameterDeclarations", + "quickSightAnalysisParameterDeclarations", + ) + """ + List of parameters used for this analysis. + """ + QUICK_SIGHT_ANALYSIS_FILTER_GROUPS: ClassVar[KeywordField] = KeywordField( + "quickSightAnalysisFilterGroups", "quickSightAnalysisFilterGroups" + ) + """ + List of filter groups used for this analysis. + """ + + QUICK_SIGHT_ANALYSIS_VISUALS: ClassVar[RelationField] = RelationField( + "quickSightAnalysisVisuals" + ) + """ + TBC + """ + QUICK_SIGHT_ANALYSIS_FOLDERS: ClassVar[RelationField] = RelationField( + "quickSightAnalysisFolders" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_analysis_status", + "quick_sight_analysis_calculated_fields", + "quick_sight_analysis_parameter_declarations", + "quick_sight_analysis_filter_groups", + "quick_sight_analysis_visuals", + "quick_sight_analysis_folders", + ] + + @property + def quick_sight_analysis_status(self) -> Optional[QuickSightAnalysisStatus]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_status + ) + + @quick_sight_analysis_status.setter + def quick_sight_analysis_status( + self, quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_status = quick_sight_analysis_status + + @property + def quick_sight_analysis_calculated_fields(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_calculated_fields + ) + + @quick_sight_analysis_calculated_fields.setter + def quick_sight_analysis_calculated_fields( + self, quick_sight_analysis_calculated_fields: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_calculated_fields = ( + quick_sight_analysis_calculated_fields + ) + + @property + def quick_sight_analysis_parameter_declarations(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_parameter_declarations + ) + + @quick_sight_analysis_parameter_declarations.setter + def quick_sight_analysis_parameter_declarations( + self, quick_sight_analysis_parameter_declarations: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_parameter_declarations = ( + quick_sight_analysis_parameter_declarations + ) + + @property + def quick_sight_analysis_filter_groups(self) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_filter_groups + ) + + @quick_sight_analysis_filter_groups.setter + def quick_sight_analysis_filter_groups( + self, quick_sight_analysis_filter_groups: Optional[set[str]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_filter_groups = ( + quick_sight_analysis_filter_groups + ) + + @property + def quick_sight_analysis_visuals(self) -> Optional[list[QuickSightAnalysisVisual]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_visuals + ) + + @quick_sight_analysis_visuals.setter + def quick_sight_analysis_visuals( + self, quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_visuals = quick_sight_analysis_visuals + + @property + def quick_sight_analysis_folders(self) -> Optional[list[QuickSightFolder]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_folders + ) + + @quick_sight_analysis_folders.setter + def quick_sight_analysis_folders( + self, quick_sight_analysis_folders: Optional[list[QuickSightFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_folders = quick_sight_analysis_folders + + class Attributes(QuickSight.Attributes): + quick_sight_analysis_status: Optional[QuickSightAnalysisStatus] = Field( + default=None, description="" + ) + quick_sight_analysis_calculated_fields: Optional[set[str]] = Field( + default=None, description="" + ) + quick_sight_analysis_parameter_declarations: Optional[set[str]] = Field( + default=None, description="" + ) + quick_sight_analysis_filter_groups: Optional[set[str]] = Field( + default=None, description="" + ) + quick_sight_analysis_visuals: Optional[list[QuickSightAnalysisVisual]] = Field( + default=None, description="" + ) # relationship + quick_sight_analysis_folders: Optional[list[QuickSightFolder]] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightAnalysis.Attributes" = Field( + default_factory=lambda: QuickSightAnalysis.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_analysis_visual import QuickSightAnalysisVisual # noqa +from .quick_sight_folder import QuickSightFolder # noqa diff --git a/pyatlan/model/assets/quick_sight_analysis_visual.py b/pyatlan/model/assets/quick_sight_analysis_visual.py new file mode 100644 index 000000000..1d247f3b9 --- /dev/null +++ b/pyatlan/model/assets/quick_sight_analysis_visual.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField + +from .quick_sight import QuickSight + + +class QuickSightAnalysisVisual(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightAnalysisVisual", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightAnalysisVisual": + raise ValueError("must be QuickSightAnalysisVisual") + return v + + def __setattr__(self, name, value): + if name in QuickSightAnalysisVisual._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_ANALYSIS_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightAnalysisQualifiedName", + "quickSightAnalysisQualifiedName", + "quickSightAnalysisQualifiedName.text", + ) + """ + Unique name of the QuickSight analysis in which this visual exists. + """ + + QUICK_SIGHT_ANALYSIS: ClassVar[RelationField] = RelationField("quickSightAnalysis") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_analysis_qualified_name", + "quick_sight_analysis", + ] + + @property + def quick_sight_analysis_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_analysis_qualified_name + ) + + @quick_sight_analysis_qualified_name.setter + def quick_sight_analysis_qualified_name( + self, quick_sight_analysis_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis_qualified_name = ( + quick_sight_analysis_qualified_name + ) + + @property + def quick_sight_analysis(self) -> Optional[QuickSightAnalysis]: + return None if self.attributes is None else self.attributes.quick_sight_analysis + + @quick_sight_analysis.setter + def quick_sight_analysis(self, quick_sight_analysis: Optional[QuickSightAnalysis]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analysis = quick_sight_analysis + + class Attributes(QuickSight.Attributes): + quick_sight_analysis_qualified_name: Optional[str] = Field( + default=None, description="" + ) + quick_sight_analysis: Optional[QuickSightAnalysis] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightAnalysisVisual.Attributes" = Field( + default_factory=lambda: QuickSightAnalysisVisual.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_analysis import QuickSightAnalysis # noqa diff --git a/pyatlan/model/assets/quick_sight_dashboard.py b/pyatlan/model/assets/quick_sight_dashboard.py new file mode 100644 index 000000000..ef5f04afc --- /dev/null +++ b/pyatlan/model/assets/quick_sight_dashboard.py @@ -0,0 +1,163 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .quick_sight import QuickSight + + +class QuickSightDashboard(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDashboard": + raise ValueError("must be QuickSightDashboard") + return v + + def __setattr__(self, name, value): + if name in QuickSightDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER: ClassVar[ + NumericField + ] = NumericField( + "quickSightDashboardPublishedVersionNumber", + "quickSightDashboardPublishedVersionNumber", + ) + """ + Version number of the published dashboard. + """ + QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME: ClassVar[NumericField] = NumericField( + "quickSightDashboardLastPublishedTime", "quickSightDashboardLastPublishedTime" + ) + """ + Time (epoch) at which this dashboard was last published, in milliseconds. + """ + + QUICK_SIGHT_DASHBOARD_FOLDERS: ClassVar[RelationField] = RelationField( + "quickSightDashboardFolders" + ) + """ + TBC + """ + QUICK_SIGHT_DASHBOARD_VISUALS: ClassVar[RelationField] = RelationField( + "quickSightDashboardVisuals" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dashboard_published_version_number", + "quick_sight_dashboard_last_published_time", + "quick_sight_dashboard_folders", + "quick_sight_dashboard_visuals", + ] + + @property + def quick_sight_dashboard_published_version_number(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_published_version_number + ) + + @quick_sight_dashboard_published_version_number.setter + def quick_sight_dashboard_published_version_number( + self, quick_sight_dashboard_published_version_number: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_published_version_number = ( + quick_sight_dashboard_published_version_number + ) + + @property + def quick_sight_dashboard_last_published_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_last_published_time + ) + + @quick_sight_dashboard_last_published_time.setter + def quick_sight_dashboard_last_published_time( + self, quick_sight_dashboard_last_published_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_last_published_time = ( + quick_sight_dashboard_last_published_time + ) + + @property + def quick_sight_dashboard_folders(self) -> Optional[list[QuickSightFolder]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_folders + ) + + @quick_sight_dashboard_folders.setter + def quick_sight_dashboard_folders( + self, quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_folders = quick_sight_dashboard_folders + + @property + def quick_sight_dashboard_visuals( + self, + ) -> Optional[list[QuickSightDashboardVisual]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_visuals + ) + + @quick_sight_dashboard_visuals.setter + def quick_sight_dashboard_visuals( + self, quick_sight_dashboard_visuals: Optional[list[QuickSightDashboardVisual]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_visuals = quick_sight_dashboard_visuals + + class Attributes(QuickSight.Attributes): + quick_sight_dashboard_published_version_number: Optional[int] = Field( + default=None, description="" + ) + quick_sight_dashboard_last_published_time: Optional[datetime] = Field( + default=None, description="" + ) + quick_sight_dashboard_folders: Optional[list[QuickSightFolder]] = Field( + default=None, description="" + ) # relationship + quick_sight_dashboard_visuals: Optional[ + list[QuickSightDashboardVisual] + ] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightDashboard.Attributes" = Field( + default_factory=lambda: QuickSightDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_dashboard_visual import QuickSightDashboardVisual # noqa +from .quick_sight_folder import QuickSightFolder # noqa diff --git a/pyatlan/model/assets/quick_sight_dashboard_visual.py b/pyatlan/model/assets/quick_sight_dashboard_visual.py new file mode 100644 index 000000000..512d3724e --- /dev/null +++ b/pyatlan/model/assets/quick_sight_dashboard_visual.py @@ -0,0 +1,100 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField + +from .quick_sight import QuickSight + + +class QuickSightDashboardVisual(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightDashboardVisual", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDashboardVisual": + raise ValueError("must be QuickSightDashboardVisual") + return v + + def __setattr__(self, name, value): + if name in QuickSightDashboardVisual._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DASHBOARD_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightDashboardQualifiedName", + "quickSightDashboardQualifiedName", + "quickSightDashboardQualifiedName.text", + ) + """ + Unique name of the dashboard in which this visual exists. + """ + + QUICK_SIGHT_DASHBOARD: ClassVar[RelationField] = RelationField( + "quickSightDashboard" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dashboard_qualified_name", + "quick_sight_dashboard", + ] + + @property + def quick_sight_dashboard_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dashboard_qualified_name + ) + + @quick_sight_dashboard_qualified_name.setter + def quick_sight_dashboard_qualified_name( + self, quick_sight_dashboard_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard_qualified_name = ( + quick_sight_dashboard_qualified_name + ) + + @property + def quick_sight_dashboard(self) -> Optional[QuickSightDashboard]: + return ( + None if self.attributes is None else self.attributes.quick_sight_dashboard + ) + + @quick_sight_dashboard.setter + def quick_sight_dashboard( + self, quick_sight_dashboard: Optional[QuickSightDashboard] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboard = quick_sight_dashboard + + class Attributes(QuickSight.Attributes): + quick_sight_dashboard_qualified_name: Optional[str] = Field( + default=None, description="" + ) + quick_sight_dashboard: Optional[QuickSightDashboard] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightDashboardVisual.Attributes" = Field( + default_factory=lambda: QuickSightDashboardVisual.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_dashboard import QuickSightDashboard # noqa diff --git a/pyatlan/model/assets/quick_sight_dataset.py b/pyatlan/model/assets/quick_sight_dataset.py new file mode 100644 index 000000000..73f35a713 --- /dev/null +++ b/pyatlan/model/assets/quick_sight_dataset.py @@ -0,0 +1,156 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import QuickSightDatasetImportMode +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .quick_sight import QuickSight + + +class QuickSightDataset(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDataset": + raise ValueError("must be QuickSightDataset") + return v + + def __setattr__(self, name, value): + if name in QuickSightDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DATASET_IMPORT_MODE: ClassVar[KeywordField] = KeywordField( + "quickSightDatasetImportMode", "quickSightDatasetImportMode" + ) + """ + Import mode for this dataset, for example: SPICE or DIRECT_QUERY. + """ + QUICK_SIGHT_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "quickSightDatasetColumnCount", "quickSightDatasetColumnCount" + ) + """ + Number of columns present in this dataset. + """ + + QUICK_SIGHT_DATASET_FOLDERS: ClassVar[RelationField] = RelationField( + "quickSightDatasetFolders" + ) + """ + TBC + """ + QUICK_SIGHT_DATASET_FIELDS: ClassVar[RelationField] = RelationField( + "quickSightDatasetFields" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dataset_import_mode", + "quick_sight_dataset_column_count", + "quick_sight_dataset_folders", + "quick_sight_dataset_fields", + ] + + @property + def quick_sight_dataset_import_mode(self) -> Optional[QuickSightDatasetImportMode]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_import_mode + ) + + @quick_sight_dataset_import_mode.setter + def quick_sight_dataset_import_mode( + self, quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_import_mode = ( + quick_sight_dataset_import_mode + ) + + @property + def quick_sight_dataset_column_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_column_count + ) + + @quick_sight_dataset_column_count.setter + def quick_sight_dataset_column_count( + self, quick_sight_dataset_column_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_column_count = ( + quick_sight_dataset_column_count + ) + + @property + def quick_sight_dataset_folders(self) -> Optional[list[QuickSightFolder]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_folders + ) + + @quick_sight_dataset_folders.setter + def quick_sight_dataset_folders( + self, quick_sight_dataset_folders: Optional[list[QuickSightFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_folders = quick_sight_dataset_folders + + @property + def quick_sight_dataset_fields(self) -> Optional[list[QuickSightDatasetField]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_fields + ) + + @quick_sight_dataset_fields.setter + def quick_sight_dataset_fields( + self, quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_fields = quick_sight_dataset_fields + + class Attributes(QuickSight.Attributes): + quick_sight_dataset_import_mode: Optional[QuickSightDatasetImportMode] = Field( + default=None, description="" + ) + quick_sight_dataset_column_count: Optional[int] = Field( + default=None, description="" + ) + quick_sight_dataset_folders: Optional[list[QuickSightFolder]] = Field( + default=None, description="" + ) # relationship + quick_sight_dataset_fields: Optional[list[QuickSightDatasetField]] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightDataset.Attributes" = Field( + default_factory=lambda: QuickSightDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_dataset_field import QuickSightDatasetField # noqa +from .quick_sight_folder import QuickSightFolder # noqa diff --git a/pyatlan/model/assets/quick_sight_dataset_field.py b/pyatlan/model/assets/quick_sight_dataset_field.py new file mode 100644 index 000000000..58638403e --- /dev/null +++ b/pyatlan/model/assets/quick_sight_dataset_field.py @@ -0,0 +1,125 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import QuickSightDatasetFieldType +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .quick_sight import QuickSight + + +class QuickSightDatasetField(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightDatasetField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightDatasetField": + raise ValueError("must be QuickSightDatasetField") + return v + + def __setattr__(self, name, value): + if name in QuickSightDatasetField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_DATASET_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( + "quickSightDatasetFieldType", "quickSightDatasetFieldType" + ) + """ + Datatype of this field, for example: STRING, INTEGER, etc. + """ + QUICK_SIGHT_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "quickSightDatasetQualifiedName", + "quickSightDatasetQualifiedName", + "quickSightDatasetQualifiedName.text", + ) + """ + Unique name of the dataset in which this field exists. + """ + + QUICK_SIGHT_DATASET: ClassVar[RelationField] = RelationField("quickSightDataset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_dataset_field_type", + "quick_sight_dataset_qualified_name", + "quick_sight_dataset", + ] + + @property + def quick_sight_dataset_field_type(self) -> Optional[QuickSightDatasetFieldType]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_field_type + ) + + @quick_sight_dataset_field_type.setter + def quick_sight_dataset_field_type( + self, quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_field_type = quick_sight_dataset_field_type + + @property + def quick_sight_dataset_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_dataset_qualified_name + ) + + @quick_sight_dataset_qualified_name.setter + def quick_sight_dataset_qualified_name( + self, quick_sight_dataset_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset_qualified_name = ( + quick_sight_dataset_qualified_name + ) + + @property + def quick_sight_dataset(self) -> Optional[QuickSightDataset]: + return None if self.attributes is None else self.attributes.quick_sight_dataset + + @quick_sight_dataset.setter + def quick_sight_dataset(self, quick_sight_dataset: Optional[QuickSightDataset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dataset = quick_sight_dataset + + class Attributes(QuickSight.Attributes): + quick_sight_dataset_field_type: Optional[QuickSightDatasetFieldType] = Field( + default=None, description="" + ) + quick_sight_dataset_qualified_name: Optional[str] = Field( + default=None, description="" + ) + quick_sight_dataset: Optional[QuickSightDataset] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightDatasetField.Attributes" = Field( + default_factory=lambda: QuickSightDatasetField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_dataset import QuickSightDataset # noqa diff --git a/pyatlan/model/assets/quick_sight_folder.py b/pyatlan/model/assets/quick_sight_folder.py new file mode 100644 index 000000000..a68398ee6 --- /dev/null +++ b/pyatlan/model/assets/quick_sight_folder.py @@ -0,0 +1,163 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import QuickSightFolderType +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .quick_sight import QuickSight + + +class QuickSightFolder(QuickSight): + """Description""" + + type_name: str = Field(default="QuickSightFolder", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "QuickSightFolder": + raise ValueError("must be QuickSightFolder") + return v + + def __setattr__(self, name, value): + if name in QuickSightFolder._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUICK_SIGHT_FOLDER_TYPE: ClassVar[KeywordField] = KeywordField( + "quickSightFolderType", "quickSightFolderType" + ) + """ + Type of this folder, for example: SHARED. + """ + QUICK_SIGHT_FOLDER_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "quickSightFolderHierarchy", "quickSightFolderHierarchy" + ) + """ + Detailed path of this folder. + """ + + QUICK_SIGHT_DASHBOARDS: ClassVar[RelationField] = RelationField( + "quickSightDashboards" + ) + """ + TBC + """ + QUICK_SIGHT_DATASETS: ClassVar[RelationField] = RelationField("quickSightDatasets") + """ + TBC + """ + QUICK_SIGHT_ANALYSES: ClassVar[RelationField] = RelationField("quickSightAnalyses") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "quick_sight_folder_type", + "quick_sight_folder_hierarchy", + "quick_sight_dashboards", + "quick_sight_datasets", + "quick_sight_analyses", + ] + + @property + def quick_sight_folder_type(self) -> Optional[QuickSightFolderType]: + return ( + None if self.attributes is None else self.attributes.quick_sight_folder_type + ) + + @quick_sight_folder_type.setter + def quick_sight_folder_type( + self, quick_sight_folder_type: Optional[QuickSightFolderType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_folder_type = quick_sight_folder_type + + @property + def quick_sight_folder_hierarchy(self) -> Optional[list[dict[str, str]]]: + return ( + None + if self.attributes is None + else self.attributes.quick_sight_folder_hierarchy + ) + + @quick_sight_folder_hierarchy.setter + def quick_sight_folder_hierarchy( + self, quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_folder_hierarchy = quick_sight_folder_hierarchy + + @property + def quick_sight_dashboards(self) -> Optional[list[QuickSightDashboard]]: + return ( + None if self.attributes is None else self.attributes.quick_sight_dashboards + ) + + @quick_sight_dashboards.setter + def quick_sight_dashboards( + self, quick_sight_dashboards: Optional[list[QuickSightDashboard]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_dashboards = quick_sight_dashboards + + @property + def quick_sight_datasets(self) -> Optional[list[QuickSightDataset]]: + return None if self.attributes is None else self.attributes.quick_sight_datasets + + @quick_sight_datasets.setter + def quick_sight_datasets( + self, quick_sight_datasets: Optional[list[QuickSightDataset]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_datasets = quick_sight_datasets + + @property + def quick_sight_analyses(self) -> Optional[list[QuickSightAnalysis]]: + return None if self.attributes is None else self.attributes.quick_sight_analyses + + @quick_sight_analyses.setter + def quick_sight_analyses( + self, quick_sight_analyses: Optional[list[QuickSightAnalysis]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.quick_sight_analyses = quick_sight_analyses + + class Attributes(QuickSight.Attributes): + quick_sight_folder_type: Optional[QuickSightFolderType] = Field( + default=None, description="" + ) + quick_sight_folder_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + quick_sight_dashboards: Optional[list[QuickSightDashboard]] = Field( + default=None, description="" + ) # relationship + quick_sight_datasets: Optional[list[QuickSightDataset]] = Field( + default=None, description="" + ) # relationship + quick_sight_analyses: Optional[list[QuickSightAnalysis]] = Field( + default=None, description="" + ) # relationship + + attributes: "QuickSightFolder.Attributes" = Field( + default_factory=lambda: QuickSightFolder.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .quick_sight_analysis import QuickSightAnalysis # noqa +from .quick_sight_dashboard import QuickSightDashboard # noqa +from .quick_sight_dataset import QuickSightDataset # noqa diff --git a/pyatlan/model/assets/readme.py b/pyatlan/model/assets/readme.py new file mode 100644 index 000000000..3f3d00f31 --- /dev/null +++ b/pyatlan/model/assets/readme.py @@ -0,0 +1,131 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional +from urllib.parse import quote, unquote + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .resource import Resource + + +class Readme(Resource): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, asset: Asset, content: str, asset_name: Optional[str] = None + ) -> Readme: + return Readme( + attributes=Readme.Attributes.create( + asset=asset, content=content, asset_name=asset_name + ) + ) + + @property + def description(self) -> Optional[str]: + ret_value = self.attributes.description + return unquote(ret_value) if ret_value is not None else ret_value + + @description.setter + def description(self, description: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.description = ( + quote(description) if description is not None else description + ) + + type_name: str = Field(default="Readme", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Readme": + raise ValueError("must be Readme") + return v + + def __setattr__(self, name, value): + if name in Readme._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SEE_ALSO: ClassVar[RelationField] = RelationField("seeAlso") + """ + TBC + """ + ASSET: ClassVar[RelationField] = RelationField("asset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "see_also", + "asset", + ] + + @property + def see_also(self) -> Optional[list[Readme]]: + return None if self.attributes is None else self.attributes.see_also + + @see_also.setter + def see_also(self, see_also: Optional[list[Readme]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.see_also = see_also + + @property + def asset(self) -> Optional[Asset]: + return None if self.attributes is None else self.attributes.asset + + @asset.setter + def asset(self, asset: Optional[Asset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset = asset + + class Attributes(Resource.Attributes): + see_also: Optional[list[Readme]] = Field( + default=None, description="" + ) # relationship + asset: Optional[Asset] = Field(default=None, description="") # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, asset: Asset, content: str, asset_name: Optional[str] = None + ) -> Readme.Attributes: + validate_required_fields(["asset", "content"], [asset, content]) + if not asset.name or len(asset.name) < 1: + if not asset_name: + raise ValueError( + "asset_name is required when name is not available from asset" + ) + elif asset_name: + raise ValueError( + "asset_name can not be given when name is available from asset" + ) + else: + asset_name = asset.name + return Readme.Attributes( + qualified_name=f"{asset.guid}/readme", + name=f"{asset_name} Readme", + asset=asset, + description=quote(content), + ) + + attributes: "Readme.Attributes" = Field( + default_factory=lambda: Readme.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa diff --git a/pyatlan/model/assets/asset55.py b/pyatlan/model/assets/readme_template.py similarity index 85% rename from pyatlan/model/assets/asset55.py rename to pyatlan/model/assets/readme_template.py index a3a6a5b11..1eb947157 100644 --- a/pyatlan/model/assets/asset55.py +++ b/pyatlan/model/assets/readme_template.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import IconType from pyatlan.model.fields.atlan_fields import KeywordField -from .asset00 import Resource +from .resource import Resource class ReadmeTemplate(Resource): """Description""" - type_name: str = Field("ReadmeTemplate", allow_mutation=False) + type_name: str = Field(default="ReadmeTemplate", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -65,14 +65,11 @@ def icon_type(self, icon_type: Optional[IconType]): self.attributes.icon_type = icon_type class Attributes(Resource.Attributes): - icon: Optional[str] = Field(None, description="", alias="icon") - icon_type: Optional[IconType] = Field(None, description="", alias="iconType") + icon: Optional[str] = Field(default=None, description="") + icon_type: Optional[IconType] = Field(default=None, description="") attributes: "ReadmeTemplate.Attributes" = Field( default_factory=lambda: ReadmeTemplate.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -ReadmeTemplate.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset45.py b/pyatlan/model/assets/redash.py similarity index 85% rename from pyatlan/model/assets/asset45.py rename to pyatlan/model/assets/redash.py index e4f055f9f..5c290219b 100644 --- a/pyatlan/model/assets/asset45.py +++ b/pyatlan/model/assets/redash.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import BooleanField -from .asset19 import BI +from .b_i import BI class Redash(BI): """Description""" - type_name: str = Field("Redash", allow_mutation=False) + type_name: str = Field(default="Redash", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -51,15 +51,10 @@ def redash_is_published(self, redash_is_published: Optional[bool]): self.attributes.redash_is_published = redash_is_published class Attributes(BI.Attributes): - redash_is_published: Optional[bool] = Field( - None, description="", alias="redashIsPublished" - ) + redash_is_published: Optional[bool] = Field(default=None, description="") attributes: "Redash.Attributes" = Field( default_factory=lambda: Redash.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Redash.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset72.py b/pyatlan/model/assets/redash_dashboard.py similarity index 88% rename from pyatlan/model/assets/asset72.py rename to pyatlan/model/assets/redash_dashboard.py index d86d1c283..6f1607a88 100644 --- a/pyatlan/model/assets/asset72.py +++ b/pyatlan/model/assets/redash_dashboard.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import NumericField -from .asset45 import Redash +from .redash import Redash class RedashDashboard(Redash): """Description""" - type_name: str = Field("RedashDashboard", allow_mutation=False) + type_name: str = Field(default="RedashDashboard", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -58,7 +58,7 @@ def redash_dashboard_widget_count( class Attributes(Redash.Attributes): redash_dashboard_widget_count: Optional[int] = Field( - None, description="", alias="redashDashboardWidgetCount" + default=None, description="" ) attributes: "RedashDashboard.Attributes" = Field( @@ -66,6 +66,3 @@ class Attributes(Redash.Attributes): description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -RedashDashboard.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset73.py b/pyatlan/model/assets/redash_query.py similarity index 57% rename from pyatlan/model/assets/asset73.py rename to pyatlan/model/assets/redash_query.py index 10c355279..cae7c9bc2 100644 --- a/pyatlan/model/assets/asset73.py +++ b/pyatlan/model/assets/redash_query.py @@ -7,7 +7,7 @@ from datetime import datetime from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import ( KeywordField, @@ -16,13 +16,13 @@ RelationField, ) -from .asset45 import Redash +from .redash import Redash class RedashQuery(Redash): """Description""" - type_name: str = Field("RedashQuery", allow_mutation=False) + type_name: str = Field(default="RedashQuery", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -192,26 +192,22 @@ def redash_visualizations( self.attributes.redash_visualizations = redash_visualizations class Attributes(Redash.Attributes): - redash_query_s_q_l: Optional[str] = Field( - None, description="", alias="redashQuerySQL" - ) - redash_query_parameters: Optional[str] = Field( - None, description="", alias="redashQueryParameters" - ) + redash_query_s_q_l: Optional[str] = Field(default=None, description="") + redash_query_parameters: Optional[str] = Field(default=None, description="") redash_query_schedule: Optional[dict[str, str]] = Field( - None, description="", alias="redashQuerySchedule" + default=None, description="" ) redash_query_last_execution_runtime: Optional[float] = Field( - None, description="", alias="redashQueryLastExecutionRuntime" + default=None, description="" ) redash_query_last_executed_at: Optional[datetime] = Field( - None, description="", alias="redashQueryLastExecutedAt" + default=None, description="" ) redash_query_schedule_humanized: Optional[str] = Field( - None, description="", alias="redashQueryScheduleHumanized" + default=None, description="" ) redash_visualizations: Optional[list[RedashVisualization]] = Field( - None, description="", alias="redashVisualizations" + default=None, description="" ) # relationship attributes: "RedashQuery.Attributes" = Field( @@ -221,125 +217,4 @@ class Attributes(Redash.Attributes): ) -class RedashVisualization(Redash): - """Description""" - - type_name: str = Field("RedashVisualization", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "RedashVisualization": - raise ValueError("must be RedashVisualization") - return v - - def __setattr__(self, name, value): - if name in RedashVisualization._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - REDASH_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( - "redashVisualizationType", "redashVisualizationType" - ) - """ - Type of this visualization. - """ - REDASH_QUERY_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "redashQueryName", "redashQueryName.keyword", "redashQueryName" - ) - """ - Simple name of the query from which this visualization is created. - """ - REDASH_QUERY_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "redashQueryQualifiedName", - "redashQueryQualifiedName", - "redashQueryQualifiedName.text", - ) - """ - Unique name of the query from which this visualization is created. - """ - - REDASH_QUERY: ClassVar[RelationField] = RelationField("redashQuery") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "redash_visualization_type", - "redash_query_name", - "redash_query_qualified_name", - "redash_query", - ] - - @property - def redash_visualization_type(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.redash_visualization_type - ) - - @redash_visualization_type.setter - def redash_visualization_type(self, redash_visualization_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.redash_visualization_type = redash_visualization_type - - @property - def redash_query_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.redash_query_name - - @redash_query_name.setter - def redash_query_name(self, redash_query_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.redash_query_name = redash_query_name - - @property - def redash_query_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.redash_query_qualified_name - ) - - @redash_query_qualified_name.setter - def redash_query_qualified_name(self, redash_query_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.redash_query_qualified_name = redash_query_qualified_name - - @property - def redash_query(self) -> Optional[RedashQuery]: - return None if self.attributes is None else self.attributes.redash_query - - @redash_query.setter - def redash_query(self, redash_query: Optional[RedashQuery]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.redash_query = redash_query - - class Attributes(Redash.Attributes): - redash_visualization_type: Optional[str] = Field( - None, description="", alias="redashVisualizationType" - ) - redash_query_name: Optional[str] = Field( - None, description="", alias="redashQueryName" - ) - redash_query_qualified_name: Optional[str] = Field( - None, description="", alias="redashQueryQualifiedName" - ) - redash_query: Optional[RedashQuery] = Field( - None, description="", alias="redashQuery" - ) # relationship - - attributes: "RedashVisualization.Attributes" = Field( - default_factory=lambda: RedashVisualization.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) - - -RedashQuery.Attributes.update_forward_refs() - - -RedashVisualization.Attributes.update_forward_refs() +from .redash_visualization import RedashVisualization # noqa diff --git a/pyatlan/model/assets/redash_visualization.py b/pyatlan/model/assets/redash_visualization.py new file mode 100644 index 000000000..be5971396 --- /dev/null +++ b/pyatlan/model/assets/redash_visualization.py @@ -0,0 +1,132 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .redash import Redash + + +class RedashVisualization(Redash): + """Description""" + + type_name: str = Field(default="RedashVisualization", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "RedashVisualization": + raise ValueError("must be RedashVisualization") + return v + + def __setattr__(self, name, value): + if name in RedashVisualization._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + REDASH_VISUALIZATION_TYPE: ClassVar[KeywordField] = KeywordField( + "redashVisualizationType", "redashVisualizationType" + ) + """ + Type of this visualization. + """ + REDASH_QUERY_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "redashQueryName", "redashQueryName.keyword", "redashQueryName" + ) + """ + Simple name of the query from which this visualization is created. + """ + REDASH_QUERY_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "redashQueryQualifiedName", + "redashQueryQualifiedName", + "redashQueryQualifiedName.text", + ) + """ + Unique name of the query from which this visualization is created. + """ + + REDASH_QUERY: ClassVar[RelationField] = RelationField("redashQuery") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "redash_visualization_type", + "redash_query_name", + "redash_query_qualified_name", + "redash_query", + ] + + @property + def redash_visualization_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.redash_visualization_type + ) + + @redash_visualization_type.setter + def redash_visualization_type(self, redash_visualization_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_visualization_type = redash_visualization_type + + @property + def redash_query_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.redash_query_name + + @redash_query_name.setter + def redash_query_name(self, redash_query_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_query_name = redash_query_name + + @property + def redash_query_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.redash_query_qualified_name + ) + + @redash_query_qualified_name.setter + def redash_query_qualified_name(self, redash_query_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_query_qualified_name = redash_query_qualified_name + + @property + def redash_query(self) -> Optional[RedashQuery]: + return None if self.attributes is None else self.attributes.redash_query + + @redash_query.setter + def redash_query(self, redash_query: Optional[RedashQuery]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.redash_query = redash_query + + class Attributes(Redash.Attributes): + redash_visualization_type: Optional[str] = Field(default=None, description="") + redash_query_name: Optional[str] = Field(default=None, description="") + redash_query_qualified_name: Optional[str] = Field(default=None, description="") + redash_query: Optional[RedashQuery] = Field( + default=None, description="" + ) # relationship + + attributes: "RedashVisualization.Attributes" = Field( + default_factory=lambda: RedashVisualization.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .redash_query import RedashQuery # noqa diff --git a/pyatlan/model/assets/referenceable.py b/pyatlan/model/assets/referenceable.py new file mode 100644 index 000000000..39e29d3c2 --- /dev/null +++ b/pyatlan/model/assets/referenceable.py @@ -0,0 +1,294 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import Any, ClassVar, Optional + +from pydantic.v1 import Field, PrivateAttr + +from pyatlan.model.core import AtlanObject, AtlanTag, Meaning +from pyatlan.model.custom_metadata import CustomMetadataDict, CustomMetadataProxy +from pyatlan.model.enums import EntityStatus, SaveSemantic +from pyatlan.model.fields.atlan_fields import ( + InternalKeywordField, + InternalKeywordTextField, + InternalNumericField, + KeywordField, + KeywordTextField, + NumericField, +) + + +class Referenceable(AtlanObject): + """Description""" + + def __init__(__pydantic_self__, **data: Any) -> None: + super().__init__(**data) + __pydantic_self__.__fields_set__.update(["attributes", "type_name"]) + __pydantic_self__._metadata_proxy = CustomMetadataProxy( + __pydantic_self__.business_attributes + ) + + def json(self, *args, **kwargs) -> str: + self.business_attributes = self._metadata_proxy.business_attributes + return super().json(**kwargs) + + def validate_required(self): + if not self.create_time or self.created_by: + self.attributes.validate_required() + + def get_custom_metadata(self, name: str) -> CustomMetadataDict: + return self._metadata_proxy.get_custom_metadata(name=name) + + def set_custom_metadata(self, custom_metadata: CustomMetadataDict): + return self._metadata_proxy.set_custom_metadata(custom_metadata=custom_metadata) + + def flush_custom_metadata(self): + self.business_attributes = self._metadata_proxy.business_attributes + + @classmethod + def can_be_archived(self) -> bool: + """ + Indicates if an asset can be archived via the asset.delete_by_guid method. + :returns: True if archiving is supported + """ + return True + + @property + def atlan_tag_names(self) -> list[str]: + from pyatlan.cache.atlan_tag_cache import AtlanTagCache + from pyatlan.model.constants import DELETED_ + + if self.classification_names: + return [ + AtlanTagCache.get_name_for_id(tag_id) or DELETED_ + for tag_id in self.classification_names + ] + return [] + + def __setattr__(self, name, value): + if name in Referenceable._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + _convenience_properties: ClassVar[list[str]] = [ + "qualified_name", + "assigned_terms", + ] + + @property + def qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.qualified_name + + @qualified_name.setter + def qualified_name(self, qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.qualified_name = qualified_name + + @property + def assigned_terms(self) -> Optional[list[AtlasGlossaryTerm]]: + return None if self.attributes is None else self.attributes.meanings + + @assigned_terms.setter + def assigned_terms(self, assigned_terms: Optional[list[AtlasGlossaryTerm]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.meanings = assigned_terms + + class Attributes(AtlanObject): + qualified_name: Optional[str] = Field(default="", description="") + meanings: Optional[list[AtlasGlossaryTerm]] = Field( + default=None, description="" + ) # relationship + + def validate_required(self): + pass + + TYPE_NAME: ClassVar[KeywordTextField] = InternalKeywordTextField( + "typeName", "__typeName.keyword", "__typeName", "__typeName" + ) + """Type of the asset. For example Table, Column, and so on.""" + + GUID: ClassVar[KeywordField] = InternalKeywordField("guid", "__guid", "__guid") + """Globally unique identifier (GUID) of any object in Atlan.""" + + CREATED_BY: ClassVar[KeywordField] = InternalKeywordField( + "createdBy", "__createdBy", "__createdBy" + ) + """Atlan user who created this asset.""" + + UPDATED_BY: ClassVar[KeywordField] = InternalKeywordField( + "updatedBy", "__modifiedBy", "__modifiedBy" + ) + """Atlan user who last updated the asset.""" + + STATUS: ClassVar[KeywordField] = InternalKeywordField( + "status", "__state", "__state" + ) + """Asset status in Atlan (active vs deleted).""" + + ATLAN_TAGS: ClassVar[KeywordTextField] = InternalKeywordTextField( + "classificationNames", + "__traitNames", + "__classificationsText", + "__classificationNames", + ) + """ + All directly-assigned Atlan tags that exist on an asset, searchable by internal hashed-string ID of the Atlan tag. + """ + + PROPAGATED_ATLAN_TAGS: ClassVar[KeywordTextField] = InternalKeywordTextField( + "classificationNames", + "__propagatedTraitNames", + "__classificationsText", + "__propagatedClassificationNames", + ) + """All propagated Atlan tags that exist on an asset, searchable by internal hashed-string ID of the Atlan tag.""" + + ASSIGNED_TERMS: ClassVar[KeywordTextField] = InternalKeywordTextField( + "meanings", "__meanings", "__meaningsText", "__meanings" + ) + """All terms attached to an asset, searchable by the term's qualifiedName.""" + + SUPER_TYPE_NAMES: ClassVar[KeywordTextField] = InternalKeywordTextField( + "typeName", "__superTypeNames.keyword", "__superTypeNames", "__superTypeNames" + ) + """All super types of an asset.""" + + CREATE_TIME: ClassVar[NumericField] = InternalNumericField( + "createTime", "__timestamp", "__timestamp" + ) + """Time (in milliseconds) when the asset was created.""" + + UPDATE_TIME: ClassVar[NumericField] = InternalNumericField( + "updateTime", "__modificationTimestamp", "__modificationTimestamp" + ) + """Time (in milliseconds) when the asset was last updated.""" + + QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "qualifiedName", "qualifiedName", "qualifiedName.text" + ) + """Unique fully-qualified name of the asset in Atlan.""" + + type_name: str = Field( + default="Referenceable", + description="Name of the type definition that defines this instance.", + ) + _metadata_proxy: CustomMetadataProxy = PrivateAttr() + attributes: Referenceable.Attributes = Field( + default_factory=lambda: Referenceable.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary " + "by type, so are described in the sub-types of this schema.", + ) + business_attributes: Optional[dict[str, Any]] = Field( + default=None, + description="Map of custom metadata attributes and values defined on the entity.", + ) + created_by: Optional[str] = Field( + default=None, + description="Username of the user who created the object.", + example="jsmith", + ) + create_time: Optional[int] = Field( + default=None, + description="Time (epoch) at which this object was created, in milliseconds.", + example=1648852296555, + ) + delete_handler: Optional[str] = Field( + default=None, + description="Details on the handler used for deletion of the asset.", + example="Hard", + ) + guid: str = Field( + default="", + description="Unique identifier of the entity instance.", + example="917ffec9-fa84-4c59-8e6c-c7b114d04be3", + ) + is_incomplete: Optional[bool] = Field(default=True, description="", example=True) + labels: Optional[list[str]] = Field(default=None, description="Internal use only.") + relationship_attributes: Optional[dict[str, Any]] = Field( + default=None, + description="Map of relationships for the entity. The specific keys of this map will vary by type, " + "so are described in the sub-types of this schema.", + ) + status: Optional[EntityStatus] = Field( + default=None, description="Status of the entity", example=EntityStatus.ACTIVE + ) + updated_by: Optional[str] = Field( + default=None, + description="Username of the user who last assets_updated the object.", + example="jsmith", + ) + update_time: Optional[int] = Field( + default=None, + description="Time (epoch) at which this object was last assets_updated, in milliseconds.", + example=1649172284333, + ) + version: Optional[int] = Field( + default=None, description="Version of this object.", example=2 + ) + atlan_tags: Optional[list[AtlanTag]] = Field( + default=None, + description="Atlan tags", + ) + classification_names: Optional[list[str]] = Field( + default=None, + description="The names of the classifications that exist on the asset.", + ) + display_text: Optional[str] = Field( + default=None, + description="Human-readable name of the entity..", + ) + entity_status: Optional[str] = Field( + default=None, + description="Status of the entity (if this is a related entity).", + ) + relationship_guid: Optional[str] = Field( + default=None, + description="Unique identifier of the relationship (when this is a related entity).", + ) + relationship_status: Optional[str] = Field( + default=None, + description="Status of the relationship (when this is a related entity).", + ) + relationship_type: Optional[str] = Field( + default=None, + description="Status of the relationship (when this is a related entity).", + ) + meaning_names: Optional[list[str]] = Field( + default=None, + description="Names of assigned_terms that have been linked to this asset.", + ) + meanings: Optional[list[Meaning]] = Field(default=None, description="") + custom_attributes: Optional[dict[str, Any]] = Field(default=None, description="") + scrubbed: Optional[bool] = Field(default=None, description="") + pending_tasks: Optional[list[str]] = Field(default=None) + + unique_attributes: Optional[dict[str, Any]] = Field(default=None) + + append_relationship_attributes: Optional[dict[str, Any]] = Field( + default=None, + description="Map of append relationship attributes.", + ) + remove_relationship_attributes: Optional[dict[str, Any]] = Field( + default=None, + description="Map of remove relationship attributes.", + ) + semantic: Optional[SaveSemantic] = Field( + default=None, + exclude=True, + description=( + "Semantic for how this relationship should be saved, " + "if used in an asset request on which `.save()` is called." + ), + ) + + +# Imports required for fixing circular dependencies: +from .asset import Asset # noqa # isort:skip + + +from .atlas_glossary_term import AtlasGlossaryTerm # noqa diff --git a/pyatlan/model/assets/resource.py b/pyatlan/model/assets/resource.py new file mode 100644 index 000000000..3548a8090 --- /dev/null +++ b/pyatlan/model/assets/resource.py @@ -0,0 +1,110 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField + +from .catalog import Catalog + + +class Resource(Catalog): + """Description""" + + type_name: str = Field(default="Resource", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Resource": + raise ValueError("must be Resource") + return v + + def __setattr__(self, name, value): + if name in Resource._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + LINK: ClassVar[KeywordField] = KeywordField("link", "link") + """ + URL to the resource. + """ + IS_GLOBAL: ClassVar[BooleanField] = BooleanField("isGlobal", "isGlobal") + """ + Whether the resource is global (true) or not (false). + """ + REFERENCE: ClassVar[KeywordField] = KeywordField("reference", "reference") + """ + Reference to the resource. + """ + RESOURCE_METADATA: ClassVar[KeywordField] = KeywordField( + "resourceMetadata", "resourceMetadata" + ) + """ + Metadata of the resource. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "link", + "is_global", + "reference", + "resource_metadata", + ] + + @property + def link(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.link + + @link.setter + def link(self, link: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.link = link + + @property + def is_global(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_global + + @is_global.setter + def is_global(self, is_global: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_global = is_global + + @property + def reference(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.reference + + @reference.setter + def reference(self, reference: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reference = reference + + @property + def resource_metadata(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.resource_metadata + + @resource_metadata.setter + def resource_metadata(self, resource_metadata: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.resource_metadata = resource_metadata + + class Attributes(Catalog.Attributes): + link: Optional[str] = Field(default=None, description="") + is_global: Optional[bool] = Field(default=None, description="") + reference: Optional[str] = Field(default=None, description="") + resource_metadata: Optional[dict[str, str]] = Field( + default=None, description="" + ) + + attributes: "Resource.Attributes" = Field( + default_factory=lambda: Resource.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/asset35.py b/pyatlan/model/assets/s3.py similarity index 85% rename from pyatlan/model/assets/asset35.py rename to pyatlan/model/assets/s3.py index 2f7b1d0c6..5b4004dbc 100644 --- a/pyatlan/model/assets/asset35.py +++ b/pyatlan/model/assets/s3.py @@ -6,18 +6,18 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField from pyatlan.model.structs import AwsTag -from .asset17 import ObjectStore +from .object_store import ObjectStore class S3(ObjectStore): """Description""" - type_name: str = Field("S3", allow_mutation=False) + type_name: str = Field(default="S3", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -210,29 +210,20 @@ def aws_tags(self, aws_tags: Optional[list[AwsTag]]): self.attributes.aws_tags = aws_tags class Attributes(ObjectStore.Attributes): - s3_e_tag: Optional[str] = Field(None, description="", alias="s3ETag") - s3_encryption: Optional[str] = Field(None, description="", alias="s3Encryption") - aws_arn: Optional[str] = Field(None, description="", alias="awsArn") - aws_partition: Optional[str] = Field(None, description="", alias="awsPartition") - aws_service: Optional[str] = Field(None, description="", alias="awsService") - aws_region: Optional[str] = Field(None, description="", alias="awsRegion") - aws_account_id: Optional[str] = Field( - None, description="", alias="awsAccountId" - ) - aws_resource_id: Optional[str] = Field( - None, description="", alias="awsResourceId" - ) - aws_owner_name: Optional[str] = Field( - None, description="", alias="awsOwnerName" - ) - aws_owner_id: Optional[str] = Field(None, description="", alias="awsOwnerId") - aws_tags: Optional[list[AwsTag]] = Field(None, description="", alias="awsTags") + s3_e_tag: Optional[str] = Field(default=None, description="") + s3_encryption: Optional[str] = Field(default=None, description="") + aws_arn: Optional[str] = Field(default=None, description="") + aws_partition: Optional[str] = Field(default=None, description="") + aws_service: Optional[str] = Field(default=None, description="") + aws_region: Optional[str] = Field(default=None, description="") + aws_account_id: Optional[str] = Field(default=None, description="") + aws_resource_id: Optional[str] = Field(default=None, description="") + aws_owner_name: Optional[str] = Field(default=None, description="") + aws_owner_id: Optional[str] = Field(default=None, description="") + aws_tags: Optional[list[AwsTag]] = Field(default=None, description="") attributes: "S3.Attributes" = Field( default_factory=lambda: S3.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -S3.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/s3_bucket.py b/pyatlan/model/assets/s3_bucket.py new file mode 100644 index 000000000..f61620c75 --- /dev/null +++ b/pyatlan/model/assets/s3_bucket.py @@ -0,0 +1,184 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional, overload + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import BooleanField, NumericField, RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .s3 import S3 + + +class S3Bucket(S3): + """Description""" + + @overload + @classmethod + @init_guid + def create( + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: str, + ) -> S3Bucket: + ... + + @overload + @classmethod + @init_guid + def create( + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: Optional[str] = None, + ) -> S3Bucket: + ... + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, connection_qualified_name: str, aws_arn: Optional[str] = None + ) -> S3Bucket: + validate_required_fields( + ["name", "connection_qualified_name"], + [name, connection_qualified_name], + ) + attributes = S3Bucket.Attributes.create( + name=name, + connection_qualified_name=connection_qualified_name, + aws_arn=aws_arn, + ) + return cls(attributes=attributes) + + type_name: str = Field(default="S3Bucket", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "S3Bucket": + raise ValueError("must be S3Bucket") + return v + + def __setattr__(self, name, value): + if name in S3Bucket._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + S3OBJECT_COUNT: ClassVar[NumericField] = NumericField( + "s3ObjectCount", "s3ObjectCount" + ) + """ + Number of objects within the bucket. + """ + S3BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( + "s3BucketVersioningEnabled", "s3BucketVersioningEnabled" + ) + """ + Whether versioning is enabled for the bucket (true) or not (false). + """ + + OBJECTS: ClassVar[RelationField] = RelationField("objects") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "s3_object_count", + "s3_bucket_versioning_enabled", + "objects", + ] + + @property + def s3_object_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.s3_object_count + + @s3_object_count.setter + def s3_object_count(self, s3_object_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.s3_object_count = s3_object_count + + @property + def s3_bucket_versioning_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.s3_bucket_versioning_enabled + ) + + @s3_bucket_versioning_enabled.setter + def s3_bucket_versioning_enabled( + self, s3_bucket_versioning_enabled: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.s3_bucket_versioning_enabled = s3_bucket_versioning_enabled + + @property + def objects(self) -> Optional[list[S3Object]]: + return None if self.attributes is None else self.attributes.objects + + @objects.setter + def objects(self, objects: Optional[list[S3Object]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.objects = objects + + class Attributes(S3.Attributes): + s3_object_count: Optional[int] = Field(default=None, description="") + s3_bucket_versioning_enabled: Optional[bool] = Field( + default=None, description="" + ) + objects: Optional[list[S3Object]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, + *, + name: str, + connection_qualified_name: str, + aws_arn: Optional[str] = None, + ) -> S3Bucket.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], + [name, connection_qualified_name], + ) + fields = connection_qualified_name.split("/") + if len(fields) != 3: + raise ValueError("Invalid connection_qualified_name") + try: + if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": + raise ValueError("Invalid connection_qualified_name") + connector_type = AtlanConnectorType(fields[1]) # type:ignore + if connector_type != AtlanConnectorType.S3: + raise ValueError("Connector type must be s3") + except ValueError as e: + raise ValueError("Invalid connection_qualified_name") from e + return S3Bucket.Attributes( + aws_arn=aws_arn, + name=name, + connection_qualified_name=connection_qualified_name, + qualified_name=f"{connection_qualified_name}/{aws_arn if aws_arn else name}", + connector_name=connector_type.value, + ) + + attributes: "S3Bucket.Attributes" = Field( + default_factory=lambda: S3Bucket.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .s3_object import S3Object # noqa diff --git a/pyatlan/model/assets/asset62.py b/pyatlan/model/assets/s3_object.py similarity index 66% rename from pyatlan/model/assets/asset62.py rename to pyatlan/model/assets/s3_object.py index 8b13c3a1b..ca8b0f255 100644 --- a/pyatlan/model/assets/asset62.py +++ b/pyatlan/model/assets/s3_object.py @@ -5,13 +5,12 @@ from __future__ import annotations from datetime import datetime -from typing import ClassVar, Optional, overload +from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.enums import AtlanConnectorType from pyatlan.model.fields.atlan_fields import ( - BooleanField, KeywordField, KeywordTextField, NumericField, @@ -19,175 +18,7 @@ ) from pyatlan.utils import init_guid, validate_required_fields -from .asset35 import S3 - - -class S3Bucket(S3): - """Description""" - - @overload - @classmethod - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: str, - ) -> S3Bucket: - ... - - @overload - @classmethod - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: Optional[str] = None, - ) -> S3Bucket: - ... - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, *, name: str, connection_qualified_name: str, aws_arn: Optional[str] = None - ) -> S3Bucket: - validate_required_fields( - ["name", "connection_qualified_name"], - [name, connection_qualified_name], - ) - attributes = S3Bucket.Attributes.create( - name=name, - connection_qualified_name=connection_qualified_name, - aws_arn=aws_arn, - ) - return cls(attributes=attributes) - - type_name: str = Field("S3Bucket", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "S3Bucket": - raise ValueError("must be S3Bucket") - return v - - def __setattr__(self, name, value): - if name in S3Bucket._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - S3OBJECT_COUNT: ClassVar[NumericField] = NumericField( - "s3ObjectCount", "s3ObjectCount" - ) - """ - Number of objects within the bucket. - """ - S3BUCKET_VERSIONING_ENABLED: ClassVar[BooleanField] = BooleanField( - "s3BucketVersioningEnabled", "s3BucketVersioningEnabled" - ) - """ - Whether versioning is enabled for the bucket (true) or not (false). - """ - - OBJECTS: ClassVar[RelationField] = RelationField("objects") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "s3_object_count", - "s3_bucket_versioning_enabled", - "objects", - ] - - @property - def s3_object_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.s3_object_count - - @s3_object_count.setter - def s3_object_count(self, s3_object_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.s3_object_count = s3_object_count - - @property - def s3_bucket_versioning_enabled(self) -> Optional[bool]: - return ( - None - if self.attributes is None - else self.attributes.s3_bucket_versioning_enabled - ) - - @s3_bucket_versioning_enabled.setter - def s3_bucket_versioning_enabled( - self, s3_bucket_versioning_enabled: Optional[bool] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.s3_bucket_versioning_enabled = s3_bucket_versioning_enabled - - @property - def objects(self) -> Optional[list[S3Object]]: - return None if self.attributes is None else self.attributes.objects - - @objects.setter - def objects(self, objects: Optional[list[S3Object]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.objects = objects - - class Attributes(S3.Attributes): - s3_object_count: Optional[int] = Field( - None, description="", alias="s3ObjectCount" - ) - s3_bucket_versioning_enabled: Optional[bool] = Field( - None, description="", alias="s3BucketVersioningEnabled" - ) - objects: Optional[list[S3Object]] = Field( - None, description="", alias="objects" - ) # relationship - - @classmethod - # @validate_arguments() - @init_guid - def create( - cls, - *, - name: str, - connection_qualified_name: str, - aws_arn: Optional[str] = None, - ) -> S3Bucket.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], - [name, connection_qualified_name], - ) - fields = connection_qualified_name.split("/") - if len(fields) != 3: - raise ValueError("Invalid connection_qualified_name") - try: - if fields[0].replace(" ", "") == "" or fields[2].replace(" ", "") == "": - raise ValueError("Invalid connection_qualified_name") - connector_type = AtlanConnectorType(fields[1]) # type:ignore - if connector_type != AtlanConnectorType.S3: - raise ValueError("Connector type must be s3") - except ValueError as e: - raise ValueError("Invalid connection_qualified_name") from e - return S3Bucket.Attributes( - aws_arn=aws_arn, - name=name, - connection_qualified_name=connection_qualified_name, - qualified_name=f"{connection_qualified_name}/{aws_arn if aws_arn else name}", - connector_name=connector_type.value, - ) - - attributes: "S3Bucket.Attributes" = Field( - default_factory=lambda: S3Bucket.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) +from .s3 import S3 class S3Object(S3): @@ -249,7 +80,7 @@ def create_with_prefix( ) return cls(attributes=attributes) - type_name: str = Field("S3Object", allow_mutation=False) + type_name: str = Field(default="S3Object", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -455,33 +286,19 @@ def bucket(self, bucket: Optional[S3Bucket]): class Attributes(S3.Attributes): s3_object_last_modified_time: Optional[datetime] = Field( - None, description="", alias="s3ObjectLastModifiedTime" - ) - s3_bucket_name: Optional[str] = Field( - None, description="", alias="s3BucketName" - ) - s3_bucket_qualified_name: Optional[str] = Field( - None, description="", alias="s3BucketQualifiedName" - ) - s3_object_size: Optional[int] = Field( - None, description="", alias="s3ObjectSize" - ) - s3_object_storage_class: Optional[str] = Field( - None, description="", alias="s3ObjectStorageClass" - ) - s3_object_key: Optional[str] = Field(None, description="", alias="s3ObjectKey") - s3_object_content_type: Optional[str] = Field( - None, description="", alias="s3ObjectContentType" + default=None, description="" ) + s3_bucket_name: Optional[str] = Field(default=None, description="") + s3_bucket_qualified_name: Optional[str] = Field(default=None, description="") + s3_object_size: Optional[int] = Field(default=None, description="") + s3_object_storage_class: Optional[str] = Field(default=None, description="") + s3_object_key: Optional[str] = Field(default=None, description="") + s3_object_content_type: Optional[str] = Field(default=None, description="") s3_object_content_disposition: Optional[str] = Field( - None, description="", alias="s3ObjectContentDisposition" + default=None, description="" ) - s3_object_version_id: Optional[str] = Field( - None, description="", alias="s3ObjectVersionId" - ) - bucket: Optional[S3Bucket] = Field( - None, description="", alias="bucket" - ) # relationship + s3_object_version_id: Optional[str] = Field(default=None, description="") + bucket: Optional[S3Bucket] = Field(default=None, description="") # relationship @classmethod # @validate_arguments() @@ -573,7 +390,4 @@ def create_with_prefix( ) -S3Bucket.Attributes.update_forward_refs() - - -S3Object.Attributes.update_forward_refs() +from .s3_bucket import S3Bucket # noqa diff --git a/pyatlan/model/assets/s_q_l.py b/pyatlan/model/assets/s_q_l.py new file mode 100644 index 000000000..fd57a4499 --- /dev/null +++ b/pyatlan/model/assets/s_q_l.py @@ -0,0 +1,399 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .catalog import Catalog + + +class SQL(Catalog): + """Description""" + + type_name: str = Field(default="SQL", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SQL": + raise ValueError("must be SQL") + return v + + def __setattr__(self, name, value): + if name in SQL._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + + DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") + """ + TBC + """ + SQL_DBT_MODELS: ClassVar[RelationField] = RelationField("sqlDbtModels") + """ + TBC + """ + SQL_DBT_SOURCES: ClassVar[RelationField] = RelationField("sqlDBTSources") + """ + TBC + """ + DBT_MODELS: ClassVar[RelationField] = RelationField("dbtModels") + """ + TBC + """ + DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "dbt_sources", + "sql_dbt_models", + "sql_dbt_sources", + "dbt_models", + "dbt_tests", + ] + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def dbt_sources(self) -> Optional[list[DbtSource]]: + return None if self.attributes is None else self.attributes.dbt_sources + + @dbt_sources.setter + def dbt_sources(self, dbt_sources: Optional[list[DbtSource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_sources = dbt_sources + + @property + def sql_dbt_models(self) -> Optional[list[DbtModel]]: + return None if self.attributes is None else self.attributes.sql_dbt_models + + @sql_dbt_models.setter + def sql_dbt_models(self, sql_dbt_models: Optional[list[DbtModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_dbt_models = sql_dbt_models + + @property + def sql_dbt_sources(self) -> Optional[list[DbtSource]]: + return None if self.attributes is None else self.attributes.sql_dbt_sources + + @sql_dbt_sources.setter + def sql_dbt_sources(self, sql_dbt_sources: Optional[list[DbtSource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_dbt_sources = sql_dbt_sources + + @property + def dbt_models(self) -> Optional[list[DbtModel]]: + return None if self.attributes is None else self.attributes.dbt_models + + @dbt_models.setter + def dbt_models(self, dbt_models: Optional[list[DbtModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_models = dbt_models + + @property + def dbt_tests(self) -> Optional[list[DbtTest]]: + return None if self.attributes is None else self.attributes.dbt_tests + + @dbt_tests.setter + def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tests = dbt_tests + + class Attributes(Catalog.Attributes): + query_count: Optional[int] = Field(default=None, description="") + query_user_count: Optional[int] = Field(default=None, description="") + query_user_map: Optional[dict[str, int]] = Field(default=None, description="") + query_count_updated_at: Optional[datetime] = Field(default=None, description="") + database_name: Optional[str] = Field(default=None, description="") + database_qualified_name: Optional[str] = Field(default=None, description="") + schema_name: Optional[str] = Field(default=None, description="") + schema_qualified_name: Optional[str] = Field(default=None, description="") + table_name: Optional[str] = Field(default=None, description="") + table_qualified_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + view_qualified_name: Optional[str] = Field(default=None, description="") + is_profiled: Optional[bool] = Field(default=None, description="") + last_profiled_at: Optional[datetime] = Field(default=None, description="") + dbt_sources: Optional[list[DbtSource]] = Field( + default=None, description="" + ) # relationship + sql_dbt_models: Optional[list[DbtModel]] = Field( + default=None, description="" + ) # relationship + sql_dbt_sources: Optional[list[DbtSource]] = Field( + default=None, description="" + ) # relationship + dbt_models: Optional[list[DbtModel]] = Field( + default=None, description="" + ) # relationship + dbt_tests: Optional[list[DbtTest]] = Field( + default=None, description="" + ) # relationship + + attributes: "SQL.Attributes" = Field( + default_factory=lambda: SQL.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dbt_model import DbtModel # noqa +from .dbt_source import DbtSource # noqa +from .dbt_test import DbtTest # noqa diff --git a/pyatlan/model/assets/asset20.py b/pyatlan/model/assets/saa_s.py similarity index 78% rename from pyatlan/model/assets/asset20.py rename to pyatlan/model/assets/saa_s.py index 25218db82..048627a4b 100644 --- a/pyatlan/model/assets/asset20.py +++ b/pyatlan/model/assets/saa_s.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset00 import Catalog +from .catalog import Catalog class SaaS(Catalog): """Description""" - type_name: str = Field("SaaS", allow_mutation=False) + type_name: str = Field(default="SaaS", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -SaaS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset54.py b/pyatlan/model/assets/salesforce.py similarity index 85% rename from pyatlan/model/assets/asset54.py rename to pyatlan/model/assets/salesforce.py index bad1391be..f706c2cb0 100644 --- a/pyatlan/model/assets/asset54.py +++ b/pyatlan/model/assets/salesforce.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField -from .asset20 import SaaS +from .saa_s import SaaS class Salesforce(SaaS): """Description""" - type_name: str = Field("Salesforce", allow_mutation=False) + type_name: str = Field(default="Salesforce", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -72,16 +72,11 @@ def api_name(self, api_name: Optional[str]): self.attributes.api_name = api_name class Attributes(SaaS.Attributes): - organization_qualified_name: Optional[str] = Field( - None, description="", alias="organizationQualifiedName" - ) - api_name: Optional[str] = Field(None, description="", alias="apiName") + organization_qualified_name: Optional[str] = Field(default=None, description="") + api_name: Optional[str] = Field(default=None, description="") attributes: "Salesforce.Attributes" = Field( default_factory=lambda: Salesforce.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Salesforce.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/salesforce_dashboard.py b/pyatlan/model/assets/salesforce_dashboard.py new file mode 100644 index 000000000..3c287ba88 --- /dev/null +++ b/pyatlan/model/assets/salesforce_dashboard.py @@ -0,0 +1,133 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .salesforce import Salesforce + + +class SalesforceDashboard(Salesforce): + """Description""" + + type_name: str = Field(default="SalesforceDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceDashboard": + raise ValueError("must be SalesforceDashboard") + return v + + def __setattr__(self, name, value): + if name in SalesforceDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + """ + Identifier of the dashboard in Salesforce. + """ + DASHBOARD_TYPE: ClassVar[KeywordField] = KeywordField( + "dashboardType", "dashboardType" + ) + """ + Type of dashboard in Salesforce. + """ + REPORT_COUNT: ClassVar[NumericField] = NumericField("reportCount", "reportCount") + """ + Number of reports linked to the dashboard in Salesforce. + """ + + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "source_id", + "dashboard_type", + "report_count", + "reports", + "organization", + ] + + @property + def source_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_id + + @source_id.setter + def source_id(self, source_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_id = source_id + + @property + def dashboard_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.dashboard_type + + @dashboard_type.setter + def dashboard_type(self, dashboard_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboard_type = dashboard_type + + @property + def report_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.report_count + + @report_count.setter + def report_count(self, report_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report_count = report_count + + @property + def reports(self) -> Optional[list[SalesforceReport]]: + return None if self.attributes is None else self.attributes.reports + + @reports.setter + def reports(self, reports: Optional[list[SalesforceReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reports = reports + + @property + def organization(self) -> Optional[SalesforceOrganization]: + return None if self.attributes is None else self.attributes.organization + + @organization.setter + def organization(self, organization: Optional[SalesforceOrganization]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.organization = organization + + class Attributes(Salesforce.Attributes): + source_id: Optional[str] = Field(default=None, description="") + dashboard_type: Optional[str] = Field(default=None, description="") + report_count: Optional[int] = Field(default=None, description="") + reports: Optional[list[SalesforceReport]] = Field( + default=None, description="" + ) # relationship + organization: Optional[SalesforceOrganization] = Field( + default=None, description="" + ) # relationship + + attributes: "SalesforceDashboard.Attributes" = Field( + default_factory=lambda: SalesforceDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .salesforce_organization import SalesforceOrganization # noqa +from .salesforce_report import SalesforceReport # noqa diff --git a/pyatlan/model/assets/salesforce_field.py b/pyatlan/model/assets/salesforce_field.py new file mode 100644 index 000000000..816e2d579 --- /dev/null +++ b/pyatlan/model/assets/salesforce_field.py @@ -0,0 +1,367 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, + TextField, +) + +from .salesforce import Salesforce + + +class SalesforceField(Salesforce): + """Description""" + + type_name: str = Field(default="SalesforceField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceField": + raise ValueError("must be SalesforceField") + return v + + def __setattr__(self, name, value): + if name in SalesforceField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "dataType", "dataType", "dataType.text" + ) + """ + Data type of values in this field. + """ + OBJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "objectQualifiedName", "objectQualifiedName" + ) + """ + Unique name of the object in which this field exists. + """ + ORDER: ClassVar[NumericField] = NumericField("order", "order") + """ + Order (position) of this field within the object. + """ + INLINE_HELP_TEXT: ClassVar[TextField] = TextField( + "inlineHelpText", "inlineHelpText.text" + ) + """ + Help text for this field. + """ + IS_CALCULATED: ClassVar[BooleanField] = BooleanField("isCalculated", "isCalculated") + """ + Whether this field is calculated (true) or not (false). + """ + FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") + """ + Formula for this field, if it is a calculated field. + """ + IS_CASE_SENSITIVE: ClassVar[BooleanField] = BooleanField( + "isCaseSensitive", "isCaseSensitive" + ) + """ + Whether this field is case sensitive (true) or in-sensitive (false). + """ + IS_ENCRYPTED: ClassVar[BooleanField] = BooleanField("isEncrypted", "isEncrypted") + """ + Whether this field is encrypted (true) or not (false). + """ + MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") + """ + Maximum length of this field. + """ + IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") + """ + Whether this field allows null values (true) or not (false). + """ + PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") + """ + Total number of digits allowed + """ + NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") + """ + Number of digits allowed to the right of the decimal point. + """ + IS_UNIQUE: ClassVar[BooleanField] = BooleanField("isUnique", "isUnique") + """ + Whether this field must have unique values (true) or not (false). + """ + PICKLIST_VALUES: ClassVar[KeywordField] = KeywordField( + "picklistValues", "picklistValues" + ) + """ + List of values from which a user can pick while adding a record. + """ + IS_POLYMORPHIC_FOREIGN_KEY: ClassVar[BooleanField] = BooleanField( + "isPolymorphicForeignKey", "isPolymorphicForeignKey" + ) + """ + Whether this field references a record of multiple objects (true) or not (false). + """ + DEFAULT_VALUE_FORMULA: ClassVar[KeywordField] = KeywordField( + "defaultValueFormula", "defaultValueFormula" + ) + """ + Formula for the default value for this field. + """ + + LOOKUP_OBJECTS: ClassVar[RelationField] = RelationField("lookupObjects") + """ + TBC + """ + OBJECT: ClassVar[RelationField] = RelationField("object") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "data_type", + "object_qualified_name", + "order", + "inline_help_text", + "is_calculated", + "formula", + "is_case_sensitive", + "is_encrypted", + "max_length", + "is_nullable", + "precision", + "numeric_scale", + "is_unique", + "picklist_values", + "is_polymorphic_foreign_key", + "default_value_formula", + "lookup_objects", + "object", + ] + + @property + def data_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.data_type + + @data_type.setter + def data_type(self, data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_type = data_type + + @property + def object_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.object_qualified_name + ) + + @object_qualified_name.setter + def object_qualified_name(self, object_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.object_qualified_name = object_qualified_name + + @property + def order(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.order + + @order.setter + def order(self, order: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.order = order + + @property + def inline_help_text(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.inline_help_text + + @inline_help_text.setter + def inline_help_text(self, inline_help_text: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.inline_help_text = inline_help_text + + @property + def is_calculated(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_calculated + + @is_calculated.setter + def is_calculated(self, is_calculated: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_calculated = is_calculated + + @property + def formula(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.formula + + @formula.setter + def formula(self, formula: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.formula = formula + + @property + def is_case_sensitive(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_case_sensitive + + @is_case_sensitive.setter + def is_case_sensitive(self, is_case_sensitive: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_case_sensitive = is_case_sensitive + + @property + def is_encrypted(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_encrypted + + @is_encrypted.setter + def is_encrypted(self, is_encrypted: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_encrypted = is_encrypted + + @property + def max_length(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.max_length + + @max_length.setter + def max_length(self, max_length: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.max_length = max_length + + @property + def is_nullable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_nullable + + @is_nullable.setter + def is_nullable(self, is_nullable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_nullable = is_nullable + + @property + def precision(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.precision + + @precision.setter + def precision(self, precision: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.precision = precision + + @property + def numeric_scale(self) -> Optional[float]: + return None if self.attributes is None else self.attributes.numeric_scale + + @numeric_scale.setter + def numeric_scale(self, numeric_scale: Optional[float]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.numeric_scale = numeric_scale + + @property + def is_unique(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_unique + + @is_unique.setter + def is_unique(self, is_unique: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_unique = is_unique + + @property + def picklist_values(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.picklist_values + + @picklist_values.setter + def picklist_values(self, picklist_values: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.picklist_values = picklist_values + + @property + def is_polymorphic_foreign_key(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.is_polymorphic_foreign_key + ) + + @is_polymorphic_foreign_key.setter + def is_polymorphic_foreign_key(self, is_polymorphic_foreign_key: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_polymorphic_foreign_key = is_polymorphic_foreign_key + + @property + def default_value_formula(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.default_value_formula + ) + + @default_value_formula.setter + def default_value_formula(self, default_value_formula: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.default_value_formula = default_value_formula + + @property + def lookup_objects(self) -> Optional[list[SalesforceObject]]: + return None if self.attributes is None else self.attributes.lookup_objects + + @lookup_objects.setter + def lookup_objects(self, lookup_objects: Optional[list[SalesforceObject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.lookup_objects = lookup_objects + + @property + def object(self) -> Optional[SalesforceObject]: + return None if self.attributes is None else self.attributes.object + + @object.setter + def object(self, object: Optional[SalesforceObject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.object = object + + class Attributes(Salesforce.Attributes): + data_type: Optional[str] = Field(default=None, description="") + object_qualified_name: Optional[str] = Field(default=None, description="") + order: Optional[int] = Field(default=None, description="") + inline_help_text: Optional[str] = Field(default=None, description="") + is_calculated: Optional[bool] = Field(default=None, description="") + formula: Optional[str] = Field(default=None, description="") + is_case_sensitive: Optional[bool] = Field(default=None, description="") + is_encrypted: Optional[bool] = Field(default=None, description="") + max_length: Optional[int] = Field(default=None, description="") + is_nullable: Optional[bool] = Field(default=None, description="") + precision: Optional[int] = Field(default=None, description="") + numeric_scale: Optional[float] = Field(default=None, description="") + is_unique: Optional[bool] = Field(default=None, description="") + picklist_values: Optional[set[str]] = Field(default=None, description="") + is_polymorphic_foreign_key: Optional[bool] = Field(default=None, description="") + default_value_formula: Optional[str] = Field(default=None, description="") + lookup_objects: Optional[list[SalesforceObject]] = Field( + default=None, description="" + ) # relationship + object: Optional[SalesforceObject] = Field( + default=None, description="" + ) # relationship + + attributes: "SalesforceField.Attributes" = Field( + default_factory=lambda: SalesforceField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .salesforce_object import SalesforceObject # noqa diff --git a/pyatlan/model/assets/salesforce_object.py b/pyatlan/model/assets/salesforce_object.py new file mode 100644 index 000000000..31c494f24 --- /dev/null +++ b/pyatlan/model/assets/salesforce_object.py @@ -0,0 +1,165 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, NumericField, RelationField + +from .salesforce import Salesforce + + +class SalesforceObject(Salesforce): + """Description""" + + type_name: str = Field(default="SalesforceObject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceObject": + raise ValueError("must be SalesforceObject") + return v + + def __setattr__(self, name, value): + if name in SalesforceObject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + IS_CUSTOM: ClassVar[BooleanField] = BooleanField("isCustom", "isCustom") + """ + Whether this object is a custom object (true) or not (false). + """ + IS_MERGABLE: ClassVar[BooleanField] = BooleanField("isMergable", "isMergable") + """ + Whether this object is mergable (true) or not (false). + """ + IS_QUERYABLE: ClassVar[BooleanField] = BooleanField("isQueryable", "isQueryable") + """ + Whether this object is queryable (true) or not (false). + """ + FIELD_COUNT: ClassVar[NumericField] = NumericField("fieldCount", "fieldCount") + """ + Number of fields in this object. + """ + + LOOKUP_FIELDS: ClassVar[RelationField] = RelationField("lookupFields") + """ + TBC + """ + ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "is_custom", + "is_mergable", + "is_queryable", + "field_count", + "lookup_fields", + "organization", + "fields", + ] + + @property + def is_custom(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_custom + + @is_custom.setter + def is_custom(self, is_custom: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_custom = is_custom + + @property + def is_mergable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_mergable + + @is_mergable.setter + def is_mergable(self, is_mergable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_mergable = is_mergable + + @property + def is_queryable(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_queryable + + @is_queryable.setter + def is_queryable(self, is_queryable: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_queryable = is_queryable + + @property + def field_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.field_count + + @field_count.setter + def field_count(self, field_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.field_count = field_count + + @property + def lookup_fields(self) -> Optional[list[SalesforceField]]: + return None if self.attributes is None else self.attributes.lookup_fields + + @lookup_fields.setter + def lookup_fields(self, lookup_fields: Optional[list[SalesforceField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.lookup_fields = lookup_fields + + @property + def organization(self) -> Optional[SalesforceOrganization]: + return None if self.attributes is None else self.attributes.organization + + @organization.setter + def organization(self, organization: Optional[SalesforceOrganization]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.organization = organization + + @property + def fields(self) -> Optional[list[SalesforceField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[SalesforceField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Salesforce.Attributes): + is_custom: Optional[bool] = Field(default=None, description="") + is_mergable: Optional[bool] = Field(default=None, description="") + is_queryable: Optional[bool] = Field(default=None, description="") + field_count: Optional[int] = Field(default=None, description="") + lookup_fields: Optional[list[SalesforceField]] = Field( + default=None, description="" + ) # relationship + organization: Optional[SalesforceOrganization] = Field( + default=None, description="" + ) # relationship + fields: Optional[list[SalesforceField]] = Field( + default=None, description="" + ) # relationship + + attributes: "SalesforceObject.Attributes" = Field( + default_factory=lambda: SalesforceObject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .salesforce_field import SalesforceField # noqa +from .salesforce_organization import SalesforceOrganization # noqa diff --git a/pyatlan/model/assets/salesforce_organization.py b/pyatlan/model/assets/salesforce_organization.py new file mode 100644 index 000000000..323fe50d3 --- /dev/null +++ b/pyatlan/model/assets/salesforce_organization.py @@ -0,0 +1,118 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .salesforce import Salesforce + + +class SalesforceOrganization(Salesforce): + """Description""" + + type_name: str = Field(default="SalesforceOrganization", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceOrganization": + raise ValueError("must be SalesforceOrganization") + return v + + def __setattr__(self, name, value): + if name in SalesforceOrganization._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + """ + Identifier of the organization in Salesforce. + """ + + REPORTS: ClassVar[RelationField] = RelationField("reports") + """ + TBC + """ + OBJECTS: ClassVar[RelationField] = RelationField("objects") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "source_id", + "reports", + "objects", + "dashboards", + ] + + @property + def source_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_id + + @source_id.setter + def source_id(self, source_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_id = source_id + + @property + def reports(self) -> Optional[list[SalesforceReport]]: + return None if self.attributes is None else self.attributes.reports + + @reports.setter + def reports(self, reports: Optional[list[SalesforceReport]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.reports = reports + + @property + def objects(self) -> Optional[list[SalesforceObject]]: + return None if self.attributes is None else self.attributes.objects + + @objects.setter + def objects(self, objects: Optional[list[SalesforceObject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.objects = objects + + @property + def dashboards(self) -> Optional[list[SalesforceDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + class Attributes(Salesforce.Attributes): + source_id: Optional[str] = Field(default=None, description="") + reports: Optional[list[SalesforceReport]] = Field( + default=None, description="" + ) # relationship + objects: Optional[list[SalesforceObject]] = Field( + default=None, description="" + ) # relationship + dashboards: Optional[list[SalesforceDashboard]] = Field( + default=None, description="" + ) # relationship + + attributes: "SalesforceOrganization.Attributes" = Field( + default_factory=lambda: SalesforceOrganization.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .salesforce_dashboard import SalesforceDashboard # noqa +from .salesforce_object import SalesforceObject # noqa +from .salesforce_report import SalesforceReport # noqa diff --git a/pyatlan/model/assets/salesforce_report.py b/pyatlan/model/assets/salesforce_report.py new file mode 100644 index 000000000..74f372600 --- /dev/null +++ b/pyatlan/model/assets/salesforce_report.py @@ -0,0 +1,133 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .salesforce import Salesforce + + +class SalesforceReport(Salesforce): + """Description""" + + type_name: str = Field(default="SalesforceReport", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SalesforceReport": + raise ValueError("must be SalesforceReport") + return v + + def __setattr__(self, name, value): + if name in SalesforceReport._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SOURCE_ID: ClassVar[KeywordField] = KeywordField("sourceId", "sourceId") + """ + Identifier of the report in Salesforce. + """ + REPORT_TYPE: ClassVar[KeywordField] = KeywordField("reportType", "reportType") + """ + Type of report in Salesforce. + """ + DETAIL_COLUMNS: ClassVar[KeywordField] = KeywordField( + "detailColumns", "detailColumns" + ) + """ + List of column names on the report. + """ + + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + ORGANIZATION: ClassVar[RelationField] = RelationField("organization") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "source_id", + "report_type", + "detail_columns", + "dashboards", + "organization", + ] + + @property + def source_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.source_id + + @source_id.setter + def source_id(self, source_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.source_id = source_id + + @property + def report_type(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.report_type + + @report_type.setter + def report_type(self, report_type: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.report_type = report_type + + @property + def detail_columns(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.detail_columns + + @detail_columns.setter + def detail_columns(self, detail_columns: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.detail_columns = detail_columns + + @property + def dashboards(self) -> Optional[list[SalesforceDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[SalesforceDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + @property + def organization(self) -> Optional[SalesforceOrganization]: + return None if self.attributes is None else self.attributes.organization + + @organization.setter + def organization(self, organization: Optional[SalesforceOrganization]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.organization = organization + + class Attributes(Salesforce.Attributes): + source_id: Optional[str] = Field(default=None, description="") + report_type: Optional[dict[str, str]] = Field(default=None, description="") + detail_columns: Optional[set[str]] = Field(default=None, description="") + dashboards: Optional[list[SalesforceDashboard]] = Field( + default=None, description="" + ) # relationship + organization: Optional[SalesforceOrganization] = Field( + default=None, description="" + ) # relationship + + attributes: "SalesforceReport.Attributes" = Field( + default_factory=lambda: SalesforceReport.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .salesforce_dashboard import SalesforceDashboard # noqa +from .salesforce_organization import SalesforceOrganization # noqa diff --git a/pyatlan/model/assets/schema.py b/pyatlan/model/assets/schema.py new file mode 100644 index 000000000..0b22371cf --- /dev/null +++ b/pyatlan/model/assets/schema.py @@ -0,0 +1,317 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import NumericField, RelationField +from pyatlan.utils import init_guid, validate_required_fields + +from .s_q_l import SQL + + +class Schema(SQL): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, database_qualified_name: str) -> Schema: + validate_required_fields( + ["name", "database_qualified_name"], [name, database_qualified_name] + ) + attributes = Schema.Attributes.create( + name=name, database_qualified_name=database_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="Schema", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Schema": + raise ValueError("must be Schema") + return v + + def __setattr__(self, name, value): + if name in Schema._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + TABLE_COUNT: ClassVar[NumericField] = NumericField("tableCount", "tableCount") + """ + Number of tables in this schema. + """ + VIEWS_COUNT: ClassVar[NumericField] = NumericField("viewsCount", "viewsCount") + """ + Number of views in this schema. + """ + + SNOWFLAKE_TAGS: ClassVar[RelationField] = RelationField("snowflakeTags") + """ + TBC + """ + FUNCTIONS: ClassVar[RelationField] = RelationField("functions") + """ + TBC + """ + TABLES: ClassVar[RelationField] = RelationField("tables") + """ + TBC + """ + DATABASE: ClassVar[RelationField] = RelationField("database") + """ + TBC + """ + PROCEDURES: ClassVar[RelationField] = RelationField("procedures") + """ + TBC + """ + VIEWS: ClassVar[RelationField] = RelationField("views") + """ + TBC + """ + MATERIALISED_VIEWS: ClassVar[RelationField] = RelationField("materialisedViews") + """ + TBC + """ + SNOWFLAKE_DYNAMIC_TABLES: ClassVar[RelationField] = RelationField( + "snowflakeDynamicTables" + ) + """ + TBC + """ + SNOWFLAKE_PIPES: ClassVar[RelationField] = RelationField("snowflakePipes") + """ + TBC + """ + SNOWFLAKE_STREAMS: ClassVar[RelationField] = RelationField("snowflakeStreams") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "table_count", + "views_count", + "snowflake_tags", + "functions", + "tables", + "database", + "procedures", + "views", + "materialised_views", + "snowflake_dynamic_tables", + "snowflake_pipes", + "snowflake_streams", + ] + + @property + def table_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.table_count + + @table_count.setter + def table_count(self, table_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_count = table_count + + @property + def views_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.views_count + + @views_count.setter + def views_count(self, views_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.views_count = views_count + + @property + def snowflake_tags(self) -> Optional[list[SnowflakeTag]]: + return None if self.attributes is None else self.attributes.snowflake_tags + + @snowflake_tags.setter + def snowflake_tags(self, snowflake_tags: Optional[list[SnowflakeTag]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_tags = snowflake_tags + + @property + def functions(self) -> Optional[list[Function]]: + return None if self.attributes is None else self.attributes.functions + + @functions.setter + def functions(self, functions: Optional[list[Function]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.functions = functions + + @property + def tables(self) -> Optional[list[Table]]: + return None if self.attributes is None else self.attributes.tables + + @tables.setter + def tables(self, tables: Optional[list[Table]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tables = tables + + @property + def database(self) -> Optional[Database]: + return None if self.attributes is None else self.attributes.database + + @database.setter + def database(self, database: Optional[Database]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database = database + + @property + def procedures(self) -> Optional[list[Procedure]]: + return None if self.attributes is None else self.attributes.procedures + + @procedures.setter + def procedures(self, procedures: Optional[list[Procedure]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.procedures = procedures + + @property + def views(self) -> Optional[list[View]]: + return None if self.attributes is None else self.attributes.views + + @views.setter + def views(self, views: Optional[list[View]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.views = views + + @property + def materialised_views(self) -> Optional[list[MaterialisedView]]: + return None if self.attributes is None else self.attributes.materialised_views + + @materialised_views.setter + def materialised_views(self, materialised_views: Optional[list[MaterialisedView]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.materialised_views = materialised_views + + @property + def snowflake_dynamic_tables(self) -> Optional[list[SnowflakeDynamicTable]]: + return ( + None + if self.attributes is None + else self.attributes.snowflake_dynamic_tables + ) + + @snowflake_dynamic_tables.setter + def snowflake_dynamic_tables( + self, snowflake_dynamic_tables: Optional[list[SnowflakeDynamicTable]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_dynamic_tables = snowflake_dynamic_tables + + @property + def snowflake_pipes(self) -> Optional[list[SnowflakePipe]]: + return None if self.attributes is None else self.attributes.snowflake_pipes + + @snowflake_pipes.setter + def snowflake_pipes(self, snowflake_pipes: Optional[list[SnowflakePipe]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_pipes = snowflake_pipes + + @property + def snowflake_streams(self) -> Optional[list[SnowflakeStream]]: + return None if self.attributes is None else self.attributes.snowflake_streams + + @snowflake_streams.setter + def snowflake_streams(self, snowflake_streams: Optional[list[SnowflakeStream]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_streams = snowflake_streams + + class Attributes(SQL.Attributes): + table_count: Optional[int] = Field(default=None, description="") + views_count: Optional[int] = Field(default=None, description="") + snowflake_tags: Optional[list[SnowflakeTag]] = Field( + default=None, description="" + ) # relationship + functions: Optional[list[Function]] = Field( + default=None, description="" + ) # relationship + tables: Optional[list[Table]] = Field( + default=None, description="" + ) # relationship + database: Optional[Database] = Field( + default=None, description="" + ) # relationship + procedures: Optional[list[Procedure]] = Field( + default=None, description="" + ) # relationship + views: Optional[list[View]] = Field( + default=None, description="" + ) # relationship + materialised_views: Optional[list[MaterialisedView]] = Field( + default=None, description="" + ) # relationship + snowflake_dynamic_tables: Optional[list[SnowflakeDynamicTable]] = Field( + default=None, description="" + ) # relationship + snowflake_pipes: Optional[list[SnowflakePipe]] = Field( + default=None, description="" + ) # relationship + snowflake_streams: Optional[list[SnowflakeStream]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create( + cls, *, name: str, database_qualified_name: str + ) -> Schema.Attributes: + if not name: + raise ValueError("name cannot be blank") + validate_required_fields( + ["database_qualified_name"], [database_qualified_name] + ) + fields = database_qualified_name.split("/") + if len(fields) != 4: + raise ValueError("Invalid database_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid database_qualified_name") from e + return Schema.Attributes( + name=name, + database_name=fields[3], + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + database_qualified_name=database_qualified_name, + qualified_name=f"{database_qualified_name}/{name}", + connector_name=connector_type.value, + database=Database.ref_by_qualified_name(database_qualified_name), + ) + + attributes: "Schema.Attributes" = Field( + default_factory=lambda: Schema.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .database import Database # noqa +from .function import Function # noqa +from .materialised_view import MaterialisedView # noqa +from .procedure import Procedure # noqa +from .snowflake_dynamic_table import SnowflakeDynamicTable # noqa +from .snowflake_pipe import SnowflakePipe # noqa +from .snowflake_stream import SnowflakeStream # noqa +from .snowflake_tag import SnowflakeTag # noqa +from .table import Table # noqa +from .view import View # noqa diff --git a/pyatlan/model/assets/schema_registry.py b/pyatlan/model/assets/schema_registry.py new file mode 100644 index 000000000..a25310df4 --- /dev/null +++ b/pyatlan/model/assets/schema_registry.py @@ -0,0 +1,91 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import SchemaRegistrySchemaType +from pyatlan.model.fields.atlan_fields import KeywordField + +from .catalog import Catalog + + +class SchemaRegistry(Catalog): + """Description""" + + type_name: str = Field(default="SchemaRegistry", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SchemaRegistry": + raise ValueError("must be SchemaRegistry") + return v + + def __setattr__(self, name, value): + if name in SchemaRegistry._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SCHEMA_REGISTRY_SCHEMA_TYPE: ClassVar[KeywordField] = KeywordField( + "schemaRegistrySchemaType", "schemaRegistrySchemaType" + ) + """ + Type of language or specification used to define the schema, for example: JSON, Protobuf, etc. + """ + SCHEMA_REGISTRY_SCHEMA_ID: ClassVar[KeywordField] = KeywordField( + "schemaRegistrySchemaId", "schemaRegistrySchemaId" + ) + """ + Unique identifier for schema definition set by the schema registry. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "schema_registry_schema_type", + "schema_registry_schema_id", + ] + + @property + def schema_registry_schema_type(self) -> Optional[SchemaRegistrySchemaType]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_schema_type + ) + + @schema_registry_schema_type.setter + def schema_registry_schema_type( + self, schema_registry_schema_type: Optional[SchemaRegistrySchemaType] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_schema_type = schema_registry_schema_type + + @property + def schema_registry_schema_id(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_schema_id + ) + + @schema_registry_schema_id.setter + def schema_registry_schema_id(self, schema_registry_schema_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_schema_id = schema_registry_schema_id + + class Attributes(Catalog.Attributes): + schema_registry_schema_type: Optional[SchemaRegistrySchemaType] = Field( + default=None, description="" + ) + schema_registry_schema_id: Optional[str] = Field(default=None, description="") + + attributes: "SchemaRegistry.Attributes" = Field( + default_factory=lambda: SchemaRegistry.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/schema_registry_subject.py b/pyatlan/model/assets/schema_registry_subject.py new file mode 100644 index 000000000..a1fc381f7 --- /dev/null +++ b/pyatlan/model/assets/schema_registry_subject.py @@ -0,0 +1,254 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import SchemaRegistrySchemaCompatibility +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + RelationField, + TextField, +) + +from .schema_registry import SchemaRegistry + + +class SchemaRegistrySubject(SchemaRegistry): + """Description""" + + type_name: str = Field(default="SchemaRegistrySubject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SchemaRegistrySubject": + raise ValueError("must be SchemaRegistrySubject") + return v + + def __setattr__(self, name, value): + if name in SchemaRegistrySubject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SCHEMA_REGISTRY_SUBJECT_BASE_NAME: ClassVar[KeywordField] = KeywordField( + "schemaRegistrySubjectBaseName", "schemaRegistrySubjectBaseName" + ) + """ + Base name of the subject, without -key, -value prefixes. + """ + SCHEMA_REGISTRY_SUBJECT_IS_KEY_SCHEMA: ClassVar[BooleanField] = BooleanField( + "schemaRegistrySubjectIsKeySchema", "schemaRegistrySubjectIsKeySchema" + ) + """ + Whether the subject is a schema for the keys of the messages (true) or not (false). + """ + SCHEMA_REGISTRY_SUBJECT_SCHEMA_COMPATIBILITY: ClassVar[KeywordField] = KeywordField( + "schemaRegistrySubjectSchemaCompatibility", + "schemaRegistrySubjectSchemaCompatibility", + ) + """ + Compatibility of the schema across versions. + """ + SCHEMA_REGISTRY_SUBJECT_LATEST_SCHEMA_VERSION: ClassVar[ + KeywordField + ] = KeywordField( + "schemaRegistrySubjectLatestSchemaVersion", + "schemaRegistrySubjectLatestSchemaVersion", + ) + """ + Latest schema version of the subject. + """ + SCHEMA_REGISTRY_SUBJECT_LATEST_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( + "schemaRegistrySubjectLatestSchemaDefinition", + "schemaRegistrySubjectLatestSchemaDefinition", + ) + """ + Definition of the latest schema in the subject. + """ + SCHEMA_REGISTRY_SUBJECT_GOVERNING_ASSET_QUALIFIED_NAMES: ClassVar[ + KeywordField + ] = KeywordField( + "schemaRegistrySubjectGoverningAssetQualifiedNames", + "schemaRegistrySubjectGoverningAssetQualifiedNames", + ) + """ + List of asset qualified names that this subject is governing/validating. + """ + + ASSETS: ClassVar[RelationField] = RelationField("assets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "schema_registry_subject_base_name", + "schema_registry_subject_is_key_schema", + "schema_registry_subject_schema_compatibility", + "schema_registry_subject_latest_schema_version", + "schema_registry_subject_latest_schema_definition", + "schema_registry_subject_governing_asset_qualified_names", + "assets", + ] + + @property + def schema_registry_subject_base_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subject_base_name + ) + + @schema_registry_subject_base_name.setter + def schema_registry_subject_base_name( + self, schema_registry_subject_base_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subject_base_name = ( + schema_registry_subject_base_name + ) + + @property + def schema_registry_subject_is_key_schema(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subject_is_key_schema + ) + + @schema_registry_subject_is_key_schema.setter + def schema_registry_subject_is_key_schema( + self, schema_registry_subject_is_key_schema: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subject_is_key_schema = ( + schema_registry_subject_is_key_schema + ) + + @property + def schema_registry_subject_schema_compatibility( + self, + ) -> Optional[SchemaRegistrySchemaCompatibility]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subject_schema_compatibility + ) + + @schema_registry_subject_schema_compatibility.setter + def schema_registry_subject_schema_compatibility( + self, + schema_registry_subject_schema_compatibility: Optional[ + SchemaRegistrySchemaCompatibility + ], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subject_schema_compatibility = ( + schema_registry_subject_schema_compatibility + ) + + @property + def schema_registry_subject_latest_schema_version(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subject_latest_schema_version + ) + + @schema_registry_subject_latest_schema_version.setter + def schema_registry_subject_latest_schema_version( + self, schema_registry_subject_latest_schema_version: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subject_latest_schema_version = ( + schema_registry_subject_latest_schema_version + ) + + @property + def schema_registry_subject_latest_schema_definition(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subject_latest_schema_definition + ) + + @schema_registry_subject_latest_schema_definition.setter + def schema_registry_subject_latest_schema_definition( + self, schema_registry_subject_latest_schema_definition: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subject_latest_schema_definition = ( + schema_registry_subject_latest_schema_definition + ) + + @property + def schema_registry_subject_governing_asset_qualified_names( + self, + ) -> Optional[set[str]]: + return ( + None + if self.attributes is None + else self.attributes.schema_registry_subject_governing_asset_qualified_names + ) + + @schema_registry_subject_governing_asset_qualified_names.setter + def schema_registry_subject_governing_asset_qualified_names( + self, + schema_registry_subject_governing_asset_qualified_names: Optional[set[str]], + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_registry_subject_governing_asset_qualified_names = ( + schema_registry_subject_governing_asset_qualified_names + ) + + @property + def assets(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.assets + + @assets.setter + def assets(self, assets: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.assets = assets + + class Attributes(SchemaRegistry.Attributes): + schema_registry_subject_base_name: Optional[str] = Field( + default=None, description="" + ) + schema_registry_subject_is_key_schema: Optional[bool] = Field( + default=None, description="" + ) + schema_registry_subject_schema_compatibility: Optional[ + SchemaRegistrySchemaCompatibility + ] = Field(default=None, description="") + schema_registry_subject_latest_schema_version: Optional[str] = Field( + default=None, description="" + ) + schema_registry_subject_latest_schema_definition: Optional[str] = Field( + default=None, description="" + ) + schema_registry_subject_governing_asset_qualified_names: Optional[ + set[str] + ] = Field(default=None, description="") + assets: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + + attributes: "SchemaRegistrySubject.Attributes" = Field( + default_factory=lambda: SchemaRegistrySubject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa diff --git a/pyatlan/model/assets/asset42.py b/pyatlan/model/assets/sigma.py similarity index 87% rename from pyatlan/model/assets/asset42.py rename to pyatlan/model/assets/sigma.py index 5fee1001c..9d8fb4cbf 100644 --- a/pyatlan/model/assets/asset42.py +++ b/pyatlan/model/assets/sigma.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordTextField -from .asset19 import BI +from .b_i import BI class Sigma(BI): """Description""" - type_name: str = Field("Sigma", allow_mutation=False) + type_name: str = Field(default="Sigma", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -163,29 +163,18 @@ def sigma_data_element_name(self, sigma_data_element_name: Optional[str]): class Attributes(BI.Attributes): sigma_workbook_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaWorkbookQualifiedName" - ) - sigma_workbook_name: Optional[str] = Field( - None, description="", alias="sigmaWorkbookName" - ) - sigma_page_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaPageQualifiedName" - ) - sigma_page_name: Optional[str] = Field( - None, description="", alias="sigmaPageName" + default=None, description="" ) + sigma_workbook_name: Optional[str] = Field(default=None, description="") + sigma_page_qualified_name: Optional[str] = Field(default=None, description="") + sigma_page_name: Optional[str] = Field(default=None, description="") sigma_data_element_qualified_name: Optional[str] = Field( - None, description="", alias="sigmaDataElementQualifiedName" - ) - sigma_data_element_name: Optional[str] = Field( - None, description="", alias="sigmaDataElementName" + default=None, description="" ) + sigma_data_element_name: Optional[str] = Field(default=None, description="") attributes: "Sigma.Attributes" = Field( default_factory=lambda: Sigma.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Sigma.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/sigma_data_element.py b/pyatlan/model/assets/sigma_data_element.py new file mode 100644 index 000000000..6cd0ecb9c --- /dev/null +++ b/pyatlan/model/assets/sigma_data_element.py @@ -0,0 +1,159 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .sigma import Sigma + + +class SigmaDataElement(Sigma): + """Description""" + + type_name: str = Field(default="SigmaDataElement", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaDataElement": + raise ValueError("must be SigmaDataElement") + return v + + def __setattr__(self, name, value): + if name in SigmaDataElement._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATA_ELEMENT_QUERY: ClassVar[KeywordField] = KeywordField( + "sigmaDataElementQuery", "sigmaDataElementQuery" + ) + """ + + """ + SIGMA_DATA_ELEMENT_TYPE: ClassVar[KeywordField] = KeywordField( + "sigmaDataElementType", "sigmaDataElementType" + ) + """ + + """ + SIGMA_DATA_ELEMENT_FIELD_COUNT: ClassVar[NumericField] = NumericField( + "sigmaDataElementFieldCount", "sigmaDataElementFieldCount" + ) + """ + Number of fields in this data element. + """ + + SIGMA_PAGE: ClassVar[RelationField] = RelationField("sigmaPage") + """ + TBC + """ + SIGMA_DATA_ELEMENT_FIELDS: ClassVar[RelationField] = RelationField( + "sigmaDataElementFields" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_data_element_query", + "sigma_data_element_type", + "sigma_data_element_field_count", + "sigma_page", + "sigma_data_element_fields", + ] + + @property + def sigma_data_element_query(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_query + ) + + @sigma_data_element_query.setter + def sigma_data_element_query(self, sigma_data_element_query: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_query = sigma_data_element_query + + @property + def sigma_data_element_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.sigma_data_element_type + ) + + @sigma_data_element_type.setter + def sigma_data_element_type(self, sigma_data_element_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_type = sigma_data_element_type + + @property + def sigma_data_element_field_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_field_count + ) + + @sigma_data_element_field_count.setter + def sigma_data_element_field_count( + self, sigma_data_element_field_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_field_count = sigma_data_element_field_count + + @property + def sigma_page(self) -> Optional[SigmaPage]: + return None if self.attributes is None else self.attributes.sigma_page + + @sigma_page.setter + def sigma_page(self, sigma_page: Optional[SigmaPage]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_page = sigma_page + + @property + def sigma_data_element_fields(self) -> Optional[list[SigmaDataElementField]]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_fields + ) + + @sigma_data_element_fields.setter + def sigma_data_element_fields( + self, sigma_data_element_fields: Optional[list[SigmaDataElementField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_fields = sigma_data_element_fields + + class Attributes(Sigma.Attributes): + sigma_data_element_query: Optional[str] = Field(default=None, description="") + sigma_data_element_type: Optional[str] = Field(default=None, description="") + sigma_data_element_field_count: Optional[int] = Field( + default=None, description="" + ) + sigma_page: Optional[SigmaPage] = Field( + default=None, description="" + ) # relationship + sigma_data_element_fields: Optional[list[SigmaDataElementField]] = Field( + default=None, description="" + ) # relationship + + attributes: "SigmaDataElement.Attributes" = Field( + default_factory=lambda: SigmaDataElement.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sigma_data_element_field import SigmaDataElementField # noqa +from .sigma_page import SigmaPage # noqa diff --git a/pyatlan/model/assets/sigma_data_element_field.py b/pyatlan/model/assets/sigma_data_element_field.py new file mode 100644 index 000000000..06b5b9417 --- /dev/null +++ b/pyatlan/model/assets/sigma_data_element_field.py @@ -0,0 +1,120 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, RelationField, TextField + +from .sigma import Sigma + + +class SigmaDataElementField(Sigma): + """Description""" + + type_name: str = Field(default="SigmaDataElementField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaDataElementField": + raise ValueError("must be SigmaDataElementField") + return v + + def __setattr__(self, name, value): + if name in SigmaDataElementField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( + "sigmaDataElementFieldIsHidden", "sigmaDataElementFieldIsHidden" + ) + """ + Whether this field is hidden (true) or not (false). + """ + SIGMA_DATA_ELEMENT_FIELD_FORMULA: ClassVar[TextField] = TextField( + "sigmaDataElementFieldFormula", "sigmaDataElementFieldFormula" + ) + """ + + """ + + SIGMA_DATA_ELEMENT: ClassVar[RelationField] = RelationField("sigmaDataElement") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_data_element_field_is_hidden", + "sigma_data_element_field_formula", + "sigma_data_element", + ] + + @property + def sigma_data_element_field_is_hidden(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_field_is_hidden + ) + + @sigma_data_element_field_is_hidden.setter + def sigma_data_element_field_is_hidden( + self, sigma_data_element_field_is_hidden: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_field_is_hidden = ( + sigma_data_element_field_is_hidden + ) + + @property + def sigma_data_element_field_formula(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_field_formula + ) + + @sigma_data_element_field_formula.setter + def sigma_data_element_field_formula( + self, sigma_data_element_field_formula: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_field_formula = ( + sigma_data_element_field_formula + ) + + @property + def sigma_data_element(self) -> Optional[SigmaDataElement]: + return None if self.attributes is None else self.attributes.sigma_data_element + + @sigma_data_element.setter + def sigma_data_element(self, sigma_data_element: Optional[SigmaDataElement]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element = sigma_data_element + + class Attributes(Sigma.Attributes): + sigma_data_element_field_is_hidden: Optional[bool] = Field( + default=None, description="" + ) + sigma_data_element_field_formula: Optional[str] = Field( + default=None, description="" + ) + sigma_data_element: Optional[SigmaDataElement] = Field( + default=None, description="" + ) # relationship + + attributes: "SigmaDataElementField.Attributes" = Field( + default_factory=lambda: SigmaDataElementField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sigma_data_element import SigmaDataElement # noqa diff --git a/pyatlan/model/assets/sigma_dataset.py b/pyatlan/model/assets/sigma_dataset.py new file mode 100644 index 000000000..a780fadba --- /dev/null +++ b/pyatlan/model/assets/sigma_dataset.py @@ -0,0 +1,92 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .sigma import Sigma + + +class SigmaDataset(Sigma): + """Description""" + + type_name: str = Field(default="SigmaDataset", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaDataset": + raise ValueError("must be SigmaDataset") + return v + + def __setattr__(self, name, value): + if name in SigmaDataset._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATASET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "sigmaDatasetColumnCount", "sigmaDatasetColumnCount" + ) + """ + Number of columns in this dataset. + """ + + SIGMA_DATASET_COLUMNS: ClassVar[RelationField] = RelationField( + "sigmaDatasetColumns" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_dataset_column_count", + "sigma_dataset_columns", + ] + + @property + def sigma_dataset_column_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sigma_dataset_column_count + ) + + @sigma_dataset_column_count.setter + def sigma_dataset_column_count(self, sigma_dataset_column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_dataset_column_count = sigma_dataset_column_count + + @property + def sigma_dataset_columns(self) -> Optional[list[SigmaDatasetColumn]]: + return ( + None if self.attributes is None else self.attributes.sigma_dataset_columns + ) + + @sigma_dataset_columns.setter + def sigma_dataset_columns( + self, sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_dataset_columns = sigma_dataset_columns + + class Attributes(Sigma.Attributes): + sigma_dataset_column_count: Optional[int] = Field(default=None, description="") + sigma_dataset_columns: Optional[list[SigmaDatasetColumn]] = Field( + default=None, description="" + ) # relationship + + attributes: "SigmaDataset.Attributes" = Field( + default_factory=lambda: SigmaDataset.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sigma_dataset_column import SigmaDatasetColumn # noqa diff --git a/pyatlan/model/assets/sigma_dataset_column.py b/pyatlan/model/assets/sigma_dataset_column.py new file mode 100644 index 000000000..2808a38fd --- /dev/null +++ b/pyatlan/model/assets/sigma_dataset_column.py @@ -0,0 +1,108 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField + +from .sigma import Sigma + + +class SigmaDatasetColumn(Sigma): + """Description""" + + type_name: str = Field(default="SigmaDatasetColumn", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaDatasetColumn": + raise ValueError("must be SigmaDatasetColumn") + return v + + def __setattr__(self, name, value): + if name in SigmaDatasetColumn._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATASET_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaDatasetQualifiedName", + "sigmaDatasetQualifiedName", + "sigmaDatasetQualifiedName.text", + ) + """ + Unique name of the dataset in which this column exists. + """ + SIGMA_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sigmaDatasetName", "sigmaDatasetName.keyword", "sigmaDatasetName" + ) + """ + Simple name of the dataset in which this column exists. + """ + + SIGMA_DATASET: ClassVar[RelationField] = RelationField("sigmaDataset") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_dataset_qualified_name", + "sigma_dataset_name", + "sigma_dataset", + ] + + @property + def sigma_dataset_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sigma_dataset_qualified_name + ) + + @sigma_dataset_qualified_name.setter + def sigma_dataset_qualified_name(self, sigma_dataset_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_dataset_qualified_name = sigma_dataset_qualified_name + + @property + def sigma_dataset_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sigma_dataset_name + + @sigma_dataset_name.setter + def sigma_dataset_name(self, sigma_dataset_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_dataset_name = sigma_dataset_name + + @property + def sigma_dataset(self) -> Optional[SigmaDataset]: + return None if self.attributes is None else self.attributes.sigma_dataset + + @sigma_dataset.setter + def sigma_dataset(self, sigma_dataset: Optional[SigmaDataset]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_dataset = sigma_dataset + + class Attributes(Sigma.Attributes): + sigma_dataset_qualified_name: Optional[str] = Field( + default=None, description="" + ) + sigma_dataset_name: Optional[str] = Field(default=None, description="") + sigma_dataset: Optional[SigmaDataset] = Field( + default=None, description="" + ) # relationship + + attributes: "SigmaDatasetColumn.Attributes" = Field( + default_factory=lambda: SigmaDatasetColumn.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sigma_dataset import SigmaDataset # noqa diff --git a/pyatlan/model/assets/sigma_page.py b/pyatlan/model/assets/sigma_page.py new file mode 100644 index 000000000..aa2b50e3c --- /dev/null +++ b/pyatlan/model/assets/sigma_page.py @@ -0,0 +1,107 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .sigma import Sigma + + +class SigmaPage(Sigma): + """Description""" + + type_name: str = Field(default="SigmaPage", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaPage": + raise ValueError("must be SigmaPage") + return v + + def __setattr__(self, name, value): + if name in SigmaPage._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_DATA_ELEMENT_COUNT: ClassVar[NumericField] = NumericField( + "sigmaDataElementCount", "sigmaDataElementCount" + ) + """ + Number of data elements on this page. + """ + + SIGMA_DATA_ELEMENTS: ClassVar[RelationField] = RelationField("sigmaDataElements") + """ + TBC + """ + SIGMA_WORKBOOK: ClassVar[RelationField] = RelationField("sigmaWorkbook") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_data_element_count", + "sigma_data_elements", + "sigma_workbook", + ] + + @property + def sigma_data_element_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sigma_data_element_count + ) + + @sigma_data_element_count.setter + def sigma_data_element_count(self, sigma_data_element_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_element_count = sigma_data_element_count + + @property + def sigma_data_elements(self) -> Optional[list[SigmaDataElement]]: + return None if self.attributes is None else self.attributes.sigma_data_elements + + @sigma_data_elements.setter + def sigma_data_elements( + self, sigma_data_elements: Optional[list[SigmaDataElement]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_data_elements = sigma_data_elements + + @property + def sigma_workbook(self) -> Optional[SigmaWorkbook]: + return None if self.attributes is None else self.attributes.sigma_workbook + + @sigma_workbook.setter + def sigma_workbook(self, sigma_workbook: Optional[SigmaWorkbook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_workbook = sigma_workbook + + class Attributes(Sigma.Attributes): + sigma_data_element_count: Optional[int] = Field(default=None, description="") + sigma_data_elements: Optional[list[SigmaDataElement]] = Field( + default=None, description="" + ) # relationship + sigma_workbook: Optional[SigmaWorkbook] = Field( + default=None, description="" + ) # relationship + + attributes: "SigmaPage.Attributes" = Field( + default_factory=lambda: SigmaPage.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sigma_data_element import SigmaDataElement # noqa +from .sigma_workbook import SigmaWorkbook # noqa diff --git a/pyatlan/model/assets/sigma_workbook.py b/pyatlan/model/assets/sigma_workbook.py new file mode 100644 index 000000000..f8925b373 --- /dev/null +++ b/pyatlan/model/assets/sigma_workbook.py @@ -0,0 +1,82 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import NumericField, RelationField + +from .sigma import Sigma + + +class SigmaWorkbook(Sigma): + """Description""" + + type_name: str = Field(default="SigmaWorkbook", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SigmaWorkbook": + raise ValueError("must be SigmaWorkbook") + return v + + def __setattr__(self, name, value): + if name in SigmaWorkbook._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SIGMA_PAGE_COUNT: ClassVar[NumericField] = NumericField( + "sigmaPageCount", "sigmaPageCount" + ) + """ + Number of pages in this workbook. + """ + + SIGMA_PAGES: ClassVar[RelationField] = RelationField("sigmaPages") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sigma_page_count", + "sigma_pages", + ] + + @property + def sigma_page_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.sigma_page_count + + @sigma_page_count.setter + def sigma_page_count(self, sigma_page_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_page_count = sigma_page_count + + @property + def sigma_pages(self) -> Optional[list[SigmaPage]]: + return None if self.attributes is None else self.attributes.sigma_pages + + @sigma_pages.setter + def sigma_pages(self, sigma_pages: Optional[list[SigmaPage]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sigma_pages = sigma_pages + + class Attributes(Sigma.Attributes): + sigma_page_count: Optional[int] = Field(default=None, description="") + sigma_pages: Optional[list[SigmaPage]] = Field( + default=None, description="" + ) # relationship + + attributes: "SigmaWorkbook.Attributes" = Field( + default_factory=lambda: SigmaWorkbook.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sigma_page import SigmaPage # noqa diff --git a/pyatlan/model/assets/asset46.py b/pyatlan/model/assets/sisense.py similarity index 78% rename from pyatlan/model/assets/asset46.py rename to pyatlan/model/assets/sisense.py index f11e27860..ea8bf028c 100644 --- a/pyatlan/model/assets/asset46.py +++ b/pyatlan/model/assets/sisense.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset19 import BI +from .b_i import BI class Sisense(BI): """Description""" - type_name: str = Field("Sisense", allow_mutation=False) + type_name: str = Field(default="Sisense", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Sisense.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/sisense_dashboard.py b/pyatlan/model/assets/sisense_dashboard.py new file mode 100644 index 000000000..208b7823c --- /dev/null +++ b/pyatlan/model/assets/sisense_dashboard.py @@ -0,0 +1,164 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordTextField, + NumericField, + RelationField, +) + +from .sisense import Sisense + + +class SisenseDashboard(Sisense): + """Description""" + + type_name: str = Field(default="SisenseDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SisenseDashboard": + raise ValueError("must be SisenseDashboard") + return v + + def __setattr__(self, name, value): + if name in SisenseDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SISENSE_DASHBOARD_FOLDER_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "sisenseDashboardFolderQualifiedName", + "sisenseDashboardFolderQualifiedName", + "sisenseDashboardFolderQualifiedName.text", + ) + """ + Unique name of the folder in which this dashboard exists. + """ + SISENSE_DASHBOARD_WIDGET_COUNT: ClassVar[NumericField] = NumericField( + "sisenseDashboardWidgetCount", "sisenseDashboardWidgetCount" + ) + """ + Number of widgets in this dashboard. + """ + + SISENSE_DATAMODELS: ClassVar[RelationField] = RelationField("sisenseDatamodels") + """ + TBC + """ + SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") + """ + TBC + """ + SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sisense_dashboard_folder_qualified_name", + "sisense_dashboard_widget_count", + "sisense_datamodels", + "sisense_widgets", + "sisense_folder", + ] + + @property + def sisense_dashboard_folder_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_dashboard_folder_qualified_name + ) + + @sisense_dashboard_folder_qualified_name.setter + def sisense_dashboard_folder_qualified_name( + self, sisense_dashboard_folder_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_dashboard_folder_qualified_name = ( + sisense_dashboard_folder_qualified_name + ) + + @property + def sisense_dashboard_widget_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sisense_dashboard_widget_count + ) + + @sisense_dashboard_widget_count.setter + def sisense_dashboard_widget_count( + self, sisense_dashboard_widget_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_dashboard_widget_count = sisense_dashboard_widget_count + + @property + def sisense_datamodels(self) -> Optional[list[SisenseDatamodel]]: + return None if self.attributes is None else self.attributes.sisense_datamodels + + @sisense_datamodels.setter + def sisense_datamodels(self, sisense_datamodels: Optional[list[SisenseDatamodel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodels = sisense_datamodels + + @property + def sisense_widgets(self) -> Optional[list[SisenseWidget]]: + return None if self.attributes is None else self.attributes.sisense_widgets + + @sisense_widgets.setter + def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widgets = sisense_widgets + + @property + def sisense_folder(self) -> Optional[SisenseFolder]: + return None if self.attributes is None else self.attributes.sisense_folder + + @sisense_folder.setter + def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_folder = sisense_folder + + class Attributes(Sisense.Attributes): + sisense_dashboard_folder_qualified_name: Optional[str] = Field( + default=None, description="" + ) + sisense_dashboard_widget_count: Optional[int] = Field( + default=None, description="" + ) + sisense_datamodels: Optional[list[SisenseDatamodel]] = Field( + default=None, description="" + ) # relationship + sisense_widgets: Optional[list[SisenseWidget]] = Field( + default=None, description="" + ) # relationship + sisense_folder: Optional[SisenseFolder] = Field( + default=None, description="" + ) # relationship + + attributes: "SisenseDashboard.Attributes" = Field( + default_factory=lambda: SisenseDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sisense_datamodel import SisenseDatamodel # noqa +from .sisense_folder import SisenseFolder # noqa +from .sisense_widget import SisenseWidget # noqa diff --git a/pyatlan/model/assets/sisense_datamodel.py b/pyatlan/model/assets/sisense_datamodel.py new file mode 100644 index 000000000..ce1a93912 --- /dev/null +++ b/pyatlan/model/assets/sisense_datamodel.py @@ -0,0 +1,295 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .sisense import Sisense + + +class SisenseDatamodel(Sisense): + """Description""" + + type_name: str = Field(default="SisenseDatamodel", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SisenseDatamodel": + raise ValueError("must be SisenseDatamodel") + return v + + def __setattr__(self, name, value): + if name in SisenseDatamodel._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SISENSE_DATAMODEL_TABLE_COUNT: ClassVar[NumericField] = NumericField( + "sisenseDatamodelTableCount", "sisenseDatamodelTableCount" + ) + """ + Number of tables in this datamodel. + """ + SISENSE_DATAMODEL_SERVER: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelServer", "sisenseDatamodelServer" + ) + """ + Hostname of the server on which this datamodel was created. + """ + SISENSE_DATAMODEL_REVISION: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelRevision", "sisenseDatamodelRevision" + ) + """ + Revision of this datamodel. + """ + SISENSE_DATAMODEL_LAST_BUILD_TIME: ClassVar[NumericField] = NumericField( + "sisenseDatamodelLastBuildTime", "sisenseDatamodelLastBuildTime" + ) + """ + Time (epoch) when this datamodel was last built, in milliseconds. + """ + SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[NumericField] = NumericField( + "sisenseDatamodelLastSuccessfulBuildTime", + "sisenseDatamodelLastSuccessfulBuildTime", + ) + """ + Time (epoch) when this datamodel was last built successfully, in milliseconds. + """ + SISENSE_DATAMODEL_LAST_PUBLISH_TIME: ClassVar[NumericField] = NumericField( + "sisenseDatamodelLastPublishTime", "sisenseDatamodelLastPublishTime" + ) + """ + Time (epoch) when this datamodel was last published, in milliseconds. + """ + SISENSE_DATAMODEL_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelType", "sisenseDatamodelType" + ) + """ + Type of this datamodel, for example: 'extract' or 'custom'. + """ + SISENSE_DATAMODEL_RELATION_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelRelationType", "sisenseDatamodelRelationType" + ) + """ + Default relation type for this datamodel. 'extract' type Datamodels have regular relations by default. 'live' type Datamodels have direct relations by default. + """ # noqa: E501 + + SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( + "sisenseDatamodelTables" + ) + """ + TBC + """ + SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sisense_datamodel_table_count", + "sisense_datamodel_server", + "sisense_datamodel_revision", + "sisense_datamodel_last_build_time", + "sisense_datamodel_last_successful_build_time", + "sisense_datamodel_last_publish_time", + "sisense_datamodel_type", + "sisense_datamodel_relation_type", + "sisense_datamodel_tables", + "sisense_dashboards", + ] + + @property + def sisense_datamodel_table_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_count + ) + + @sisense_datamodel_table_count.setter + def sisense_datamodel_table_count( + self, sisense_datamodel_table_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_count = sisense_datamodel_table_count + + @property + def sisense_datamodel_server(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_server + ) + + @sisense_datamodel_server.setter + def sisense_datamodel_server(self, sisense_datamodel_server: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_server = sisense_datamodel_server + + @property + def sisense_datamodel_revision(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_revision + ) + + @sisense_datamodel_revision.setter + def sisense_datamodel_revision(self, sisense_datamodel_revision: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_revision = sisense_datamodel_revision + + @property + def sisense_datamodel_last_build_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_last_build_time + ) + + @sisense_datamodel_last_build_time.setter + def sisense_datamodel_last_build_time( + self, sisense_datamodel_last_build_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_last_build_time = ( + sisense_datamodel_last_build_time + ) + + @property + def sisense_datamodel_last_successful_build_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_last_successful_build_time + ) + + @sisense_datamodel_last_successful_build_time.setter + def sisense_datamodel_last_successful_build_time( + self, sisense_datamodel_last_successful_build_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_last_successful_build_time = ( + sisense_datamodel_last_successful_build_time + ) + + @property + def sisense_datamodel_last_publish_time(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_last_publish_time + ) + + @sisense_datamodel_last_publish_time.setter + def sisense_datamodel_last_publish_time( + self, sisense_datamodel_last_publish_time: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_last_publish_time = ( + sisense_datamodel_last_publish_time + ) + + @property + def sisense_datamodel_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.sisense_datamodel_type + ) + + @sisense_datamodel_type.setter + def sisense_datamodel_type(self, sisense_datamodel_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_type = sisense_datamodel_type + + @property + def sisense_datamodel_relation_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_relation_type + ) + + @sisense_datamodel_relation_type.setter + def sisense_datamodel_relation_type( + self, sisense_datamodel_relation_type: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_relation_type = ( + sisense_datamodel_relation_type + ) + + @property + def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_tables + ) + + @sisense_datamodel_tables.setter + def sisense_datamodel_tables( + self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_tables = sisense_datamodel_tables + + @property + def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: + return None if self.attributes is None else self.attributes.sisense_dashboards + + @sisense_dashboards.setter + def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_dashboards = sisense_dashboards + + class Attributes(Sisense.Attributes): + sisense_datamodel_table_count: Optional[int] = Field( + default=None, description="" + ) + sisense_datamodel_server: Optional[str] = Field(default=None, description="") + sisense_datamodel_revision: Optional[str] = Field(default=None, description="") + sisense_datamodel_last_build_time: Optional[datetime] = Field( + default=None, description="" + ) + sisense_datamodel_last_successful_build_time: Optional[datetime] = Field( + default=None, description="" + ) + sisense_datamodel_last_publish_time: Optional[datetime] = Field( + default=None, description="" + ) + sisense_datamodel_type: Optional[str] = Field(default=None, description="") + sisense_datamodel_relation_type: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( + default=None, description="" + ) # relationship + sisense_dashboards: Optional[list[SisenseDashboard]] = Field( + default=None, description="" + ) # relationship + + attributes: "SisenseDatamodel.Attributes" = Field( + default_factory=lambda: SisenseDatamodel.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sisense_dashboard import SisenseDashboard # noqa +from .sisense_datamodel_table import SisenseDatamodelTable # noqa diff --git a/pyatlan/model/assets/sisense_datamodel_table.py b/pyatlan/model/assets/sisense_datamodel_table.py new file mode 100644 index 000000000..20b2b86ad --- /dev/null +++ b/pyatlan/model/assets/sisense_datamodel_table.py @@ -0,0 +1,312 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .sisense import Sisense + + +class SisenseDatamodelTable(Sisense): + """Description""" + + type_name: str = Field(default="SisenseDatamodelTable", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SisenseDatamodelTable": + raise ValueError("must be SisenseDatamodelTable") + return v + + def __setattr__(self, name, value): + if name in SisenseDatamodelTable._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SISENSE_DATAMODEL_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sisenseDatamodelQualifiedName", + "sisenseDatamodelQualifiedName", + "sisenseDatamodelQualifiedName.text", + ) + """ + Unique name of the datamodel in which this datamodel table exists. + """ + SISENSE_DATAMODEL_TABLE_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "sisenseDatamodelTableColumnCount", "sisenseDatamodelTableColumnCount" + ) + """ + Number of columns present in this datamodel table. + """ + SISENSE_DATAMODEL_TABLE_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableType", "sisenseDatamodelTableType" + ) + """ + Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables. + """ + SISENSE_DATAMODEL_TABLE_EXPRESSION: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableExpression", "sisenseDatamodelTableExpression" + ) + """ + SQL expression of this datamodel table. + """ + SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED: ClassVar[BooleanField] = BooleanField( + "sisenseDatamodelTableIsMaterialized", "sisenseDatamodelTableIsMaterialized" + ) + """ + Whether this datamodel table is materialised (true) or not (false). + """ + SISENSE_DATAMODEL_TABLE_IS_HIDDEN: ClassVar[BooleanField] = BooleanField( + "sisenseDatamodelTableIsHidden", "sisenseDatamodelTableIsHidden" + ) + """ + Whether this datamodel table is hidden in Sisense (true) or not (false). + """ + SISENSE_DATAMODEL_TABLE_SCHEDULE: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableSchedule", "sisenseDatamodelTableSchedule" + ) + """ + JSON specifying the refresh schedule of this datamodel table. + """ + SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS: ClassVar[KeywordField] = KeywordField( + "sisenseDatamodelTableLiveQuerySettings", + "sisenseDatamodelTableLiveQuerySettings", + ) + """ + JSON specifying the LiveQuery settings of this datamodel table. + """ + + SISENSE_DATAMODEL: ClassVar[RelationField] = RelationField("sisenseDatamodel") + """ + TBC + """ + SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sisense_datamodel_qualified_name", + "sisense_datamodel_table_column_count", + "sisense_datamodel_table_type", + "sisense_datamodel_table_expression", + "sisense_datamodel_table_is_materialized", + "sisense_datamodel_table_is_hidden", + "sisense_datamodel_table_schedule", + "sisense_datamodel_table_live_query_settings", + "sisense_datamodel", + "sisense_widgets", + ] + + @property + def sisense_datamodel_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_qualified_name + ) + + @sisense_datamodel_qualified_name.setter + def sisense_datamodel_qualified_name( + self, sisense_datamodel_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_qualified_name = ( + sisense_datamodel_qualified_name + ) + + @property + def sisense_datamodel_table_column_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_column_count + ) + + @sisense_datamodel_table_column_count.setter + def sisense_datamodel_table_column_count( + self, sisense_datamodel_table_column_count: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_column_count = ( + sisense_datamodel_table_column_count + ) + + @property + def sisense_datamodel_table_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_type + ) + + @sisense_datamodel_table_type.setter + def sisense_datamodel_table_type(self, sisense_datamodel_table_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_type = sisense_datamodel_table_type + + @property + def sisense_datamodel_table_expression(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_expression + ) + + @sisense_datamodel_table_expression.setter + def sisense_datamodel_table_expression( + self, sisense_datamodel_table_expression: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_expression = ( + sisense_datamodel_table_expression + ) + + @property + def sisense_datamodel_table_is_materialized(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_is_materialized + ) + + @sisense_datamodel_table_is_materialized.setter + def sisense_datamodel_table_is_materialized( + self, sisense_datamodel_table_is_materialized: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_is_materialized = ( + sisense_datamodel_table_is_materialized + ) + + @property + def sisense_datamodel_table_is_hidden(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_is_hidden + ) + + @sisense_datamodel_table_is_hidden.setter + def sisense_datamodel_table_is_hidden( + self, sisense_datamodel_table_is_hidden: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_is_hidden = ( + sisense_datamodel_table_is_hidden + ) + + @property + def sisense_datamodel_table_schedule(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_schedule + ) + + @sisense_datamodel_table_schedule.setter + def sisense_datamodel_table_schedule( + self, sisense_datamodel_table_schedule: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_schedule = ( + sisense_datamodel_table_schedule + ) + + @property + def sisense_datamodel_table_live_query_settings(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_table_live_query_settings + ) + + @sisense_datamodel_table_live_query_settings.setter + def sisense_datamodel_table_live_query_settings( + self, sisense_datamodel_table_live_query_settings: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_table_live_query_settings = ( + sisense_datamodel_table_live_query_settings + ) + + @property + def sisense_datamodel(self) -> Optional[SisenseDatamodel]: + return None if self.attributes is None else self.attributes.sisense_datamodel + + @sisense_datamodel.setter + def sisense_datamodel(self, sisense_datamodel: Optional[SisenseDatamodel]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel = sisense_datamodel + + @property + def sisense_widgets(self) -> Optional[list[SisenseWidget]]: + return None if self.attributes is None else self.attributes.sisense_widgets + + @sisense_widgets.setter + def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widgets = sisense_widgets + + class Attributes(Sisense.Attributes): + sisense_datamodel_qualified_name: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel_table_column_count: Optional[int] = Field( + default=None, description="" + ) + sisense_datamodel_table_type: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel_table_expression: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel_table_is_materialized: Optional[bool] = Field( + default=None, description="" + ) + sisense_datamodel_table_is_hidden: Optional[bool] = Field( + default=None, description="" + ) + sisense_datamodel_table_schedule: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel_table_live_query_settings: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel: Optional[SisenseDatamodel] = Field( + default=None, description="" + ) # relationship + sisense_widgets: Optional[list[SisenseWidget]] = Field( + default=None, description="" + ) # relationship + + attributes: "SisenseDatamodelTable.Attributes" = Field( + default_factory=lambda: SisenseDatamodelTable.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sisense_datamodel import SisenseDatamodel # noqa +from .sisense_widget import SisenseWidget # noqa diff --git a/pyatlan/model/assets/sisense_folder.py b/pyatlan/model/assets/sisense_folder.py new file mode 100644 index 000000000..9f0789c85 --- /dev/null +++ b/pyatlan/model/assets/sisense_folder.py @@ -0,0 +1,161 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField + +from .sisense import Sisense + + +class SisenseFolder(Sisense): + """Description""" + + type_name: str = Field(default="SisenseFolder", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SisenseFolder": + raise ValueError("must be SisenseFolder") + return v + + def __setattr__(self, name, value): + if name in SisenseFolder._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SISENSE_FOLDER_PARENT_FOLDER_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "sisenseFolderParentFolderQualifiedName", + "sisenseFolderParentFolderQualifiedName", + "sisenseFolderParentFolderQualifiedName.text", + ) + """ + Unique name of the parent folder in which this folder exists. + """ + + SISENSE_CHILD_FOLDERS: ClassVar[RelationField] = RelationField( + "sisenseChildFolders" + ) + """ + TBC + """ + SISENSE_WIDGETS: ClassVar[RelationField] = RelationField("sisenseWidgets") + """ + TBC + """ + SISENSE_DASHBOARDS: ClassVar[RelationField] = RelationField("sisenseDashboards") + """ + TBC + """ + SISENSE_PARENT_FOLDER: ClassVar[RelationField] = RelationField( + "sisenseParentFolder" + ) + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sisense_folder_parent_folder_qualified_name", + "sisense_child_folders", + "sisense_widgets", + "sisense_dashboards", + "sisense_parent_folder", + ] + + @property + def sisense_folder_parent_folder_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_folder_parent_folder_qualified_name + ) + + @sisense_folder_parent_folder_qualified_name.setter + def sisense_folder_parent_folder_qualified_name( + self, sisense_folder_parent_folder_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_folder_parent_folder_qualified_name = ( + sisense_folder_parent_folder_qualified_name + ) + + @property + def sisense_child_folders(self) -> Optional[list[SisenseFolder]]: + return ( + None if self.attributes is None else self.attributes.sisense_child_folders + ) + + @sisense_child_folders.setter + def sisense_child_folders( + self, sisense_child_folders: Optional[list[SisenseFolder]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_child_folders = sisense_child_folders + + @property + def sisense_widgets(self) -> Optional[list[SisenseWidget]]: + return None if self.attributes is None else self.attributes.sisense_widgets + + @sisense_widgets.setter + def sisense_widgets(self, sisense_widgets: Optional[list[SisenseWidget]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widgets = sisense_widgets + + @property + def sisense_dashboards(self) -> Optional[list[SisenseDashboard]]: + return None if self.attributes is None else self.attributes.sisense_dashboards + + @sisense_dashboards.setter + def sisense_dashboards(self, sisense_dashboards: Optional[list[SisenseDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_dashboards = sisense_dashboards + + @property + def sisense_parent_folder(self) -> Optional[SisenseFolder]: + return ( + None if self.attributes is None else self.attributes.sisense_parent_folder + ) + + @sisense_parent_folder.setter + def sisense_parent_folder(self, sisense_parent_folder: Optional[SisenseFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_parent_folder = sisense_parent_folder + + class Attributes(Sisense.Attributes): + sisense_folder_parent_folder_qualified_name: Optional[str] = Field( + default=None, description="" + ) + sisense_child_folders: Optional[list[SisenseFolder]] = Field( + default=None, description="" + ) # relationship + sisense_widgets: Optional[list[SisenseWidget]] = Field( + default=None, description="" + ) # relationship + sisense_dashboards: Optional[list[SisenseDashboard]] = Field( + default=None, description="" + ) # relationship + sisense_parent_folder: Optional[SisenseFolder] = Field( + default=None, description="" + ) # relationship + + attributes: "SisenseFolder.Attributes" = Field( + default_factory=lambda: SisenseFolder.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sisense_dashboard import SisenseDashboard # noqa +from .sisense_widget import SisenseWidget # noqa diff --git a/pyatlan/model/assets/sisense_widget.py b/pyatlan/model/assets/sisense_widget.py new file mode 100644 index 000000000..74c9326b7 --- /dev/null +++ b/pyatlan/model/assets/sisense_widget.py @@ -0,0 +1,237 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +from .sisense import Sisense + + +class SisenseWidget(Sisense): + """Description""" + + type_name: str = Field(default="SisenseWidget", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SisenseWidget": + raise ValueError("must be SisenseWidget") + return v + + def __setattr__(self, name, value): + if name in SisenseWidget._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SISENSE_WIDGET_COLUMN_COUNT: ClassVar[NumericField] = NumericField( + "sisenseWidgetColumnCount", "sisenseWidgetColumnCount" + ) + """ + Number of columns used in this widget. + """ + SISENSE_WIDGET_SUB_TYPE: ClassVar[KeywordField] = KeywordField( + "sisenseWidgetSubType", "sisenseWidgetSubType" + ) + """ + Subtype of this widget. + """ + SISENSE_WIDGET_SIZE: ClassVar[KeywordField] = KeywordField( + "sisenseWidgetSize", "sisenseWidgetSize" + ) + """ + Size of this widget. + """ + SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "sisenseWidgetDashboardQualifiedName", + "sisenseWidgetDashboardQualifiedName", + "sisenseWidgetDashboardQualifiedName.text", + ) + """ + Unique name of the dashboard in which this widget exists. + """ + SISENSE_WIDGET_FOLDER_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "sisenseWidgetFolderQualifiedName", + "sisenseWidgetFolderQualifiedName", + "sisenseWidgetFolderQualifiedName.text", + ) + """ + Unique name of the folder in which this widget exists. + """ + + SISENSE_DATAMODEL_TABLES: ClassVar[RelationField] = RelationField( + "sisenseDatamodelTables" + ) + """ + TBC + """ + SISENSE_FOLDER: ClassVar[RelationField] = RelationField("sisenseFolder") + """ + TBC + """ + SISENSE_DASHBOARD: ClassVar[RelationField] = RelationField("sisenseDashboard") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "sisense_widget_column_count", + "sisense_widget_sub_type", + "sisense_widget_size", + "sisense_widget_dashboard_qualified_name", + "sisense_widget_folder_qualified_name", + "sisense_datamodel_tables", + "sisense_folder", + "sisense_dashboard", + ] + + @property + def sisense_widget_column_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.sisense_widget_column_count + ) + + @sisense_widget_column_count.setter + def sisense_widget_column_count(self, sisense_widget_column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widget_column_count = sisense_widget_column_count + + @property + def sisense_widget_sub_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.sisense_widget_sub_type + ) + + @sisense_widget_sub_type.setter + def sisense_widget_sub_type(self, sisense_widget_sub_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widget_sub_type = sisense_widget_sub_type + + @property + def sisense_widget_size(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.sisense_widget_size + + @sisense_widget_size.setter + def sisense_widget_size(self, sisense_widget_size: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widget_size = sisense_widget_size + + @property + def sisense_widget_dashboard_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_widget_dashboard_qualified_name + ) + + @sisense_widget_dashboard_qualified_name.setter + def sisense_widget_dashboard_qualified_name( + self, sisense_widget_dashboard_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widget_dashboard_qualified_name = ( + sisense_widget_dashboard_qualified_name + ) + + @property + def sisense_widget_folder_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.sisense_widget_folder_qualified_name + ) + + @sisense_widget_folder_qualified_name.setter + def sisense_widget_folder_qualified_name( + self, sisense_widget_folder_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_widget_folder_qualified_name = ( + sisense_widget_folder_qualified_name + ) + + @property + def sisense_datamodel_tables(self) -> Optional[list[SisenseDatamodelTable]]: + return ( + None + if self.attributes is None + else self.attributes.sisense_datamodel_tables + ) + + @sisense_datamodel_tables.setter + def sisense_datamodel_tables( + self, sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_datamodel_tables = sisense_datamodel_tables + + @property + def sisense_folder(self) -> Optional[SisenseFolder]: + return None if self.attributes is None else self.attributes.sisense_folder + + @sisense_folder.setter + def sisense_folder(self, sisense_folder: Optional[SisenseFolder]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_folder = sisense_folder + + @property + def sisense_dashboard(self) -> Optional[SisenseDashboard]: + return None if self.attributes is None else self.attributes.sisense_dashboard + + @sisense_dashboard.setter + def sisense_dashboard(self, sisense_dashboard: Optional[SisenseDashboard]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sisense_dashboard = sisense_dashboard + + class Attributes(Sisense.Attributes): + sisense_widget_column_count: Optional[int] = Field(default=None, description="") + sisense_widget_sub_type: Optional[str] = Field(default=None, description="") + sisense_widget_size: Optional[str] = Field(default=None, description="") + sisense_widget_dashboard_qualified_name: Optional[str] = Field( + default=None, description="" + ) + sisense_widget_folder_qualified_name: Optional[str] = Field( + default=None, description="" + ) + sisense_datamodel_tables: Optional[list[SisenseDatamodelTable]] = Field( + default=None, description="" + ) # relationship + sisense_folder: Optional[SisenseFolder] = Field( + default=None, description="" + ) # relationship + sisense_dashboard: Optional[SisenseDashboard] = Field( + default=None, description="" + ) # relationship + + attributes: "SisenseWidget.Attributes" = Field( + default_factory=lambda: SisenseWidget.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .sisense_dashboard import SisenseDashboard # noqa +from .sisense_datamodel_table import SisenseDatamodelTable # noqa +from .sisense_folder import SisenseFolder # noqa diff --git a/pyatlan/model/assets/snowflake_dynamic_table.py b/pyatlan/model/assets/snowflake_dynamic_table.py new file mode 100644 index 000000000..f7fb4acc7 --- /dev/null +++ b/pyatlan/model/assets/snowflake_dynamic_table.py @@ -0,0 +1,58 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField + +from .table import Table + + +class SnowflakeDynamicTable(Table): + """Description""" + + type_name: str = Field(default="SnowflakeDynamicTable", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SnowflakeDynamicTable": + raise ValueError("must be SnowflakeDynamicTable") + return v + + def __setattr__(self, name, value): + if name in SnowflakeDynamicTable._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") + """ + SQL statements used to define the dynamic table. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "definition", + ] + + @property + def definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.definition + + @definition.setter + def definition(self, definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.definition = definition + + class Attributes(Table.Attributes): + definition: Optional[str] = Field(default=None, description="") + + attributes: "SnowflakeDynamicTable.Attributes" = Field( + default_factory=lambda: SnowflakeDynamicTable.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/snowflake_pipe.py b/pyatlan/model/assets/snowflake_pipe.py new file mode 100644 index 000000000..be74c5740 --- /dev/null +++ b/pyatlan/model/assets/snowflake_pipe.py @@ -0,0 +1,145 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + RelationField, +) + +from .s_q_l import SQL + + +class SnowflakePipe(SQL): + """Description""" + + type_name: str = Field(default="SnowflakePipe", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SnowflakePipe": + raise ValueError("must be SnowflakePipe") + return v + + def __setattr__(self, name, value): + if name in SnowflakePipe._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") + """ + SQL definition of this pipe. + """ + SNOWFLAKE_PIPE_IS_AUTO_INGEST_ENABLED: ClassVar[BooleanField] = BooleanField( + "snowflakePipeIsAutoIngestEnabled", "snowflakePipeIsAutoIngestEnabled" + ) + """ + Whether auto-ingest is enabled for this pipe (true) or not (false). + """ + SNOWFLAKE_PIPE_NOTIFICATION_CHANNEL_NAME: ClassVar[ + KeywordTextField + ] = KeywordTextField( + "snowflakePipeNotificationChannelName", + "snowflakePipeNotificationChannelName", + "snowflakePipeNotificationChannelName.text", + ) + """ + Name of the notification channel for this pipe. + """ + + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "definition", + "snowflake_pipe_is_auto_ingest_enabled", + "snowflake_pipe_notification_channel_name", + "atlan_schema", + ] + + @property + def definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.definition + + @definition.setter + def definition(self, definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.definition = definition + + @property + def snowflake_pipe_is_auto_ingest_enabled(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.snowflake_pipe_is_auto_ingest_enabled + ) + + @snowflake_pipe_is_auto_ingest_enabled.setter + def snowflake_pipe_is_auto_ingest_enabled( + self, snowflake_pipe_is_auto_ingest_enabled: Optional[bool] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_pipe_is_auto_ingest_enabled = ( + snowflake_pipe_is_auto_ingest_enabled + ) + + @property + def snowflake_pipe_notification_channel_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.snowflake_pipe_notification_channel_name + ) + + @snowflake_pipe_notification_channel_name.setter + def snowflake_pipe_notification_channel_name( + self, snowflake_pipe_notification_channel_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_pipe_notification_channel_name = ( + snowflake_pipe_notification_channel_name + ) + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + class Attributes(SQL.Attributes): + definition: Optional[str] = Field(default=None, description="") + snowflake_pipe_is_auto_ingest_enabled: Optional[bool] = Field( + default=None, description="" + ) + snowflake_pipe_notification_channel_name: Optional[str] = Field( + default=None, description="" + ) + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + attributes: "SnowflakePipe.Attributes" = Field( + default_factory=lambda: SnowflakePipe.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/snowflake_stream.py b/pyatlan/model/assets/snowflake_stream.py new file mode 100644 index 000000000..e4cba8ce9 --- /dev/null +++ b/pyatlan/model/assets/snowflake_stream.py @@ -0,0 +1,182 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) + +from .s_q_l import SQL + + +class SnowflakeStream(SQL): + """Description""" + + type_name: str = Field(default="SnowflakeStream", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SnowflakeStream": + raise ValueError("must be SnowflakeStream") + return v + + def __setattr__(self, name, value): + if name in SnowflakeStream._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SNOWFLAKE_STREAM_TYPE: ClassVar[KeywordField] = KeywordField( + "snowflakeStreamType", "snowflakeStreamType" + ) + """ + Type of this stream, for example: standard, append-only, insert-only, etc. + """ + SNOWFLAKE_STREAM_SOURCE_TYPE: ClassVar[KeywordField] = KeywordField( + "snowflakeStreamSourceType", "snowflakeStreamSourceType" + ) + """ + Type of the source of this stream. + """ + SNOWFLAKE_STREAM_MODE: ClassVar[KeywordField] = KeywordField( + "snowflakeStreamMode", "snowflakeStreamMode" + ) + """ + Mode of this stream. + """ + SNOWFLAKE_STREAM_IS_STALE: ClassVar[BooleanField] = BooleanField( + "snowflakeStreamIsStale", "snowflakeStreamIsStale" + ) + """ + Whether this stream is stale (true) or not (false). + """ + SNOWFLAKE_STREAM_STALE_AFTER: ClassVar[NumericField] = NumericField( + "snowflakeStreamStaleAfter", "snowflakeStreamStaleAfter" + ) + """ + Time (epoch) after which this stream will be stale, in milliseconds. + """ + + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "snowflake_stream_type", + "snowflake_stream_source_type", + "snowflake_stream_mode", + "snowflake_stream_is_stale", + "snowflake_stream_stale_after", + "atlan_schema", + ] + + @property + def snowflake_stream_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.snowflake_stream_type + ) + + @snowflake_stream_type.setter + def snowflake_stream_type(self, snowflake_stream_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_stream_type = snowflake_stream_type + + @property + def snowflake_stream_source_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.snowflake_stream_source_type + ) + + @snowflake_stream_source_type.setter + def snowflake_stream_source_type(self, snowflake_stream_source_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_stream_source_type = snowflake_stream_source_type + + @property + def snowflake_stream_mode(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.snowflake_stream_mode + ) + + @snowflake_stream_mode.setter + def snowflake_stream_mode(self, snowflake_stream_mode: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_stream_mode = snowflake_stream_mode + + @property + def snowflake_stream_is_stale(self) -> Optional[bool]: + return ( + None + if self.attributes is None + else self.attributes.snowflake_stream_is_stale + ) + + @snowflake_stream_is_stale.setter + def snowflake_stream_is_stale(self, snowflake_stream_is_stale: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_stream_is_stale = snowflake_stream_is_stale + + @property + def snowflake_stream_stale_after(self) -> Optional[datetime]: + return ( + None + if self.attributes is None + else self.attributes.snowflake_stream_stale_after + ) + + @snowflake_stream_stale_after.setter + def snowflake_stream_stale_after( + self, snowflake_stream_stale_after: Optional[datetime] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.snowflake_stream_stale_after = snowflake_stream_stale_after + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + class Attributes(SQL.Attributes): + snowflake_stream_type: Optional[str] = Field(default=None, description="") + snowflake_stream_source_type: Optional[str] = Field( + default=None, description="" + ) + snowflake_stream_mode: Optional[str] = Field(default=None, description="") + snowflake_stream_is_stale: Optional[bool] = Field(default=None, description="") + snowflake_stream_stale_after: Optional[datetime] = Field( + default=None, description="" + ) + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + attributes: "SnowflakeStream.Attributes" = Field( + default_factory=lambda: SnowflakeStream.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/snowflake_tag.py b/pyatlan/model/assets/snowflake_tag.py new file mode 100644 index 000000000..6f9c6ad84 --- /dev/null +++ b/pyatlan/model/assets/snowflake_tag.py @@ -0,0 +1,493 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) +from pyatlan.model.structs import SourceTagAttribute + +from .tag import Tag + + +class SnowflakeTag(Tag): + """Description""" + + type_name: str = Field(default="SnowflakeTag", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SnowflakeTag": + raise ValueError("must be SnowflakeTag") + return v + + def __setattr__(self, name, value): + if name in SnowflakeTag._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") + """ + Unique identifier of the tag in the source system. + """ + TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( + "tagAttributes", "tagAttributes" + ) + """ + Attributes associated with the tag in the source system. + """ + TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( + "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" + ) + """ + Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. + """ + MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( + "mappedClassificationName", "mappedClassificationName" + ) + """ + Name of the classification in Atlan that is mapped to this tag. + """ + QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") + """ + Number of times this asset has been queried. + """ + QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( + "queryUserCount", "queryUserCount" + ) + """ + Number of unique users who have queried this asset. + """ + QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( + "queryUserMap", "queryUserMap" + ) + """ + Map of unique users who have queried this asset to the number of times they have queried it. + """ + QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( + "queryCountUpdatedAt", "queryCountUpdatedAt" + ) + """ + Time (epoch) at which the query count was last updated, in milliseconds. + """ + DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "databaseName", "databaseName.keyword", "databaseName" + ) + """ + Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "databaseQualifiedName", "databaseQualifiedName" + ) + """ + Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. + """ + SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "schemaName", "schemaName.keyword", "schemaName" + ) + """ + Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "schemaQualifiedName", "schemaQualifiedName" + ) + """ + Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. + """ + TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "tableName", "tableName.keyword", "tableName" + ) + """ + Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "tableQualifiedName", "tableQualifiedName" + ) + """ + Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. + """ + VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( + "viewName", "viewName.keyword", "viewName" + ) + """ + Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "viewQualifiedName", "viewQualifiedName" + ) + """ + Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. + """ + IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") + """ + Whether this asset has been profiled (true) or not (false). + """ + LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( + "lastProfiledAt", "lastProfiledAt" + ) + """ + Time (epoch) at which this asset was last profiled, in milliseconds. + """ + + DBT_SOURCES: ClassVar[RelationField] = RelationField("dbtSources") + """ + TBC + """ + SQL_DBT_MODELS: ClassVar[RelationField] = RelationField("sqlDbtModels") + """ + TBC + """ + SQL_DBT_SOURCES: ClassVar[RelationField] = RelationField("sqlDBTSources") + """ + TBC + """ + DBT_MODELS: ClassVar[RelationField] = RelationField("dbtModels") + """ + TBC + """ + DBT_TESTS: ClassVar[RelationField] = RelationField("dbtTests") + """ + TBC + """ + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "tag_id", + "tag_attributes", + "tag_allowed_values", + "mapped_atlan_tag_name", + "query_count", + "query_user_count", + "query_user_map", + "query_count_updated_at", + "database_name", + "database_qualified_name", + "schema_name", + "schema_qualified_name", + "table_name", + "table_qualified_name", + "view_name", + "view_qualified_name", + "is_profiled", + "last_profiled_at", + "dbt_sources", + "sql_dbt_models", + "sql_dbt_sources", + "dbt_models", + "dbt_tests", + "atlan_schema", + ] + + @property + def tag_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tag_id + + @tag_id.setter + def tag_id(self, tag_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_id = tag_id + + @property + def tag_attributes(self) -> Optional[list[SourceTagAttribute]]: + return None if self.attributes is None else self.attributes.tag_attributes + + @tag_attributes.setter + def tag_attributes(self, tag_attributes: Optional[list[SourceTagAttribute]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_attributes = tag_attributes + + @property + def tag_allowed_values(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.tag_allowed_values + + @tag_allowed_values.setter + def tag_allowed_values(self, tag_allowed_values: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_allowed_values = tag_allowed_values + + @property + def mapped_atlan_tag_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mapped_atlan_tag_name + ) + + @mapped_atlan_tag_name.setter + def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name + + @property + def query_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_count + + @query_count.setter + def query_count(self, query_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count = query_count + + @property + def query_user_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.query_user_count + + @query_user_count.setter + def query_user_count(self, query_user_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_count = query_user_count + + @property + def query_user_map(self) -> Optional[dict[str, int]]: + return None if self.attributes is None else self.attributes.query_user_map + + @query_user_map.setter + def query_user_map(self, query_user_map: Optional[dict[str, int]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_user_map = query_user_map + + @property + def query_count_updated_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.query_count_updated_at + ) + + @query_count_updated_at.setter + def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_count_updated_at = query_count_updated_at + + @property + def database_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.database_name + + @database_name.setter + def database_name(self, database_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_name = database_name + + @property + def database_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.database_qualified_name + ) + + @database_qualified_name.setter + def database_qualified_name(self, database_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.database_qualified_name = database_qualified_name + + @property + def schema_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.schema_name + + @schema_name.setter + def schema_name(self, schema_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_name = schema_name + + @property + def schema_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.schema_qualified_name + ) + + @schema_qualified_name.setter + def schema_qualified_name(self, schema_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.schema_qualified_name = schema_qualified_name + + @property + def table_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_name + + @table_name.setter + def table_name(self, table_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_name = table_name + + @property + def table_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.table_qualified_name + + @table_qualified_name.setter + def table_qualified_name(self, table_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.table_qualified_name = table_qualified_name + + @property + def view_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_name + + @view_name.setter + def view_name(self, view_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_name = view_name + + @property + def view_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.view_qualified_name + + @view_qualified_name.setter + def view_qualified_name(self, view_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.view_qualified_name = view_qualified_name + + @property + def is_profiled(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_profiled + + @is_profiled.setter + def is_profiled(self, is_profiled: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_profiled = is_profiled + + @property + def last_profiled_at(self) -> Optional[datetime]: + return None if self.attributes is None else self.attributes.last_profiled_at + + @last_profiled_at.setter + def last_profiled_at(self, last_profiled_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.last_profiled_at = last_profiled_at + + @property + def dbt_sources(self) -> Optional[list[DbtSource]]: + return None if self.attributes is None else self.attributes.dbt_sources + + @dbt_sources.setter + def dbt_sources(self, dbt_sources: Optional[list[DbtSource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_sources = dbt_sources + + @property + def sql_dbt_models(self) -> Optional[list[DbtModel]]: + return None if self.attributes is None else self.attributes.sql_dbt_models + + @sql_dbt_models.setter + def sql_dbt_models(self, sql_dbt_models: Optional[list[DbtModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_dbt_models = sql_dbt_models + + @property + def sql_dbt_sources(self) -> Optional[list[DbtSource]]: + return None if self.attributes is None else self.attributes.sql_dbt_sources + + @sql_dbt_sources.setter + def sql_dbt_sources(self, sql_dbt_sources: Optional[list[DbtSource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.sql_dbt_sources = sql_dbt_sources + + @property + def dbt_models(self) -> Optional[list[DbtModel]]: + return None if self.attributes is None else self.attributes.dbt_models + + @dbt_models.setter + def dbt_models(self, dbt_models: Optional[list[DbtModel]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_models = dbt_models + + @property + def dbt_tests(self) -> Optional[list[DbtTest]]: + return None if self.attributes is None else self.attributes.dbt_tests + + @dbt_tests.setter + def dbt_tests(self, dbt_tests: Optional[list[DbtTest]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dbt_tests = dbt_tests + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + class Attributes(Tag.Attributes): + tag_id: Optional[str] = Field(default=None, description="") + tag_attributes: Optional[list[SourceTagAttribute]] = Field( + default=None, description="" + ) + tag_allowed_values: Optional[set[str]] = Field(default=None, description="") + mapped_atlan_tag_name: Optional[str] = Field(default=None, description="") + query_count: Optional[int] = Field(default=None, description="") + query_user_count: Optional[int] = Field(default=None, description="") + query_user_map: Optional[dict[str, int]] = Field(default=None, description="") + query_count_updated_at: Optional[datetime] = Field(default=None, description="") + database_name: Optional[str] = Field(default=None, description="") + database_qualified_name: Optional[str] = Field(default=None, description="") + schema_name: Optional[str] = Field(default=None, description="") + schema_qualified_name: Optional[str] = Field(default=None, description="") + table_name: Optional[str] = Field(default=None, description="") + table_qualified_name: Optional[str] = Field(default=None, description="") + view_name: Optional[str] = Field(default=None, description="") + view_qualified_name: Optional[str] = Field(default=None, description="") + is_profiled: Optional[bool] = Field(default=None, description="") + last_profiled_at: Optional[datetime] = Field(default=None, description="") + dbt_sources: Optional[list[DbtSource]] = Field( + default=None, description="" + ) # relationship + sql_dbt_models: Optional[list[DbtModel]] = Field( + default=None, description="" + ) # relationship + sql_dbt_sources: Optional[list[DbtSource]] = Field( + default=None, description="" + ) # relationship + dbt_models: Optional[list[DbtModel]] = Field( + default=None, description="" + ) # relationship + dbt_tests: Optional[list[DbtTest]] = Field( + default=None, description="" + ) # relationship + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + attributes: "SnowflakeTag.Attributes" = Field( + default_factory=lambda: SnowflakeTag.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .dbt_model import DbtModel # noqa +from .dbt_source import DbtSource # noqa +from .dbt_test import DbtTest # noqa +from .schema import Schema # noqa diff --git a/pyatlan/model/assets/soda.py b/pyatlan/model/assets/soda.py new file mode 100644 index 000000000..06d8a9bfd --- /dev/null +++ b/pyatlan/model/assets/soda.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar + +from pydantic.v1 import Field, validator + +from .data_quality import DataQuality + + +class Soda(DataQuality): + """Description""" + + type_name: str = Field(default="Soda", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Soda": + raise ValueError("must be Soda") + return v + + def __setattr__(self, name, value): + if name in Soda._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + _convenience_properties: ClassVar[list[str]] = [] diff --git a/pyatlan/model/assets/soda_check.py b/pyatlan/model/assets/soda_check.py new file mode 100644 index 000000000..544031e1d --- /dev/null +++ b/pyatlan/model/assets/soda_check.py @@ -0,0 +1,188 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from datetime import datetime +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField + +from .soda import Soda + + +class SodaCheck(Soda): + """Description""" + + type_name: str = Field(default="SodaCheck", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "SodaCheck": + raise ValueError("must be SodaCheck") + return v + + def __setattr__(self, name, value): + if name in SodaCheck._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SODA_CHECK_ID: ClassVar[KeywordField] = KeywordField("sodaCheckId", "sodaCheckId") + """ + Identifier of the check in Soda. + """ + SODA_CHECK_EVALUATION_STATUS: ClassVar[KeywordField] = KeywordField( + "sodaCheckEvaluationStatus", "sodaCheckEvaluationStatus" + ) + """ + Status of the check in Soda. + """ + SODA_CHECK_DEFINITION: ClassVar[KeywordField] = KeywordField( + "sodaCheckDefinition", "sodaCheckDefinition" + ) + """ + Definition of the check in Soda. + """ + SODA_CHECK_LAST_SCAN_AT: ClassVar[NumericField] = NumericField( + "sodaCheckLastScanAt", "sodaCheckLastScanAt" + ) + """ + + """ + SODA_CHECK_INCIDENT_COUNT: ClassVar[NumericField] = NumericField( + "sodaCheckIncidentCount", "sodaCheckIncidentCount" + ) + """ + + """ + + SODA_CHECK_COLUMNS: ClassVar[RelationField] = RelationField("sodaCheckColumns") + """ + TBC + """ + SODA_CHECK_ASSETS: ClassVar[RelationField] = RelationField("sodaCheckAssets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "soda_check_id", + "soda_check_evaluation_status", + "soda_check_definition", + "soda_check_last_scan_at", + "soda_check_incident_count", + "soda_check_columns", + "soda_check_assets", + ] + + @property + def soda_check_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.soda_check_id + + @soda_check_id.setter + def soda_check_id(self, soda_check_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_id = soda_check_id + + @property + def soda_check_evaluation_status(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.soda_check_evaluation_status + ) + + @soda_check_evaluation_status.setter + def soda_check_evaluation_status(self, soda_check_evaluation_status: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_evaluation_status = soda_check_evaluation_status + + @property + def soda_check_definition(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.soda_check_definition + ) + + @soda_check_definition.setter + def soda_check_definition(self, soda_check_definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_definition = soda_check_definition + + @property + def soda_check_last_scan_at(self) -> Optional[datetime]: + return ( + None if self.attributes is None else self.attributes.soda_check_last_scan_at + ) + + @soda_check_last_scan_at.setter + def soda_check_last_scan_at(self, soda_check_last_scan_at: Optional[datetime]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_last_scan_at = soda_check_last_scan_at + + @property + def soda_check_incident_count(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.soda_check_incident_count + ) + + @soda_check_incident_count.setter + def soda_check_incident_count(self, soda_check_incident_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_incident_count = soda_check_incident_count + + @property + def soda_check_columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.soda_check_columns + + @soda_check_columns.setter + def soda_check_columns(self, soda_check_columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_columns = soda_check_columns + + @property + def soda_check_assets(self) -> Optional[list[Asset]]: + return None if self.attributes is None else self.attributes.soda_check_assets + + @soda_check_assets.setter + def soda_check_assets(self, soda_check_assets: Optional[list[Asset]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.soda_check_assets = soda_check_assets + + class Attributes(Soda.Attributes): + soda_check_id: Optional[str] = Field(default=None, description="") + soda_check_evaluation_status: Optional[str] = Field( + default=None, description="" + ) + soda_check_definition: Optional[str] = Field(default=None, description="") + soda_check_last_scan_at: Optional[datetime] = Field( + default=None, description="" + ) + soda_check_incident_count: Optional[int] = Field(default=None, description="") + soda_check_columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + soda_check_assets: Optional[list[Asset]] = Field( + default=None, description="" + ) # relationship + + attributes: "SodaCheck.Attributes" = Field( + default_factory=lambda: SodaCheck.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .asset import Asset # noqa +from .column import Column # noqa diff --git a/pyatlan/model/assets/table.py b/pyatlan/model/assets/table.py new file mode 100644 index 000000000..ffdc4b05a --- /dev/null +++ b/pyatlan/model/assets/table.py @@ -0,0 +1,454 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .s_q_l import SQL + + +class Table(SQL): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, schema_qualified_name: str) -> Table: + validate_required_fields( + ["name", "schema_qualified_name"], [name, schema_qualified_name] + ) + attributes = Table.Attributes.create( + name=name, schema_qualified_name=schema_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="Table", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Table": + raise ValueError("must be Table") + return v + + def __setattr__(self, name, value): + if name in Table._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this table. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this table. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this table, in bytes. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this table. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this table is temporary (true) or not (false). + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether preview queries are allowed for this table (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for preview queries. + """ + EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( + "externalLocation", "externalLocation" + ) + """ + External location of this table, for example: an S3 object location. + """ + EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( + "externalLocationRegion", "externalLocationRegion" + ) + """ + Region of the external location of this table, for example: S3 region. + """ + EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( + "externalLocationFormat", "externalLocationFormat" + ) + """ + Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. + """ + IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( + "isPartitioned", "isPartitioned" + ) + """ + Whether this table is partitioned (true) or not (false). + """ + PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( + "partitionStrategy", "partitionStrategy" + ) + """ + Partition strategy for this table. + """ + PARTITION_COUNT: ClassVar[NumericField] = NumericField( + "partitionCount", "partitionCount" + ) + """ + Number of partitions in this table. + """ + PARTITION_LIST: ClassVar[KeywordField] = KeywordField( + "partitionList", "partitionList" + ) + """ + List of partitions in this table. + """ + + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + FACTS: ClassVar[RelationField] = RelationField("facts") + """ + TBC + """ + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + PARTITIONS: ClassVar[RelationField] = RelationField("partitions") + """ + TBC + """ + QUERIES: ClassVar[RelationField] = RelationField("queries") + """ + TBC + """ + DIMENSIONS: ClassVar[RelationField] = RelationField("dimensions") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "column_count", + "row_count", + "size_bytes", + "alias", + "is_temporary", + "is_query_preview", + "query_preview_config", + "external_location", + "external_location_region", + "external_location_format", + "is_partitioned", + "partition_strategy", + "partition_count", + "partition_list", + "columns", + "facts", + "atlan_schema", + "partitions", + "queries", + "dimensions", + ] + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.alias = alias + + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def external_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.external_location + + @external_location.setter + def external_location(self, external_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location = external_location + + @property + def external_location_region(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_region + ) + + @external_location_region.setter + def external_location_region(self, external_location_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_region = external_location_region + + @property + def external_location_format(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_format + ) + + @external_location_format.setter + def external_location_format(self, external_location_format: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_format = external_location_format + + @property + def is_partitioned(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partitioned + + @is_partitioned.setter + def is_partitioned(self, is_partitioned: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partitioned = is_partitioned + + @property + def partition_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_strategy + + @partition_strategy.setter + def partition_strategy(self, partition_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_strategy = partition_strategy + + @property + def partition_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.partition_count + + @partition_count.setter + def partition_count(self, partition_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_count = partition_count + + @property + def partition_list(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_list + + @partition_list.setter + def partition_list(self, partition_list: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_list = partition_list + + @property + def columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.columns + + @columns.setter + def columns(self, columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.columns = columns + + @property + def facts(self) -> Optional[list[Table]]: + return None if self.attributes is None else self.attributes.facts + + @facts.setter + def facts(self, facts: Optional[list[Table]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.facts = facts + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + @property + def partitions(self) -> Optional[list[TablePartition]]: + return None if self.attributes is None else self.attributes.partitions + + @partitions.setter + def partitions(self, partitions: Optional[list[TablePartition]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partitions = partitions + + @property + def queries(self) -> Optional[list[Query]]: + return None if self.attributes is None else self.attributes.queries + + @queries.setter + def queries(self, queries: Optional[list[Query]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.queries = queries + + @property + def dimensions(self) -> Optional[list[Table]]: + return None if self.attributes is None else self.attributes.dimensions + + @dimensions.setter + def dimensions(self, dimensions: Optional[list[Table]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dimensions = dimensions + + class Attributes(SQL.Attributes): + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") + query_preview_config: Optional[dict[str, str]] = Field( + default=None, description="" + ) + external_location: Optional[str] = Field(default=None, description="") + external_location_region: Optional[str] = Field(default=None, description="") + external_location_format: Optional[str] = Field(default=None, description="") + is_partitioned: Optional[bool] = Field(default=None, description="") + partition_strategy: Optional[str] = Field(default=None, description="") + partition_count: Optional[int] = Field(default=None, description="") + partition_list: Optional[str] = Field(default=None, description="") + columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + facts: Optional[list[Table]] = Field( + default=None, description="" + ) # relationship + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + partitions: Optional[list[TablePartition]] = Field( + default=None, description="" + ) # relationship + queries: Optional[list[Query]] = Field( + default=None, description="" + ) # relationship + dimensions: Optional[list[Table]] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, schema_qualified_name: str) -> Table.Attributes: + if not name: + raise ValueError("name cannot be blank") + validate_required_fields(["schema_qualified_name"], [schema_qualified_name]) + fields = schema_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid schema_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid schema_qualified_name") from e + return Table.Attributes( + name=name, + database_name=fields[3], + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", + qualified_name=f"{schema_qualified_name}/{name}", + schema_qualified_name=schema_qualified_name, + schema_name=fields[4], + connector_name=connector_type.value, + atlan_schema=Schema.ref_by_qualified_name(schema_qualified_name), + ) + + attributes: "Table.Attributes" = Field( + default_factory=lambda: Table.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .column import Column # noqa +from .query import Query # noqa +from .schema import Schema # noqa +from .table_partition import TablePartition # noqa diff --git a/pyatlan/model/assets/table_partition.py b/pyatlan/model/assets/table_partition.py new file mode 100644 index 000000000..1a6c0f95d --- /dev/null +++ b/pyatlan/model/assets/table_partition.py @@ -0,0 +1,402 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) + +from .s_q_l import SQL + + +class TablePartition(SQL): + """Description""" + + type_name: str = Field(default="TablePartition", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TablePartition": + raise ValueError("must be TablePartition") + return v + + def __setattr__(self, name, value): + if name in TablePartition._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + CONSTRAINT: ClassVar[KeywordField] = KeywordField("constraint", "constraint") + """ + Constraint that defines this table partition. + """ + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this partition. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this partition. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this partition, in bytes. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this partition. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this partition is temporary (true) or not (false). + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether preview queries for this partition are allowed (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for the preview queries. + """ + EXTERNAL_LOCATION: ClassVar[KeywordField] = KeywordField( + "externalLocation", "externalLocation" + ) + """ + External location of this partition, for example: an S3 object location. + """ + EXTERNAL_LOCATION_REGION: ClassVar[KeywordField] = KeywordField( + "externalLocationRegion", "externalLocationRegion" + ) + """ + Region of the external location of this partition, for example: S3 region. + """ + EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( + "externalLocationFormat", "externalLocationFormat" + ) + """ + Format of the external location of this partition, for example: JSON, CSV, PARQUET, etc. + """ + IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( + "isPartitioned", "isPartitioned" + ) + """ + Whether this partition is further partitioned (true) or not (false). + """ + PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( + "partitionStrategy", "partitionStrategy" + ) + """ + Partition strategy of this partition. + """ + PARTITION_COUNT: ClassVar[NumericField] = NumericField( + "partitionCount", "partitionCount" + ) + """ + Number of sub-partitions of this partition. + """ + PARTITION_LIST: ClassVar[KeywordField] = KeywordField( + "partitionList", "partitionList" + ) + """ + List of sub-partitions in this partition. + """ + + CHILD_TABLE_PARTITIONS: ClassVar[RelationField] = RelationField( + "childTablePartitions" + ) + """ + TBC + """ + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + PARENT_TABLE_PARTITION: ClassVar[RelationField] = RelationField( + "parentTablePartition" + ) + """ + TBC + """ + PARENT_TABLE: ClassVar[RelationField] = RelationField("parentTable") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "constraint", + "column_count", + "row_count", + "size_bytes", + "alias", + "is_temporary", + "is_query_preview", + "query_preview_config", + "external_location", + "external_location_region", + "external_location_format", + "is_partitioned", + "partition_strategy", + "partition_count", + "partition_list", + "child_table_partitions", + "columns", + "parent_table_partition", + "parent_table", + ] + + @property + def constraint(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.constraint + + @constraint.setter + def constraint(self, constraint: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.constraint = constraint + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.alias = alias + + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def external_location(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.external_location + + @external_location.setter + def external_location(self, external_location: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location = external_location + + @property + def external_location_region(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_region + ) + + @external_location_region.setter + def external_location_region(self, external_location_region: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_region = external_location_region + + @property + def external_location_format(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.external_location_format + ) + + @external_location_format.setter + def external_location_format(self, external_location_format: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.external_location_format = external_location_format + + @property + def is_partitioned(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_partitioned + + @is_partitioned.setter + def is_partitioned(self, is_partitioned: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_partitioned = is_partitioned + + @property + def partition_strategy(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_strategy + + @partition_strategy.setter + def partition_strategy(self, partition_strategy: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_strategy = partition_strategy + + @property + def partition_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.partition_count + + @partition_count.setter + def partition_count(self, partition_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_count = partition_count + + @property + def partition_list(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.partition_list + + @partition_list.setter + def partition_list(self, partition_list: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.partition_list = partition_list + + @property + def child_table_partitions(self) -> Optional[list[TablePartition]]: + return ( + None if self.attributes is None else self.attributes.child_table_partitions + ) + + @child_table_partitions.setter + def child_table_partitions( + self, child_table_partitions: Optional[list[TablePartition]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.child_table_partitions = child_table_partitions + + @property + def columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.columns + + @columns.setter + def columns(self, columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.columns = columns + + @property + def parent_table_partition(self) -> Optional[TablePartition]: + return ( + None if self.attributes is None else self.attributes.parent_table_partition + ) + + @parent_table_partition.setter + def parent_table_partition(self, parent_table_partition: Optional[TablePartition]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_table_partition = parent_table_partition + + @property + def parent_table(self) -> Optional[Table]: + return None if self.attributes is None else self.attributes.parent_table + + @parent_table.setter + def parent_table(self, parent_table: Optional[Table]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_table = parent_table + + class Attributes(SQL.Attributes): + constraint: Optional[str] = Field(default=None, description="") + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") + query_preview_config: Optional[dict[str, str]] = Field( + default=None, description="" + ) + external_location: Optional[str] = Field(default=None, description="") + external_location_region: Optional[str] = Field(default=None, description="") + external_location_format: Optional[str] = Field(default=None, description="") + is_partitioned: Optional[bool] = Field(default=None, description="") + partition_strategy: Optional[str] = Field(default=None, description="") + partition_count: Optional[int] = Field(default=None, description="") + partition_list: Optional[str] = Field(default=None, description="") + child_table_partitions: Optional[list[TablePartition]] = Field( + default=None, description="" + ) # relationship + columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + parent_table_partition: Optional[TablePartition] = Field( + default=None, description="" + ) # relationship + parent_table: Optional[Table] = Field( + default=None, description="" + ) # relationship + + attributes: "TablePartition.Attributes" = Field( + default_factory=lambda: TablePartition.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .column import Column # noqa +from .table import Table # noqa diff --git a/pyatlan/model/assets/asset43.py b/pyatlan/model/assets/tableau.py similarity index 78% rename from pyatlan/model/assets/asset43.py rename to pyatlan/model/assets/tableau.py index 27729fa57..01b39f1af 100644 --- a/pyatlan/model/assets/asset43.py +++ b/pyatlan/model/assets/tableau.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset19 import BI +from .b_i import BI class Tableau(BI): """Description""" - type_name: str = Field("Tableau", allow_mutation=False) + type_name: str = Field(default="Tableau", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -Tableau.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/tableau_calculated_field.py b/pyatlan/model/assets/tableau_calculated_field.py new file mode 100644 index 000000000..038851e70 --- /dev/null +++ b/pyatlan/model/assets/tableau_calculated_field.py @@ -0,0 +1,301 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .tableau import Tableau + + +class TableauCalculatedField(Tableau): + """Description""" + + type_name: str = Field(default="TableauCalculatedField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauCalculatedField": + raise ValueError("must be TableauCalculatedField") + return v + + def __setattr__(self, name, value): + if name in TableauCalculatedField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this calculated field exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this calculated field exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this calculated field exists. + """ + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" + ) + """ + Unique name of the workbook in which this calculated field exists. + """ + DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasourceQualifiedName", "datasourceQualifiedName" + ) + """ + Unique name of the datasource in which this calculated field exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects and their nested projects. + """ + DATA_CATEGORY: ClassVar[KeywordField] = KeywordField("dataCategory", "dataCategory") + """ + Data category of this field. + """ + ROLE: ClassVar[KeywordField] = KeywordField("role", "role") + """ + Role of this field, for example: 'dimension', 'measure', or 'unknown'. + """ + TABLEAU_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "tableauDataType", "tableauDataType", "tableauDataType.text" + ) + """ + Data type of the field, from Tableau. + """ + FORMULA: ClassVar[KeywordField] = KeywordField("formula", "formula") + """ + Formula for this calculated field. + """ + UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( + "upstreamFields", "upstreamFields" + ) + """ + List of fields that are upstream to this calculated field. + """ + + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") + """ + TBC + """ + DATASOURCE: ClassVar[RelationField] = RelationField("datasource") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "workbook_qualified_name", + "datasource_qualified_name", + "project_hierarchy", + "data_category", + "role", + "tableau_data_type", + "formula", + "upstream_fields", + "worksheets", + "datasource", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) + + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def datasource_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.datasource_qualified_name + ) + + @datasource_qualified_name.setter + def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource_qualified_name = datasource_qualified_name + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def data_category(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.data_category + + @data_category.setter + def data_category(self, data_category: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.data_category = data_category + + @property + def role(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.role + + @role.setter + def role(self, role: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.role = role + + @property + def tableau_data_type(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tableau_data_type + + @tableau_data_type.setter + def tableau_data_type(self, tableau_data_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_data_type = tableau_data_type + + @property + def formula(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.formula + + @formula.setter + def formula(self, formula: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.formula = formula + + @property + def upstream_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_fields + + @upstream_fields.setter + def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_fields = upstream_fields + + @property + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets + + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.worksheets = worksheets + + @property + def datasource(self) -> Optional[TableauDatasource]: + return None if self.attributes is None else self.attributes.datasource + + @datasource.setter + def datasource(self, datasource: Optional[TableauDatasource]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource = datasource + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + workbook_qualified_name: Optional[str] = Field(default=None, description="") + datasource_qualified_name: Optional[str] = Field(default=None, description="") + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + data_category: Optional[str] = Field(default=None, description="") + role: Optional[str] = Field(default=None, description="") + tableau_data_type: Optional[str] = Field(default=None, description="") + formula: Optional[str] = Field(default=None, description="") + upstream_fields: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + worksheets: Optional[list[TableauWorksheet]] = Field( + default=None, description="" + ) # relationship + datasource: Optional[TableauDatasource] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauCalculatedField.Attributes" = Field( + default_factory=lambda: TableauCalculatedField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_datasource import TableauDatasource # noqa +from .tableau_worksheet import TableauWorksheet # noqa diff --git a/pyatlan/model/assets/tableau_dashboard.py b/pyatlan/model/assets/tableau_dashboard.py new file mode 100644 index 000000000..b19a56057 --- /dev/null +++ b/pyatlan/model/assets/tableau_dashboard.py @@ -0,0 +1,189 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .tableau import Tableau + + +class TableauDashboard(Tableau): + """Description""" + + type_name: str = Field(default="TableauDashboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauDashboard": + raise ValueError("must be TableauDashboard") + return v + + def __setattr__(self, name, value): + if name in TableauDashboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this dashboard exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this dashboard exists. + """ + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" + ) + """ + Unique name of the workbook in which this dashboard exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this dashboard exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects and their nested child projects. + """ + + WORKBOOK: ClassVar[RelationField] = RelationField("workbook") + """ + TBC + """ + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "workbook_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "workbook", + "worksheets", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) + + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def workbook(self) -> Optional[TableauWorkbook]: + return None if self.attributes is None else self.attributes.workbook + + @workbook.setter + def workbook(self, workbook: Optional[TableauWorkbook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook = workbook + + @property + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets + + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.worksheets = worksheets + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + workbook_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + workbook: Optional[TableauWorkbook] = Field( + default=None, description="" + ) # relationship + worksheets: Optional[list[TableauWorksheet]] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauDashboard.Attributes" = Field( + default_factory=lambda: TableauDashboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_workbook import TableauWorkbook # noqa +from .tableau_worksheet import TableauWorksheet # noqa diff --git a/pyatlan/model/assets/tableau_datasource.py b/pyatlan/model/assets/tableau_datasource.py new file mode 100644 index 000000000..6c17fd739 --- /dev/null +++ b/pyatlan/model/assets/tableau_datasource.py @@ -0,0 +1,352 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, RelationField + +from .tableau import Tableau + + +class TableauDatasource(Tableau): + """Description""" + + type_name: str = Field(default="TableauDatasource", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauDatasource": + raise ValueError("must be TableauDatasource") + return v + + def __setattr__(self, name, value): + if name in TableauDatasource._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this datasource exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this datasource exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this datasource exists. + """ + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" + ) + """ + Unique name of the workbook in which this datasource exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects with their nested child projects. + """ + IS_PUBLISHED: ClassVar[BooleanField] = BooleanField("isPublished", "isPublished") + """ + Whether this datasource is published (true) or embedded (false). + """ + HAS_EXTRACTS: ClassVar[BooleanField] = BooleanField("hasExtracts", "hasExtracts") + """ + Whether this datasource has extracts (true) or not (false). + """ + IS_CERTIFIED: ClassVar[BooleanField] = BooleanField("isCertified", "isCertified") + """ + Whether this datasource is certified in Tableau (true) or not (false). + """ + CERTIFIER: ClassVar[KeywordField] = KeywordField("certifier", "certifier") + """ + Users that have marked this datasource as cerified, in Tableau. + """ + CERTIFICATION_NOTE: ClassVar[KeywordField] = KeywordField( + "certificationNote", "certificationNote" + ) + """ + Notes related to this datasource being cerfified, in Tableau. + """ + CERTIFIER_DISPLAY_NAME: ClassVar[KeywordField] = KeywordField( + "certifierDisplayName", "certifierDisplayName" + ) + """ + Name of the user who cerified this datasource, in Tableau. + """ + UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( + "upstreamTables", "upstreamTables" + ) + """ + List of tables that are upstream of this datasource. + """ + UPSTREAM_DATASOURCES: ClassVar[KeywordField] = KeywordField( + "upstreamDatasources", "upstreamDatasources" + ) + """ + List of datasources that are upstream of this datasource. + """ + + WORKBOOK: ClassVar[RelationField] = RelationField("workbook") + """ + TBC + """ + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + FIELDS: ClassVar[RelationField] = RelationField("fields") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "workbook_qualified_name", + "project_hierarchy", + "is_published", + "has_extracts", + "is_certified", + "certifier", + "certification_note", + "certifier_display_name", + "upstream_tables", + "upstream_datasources", + "workbook", + "project", + "fields", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) + + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def is_published(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_published + + @is_published.setter + def is_published(self, is_published: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_published = is_published + + @property + def has_extracts(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.has_extracts + + @has_extracts.setter + def has_extracts(self, has_extracts: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.has_extracts = has_extracts + + @property + def is_certified(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_certified + + @is_certified.setter + def is_certified(self, is_certified: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_certified = is_certified + + @property + def certifier(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.certifier + + @certifier.setter + def certifier(self, certifier: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certifier = certifier + + @property + def certification_note(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.certification_note + + @certification_note.setter + def certification_note(self, certification_note: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certification_note = certification_note + + @property + def certifier_display_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.certifier_display_name + ) + + @certifier_display_name.setter + def certifier_display_name(self, certifier_display_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.certifier_display_name = certifier_display_name + + @property + def upstream_tables(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_tables + + @upstream_tables.setter + def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_tables = upstream_tables + + @property + def upstream_datasources(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_datasources + + @upstream_datasources.setter + def upstream_datasources( + self, upstream_datasources: Optional[list[dict[str, str]]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_datasources = upstream_datasources + + @property + def workbook(self) -> Optional[TableauWorkbook]: + return None if self.attributes is None else self.attributes.workbook + + @workbook.setter + def workbook(self, workbook: Optional[TableauWorkbook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook = workbook + + @property + def project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[TableauProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def fields(self) -> Optional[list[TableauDatasourceField]]: + return None if self.attributes is None else self.attributes.fields + + @fields.setter + def fields(self, fields: Optional[list[TableauDatasourceField]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fields = fields + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + workbook_qualified_name: Optional[str] = Field(default=None, description="") + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + is_published: Optional[bool] = Field(default=None, description="") + has_extracts: Optional[bool] = Field(default=None, description="") + is_certified: Optional[bool] = Field(default=None, description="") + certifier: Optional[dict[str, str]] = Field(default=None, description="") + certification_note: Optional[str] = Field(default=None, description="") + certifier_display_name: Optional[str] = Field(default=None, description="") + upstream_tables: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + upstream_datasources: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + workbook: Optional[TableauWorkbook] = Field( + default=None, description="" + ) # relationship + project: Optional[TableauProject] = Field( + default=None, description="" + ) # relationship + fields: Optional[list[TableauDatasourceField]] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauDatasource.Attributes" = Field( + default_factory=lambda: TableauDatasource.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_datasource_field import TableauDatasourceField # noqa +from .tableau_project import TableauProject # noqa +from .tableau_workbook import TableauWorkbook # noqa diff --git a/pyatlan/model/assets/tableau_datasource_field.py b/pyatlan/model/assets/tableau_datasource_field.py new file mode 100644 index 000000000..09f66faef --- /dev/null +++ b/pyatlan/model/assets/tableau_datasource_field.py @@ -0,0 +1,453 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import ( + KeywordField, + KeywordTextField, + RelationField, +) + +from .tableau import Tableau + + +class TableauDatasourceField(Tableau): + """Description""" + + type_name: str = Field(default="TableauDatasourceField", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauDatasourceField": + raise ValueError("must be TableauDatasourceField") + return v + + def __setattr__(self, name, value): + if name in TableauDatasourceField._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this datasource field exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this datasource field exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this datasource field exists. + """ + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" + ) + """ + Unique name of the workbook in which this datasource field exists. + """ + DATASOURCE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "datasourceQualifiedName", "datasourceQualifiedName" + ) + """ + Unique name of the datasource in which this datasource field exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects and their nested child projects. + """ + FULLY_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "fullyQualifiedName", "fullyQualifiedName" + ) + """ + Name used internally in Tableau to uniquely identify this field. + """ + TABLEAU_DATASOURCE_FIELD_DATA_CATEGORY: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldDataCategory", "tableauDatasourceFieldDataCategory" + ) + """ + Data category of this field. + """ + TABLEAU_DATASOURCE_FIELD_ROLE: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldRole", "tableauDatasourceFieldRole" + ) + """ + Role of this field, for example: 'dimension', 'measure', or 'unknown'. + """ + TABLEAU_DATASOURCE_FIELD_DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( + "tableauDatasourceFieldDataType", + "tableauDatasourceFieldDataType", + "tableauDatasourceFieldDataType.text", + ) + """ + Data type of this field. + """ + UPSTREAM_TABLES: ClassVar[KeywordField] = KeywordField( + "upstreamTables", "upstreamTables" + ) + """ + Tables upstream to this datasource field. + """ + TABLEAU_DATASOURCE_FIELD_FORMULA: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldFormula", "tableauDatasourceFieldFormula" + ) + """ + Formula for this field. + """ + TABLEAU_DATASOURCE_FIELD_BIN_SIZE: ClassVar[KeywordField] = KeywordField( + "tableauDatasourceFieldBinSize", "tableauDatasourceFieldBinSize" + ) + """ + Bin size of this field. + """ + UPSTREAM_COLUMNS: ClassVar[KeywordField] = KeywordField( + "upstreamColumns", "upstreamColumns" + ) + """ + Columns upstream to this field. + """ + UPSTREAM_FIELDS: ClassVar[KeywordField] = KeywordField( + "upstreamFields", "upstreamFields" + ) + """ + Fields upstream to this field. + """ + DATASOURCE_FIELD_TYPE: ClassVar[KeywordField] = KeywordField( + "datasourceFieldType", "datasourceFieldType" + ) + """ + Type of this datasource field. + """ + + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") + """ + TBC + """ + DATASOURCE: ClassVar[RelationField] = RelationField("datasource") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "workbook_qualified_name", + "datasource_qualified_name", + "project_hierarchy", + "fully_qualified_name", + "tableau_datasource_field_data_category", + "tableau_datasource_field_role", + "tableau_datasource_field_data_type", + "upstream_tables", + "tableau_datasource_field_formula", + "tableau_datasource_field_bin_size", + "upstream_columns", + "upstream_fields", + "datasource_field_type", + "worksheets", + "datasource", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) + + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def datasource_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.datasource_qualified_name + ) + + @datasource_qualified_name.setter + def datasource_qualified_name(self, datasource_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource_qualified_name = datasource_qualified_name + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def fully_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.fully_qualified_name + + @fully_qualified_name.setter + def fully_qualified_name(self, fully_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.fully_qualified_name = fully_qualified_name + + @property + def tableau_datasource_field_data_category(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_data_category + ) + + @tableau_datasource_field_data_category.setter + def tableau_datasource_field_data_category( + self, tableau_datasource_field_data_category: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_datasource_field_data_category = ( + tableau_datasource_field_data_category + ) + + @property + def tableau_datasource_field_role(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_role + ) + + @tableau_datasource_field_role.setter + def tableau_datasource_field_role( + self, tableau_datasource_field_role: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_datasource_field_role = tableau_datasource_field_role + + @property + def tableau_datasource_field_data_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_data_type + ) + + @tableau_datasource_field_data_type.setter + def tableau_datasource_field_data_type( + self, tableau_datasource_field_data_type: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_datasource_field_data_type = ( + tableau_datasource_field_data_type + ) + + @property + def upstream_tables(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_tables + + @upstream_tables.setter + def upstream_tables(self, upstream_tables: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_tables = upstream_tables + + @property + def tableau_datasource_field_formula(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_formula + ) + + @tableau_datasource_field_formula.setter + def tableau_datasource_field_formula( + self, tableau_datasource_field_formula: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_datasource_field_formula = ( + tableau_datasource_field_formula + ) + + @property + def tableau_datasource_field_bin_size(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.tableau_datasource_field_bin_size + ) + + @tableau_datasource_field_bin_size.setter + def tableau_datasource_field_bin_size( + self, tableau_datasource_field_bin_size: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tableau_datasource_field_bin_size = ( + tableau_datasource_field_bin_size + ) + + @property + def upstream_columns(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_columns + + @upstream_columns.setter + def upstream_columns(self, upstream_columns: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_columns = upstream_columns + + @property + def upstream_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.upstream_fields + + @upstream_fields.setter + def upstream_fields(self, upstream_fields: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.upstream_fields = upstream_fields + + @property + def datasource_field_type(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.datasource_field_type + ) + + @datasource_field_type.setter + def datasource_field_type(self, datasource_field_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource_field_type = datasource_field_type + + @property + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets + + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.worksheets = worksheets + + @property + def datasource(self) -> Optional[TableauDatasource]: + return None if self.attributes is None else self.attributes.datasource + + @datasource.setter + def datasource(self, datasource: Optional[TableauDatasource]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource = datasource + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + workbook_qualified_name: Optional[str] = Field(default=None, description="") + datasource_qualified_name: Optional[str] = Field(default=None, description="") + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + fully_qualified_name: Optional[str] = Field(default=None, description="") + tableau_datasource_field_data_category: Optional[str] = Field( + default=None, description="" + ) + tableau_datasource_field_role: Optional[str] = Field( + default=None, description="" + ) + tableau_datasource_field_data_type: Optional[str] = Field( + default=None, description="" + ) + upstream_tables: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + tableau_datasource_field_formula: Optional[str] = Field( + default=None, description="" + ) + tableau_datasource_field_bin_size: Optional[str] = Field( + default=None, description="" + ) + upstream_columns: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + upstream_fields: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + datasource_field_type: Optional[str] = Field(default=None, description="") + worksheets: Optional[list[TableauWorksheet]] = Field( + default=None, description="" + ) # relationship + datasource: Optional[TableauDatasource] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauDatasourceField.Attributes" = Field( + default_factory=lambda: TableauDatasourceField.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_datasource import TableauDatasource # noqa +from .tableau_worksheet import TableauWorksheet # noqa diff --git a/pyatlan/model/assets/tableau_flow.py b/pyatlan/model/assets/tableau_flow.py new file mode 100644 index 000000000..39ae3ea71 --- /dev/null +++ b/pyatlan/model/assets/tableau_flow.py @@ -0,0 +1,204 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .tableau import Tableau + + +class TableauFlow(Tableau): + """Description""" + + type_name: str = Field(default="TableauFlow", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauFlow": + raise ValueError("must be TableauFlow") + return v + + def __setattr__(self, name, value): + if name in TableauFlow._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this flow exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this flow exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this flow exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects with their nested child projects. + """ + INPUT_FIELDS: ClassVar[KeywordField] = KeywordField("inputFields", "inputFields") + """ + List of fields that are inputs to this flow. + """ + OUTPUT_FIELDS: ClassVar[KeywordField] = KeywordField("outputFields", "outputFields") + """ + List of fields that are outputs from this flow. + """ + OUTPUT_STEPS: ClassVar[KeywordField] = KeywordField("outputSteps", "outputSteps") + """ + List of steps that are outputs from this flow. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "input_fields", + "output_fields", + "output_steps", + "project", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def input_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.input_fields + + @input_fields.setter + def input_fields(self, input_fields: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_fields = input_fields + + @property + def output_fields(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.output_fields + + @output_fields.setter + def output_fields(self, output_fields: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_fields = output_fields + + @property + def output_steps(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.output_steps + + @output_steps.setter + def output_steps(self, output_steps: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.output_steps = output_steps + + @property + def project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[TableauProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + input_fields: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + output_fields: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + output_steps: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + project: Optional[TableauProject] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauFlow.Attributes" = Field( + default_factory=lambda: TableauFlow.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_project import TableauProject # noqa diff --git a/pyatlan/model/assets/asset70.py b/pyatlan/model/assets/tableau_metric.py similarity index 87% rename from pyatlan/model/assets/asset70.py rename to pyatlan/model/assets/tableau_metric.py index 4481c59ad..7bead0f5b 100644 --- a/pyatlan/model/assets/asset70.py +++ b/pyatlan/model/assets/tableau_metric.py @@ -6,18 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, RelationField -from .asset43 import Tableau -from .asset69 import TableauProject +from .tableau import Tableau class TableauMetric(Tableau): """Description""" - type_name: str = Field("TableauMetric", allow_mutation=False) + type_name: str = Field(default="TableauMetric", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -129,20 +128,16 @@ def project(self, project: Optional[TableauProject]): self.attributes.project = project class Attributes(Tableau.Attributes): - site_qualified_name: Optional[str] = Field( - None, description="", alias="siteQualifiedName" - ) - project_qualified_name: Optional[str] = Field( - None, description="", alias="projectQualifiedName" - ) + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") top_level_project_qualified_name: Optional[str] = Field( - None, description="", alias="topLevelProjectQualifiedName" + default=None, description="" ) project_hierarchy: Optional[list[dict[str, str]]] = Field( - None, description="", alias="projectHierarchy" + default=None, description="" ) project: Optional[TableauProject] = Field( - None, description="", alias="project" + default=None, description="" ) # relationship attributes: "TableauMetric.Attributes" = Field( @@ -152,4 +147,4 @@ class Attributes(Tableau.Attributes): ) -TableauMetric.Attributes.update_forward_refs() +from .tableau_project import TableauProject # noqa diff --git a/pyatlan/model/assets/tableau_project.py b/pyatlan/model/assets/tableau_project.py new file mode 100644 index 000000000..14e9d7ed1 --- /dev/null +++ b/pyatlan/model/assets/tableau_project.py @@ -0,0 +1,241 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField, RelationField + +from .tableau import Tableau + + +class TableauProject(Tableau): + """Description""" + + type_name: str = Field(default="TableauProject", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauProject": + raise ValueError("must be TableauProject") + return v + + def __setattr__(self, name, value): + if name in TableauProject._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this project exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this project exists, if this is a nested project. + """ + IS_TOP_LEVEL_PROJECT: ClassVar[BooleanField] = BooleanField( + "isTopLevelProject", "isTopLevelProject" + ) + """ + Whether this project is a top-level project (true) or not (false). + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects with their nested child projects. + """ + + PARENT_PROJECT: ClassVar[RelationField] = RelationField("parentProject") + """ + TBC + """ + WORKBOOKS: ClassVar[RelationField] = RelationField("workbooks") + """ + TBC + """ + SITE: ClassVar[RelationField] = RelationField("site") + """ + TBC + """ + DATASOURCES: ClassVar[RelationField] = RelationField("datasources") + """ + TBC + """ + FLOWS: ClassVar[RelationField] = RelationField("flows") + """ + TBC + """ + CHILD_PROJECTS: ClassVar[RelationField] = RelationField("childProjects") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "top_level_project_qualified_name", + "is_top_level_project", + "project_hierarchy", + "parent_project", + "workbooks", + "site", + "datasources", + "flows", + "child_projects", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def is_top_level_project(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_top_level_project + + @is_top_level_project.setter + def is_top_level_project(self, is_top_level_project: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_top_level_project = is_top_level_project + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def parent_project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.parent_project + + @parent_project.setter + def parent_project(self, parent_project: Optional[TableauProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.parent_project = parent_project + + @property + def workbooks(self) -> Optional[list[TableauWorkbook]]: + return None if self.attributes is None else self.attributes.workbooks + + @workbooks.setter + def workbooks(self, workbooks: Optional[list[TableauWorkbook]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbooks = workbooks + + @property + def site(self) -> Optional[TableauSite]: + return None if self.attributes is None else self.attributes.site + + @site.setter + def site(self, site: Optional[TableauSite]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site = site + + @property + def datasources(self) -> Optional[list[TableauDatasource]]: + return None if self.attributes is None else self.attributes.datasources + + @datasources.setter + def datasources(self, datasources: Optional[list[TableauDatasource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasources = datasources + + @property + def flows(self) -> Optional[list[TableauFlow]]: + return None if self.attributes is None else self.attributes.flows + + @flows.setter + def flows(self, flows: Optional[list[TableauFlow]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.flows = flows + + @property + def child_projects(self) -> Optional[list[TableauProject]]: + return None if self.attributes is None else self.attributes.child_projects + + @child_projects.setter + def child_projects(self, child_projects: Optional[list[TableauProject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.child_projects = child_projects + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + is_top_level_project: Optional[bool] = Field(default=None, description="") + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + parent_project: Optional[TableauProject] = Field( + default=None, description="" + ) # relationship + workbooks: Optional[list[TableauWorkbook]] = Field( + default=None, description="" + ) # relationship + site: Optional[TableauSite] = Field( + default=None, description="" + ) # relationship + datasources: Optional[list[TableauDatasource]] = Field( + default=None, description="" + ) # relationship + flows: Optional[list[TableauFlow]] = Field( + default=None, description="" + ) # relationship + child_projects: Optional[list[TableauProject]] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauProject.Attributes" = Field( + default_factory=lambda: TableauProject.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_datasource import TableauDatasource # noqa +from .tableau_flow import TableauFlow # noqa +from .tableau_site import TableauSite # noqa +from .tableau_workbook import TableauWorkbook # noqa diff --git a/pyatlan/model/assets/tableau_site.py b/pyatlan/model/assets/tableau_site.py new file mode 100644 index 000000000..461e693f5 --- /dev/null +++ b/pyatlan/model/assets/tableau_site.py @@ -0,0 +1,63 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .tableau import Tableau + + +class TableauSite(Tableau): + """Description""" + + type_name: str = Field(default="TableauSite", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauSite": + raise ValueError("must be TableauSite") + return v + + def __setattr__(self, name, value): + if name in TableauSite._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + PROJECTS: ClassVar[RelationField] = RelationField("projects") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "projects", + ] + + @property + def projects(self) -> Optional[list[TableauProject]]: + return None if self.attributes is None else self.attributes.projects + + @projects.setter + def projects(self, projects: Optional[list[TableauProject]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.projects = projects + + class Attributes(Tableau.Attributes): + projects: Optional[list[TableauProject]] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauSite.Attributes" = Field( + default_factory=lambda: TableauSite.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_project import TableauProject # noqa diff --git a/pyatlan/model/assets/tableau_workbook.py b/pyatlan/model/assets/tableau_workbook.py new file mode 100644 index 000000000..58d2369c8 --- /dev/null +++ b/pyatlan/model/assets/tableau_workbook.py @@ -0,0 +1,227 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .tableau import Tableau + + +class TableauWorkbook(Tableau): + """Description""" + + type_name: str = Field(default="TableauWorkbook", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauWorkbook": + raise ValueError("must be TableauWorkbook") + return v + + def __setattr__(self, name, value): + if name in TableauWorkbook._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this workbook exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this workbook exists. + """ + TOP_LEVEL_PROJECT_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectName", "topLevelProjectName" + ) + """ + Simple name of the top-level project in which this workbook exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this workbook exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects with their nested child projects. + """ + + PROJECT: ClassVar[RelationField] = RelationField("project") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + WORKSHEETS: ClassVar[RelationField] = RelationField("worksheets") + """ + TBC + """ + DATASOURCES: ClassVar[RelationField] = RelationField("datasources") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "top_level_project_name", + "top_level_project_qualified_name", + "project_hierarchy", + "project", + "dashboards", + "worksheets", + "datasources", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.top_level_project_name + ) + + @top_level_project_name.setter + def top_level_project_name(self, top_level_project_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_name = top_level_project_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def project(self) -> Optional[TableauProject]: + return None if self.attributes is None else self.attributes.project + + @project.setter + def project(self, project: Optional[TableauProject]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project = project + + @property + def dashboards(self) -> Optional[list[TableauDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[TableauDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + @property + def worksheets(self) -> Optional[list[TableauWorksheet]]: + return None if self.attributes is None else self.attributes.worksheets + + @worksheets.setter + def worksheets(self, worksheets: Optional[list[TableauWorksheet]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.worksheets = worksheets + + @property + def datasources(self) -> Optional[list[TableauDatasource]]: + return None if self.attributes is None else self.attributes.datasources + + @datasources.setter + def datasources(self, datasources: Optional[list[TableauDatasource]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasources = datasources + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + project: Optional[TableauProject] = Field( + default=None, description="" + ) # relationship + dashboards: Optional[list[TableauDashboard]] = Field( + default=None, description="" + ) # relationship + worksheets: Optional[list[TableauWorksheet]] = Field( + default=None, description="" + ) # relationship + datasources: Optional[list[TableauDatasource]] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauWorkbook.Attributes" = Field( + default_factory=lambda: TableauWorkbook.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_dashboard import TableauDashboard # noqa +from .tableau_datasource import TableauDatasource # noqa +from .tableau_project import TableauProject # noqa +from .tableau_worksheet import TableauWorksheet # noqa diff --git a/pyatlan/model/assets/tableau_worksheet.py b/pyatlan/model/assets/tableau_worksheet.py new file mode 100644 index 000000000..78702111f --- /dev/null +++ b/pyatlan/model/assets/tableau_worksheet.py @@ -0,0 +1,231 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, RelationField + +from .tableau import Tableau + + +class TableauWorksheet(Tableau): + """Description""" + + type_name: str = Field(default="TableauWorksheet", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "TableauWorksheet": + raise ValueError("must be TableauWorksheet") + return v + + def __setattr__(self, name, value): + if name in TableauWorksheet._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + SITE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "siteQualifiedName", "siteQualifiedName" + ) + """ + Unique name of the site in which this worksheet exists. + """ + PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "projectQualifiedName", "projectQualifiedName" + ) + """ + Unique name of the project in which this worksheet exists. + """ + TOP_LEVEL_PROJECT_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "topLevelProjectQualifiedName", "topLevelProjectQualifiedName" + ) + """ + Unique name of the top-level project in which this worksheet exists. + """ + PROJECT_HIERARCHY: ClassVar[KeywordField] = KeywordField( + "projectHierarchy", "projectHierarchy" + ) + """ + List of top-level projects with their nested child projects. + """ + WORKBOOK_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "workbookQualifiedName", "workbookQualifiedName" + ) + """ + Unique name of the workbook in which this worksheet exists. + """ + + WORKBOOK: ClassVar[RelationField] = RelationField("workbook") + """ + TBC + """ + DATASOURCE_FIELDS: ClassVar[RelationField] = RelationField("datasourceFields") + """ + TBC + """ + CALCULATED_FIELDS: ClassVar[RelationField] = RelationField("calculatedFields") + """ + TBC + """ + DASHBOARDS: ClassVar[RelationField] = RelationField("dashboards") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "site_qualified_name", + "project_qualified_name", + "top_level_project_qualified_name", + "project_hierarchy", + "workbook_qualified_name", + "workbook", + "datasource_fields", + "calculated_fields", + "dashboards", + ] + + @property + def site_qualified_name(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.site_qualified_name + + @site_qualified_name.setter + def site_qualified_name(self, site_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.site_qualified_name = site_qualified_name + + @property + def project_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.project_qualified_name + ) + + @project_qualified_name.setter + def project_qualified_name(self, project_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_qualified_name = project_qualified_name + + @property + def top_level_project_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.top_level_project_qualified_name + ) + + @top_level_project_qualified_name.setter + def top_level_project_qualified_name( + self, top_level_project_qualified_name: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.top_level_project_qualified_name = ( + top_level_project_qualified_name + ) + + @property + def project_hierarchy(self) -> Optional[list[dict[str, str]]]: + return None if self.attributes is None else self.attributes.project_hierarchy + + @project_hierarchy.setter + def project_hierarchy(self, project_hierarchy: Optional[list[dict[str, str]]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.project_hierarchy = project_hierarchy + + @property + def workbook_qualified_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.workbook_qualified_name + ) + + @workbook_qualified_name.setter + def workbook_qualified_name(self, workbook_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook_qualified_name = workbook_qualified_name + + @property + def workbook(self) -> Optional[TableauWorkbook]: + return None if self.attributes is None else self.attributes.workbook + + @workbook.setter + def workbook(self, workbook: Optional[TableauWorkbook]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.workbook = workbook + + @property + def datasource_fields(self) -> Optional[list[TableauDatasourceField]]: + return None if self.attributes is None else self.attributes.datasource_fields + + @datasource_fields.setter + def datasource_fields( + self, datasource_fields: Optional[list[TableauDatasourceField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.datasource_fields = datasource_fields + + @property + def calculated_fields(self) -> Optional[list[TableauCalculatedField]]: + return None if self.attributes is None else self.attributes.calculated_fields + + @calculated_fields.setter + def calculated_fields( + self, calculated_fields: Optional[list[TableauCalculatedField]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.calculated_fields = calculated_fields + + @property + def dashboards(self) -> Optional[list[TableauDashboard]]: + return None if self.attributes is None else self.attributes.dashboards + + @dashboards.setter + def dashboards(self, dashboards: Optional[list[TableauDashboard]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.dashboards = dashboards + + class Attributes(Tableau.Attributes): + site_qualified_name: Optional[str] = Field(default=None, description="") + project_qualified_name: Optional[str] = Field(default=None, description="") + top_level_project_qualified_name: Optional[str] = Field( + default=None, description="" + ) + project_hierarchy: Optional[list[dict[str, str]]] = Field( + default=None, description="" + ) + workbook_qualified_name: Optional[str] = Field(default=None, description="") + workbook: Optional[TableauWorkbook] = Field( + default=None, description="" + ) # relationship + datasource_fields: Optional[list[TableauDatasourceField]] = Field( + default=None, description="" + ) # relationship + calculated_fields: Optional[list[TableauCalculatedField]] = Field( + default=None, description="" + ) # relationship + dashboards: Optional[list[TableauDashboard]] = Field( + default=None, description="" + ) # relationship + + attributes: "TableauWorksheet.Attributes" = Field( + default_factory=lambda: TableauWorksheet.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .tableau_calculated_field import TableauCalculatedField # noqa +from .tableau_dashboard import TableauDashboard # noqa +from .tableau_datasource_field import TableauDatasourceField # noqa +from .tableau_workbook import TableauWorkbook # noqa diff --git a/pyatlan/model/assets/tag.py b/pyatlan/model/assets/tag.py new file mode 100644 index 000000000..5f017e664 --- /dev/null +++ b/pyatlan/model/assets/tag.py @@ -0,0 +1,117 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField +from pyatlan.model.structs import SourceTagAttribute + +from .catalog import Catalog + + +class Tag(Catalog): + """Description""" + + type_name: str = Field(default="Tag", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "Tag": + raise ValueError("must be Tag") + return v + + def __setattr__(self, name, value): + if name in Tag._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") + """ + Unique identifier of the tag in the source system. + """ + TAG_ATTRIBUTES: ClassVar[KeywordField] = KeywordField( + "tagAttributes", "tagAttributes" + ) + """ + Attributes associated with the tag in the source system. + """ + TAG_ALLOWED_VALUES: ClassVar[KeywordTextField] = KeywordTextField( + "tagAllowedValues", "tagAllowedValues", "tagAllowedValues.text" + ) + """ + Allowed values for the tag in the source system. These are denormalized from tagAttributes for ease of querying. + """ + MAPPED_CLASSIFICATION_NAME: ClassVar[KeywordField] = KeywordField( + "mappedClassificationName", "mappedClassificationName" + ) + """ + Name of the classification in Atlan that is mapped to this tag. + """ + + _convenience_properties: ClassVar[list[str]] = [ + "tag_id", + "tag_attributes", + "tag_allowed_values", + "mapped_atlan_tag_name", + ] + + @property + def tag_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.tag_id + + @tag_id.setter + def tag_id(self, tag_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_id = tag_id + + @property + def tag_attributes(self) -> Optional[list[SourceTagAttribute]]: + return None if self.attributes is None else self.attributes.tag_attributes + + @tag_attributes.setter + def tag_attributes(self, tag_attributes: Optional[list[SourceTagAttribute]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_attributes = tag_attributes + + @property + def tag_allowed_values(self) -> Optional[set[str]]: + return None if self.attributes is None else self.attributes.tag_allowed_values + + @tag_allowed_values.setter + def tag_allowed_values(self, tag_allowed_values: Optional[set[str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.tag_allowed_values = tag_allowed_values + + @property + def mapped_atlan_tag_name(self) -> Optional[str]: + return ( + None if self.attributes is None else self.attributes.mapped_atlan_tag_name + ) + + @mapped_atlan_tag_name.setter + def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name + + class Attributes(Catalog.Attributes): + tag_id: Optional[str] = Field(default=None, description="") + tag_attributes: Optional[list[SourceTagAttribute]] = Field( + default=None, description="" + ) + tag_allowed_values: Optional[set[str]] = Field(default=None, description="") + mapped_atlan_tag_name: Optional[str] = Field(default=None, description="") + + attributes: "Tag.Attributes" = Field( + default_factory=lambda: Tag.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) diff --git a/pyatlan/model/assets/asset02.py b/pyatlan/model/assets/tag_attachment.py similarity index 85% rename from pyatlan/model/assets/asset02.py rename to pyatlan/model/assets/tag_attachment.py index 670fed4fb..606d7edb1 100644 --- a/pyatlan/model/assets/asset02.py +++ b/pyatlan/model/assets/tag_attachment.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordTextField -from .asset00 import Asset +from .asset import Asset class TagAttachment(Asset, type_name="TagAttachment"): """Description""" - type_name: str = Field("TagAttachment", allow_mutation=False) + type_name: str = Field(default="TagAttachment", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -74,18 +74,11 @@ def tag_attachment_string_value(self, tag_attachment_string_value: Optional[str] self.attributes.tag_attachment_string_value = tag_attachment_string_value class Attributes(Asset.Attributes): - tag_qualified_name: Optional[str] = Field( - None, description="", alias="tagQualifiedName" - ) - tag_attachment_string_value: Optional[str] = Field( - None, description="", alias="tagAttachmentStringValue" - ) + tag_qualified_name: Optional[str] = Field(default=None, description="") + tag_attachment_string_value: Optional[str] = Field(default=None, description="") attributes: "TagAttachment.Attributes" = Field( default_factory=lambda: TagAttachment.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -TagAttachment.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset50.py b/pyatlan/model/assets/thoughtspot.py similarity index 84% rename from pyatlan/model/assets/asset50.py rename to pyatlan/model/assets/thoughtspot.py index 3a5fff293..f7a31a677 100644 --- a/pyatlan/model/assets/asset50.py +++ b/pyatlan/model/assets/thoughtspot.py @@ -6,17 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordField, TextField -from .asset19 import BI +from .b_i import BI class Thoughtspot(BI): """Description""" - type_name: str = Field("Thoughtspot", allow_mutation=False) + type_name: str = Field(default="Thoughtspot", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -74,18 +74,11 @@ def thoughtspot_question_text(self, thoughtspot_question_text: Optional[str]): self.attributes.thoughtspot_question_text = thoughtspot_question_text class Attributes(BI.Attributes): - thoughtspot_chart_type: Optional[str] = Field( - None, description="", alias="thoughtspotChartType" - ) - thoughtspot_question_text: Optional[str] = Field( - None, description="", alias="thoughtspotQuestionText" - ) + thoughtspot_chart_type: Optional[str] = Field(default=None, description="") + thoughtspot_question_text: Optional[str] = Field(default=None, description="") attributes: "Thoughtspot.Attributes" = Field( default_factory=lambda: Thoughtspot.Attributes(), description="Map of attributes in the instance and their values. The specific keys of this map will vary by " "type, so are described in the sub-types of this schema.\n", ) - - -Thoughtspot.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset78.py b/pyatlan/model/assets/thoughtspot_answer.py similarity index 76% rename from pyatlan/model/assets/asset78.py rename to pyatlan/model/assets/thoughtspot_answer.py index a87cca0af..2860fbf5c 100644 --- a/pyatlan/model/assets/asset78.py +++ b/pyatlan/model/assets/thoughtspot_answer.py @@ -6,15 +6,15 @@ from typing import ClassVar -from pydantic import Field, validator +from pydantic.v1 import Field, validator -from .asset50 import Thoughtspot +from .thoughtspot import Thoughtspot class ThoughtspotAnswer(Thoughtspot): """Description""" - type_name: str = Field("ThoughtspotAnswer", allow_mutation=False) + type_name: str = Field(default="ThoughtspotAnswer", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -28,6 +28,3 @@ def __setattr__(self, name, value): super().__setattr__(name, value) _convenience_properties: ClassVar[list[str]] = [] - - -ThoughtspotAnswer.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/asset77.py b/pyatlan/model/assets/thoughtspot_dashlet.py similarity index 62% rename from pyatlan/model/assets/asset77.py rename to pyatlan/model/assets/thoughtspot_dashlet.py index 168751be6..9e1043b69 100644 --- a/pyatlan/model/assets/asset77.py +++ b/pyatlan/model/assets/thoughtspot_dashlet.py @@ -6,66 +6,17 @@ from typing import ClassVar, Optional -from pydantic import Field, validator +from pydantic.v1 import Field, validator from pyatlan.model.fields.atlan_fields import KeywordTextField, RelationField -from .asset50 import Thoughtspot - - -class ThoughtspotLiveboard(Thoughtspot): - """Description""" - - type_name: str = Field("ThoughtspotLiveboard", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ThoughtspotLiveboard": - raise ValueError("must be ThoughtspotLiveboard") - return v - - def __setattr__(self, name, value): - if name in ThoughtspotLiveboard._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - THOUGHTSPOT_DASHLETS: ClassVar[RelationField] = RelationField("thoughtspotDashlets") - """ - TBC - """ - - _convenience_properties: ClassVar[list[str]] = [ - "thoughtspot_dashlets", - ] - - @property - def thoughtspot_dashlets(self) -> Optional[list[ThoughtspotDashlet]]: - return None if self.attributes is None else self.attributes.thoughtspot_dashlets - - @thoughtspot_dashlets.setter - def thoughtspot_dashlets( - self, thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.thoughtspot_dashlets = thoughtspot_dashlets - - class Attributes(Thoughtspot.Attributes): - thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] = Field( - None, description="", alias="thoughtspotDashlets" - ) # relationship - - attributes: "ThoughtspotLiveboard.Attributes" = Field( - default_factory=lambda: ThoughtspotLiveboard.Attributes(), - description="Map of attributes in the instance and their values. The specific keys of this map will vary by " - "type, so are described in the sub-types of this schema.\n", - ) +from .thoughtspot import Thoughtspot class ThoughtspotDashlet(Thoughtspot): """Description""" - type_name: str = Field("ThoughtspotDashlet", allow_mutation=False) + type_name: str = Field(default="ThoughtspotDashlet", allow_mutation=False) @validator("type_name") def validate_type_name(cls, v): @@ -155,14 +106,12 @@ def thoughtspot_liveboard( self.attributes.thoughtspot_liveboard = thoughtspot_liveboard class Attributes(Thoughtspot.Attributes): - thoughtspot_liveboard_name: Optional[str] = Field( - None, description="", alias="thoughtspotLiveboardName" - ) + thoughtspot_liveboard_name: Optional[str] = Field(default=None, description="") thoughtspot_liveboard_qualified_name: Optional[str] = Field( - None, description="", alias="thoughtspotLiveboardQualifiedName" + default=None, description="" ) thoughtspot_liveboard: Optional[ThoughtspotLiveboard] = Field( - None, description="", alias="thoughtspotLiveboard" + default=None, description="" ) # relationship attributes: "ThoughtspotDashlet.Attributes" = Field( @@ -172,7 +121,4 @@ class Attributes(Thoughtspot.Attributes): ) -ThoughtspotLiveboard.Attributes.update_forward_refs() - - -ThoughtspotDashlet.Attributes.update_forward_refs() +from .thoughtspot_liveboard import ThoughtspotLiveboard # noqa diff --git a/pyatlan/model/assets/thoughtspot_liveboard.py b/pyatlan/model/assets/thoughtspot_liveboard.py new file mode 100644 index 000000000..deb25a118 --- /dev/null +++ b/pyatlan/model/assets/thoughtspot_liveboard.py @@ -0,0 +1,65 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import RelationField + +from .thoughtspot import Thoughtspot + + +class ThoughtspotLiveboard(Thoughtspot): + """Description""" + + type_name: str = Field(default="ThoughtspotLiveboard", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "ThoughtspotLiveboard": + raise ValueError("must be ThoughtspotLiveboard") + return v + + def __setattr__(self, name, value): + if name in ThoughtspotLiveboard._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + THOUGHTSPOT_DASHLETS: ClassVar[RelationField] = RelationField("thoughtspotDashlets") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "thoughtspot_dashlets", + ] + + @property + def thoughtspot_dashlets(self) -> Optional[list[ThoughtspotDashlet]]: + return None if self.attributes is None else self.attributes.thoughtspot_dashlets + + @thoughtspot_dashlets.setter + def thoughtspot_dashlets( + self, thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.thoughtspot_dashlets = thoughtspot_dashlets + + class Attributes(Thoughtspot.Attributes): + thoughtspot_dashlets: Optional[list[ThoughtspotDashlet]] = Field( + default=None, description="" + ) # relationship + + attributes: "ThoughtspotLiveboard.Attributes" = Field( + default_factory=lambda: ThoughtspotLiveboard.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .thoughtspot_dashlet import ThoughtspotDashlet # noqa diff --git a/pyatlan/model/assets/view.py b/pyatlan/model/assets/view.py new file mode 100644 index 000000000..95fc6c1db --- /dev/null +++ b/pyatlan/model/assets/view.py @@ -0,0 +1,281 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import ( + BooleanField, + KeywordField, + NumericField, + RelationField, +) +from pyatlan.utils import init_guid, validate_required_fields + +from .s_q_l import SQL + + +class View(SQL): + """Description""" + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, schema_qualified_name: str) -> View: + validate_required_fields( + ["name", "schema_qualified_name"], [name, schema_qualified_name] + ) + attributes = View.Attributes.create( + name=name, schema_qualified_name=schema_qualified_name + ) + return cls(attributes=attributes) + + type_name: str = Field(default="View", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "View": + raise ValueError("must be View") + return v + + def __setattr__(self, name, value): + if name in View._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") + """ + Number of columns in this view. + """ + ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") + """ + Number of rows in this view. + """ + SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") + """ + Size of this view, in bytes. + """ + IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( + "isQueryPreview", "isQueryPreview" + ) + """ + Whether preview queries are allowed on this view (true) or not (false). + """ + QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( + "queryPreviewConfig", "queryPreviewConfig" + ) + """ + Configuration for preview queries on this view. + """ + ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") + """ + Alias for this view. + """ + IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") + """ + Whether this view is temporary (true) or not (false). + """ + DEFINITION: ClassVar[KeywordField] = KeywordField("definition", "definition") + """ + SQL definition of this view. + """ + + COLUMNS: ClassVar[RelationField] = RelationField("columns") + """ + TBC + """ + QUERIES: ClassVar[RelationField] = RelationField("queries") + """ + TBC + """ + ATLAN_SCHEMA: ClassVar[RelationField] = RelationField("atlanSchema") + """ + TBC + """ + + _convenience_properties: ClassVar[list[str]] = [ + "column_count", + "row_count", + "size_bytes", + "is_query_preview", + "query_preview_config", + "alias", + "is_temporary", + "definition", + "columns", + "queries", + "atlan_schema", + ] + + @property + def column_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.column_count + + @column_count.setter + def column_count(self, column_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.column_count = column_count + + @property + def row_count(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.row_count + + @row_count.setter + def row_count(self, row_count: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.row_count = row_count + + @property + def size_bytes(self) -> Optional[int]: + return None if self.attributes is None else self.attributes.size_bytes + + @size_bytes.setter + def size_bytes(self, size_bytes: Optional[int]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.size_bytes = size_bytes + + @property + def is_query_preview(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_query_preview + + @is_query_preview.setter + def is_query_preview(self, is_query_preview: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_query_preview = is_query_preview + + @property + def query_preview_config(self) -> Optional[dict[str, str]]: + return None if self.attributes is None else self.attributes.query_preview_config + + @query_preview_config.setter + def query_preview_config(self, query_preview_config: Optional[dict[str, str]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.query_preview_config = query_preview_config + + @property + def alias(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.alias + + @alias.setter + def alias(self, alias: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.alias = alias + + @property + def is_temporary(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.is_temporary + + @is_temporary.setter + def is_temporary(self, is_temporary: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.is_temporary = is_temporary + + @property + def definition(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.definition + + @definition.setter + def definition(self, definition: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.definition = definition + + @property + def columns(self) -> Optional[list[Column]]: + return None if self.attributes is None else self.attributes.columns + + @columns.setter + def columns(self, columns: Optional[list[Column]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.columns = columns + + @property + def queries(self) -> Optional[list[Query]]: + return None if self.attributes is None else self.attributes.queries + + @queries.setter + def queries(self, queries: Optional[list[Query]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.queries = queries + + @property + def atlan_schema(self) -> Optional[Schema]: + return None if self.attributes is None else self.attributes.atlan_schema + + @atlan_schema.setter + def atlan_schema(self, atlan_schema: Optional[Schema]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.atlan_schema = atlan_schema + + class Attributes(SQL.Attributes): + column_count: Optional[int] = Field(default=None, description="") + row_count: Optional[int] = Field(default=None, description="") + size_bytes: Optional[int] = Field(default=None, description="") + is_query_preview: Optional[bool] = Field(default=None, description="") + query_preview_config: Optional[dict[str, str]] = Field( + default=None, description="" + ) + alias: Optional[str] = Field(default=None, description="") + is_temporary: Optional[bool] = Field(default=None, description="") + definition: Optional[str] = Field(default=None, description="") + columns: Optional[list[Column]] = Field( + default=None, description="" + ) # relationship + queries: Optional[list[Query]] = Field( + default=None, description="" + ) # relationship + atlan_schema: Optional[Schema] = Field( + default=None, description="" + ) # relationship + + @classmethod + # @validate_arguments() + @init_guid + def create(cls, *, name: str, schema_qualified_name: str) -> View.Attributes: + if not name: + raise ValueError("name cannot be blank") + validate_required_fields(["schema_qualified_name"], [schema_qualified_name]) + fields = schema_qualified_name.split("/") + if len(fields) != 5: + raise ValueError("Invalid schema_qualified_name") + try: + connector_type = AtlanConnectorType(fields[1]) # type:ignore + except ValueError as e: + raise ValueError("Invalid schema_qualified_name") from e + return View.Attributes( + name=name, + database_name=fields[3], + connection_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}", + database_qualified_name=f"{fields[0]}/{fields[1]}/{fields[2]}/{fields[3]}", + qualified_name=f"{schema_qualified_name}/{name}", + schema_qualified_name=schema_qualified_name, + schema_name=fields[4], + connector_name=connector_type.value, + atlan_schema=Schema.ref_by_qualified_name(schema_qualified_name), + ) + + attributes: "View.Attributes" = Field( + default_factory=lambda: View.Attributes(), + description="Map of attributes in the instance and their values. The specific keys of this map will vary by " + "type, so are described in the sub-types of this schema.\n", + ) + + +from .column import Column # noqa +from .query import Query # noqa +from .schema import Schema # noqa diff --git a/pyatlan/model/atlan_image.py b/pyatlan/model/atlan_image.py index 81ab8f8f2..0276769e6 100644 --- a/pyatlan/model/atlan_image.py +++ b/pyatlan/model/atlan_image.py @@ -4,14 +4,16 @@ from typing import Optional -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.core import AtlanObject class AtlanImage(AtlanObject): - id: Optional[str] = Field(description="Unique identifier (GUID) of the image.") - version: Optional[str] = Field(description="TBC") + id: Optional[str] = Field( + default=None, description="Unique identifier (GUID) of the image." + ) + version: Optional[str] = Field(default=None, description="TBC") created_at: Optional[int] = Field( description="Time at which the image was uploaded (epoch), in milliseconds." ) @@ -19,29 +21,29 @@ class AtlanImage(AtlanObject): description="Time at which the image was last modified (epoch), in milliseconds." ) file_name: Optional[str] = Field( - description="Generated name of the image that was uploaded." + default=None, description="Generated name of the image that was uploaded." ) raw_name: Optional[str] = Field( - description="Generated name of the image that was uploaded." + default=None, description="Generated name of the image that was uploaded." ) key: Optional[str] = Field( - description="Generated name of the image that was uploaded." + default=None, description="Generated name of the image that was uploaded." ) extension: Optional[str] = Field( - description="Filename extension for the image that was uploaded." + default=None, description="Filename extension for the image that was uploaded." ) content_type: Optional[str] = Field( - description="MIME type for the image that was uploaded." + default=None, description="MIME type for the image that was uploaded." ) file_size: Optional[str] = Field( - description="Size of the image that was uploaded, in bytes." + default=None, description="Size of the image that was uploaded, in bytes." ) is_encrypted: Optional[bool] = Field( description="Whether the image is encrypted (true) or not (false)." ) - redirect_url: Optional[str] = Field(description="TBC") - is_uploaded: Optional[bool] = Field(description="TBC") - uploaded_at: Optional[str] = Field(description="TBC") + redirect_url: Optional[str] = Field(default=None, description="TBC") + is_uploaded: Optional[bool] = Field(default=None, description="TBC") + uploaded_at: Optional[str] = Field(default=None, description="TBC") is_archived: Optional[bool] = Field( description="Whether the image has been archived (true) or is still actively available (false)." ) diff --git a/pyatlan/model/audit.py b/pyatlan/model/audit.py index 55b33fe38..2b3938434 100644 --- a/pyatlan/model/audit.py +++ b/pyatlan/model/audit.py @@ -2,7 +2,7 @@ from enum import Enum from typing import Any, Generator, Iterable, Optional, Union -from pydantic import Field, ValidationError, parse_obj_as, root_validator +from pydantic.v1 import Field, ValidationError, parse_obj_as, root_validator from pyatlan.cache.custom_metadata_cache import CustomMetadataCache from pyatlan.client.common import ApiCaller @@ -184,10 +184,10 @@ class EntityAudit(AtlanObject): ) user: str = Field(description="User who carried out the activity.") action: AuditActionType = Field(description="The type of activity that was done.") - details: Optional[Any] = Field(description="Unused.") + details: Optional[Any] = Field(default=None, description="Unused.") event_key: str = Field(description="Unique identifier of the activity.") - entity: Optional[Any] = Field(description="Unused.") - type: Optional[Any] = Field(description="Unused.") + entity: Optional[Any] = Field(default=None, description="Unused.") + type: Optional[Any] = Field(default=None, description="Unused.") detail: Optional[ Union[ CustomMetadataAttributesAuditDetail, diff --git a/pyatlan/model/core.py b/pyatlan/model/core.py index c38e96170..dfbfca814 100644 --- a/pyatlan/model/core.py +++ b/pyatlan/model/core.py @@ -3,18 +3,18 @@ from abc import ABC from typing import TYPE_CHECKING -from pydantic import BaseModel, Extra, Field, PrivateAttr, validator +from pydantic.v1 import BaseModel, Extra, Field, PrivateAttr, validator from pyatlan.model.utils import encoders, to_camel_case if TYPE_CHECKING: from dataclasses import dataclass else: - from pydantic.dataclasses import dataclass + from pydantic.v1.dataclasses import dataclass from typing import Any, Generic, Optional, TypeVar -from pydantic.generics import GenericModel +from pydantic.v1.generics import GenericModel from pyatlan.model.constants import DELETED_, DELETED_SENTINEL from pyatlan.model.enums import AnnouncementType, EntityStatus, SaveSemantic @@ -100,22 +100,22 @@ class Config: class SearchRequest(AtlanObject, ABC): attributes: Optional[list[str]] = Field( - description="List of attributes to be returned for each result.", default_factory=list, + description="List of attributes to be returned for each result.", ) offset: Optional[int] = Field( - description="Starting point for pagination.", alias="from" + default=None, description="Starting point for pagination.", alias="from" ) size: Optional[int] = Field( - description="How many results to include in each page of results." + default=None, description="How many results to include in each page of results." ) @dataclass class Announcement: announcement_title: str - announcement_message: Optional[str] announcement_type: AnnouncementType + announcement_message: Optional[str] = Field(default=None) class AtlanTag(AtlanObject): @@ -123,30 +123,30 @@ class Config: extra = "forbid" type_name: Optional[AtlanTagName] = Field( - None, + default=None, description="Name of the type definition that defines this instance.\n", alias="typeName", ) entity_guid: Optional[str] = Field( - None, + default=None, description="Unique identifier of the entity instance.\n", example="917ffec9-fa84-4c59-8e6c-c7b114d04be3", alias="entityGuid", ) entity_status: Optional[EntityStatus] = Field( - None, + default=None, description="Status of the entity", example=EntityStatus.ACTIVE, alias="entityStatus", ) - propagate: Optional[bool] = Field(None, description="") + propagate: Optional[bool] = Field(default=None, description="") remove_propagations_on_entity_delete: Optional[bool] = Field( - None, description="", alias="removePropagationsOnEntityDelete" + default=None, description="", alias="removePropagationsOnEntityDelete" ) restrict_propagation_through_lineage: Optional[bool] = Field( - None, description="", alias="restrictPropagationThroughLineage" + default=None, description="", alias="restrictPropagationThroughLineage" ) - validity_periods: Optional[list[str]] = Field(None, alias="validityPeriods") + validity_periods: Optional[list[str]] = Field(default=None, alias="validityPeriods") _source_tag_attachements: list[SourceTagAttachment] = PrivateAttr( default_factory=list ) @@ -211,7 +211,7 @@ class Meaning(AtlanObject): class AssetResponse(AtlanObject, GenericModel, Generic[T]): entity: T referredEntities: Optional[dict[str, Any]] = Field( - None, + default=None, description="Map of related entities keyed by the GUID of the related entity. The values will be the detailed " "entity object of the related entity.\n", ) diff --git a/pyatlan/model/credential.py b/pyatlan/model/credential.py index 33c05d760..087bae9d7 100644 --- a/pyatlan/model/credential.py +++ b/pyatlan/model/credential.py @@ -1,39 +1,39 @@ from typing import Any, Optional -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.core import AtlanObject class Credential(AtlanObject): # Unique identifier (GUID) of the credential. - id: Optional[str] + id: Optional[str] = Field(default=None) # Name of the credential. - name: Optional[str] + name: Optional[str] = Field(default=None) # Hostname for which connectivity is defined by the credential. - host: Optional[str] + host: Optional[str] = Field(default=None) # Port number on which connectivity should be done. - port: Optional[int] + port: Optional[int] = Field(default=None) # Authentication mechanism represented by the credential. - auth_type: Optional[str] + auth_type: Optional[str] = Field(default=None) # Type of connector used by the credential. - connector_type: Optional[str] + connector_type: Optional[str] = Field(default=None) # Less sensitive portion of the credential # typically used for a username for basic authentication # or client IDs for other forms of authentication. - username: Optional[str] + username: Optional[str] = Field(default=None) # More sensitive portion of the credential, # typically used for a password for basic authenticatio # or client secrets for other forms of authentication. - password: Optional[str] + password: Optional[str] = Field(default=None) # Additional details about the credential. This can capture, # for example, a secondary secret for particular forms of authentication # and / or additional details about the scope of the connectivity # (a specific database, role, warehouse, etc). - extras: Optional[dict[str, Any]] = Field(alias="extra") + extras: Optional[dict[str, Any]] = Field(default=None, alias="extra") # Name of the connector configuration # responsible for managing the credential. - connector_config_name: Optional[str] + connector_config_name: Optional[str] = Field(default=None) class CredentialResponse(AtlanObject): diff --git a/pyatlan/model/custom_metadata.py b/pyatlan/model/custom_metadata.py index 3f8ba20b0..5b4d10645 100644 --- a/pyatlan/model/custom_metadata.py +++ b/pyatlan/model/custom_metadata.py @@ -1,7 +1,7 @@ from collections import UserDict from typing import Any, Optional -from pydantic import PrivateAttr +from pydantic.v1 import PrivateAttr from pyatlan.cache.custom_metadata_cache import CustomMetadataCache from pyatlan.errors import NotFoundError diff --git a/pyatlan/model/events.py b/pyatlan/model/events.py index de9efcd55..3caca1368 100644 --- a/pyatlan/model/events.py +++ b/pyatlan/model/events.py @@ -2,7 +2,7 @@ # Copyright 2023 Atlan Pte. Ltd. from typing import Any, Literal, Optional, Union -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.assets import Asset from pyatlan.model.core import AtlanObject, AtlanTag @@ -21,7 +21,7 @@ def __init__(__pydantic_self__, **data: Any) -> None: __pydantic_self__.__fields_set__.update(["type", "operation_type"]) event_type: Optional[str] = Field( - description="Type of the event payload.", alias="type" + default=None, description="Type of the event payload.", alias="type" ) operation_type: Literal[ "ENTITY_CREATE", @@ -103,13 +103,13 @@ class AtlanTagDeletePayload( class AtlanEvent(AtlanObject): source: Optional[Any] = Field(description="TBC") version: Optional[Any] = Field(description="TBC") - msg_compression_kind: Optional[str] = Field(description="TBC") + msg_compression_kind: Optional[str] = Field(default=None, description="TBC") msg_split_idx: Optional[int] = Field(description="TBC") msg_split_count: Optional[int] = Field(description="TBC") msg_source_ip: Optional[str] = Field( - description="Originating IP address for the event." + default=None, description="Originating IP address for the event." ) - msg_created_by: Optional[str] = Field(description="TBC") + msg_created_by: Optional[str] = Field(default=None, description="TBC") msg_creation_time: Optional[int] = Field( description="Timestamp (epoch) for when the event was created, in milliseconds." ) @@ -131,17 +131,18 @@ class AtlanEvent(AtlanObject): class AwsRequestContext(AtlanObject): account_id: Optional[str] = Field( - description="Account from which the request originated." + default=None, description="Account from which the request originated." ) - api_id: Optional[str] = Field(description="TBC") - domain_name: Optional[str] = Field(description="TBC") - domain_prefix: Optional[str] = Field(description="TBC") + api_id: Optional[str] = Field(default=None, description="TBC") + domain_name: Optional[str] = Field(default=None, description="TBC") + domain_prefix: Optional[str] = Field(default=None, description="TBC") http: Optional[dict[str, str]] = Field(description="TBC") - request_id: Optional[str] = Field(description="TBC") - route_key: Optional[str] = Field(description="TBC") - stage: Optional[str] = Field(description="TBC") + request_id: Optional[str] = Field(default=None, description="TBC") + route_key: Optional[str] = Field(default=None, description="TBC") + stage: Optional[str] = Field(default=None, description="TBC") time: Optional[str] = Field( - description="Time at which the event was received, as a formatted string." + default=None, + description="Time at which the event was received, as a formatted string.", ) time_epoch: Optional[int] = Field( description="Time at which the event was received, epoch-based, in milliseconds." @@ -149,16 +150,16 @@ class AwsRequestContext(AtlanObject): class AwsEventWrapper(AtlanObject): - version: Optional[str] = Field(description="TBC") - route_key: Optional[str] = Field(description="TBC") - raw_path: Optional[str] = Field(description="TBC") - raw_query_string: Optional[str] = Field(description="TBC") + version: Optional[str] = Field(default=None, description="TBC") + route_key: Optional[str] = Field(default=None, description="TBC") + raw_path: Optional[str] = Field(default=None, description="TBC") + raw_query_string: Optional[str] = Field(default=None, description="TBC") headers: Optional[dict[str, str]] = Field( description="Headers that were used when sending the event through to the Lambda URL." ) request_context: Optional[AwsRequestContext] = Field(description="TBC") body: Optional[str] = Field( - description="Actual contents of the event that was sent by Atlan." + default=None, description="Actual contents of the event that was sent by Atlan." ) is_base_64_encoded: Optional[bool] = Field( description="Whether the contents are base64-encoded (True) or plain text (False)." diff --git a/pyatlan/model/fields/atlan_fields.py b/pyatlan/model/fields/atlan_fields.py index 6087e273c..05072bc0e 100644 --- a/pyatlan/model/fields/atlan_fields.py +++ b/pyatlan/model/fields/atlan_fields.py @@ -5,7 +5,7 @@ from enum import Enum from typing import Union, overload -from pydantic import StrictBool, StrictFloat, StrictInt, StrictStr +from pydantic.v1 import StrictBool, StrictFloat, StrictInt, StrictStr from pyatlan.errors import ErrorCode from pyatlan.model.aggregation import Aggregation diff --git a/pyatlan/model/group.py b/pyatlan/model/group.py index 20720bae2..a86c4ddbe 100644 --- a/pyatlan/model/group.py +++ b/pyatlan/model/group.py @@ -4,7 +4,7 @@ from typing import Any, Optional -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.core import AtlanObject @@ -12,47 +12,56 @@ class AtlanGroup(AtlanObject): class Attributes(AtlanObject): alias: Optional[list[str]] = Field( - description="Name of the group as it appears in the UI." + default=None, description="Name of the group as it appears in the UI." ) created_at: Optional[list[str]] = Field( - description="Time (epoch) at which the group was created, in milliseconds." + default=None, + description="Time (epoch) at which the group was created, in milliseconds.", ) created_by: Optional[list[str]] = Field( - description="User who created the group." + default=None, description="User who created the group." ) updated_at: Optional[list[str]] = Field( - description="Time (epoch) at which the group was last updated, in milliseconds." + default=None, + description="Time (epoch) at which the group was last updated, in milliseconds.", ) updated_by: Optional[list[str]] = Field( - description="User who last updated the group." + default=None, description="User who last updated the group." ) description: Optional[list[str]] = Field( - description="Description of the group." + default=None, description="Description of the group." ) is_default: Optional[list[str]] = Field( - description="Whether this group should be auto-assigned to all new users or not." + default=None, + description="Whether this group should be auto-assigned to all new users or not.", ) channels: Optional[list[str]] = Field( - description="Slack channels for this group." + default=None, description="Slack channels for this group." ) alias: Optional[str] = Field( - description="Name of the group as it appears in the UI." + default=None, description="Name of the group as it appears in the UI." ) attributes: Optional[AtlanGroup.Attributes] = Field( - description="Detailed attributes of the group." + default=None, description="Detailed attributes of the group." ) - decentralized_roles: Optional[list[Any]] = Field(description="TBC") - id: Optional[str] = Field(description="Unique identifier for the group (GUID).") - name: Optional[str] = Field(description="Unique (internal) name for the group.") - path: Optional[str] = Field(description="TBC") + decentralized_roles: Optional[list[Any]] = Field(default=None, description="TBC") + id: Optional[str] = Field( + default=None, description="Unique identifier for the group (GUID)." + ) + name: Optional[str] = Field( + default=None, description="Unique (internal) name for the group." + ) + path: Optional[str] = Field(default=None, description="TBC") personas: Optional[list[Any]] = Field( - description="Personas the group is associated with." + default=None, description="Personas the group is associated with." ) purposes: Optional[list[Any]] = Field( - description="Purposes the group is associated with." + default=None, description="Purposes the group is associated with." + ) + user_count: Optional[int] = Field( + default=None, description="Number of users in the group." ) - user_count: Optional[int] = Field(description="Number of users in the group.") def is_default(self) -> bool: return ( @@ -116,23 +125,27 @@ class GroupResponse(AtlanObject): class CreateGroupRequest(AtlanObject): group: AtlanGroup = Field(description="Group to be created.") users: Optional[list[str]] = Field( - description="List of users (their GUIDs) to be included in the group." + default=None, + description="List of users (their GUIDs) to be included in the group.", ) class RemoveFromGroupRequest(AtlanObject): users: Optional[list[str]] = Field( - description="List of users (their GUIDs) to remove from the group." + default=None, + description="List of users (their GUIDs) to remove from the group.", ) class CreateGroupResponse(AtlanObject): class UserStatus(AtlanObject): status: Optional[int] = Field( - description="Response code for the association (200 is success)." + default=None, + description="Response code for the association (200 is success).", ) status_message: Optional[str] = Field( - description="Status message for the association ('success' means the association was successful)." + default=None, + description="Status message for the association ('success' means the association was successful).", ) def was_successful(self) -> bool: @@ -144,5 +157,6 @@ def was_successful(self) -> bool: description="Unique identifier (GUID) of the group that was created." ) users: Optional[dict[str, CreateGroupResponse.UserStatus]] = Field( - description="Map of user association statuses, keyed by unique identifier (GUID) of the user." + default=None, + description="Map of user association statuses, keyed by unique identifier (GUID) of the user.", ) diff --git a/pyatlan/model/keycloak_events.py b/pyatlan/model/keycloak_events.py index 1f126381b..54d5f87b9 100644 --- a/pyatlan/model/keycloak_events.py +++ b/pyatlan/model/keycloak_events.py @@ -2,7 +2,7 @@ # Copyright 2023 Atlan Pte. Ltd. from typing import Any, Generator, Optional -from pydantic import Field, parse_obj_as +from pydantic.v1 import Field, parse_obj_as from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ADMIN_EVENTS, KEYCLOAK_EVENTS @@ -12,30 +12,34 @@ class AuthDetails(AtlanObject): client_id: Optional[str] = Field( - description="Unique identifier (GUID) of the client that carried out the operation." + default=None, + description="Unique identifier (GUID) of the client that carried out the operation.", ) ip_address: Optional[str] = Field( - description="IP address from which the operation was carried out." + default=None, description="IP address from which the operation was carried out." ) realm_id: Optional[str] = Field( - description="Unique name of the realm from which the operation was carried out." + default=None, + description="Unique name of the realm from which the operation was carried out.", ) user_id: Optional[str] = Field( + default=None, description="Unique identifier (GUID) of the user who carried out the operation.", ) class KeycloakEvent(AtlanObject): client_id: Optional[str] = Field( - description="Where the login occurred (usually 'atlan-frontend')." + default=None, description="Where the login occurred (usually 'atlan-frontend')." ) details: Any = Field(description="TBC") ip_address: Optional[str] = Field( - description="IP address from which the user logged in." + default=None, description="IP address from which the user logged in." ) - realm_id: Optional[str] = Field(description="TBC") + realm_id: Optional[str] = Field(default=None, description="TBC") session_id: Optional[str] = Field( - description="Unique identifier (GUID) of the session for the login." + default=None, + description="Unique identifier (GUID) of the session for the login.", ) time: Optional[int] = Field( description="Time (epoch) when the login occurred, in milliseconds." @@ -44,56 +48,65 @@ class KeycloakEvent(AtlanObject): description="Type of login event that occurred (usually 'LOGIN')." ) user_id: Optional[str] = Field( - description="Unique identifier (GUID) of the user that logged in." + default=None, description="Unique identifier (GUID) of the user that logged in." ) class AdminEvent(AtlanObject): operation_type: Optional[AdminOperationType] = Field( - description="Type of admin operation that occurred." + default=None, description="Type of admin operation that occurred." ) realm_id: Optional[str] = Field( - description="Unique identifier of the realm in which the event occurred (usually 'default')." + default=None, + description="Unique identifier of the realm in which the event occurred (usually 'default').", ) representation: Optional[str] = Field( - description="Detailed resource that was created or changed as a result of the operation." + default=None, + description="Detailed resource that was created or changed as a result of the operation.", ) resource_path: Optional[str] = Field( - description="Location of the resource that was created or changed as a result of the operation." + default=None, + description="Location of the resource that was created or changed as a result of the operation.", ) resource_type: Optional[AdminResourceType] = Field( - description="Type of resource for the admin operation that occurred." + default=None, + description="Type of resource for the admin operation that occurred.", ) time: Optional[int] = Field( description="Time (epoch) when the admin operation occurred, in milliseconds." ) auth_details: Optional[AuthDetails] = Field( - description="Details of who carried out the operation." + default=None, description="Details of who carried out the operation." ) class KeycloakEventRequest(AtlanObject): - client: Optional[str] = Field(description="Application or OAuth client name.") + client: Optional[str] = Field( + default=None, description="Application or OAuth client name." + ) ip_address: Optional[str] = Field( - description="IP address from which the event was triggered." + default=None, description="IP address from which the event was triggered." ) date_from: Optional[str] = Field( - description="Earliest date from which to include events (format: yyyy-MM-dd)." + default=None, + description="Earliest date from which to include events (format: yyyy-MM-dd).", ) date_to: Optional[str] = Field( - description="Latest date up to which to include events (format: yyyy-MM-dd)." + default=None, + description="Latest date up to which to include events (format: yyyy-MM-dd).", ) offset: Optional[int] = Field( - description="Starting point for the events (for paging)." + default=None, description="Starting point for the events (for paging)." ) size: Optional[int] = Field( - description="Maximum number of events to retrieve (per page)." + default=None, description="Maximum number of events to retrieve (per page)." ) types: Optional[list[KeycloakEventType]] = Field( - description="Include events only of the supplied types." + default=None, description="Include events only of the supplied types." ) user_id: Optional[str] = Field( - description="Unique identifier (GUID) of the user who triggered the event." + default=None, + description="Unique identifier (GUID) of the user who triggered the event.", ) @property @@ -162,37 +175,44 @@ def __iter__(self) -> Generator[KeycloakEvent, None, None]: class AdminEventRequest(AtlanObject): client_id: Optional[str] = Field( - description="Unique identifier (GUID) of the client that carried out the operation." + default=None, + description="Unique identifier (GUID) of the client that carried out the operation.", ) ip_address: Optional[str] = Field( - description="IP address from which the operation was carried out." + default=None, description="IP address from which the operation was carried out." ) realm_id: Optional[str] = Field( - description="Unique name of the realm from which the operation was carried out." + default=None, + description="Unique name of the realm from which the operation was carried out.", ) user_id: Optional[str] = Field( - description="Unique identifier (GUID) of the user who carried out the operation." + default=None, + description="Unique identifier (GUID) of the user who carried out the operation.", ) date_from: Optional[str] = Field( - description="Earliest date from which to include events (format: yyyy-MM-dd)." + default=None, + description="Earliest date from which to include events (format: yyyy-MM-dd).", ) date_to: Optional[str] = Field( - description="Latest date up to which to include events (format: yyyy-MM-dd)." + default=None, + description="Latest date up to which to include events (format: yyyy-MM-dd).", ) offset: Optional[int] = Field( - description="Starting point for the events (for paging)." + default=None, description="Starting point for the events (for paging)." ) size: Optional[int] = Field( - description="Maximum number of events to retrieve (per page)." + default=None, description="Maximum number of events to retrieve (per page)." ) operation_types: Optional[list[AdminOperationType]] = Field( - description="Include events only with the supplied types of operations." + default=None, + description="Include events only with the supplied types of operations.", ) resource_path: Optional[str] = Field( - description="Include events only against the supplied resource." + default=None, description="Include events only against the supplied resource." ) resource_types: Optional[list[AdminResourceType]] = Field( - description="Include events only against the supplied types of resources." + default=None, + description="Include events only against the supplied types of resources.", ) @property diff --git a/pyatlan/model/lineage.py b/pyatlan/model/lineage.py index 47dcd920c..be8397657 100644 --- a/pyatlan/model/lineage.py +++ b/pyatlan/model/lineage.py @@ -6,12 +6,12 @@ from collections import deque from typing import TYPE_CHECKING, Any, Optional, Union -from pydantic import Field, StrictBool, StrictInt, StrictStr, validate_arguments +from pydantic.v1 import Field, StrictBool, StrictInt, StrictStr, validate_arguments if TYPE_CHECKING: from dataclasses import dataclass else: - from pydantic.dataclasses import dataclass + from pydantic.v1.dataclasses import dataclass from pyatlan.errors import ErrorCode from pyatlan.model.assets import Asset @@ -263,29 +263,33 @@ class LineageListRequest(SearchRequest): "Note that you cannot fetch both upstream and downstream at the same time." ) entity_filters: Optional[FilterList] = Field( - description="Filters to apply on entities." + default=None, description="Filters to apply on entities." ) entity_traversal_filters: Optional[FilterList] = Field( + default=None, description="Filters to apply for skipping traversal based on entities." "Any sub-graphs beyond the entities filtered out by these filters will not be included" - "in the lineage result." + "in the lineage result.", ) relationship_traversal_filters: Optional[FilterList] = Field( + default=None, description="Filters to apply for skipping traversal based on relationships." "Any sub-graphs beyond the relationships filtered out by these filters will not be included" - "in the lineage result." + "in the lineage result.", ) offset: Optional[int] = Field( - description="Starting point for pagination.", alias="from" + default=None, description="Starting point for pagination.", alias="from" ) size: Optional[int] = Field( - description="How many results to include in each page of results." + default=None, description="How many results to include in each page of results." ) exclude_meanings: Optional[bool] = Field( - description="Whether to include assigned terms for assets (false) or not (true)." + default=None, + description="Whether to include assigned terms for assets (false) or not (true).", ) exclude_classifications: Optional[bool] = Field( - description="Whether to include classifications for assets (false) or not (true)." + default=None, + description="Whether to include classifications for assets (false) or not (true).", ) @staticmethod @@ -306,7 +310,7 @@ def create( size=10, exclude_meanings=True, exclude_classifications=True, - ) + ) # type: ignore[call-arg] class FluentLineage: @@ -488,7 +492,7 @@ def request(self) -> LineageListRequest: ) for _filter in self._includes_in_results ] - request.entity_filters = FilterList(condition="AND", criteria=criteria) + request.entity_filters = FilterList(condition="AND", criteria=criteria) # type: ignore if self._includes_on_results: request.attributes = [ field.atlan_field_name for field in self._includes_on_results @@ -506,7 +510,7 @@ def request(self) -> LineageListRequest: ] request.entity_traversal_filters = FilterList( condition="AND", criteria=criteria - ) + ) # type: ignore[call-arg] if self._where_relationships: criteria = [ EntityFilter( @@ -518,5 +522,5 @@ def request(self) -> LineageListRequest: ] request.relationship_traversal_filters = FilterList( condition="AND", criteria=criteria - ) + ) # type: ignore[call-arg] return request diff --git a/pyatlan/model/packages/base/package.py b/pyatlan/model/packages/base/package.py index bac5e43f5..d0906ab15 100644 --- a/pyatlan/model/packages/base/package.py +++ b/pyatlan/model/packages/base/package.py @@ -1,6 +1,6 @@ from json import loads -from pydantic import parse_obj_as +from pydantic.v1 import parse_obj_as from pyatlan.model.credential import Credential from pyatlan.model.workflow import ( diff --git a/pyatlan/model/query.py b/pyatlan/model/query.py index b4a82ce72..ea35cca30 100644 --- a/pyatlan/model/query.py +++ b/pyatlan/model/query.py @@ -4,7 +4,7 @@ from typing import Any, Optional -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.core import AtlanObject from pyatlan.model.enums import ( @@ -17,70 +17,96 @@ class ParsedQuery(AtlanObject): class DatabaseColumn(AtlanObject): - id: Optional[str] = Field(description="Numeric identifier for the column.") - name: Optional[str] = Field(description="Name of the column (unqualified).") - source: Optional[str] = Field(description="TBC") + id: Optional[str] = Field( + default=None, description="Numeric identifier for the column." + ) + name: Optional[str] = Field( + default=None, description="Name of the column (unqualified)." + ) + source: Optional[str] = Field(default=None, description="TBC") class RelationshipEndpoint(AtlanObject): id: Optional[str] = Field( - description="Numeric identifier for the column referred to by this end of the relationship." + default=None, + description="Numeric identifier for the column referred to by this end of the relationship.", ) column: Optional[str] = Field( - description="Name of the column used by this end of the relationship." + default=None, + description="Name of the column used by this end of the relationship.", ) parent_id: Optional[str] = Field( - description="Numeric identifier of the parent object in which the column exists." + default=None, + description="Numeric identifier of the parent object in which the column exists.", ) parent_name: Optional[str] = Field( - description="Name of the parent object in which the column exists." + default=None, + description="Name of the parent object in which the column exists.", ) class ParserError(AtlanObject): - error_message: Optional[str] = Field(description="Description of the error.") - error_type: Optional[str] = Field(description="Type of the error.") + error_message: Optional[str] = Field( + default=None, description="Description of the error." + ) + error_type: Optional[str] = Field( + default=None, description="Type of the error." + ) coordinates: Optional[list[Any]] = Field(description="TBC") class Relationship(AtlanObject): id: Optional[str] = Field( - description="Numeric identifier for the relationship." + default=None, description="Numeric identifier for the relationship." + ) + type: Optional[str] = Field( + default=None, description="Type of the relationship." ) - type: Optional[str] = Field(description="Type of the relationship.") effect_type: Optional[str] = Field( - description="Type of effect made by the query (for example, select vs insert)." + default=None, + description="Type of effect made by the query (for example, select vs insert).", ) target: Optional[ParsedQuery.RelationshipEndpoint] = Field(description="TBC") sources: Optional[list[ParsedQuery.RelationshipEndpoint]] = Field( description="TBC" ) process_id: Optional[str] = Field( - description="Numeric identifier for the procedure (if any) that manages this relationship." + default=None, + description="Numeric identifier for the procedure (if any) that manages this relationship.", ) process_type: Optional[str] = Field( - description="Type of procedure (if any) that manages this relationship." + default=None, + description="Type of procedure (if any) that manages this relationship.", ) class DatabaseObject(AtlanObject): display_name: Optional[str] = Field( - description="Fully-qualified name of the SQL object. (Only present on non-process objects.)" + default=None, + description="Fully-qualified name of the SQL object. (Only present on non-process objects.)", ) - id: Optional[str] = Field(description="Numeric identifier for the object.") - name: Optional[str] = Field(description="Name of the object (unqualified).") - type: Optional[str] = Field(description="Type of the object.") + id: Optional[str] = Field( + default=None, description="Numeric identifier for the object." + ) + name: Optional[str] = Field( + default=None, description="Name of the object (unqualified)." + ) + type: Optional[str] = Field(default=None, description="Type of the object.") database: Optional[str] = Field( - description="Name of the database the object exists within." + default=None, description="Name of the database the object exists within." ) db_schema: Optional[str] = Field( - description="Name of the schema the object exists within.", alias="schema" + default=None, + description="Name of the schema the object exists within.", + alias="schema", ) columns: Optional[list[ParsedQuery.DatabaseColumn]] = Field( description="List of details about the columns queried within the object." " (Only present on non-process objects.)" ) procedure_name: Optional[str] = Field( - description="Name of the procedure (only for process objects)." + default=None, + description="Name of the procedure (only for process objects).", ) query_hash_id: Optional[str] = Field( - description="Unique hash representing the query (only for process objects)." + default=None, + description="Unique hash representing the query (only for process objects).", ) dbobjs: Optional[list[ParsedQuery.DatabaseObject]] = Field( @@ -100,10 +126,12 @@ class QueryParserRequest(AtlanObject): description="Dialect to use when parsing the SQL." ) default_database: Optional[str] = Field( - description="Default database name to use for unqualified objects in the SQL." + default=None, + description="Default database name to use for unqualified objects in the SQL.", ) default_schema: Optional[str] = Field( - description="Default schema name to use for unqualified objects in the SQL." + default=None, + description="Default schema name to use for unqualified objects in the SQL.", ) link_orphan_column_to_first_table: Optional[bool] = Field(description="TBC") show_join: Optional[bool] = Field(description="TBC") @@ -178,19 +206,21 @@ def __init__(self, events: Optional[list[dict[str, Any]]] = None): self.details = last_event.get("details") request_id: Optional[str] = Field( - description="Unique identifier for the request, if there was any error." + default=None, + description="Unique identifier for the request, if there was any error.", ) error_name: Optional[str] = Field( - description="Unique name for the error, if there was any error." + default=None, description="Unique name for the error, if there was any error." ) error_message: Optional[str] = Field( - description="Explanation of the error, if there was any error." + default=None, description="Explanation of the error, if there was any error." ) error_code: Optional[str] = Field( - description="Unique code for the error, if there was any error." + default=None, description="Unique code for the error, if there was any error." ) query_id: Optional[str] = Field( - description="Unique identifier (GUID) for the specific run of the query." + default=None, + description="Unique identifier (GUID) for the specific run of the query.", ) rows: Optional[list[list[str]]] = Field( description="Results of the query. Each element is of " @@ -203,7 +233,7 @@ class ColumnType(AtlanObject): description="Unique identifier for the request, if there was any error." ) name: Optional[str] = Field( - description="SQL name of the data type for this column.." + default=None, description="SQL name of the data type for this column.." ) rep: Optional[str] @@ -220,26 +250,32 @@ class ColumnDetails(AtlanObject): nullable: Optional[int] = Field(description="TBC") signed: Optional[bool] = Field(description="TBC") display_size: Optional[int] = Field(description="TBC") - label: Optional[str] = Field(description="Display value for the column's name.") + label: Optional[str] = Field( + default=None, description="Display value for the column's name." + ) column_name: Optional[str] = Field( - description="Name of the column (technical)." + default=None, description="Name of the column (technical)." ) schema_name: Optional[str] = Field( - description="Name of the schema in which this column's table is contained." + default=None, + description="Name of the schema in which this column's table is contained.", ) precision: Optional[int] = Field(description="TBC") scale: Optional[int] = Field(description="TBC") table_name: Optional[str] = Field( - description="Name of the table in which the column is contained." + default=None, + description="Name of the table in which the column is contained.", ) catalog_name: Optional[str] = Field( - description="Name of the database in which the table's schema is contained." + default=None, + description="Name of the database in which the table's schema is contained.", ) read_only: Optional[bool] = Field(description="TBC") writable: Optional[bool] = Field(description="TBC") definitely_writable: Optional[bool] = Field(description="TBC") column_class_name: Optional[str] = Field( - description="Canonical name of the Java class representing this column's values." + default=None, + description="Canonical name of the Java class representing this column's values.", ) type: Optional[QueryResponse.ColumnType] = Field( description="Details about the (SQL) data type of the column." @@ -253,16 +289,20 @@ class ColumnDetails(AtlanObject): class AssetDetails(AtlanObject): connection_name: Optional[str] = Field( - description="Simple name of the connection." + default=None, description="Simple name of the connection." ) connection_qn: Optional[str] = Field( - description="Unique name of the connection." + default=None, description="Unique name of the connection." + ) + database: Optional[str] = Field( + default=None, description="Simple name of the database." ) - database: Optional[str] = Field(description="Simple name of the database.") schema_: Optional[str] = Field( - alias="schema", description="Simple name of the schema." + default=None, alias="schema", description="Simple name of the schema." + ) + table: Optional[str] = Field( + default=None, description="Simple name of the table." ) - table: Optional[str] = Field(description="Simple name of the table.") class QueryDetails(AtlanObject): """ @@ -273,23 +313,24 @@ class QueryDetails(AtlanObject): description="Total number of results returned by the query." ) status: Optional[QueryStatus] = Field(description="Status of the query.") - parsed_query: Optional[str] = Field(description="TBC") + parsed_query: Optional[str] = Field(default=None, description="TBC") pushdown_query: Optional[str] = Field( - description="Query that was sent to the data store." + default=None, description="Query that was sent to the data store." ) execution_time: Optional[int] = Field( description="How long the query took to run, in milliseconds." ) - source_query_id: Optional[str] = Field(description="TBC") - result_output_location: Optional[str] = Field(description="TBC") + source_query_id: Optional[str] = Field(default=None, description="TBC") + result_output_location: Optional[str] = Field(default=None, description="TBC") warnings: Optional[list[str]] = Field( - description="List of any warnings produced when running the query." + default=None, + description="List of any warnings produced when running the query.", ) parsing_flow: Optional[ParsingFlow] = Field( description="How the query was parsed prior to running." ) heka_flow: Optional[HekaFlow] = Field(description="How the query was run.") - s3_upload_path: Optional[str] = Field(description="TBC") + s3_upload_path: Optional[str] = Field(default=None, description="TBC") source_first_connection_time: Optional[int] = Field(description="TBC") source_first_connection_time_perc: Optional[float] = Field(description="TBC") explain_call_time_perc: Optional[float] = Field(description="TBC") @@ -324,7 +365,8 @@ class QueryDetails(AtlanObject): description="Metadata about the asset used in the query, in case of any errors." ) developer_message: Optional[str] = Field( - description="Detailed back-end error message that could be helpful for developers." + default=None, + description="Detailed back-end error message that could be helpful for developers.", ) line: Optional[int] = Field( description="Line number of the query that had a validation error, if any." @@ -333,7 +375,8 @@ class QueryDetails(AtlanObject): description="Column position of the validation error, if any." ) obj: Optional[str] = Field( - description="Name of the object that caused the validation error, if any." + default=None, + description="Name of the object that caused the validation error, if any.", ) details: Optional[QueryResponse.QueryDetails] = Field( diff --git a/pyatlan/model/response.py b/pyatlan/model/response.py index dcd7c9240..eb125442a 100644 --- a/pyatlan/model/response.py +++ b/pyatlan/model/response.py @@ -3,7 +3,7 @@ # Based on original code from https://github.com/apache/atlas (under Apache-2.0 license) from typing import Optional, Type, TypeVar -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.assets import Asset from pyatlan.model.core import AtlanObject @@ -11,25 +11,25 @@ class MutatedEntities(AtlanObject): CREATE: Optional[list[Asset]] = Field( - None, + default=None, description="Assets that were created. The detailed properties of the returned asset will vary based on the " "type of asset, but listed in the example are the common set of properties across assets.", alias="CREATE", ) UPDATE: Optional[list[Asset]] = Field( - None, + default=None, description="Assets that were assets_updated. The detailed properties of the returned asset will vary based on" " the type of asset, but listed in the example are the common set of properties across assets.", alias="UPDATE", ) DELETE: Optional[list[Asset]] = Field( - None, + default=None, description="Assets that were deleted. The detailed properties of the returned asset will vary based on the " "type of asset, but listed in the example are the common set of properties across assets.", alias="DELETE", ) PARTIAL_UPDATE: Optional[list[Asset]] = Field( - None, + default=None, description="Assets that were partially updated. The detailed properties of the returned asset will " "vary based on the type of asset, but listed in the example are the common set of properties across assets.", alias="DELETE", @@ -41,13 +41,14 @@ class MutatedEntities(AtlanObject): class AssetMutationResponse(AtlanObject): guid_assignments: Optional[dict[str, str]] = Field( - description="Map of assigned unique identifiers for the changed assets." + default=None, + description="Map of assigned unique identifiers for the changed assets.", ) mutated_entities: Optional[MutatedEntities] = Field( - None, description="Assets that were changed." + default=None, description="Assets that were changed." ) partial_updated_entities: Optional[list[Asset]] = Field( - None, description="Assets that were partially updated" + default=None, description="Assets that were partially updated" ) def assets_created(self, asset_type: Type[A]) -> list[A]: diff --git a/pyatlan/model/role.py b/pyatlan/model/role.py index ddfdfb7a9..67788fdb8 100644 --- a/pyatlan/model/role.py +++ b/pyatlan/model/role.py @@ -4,7 +4,7 @@ from typing import Optional -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.core import AtlanObject @@ -13,17 +13,21 @@ class AtlanRole(AtlanObject): id: str = Field(description="Unique identifier for the role (GUID).\n") """Unique identifier for the role (GUID).""" name: str = Field(description="Unique name for the role.\n") - description: Optional[str] = Field(None, description="Description of the role.\n") - client_role: Optional[bool] = Field(None, description="TBC\n") - level: Optional[str] = Field(None, description="TBC\n") + description: Optional[str] = Field( + default=None, description="Description of the role.\n" + ) + client_role: Optional[bool] = Field(default=None, description="TBC\n") + level: Optional[str] = Field(default=None, description="TBC\n") member_count: Optional[str] = Field( - None, description="Number of users with this role.\n" + default=None, description="Number of users with this role.\n" ) - user_count: Optional[str] = Field(None, description="TBC\n") + user_count: Optional[str] = Field(default=None, description="TBC\n") class RoleResponse(AtlanObject): - total_record: Optional[int] = Field(None, description="Total number of roles.\n") + total_record: Optional[int] = Field( + default=None, description="Total number of roles.\n" + ) filter_record: Optional[int] = Field( None, description="Number of roles in the filtered response.\n", diff --git a/pyatlan/model/search.py b/pyatlan/model/search.py index ccebc2283..39ed099a2 100644 --- a/pyatlan/model/search.py +++ b/pyatlan/model/search.py @@ -9,7 +9,7 @@ from json import dumps, loads from typing import Any, Literal, Optional, Union -from pydantic import ( +from pydantic.v1 import ( ConfigDict, Field, StrictBool, @@ -20,8 +20,8 @@ validate_arguments, validator, ) -from pydantic.config import Extra -from pydantic.dataclasses import dataclass +from pydantic.v1.config import Extra +from pydantic.v1.dataclasses import dataclass from pyatlan.model.aggregation import Aggregation from pyatlan.model.core import AtlanObject, SearchRequest @@ -1800,11 +1800,11 @@ class DSL(AtlanObject): from_: int = Field(0, alias="from") size: int = 100 aggregations: dict[str, Aggregation] = Field(default_factory=dict) - track_total_hits: bool = Field(True, alias="track_total_hits") - post_filter: Optional[Query] = Field(alias="post_filter") + track_total_hits: bool = Field(default=True, alias="track_total_hits") + post_filter: Optional[Query] = Field(default=None, alias="post_filter") query: Optional[Query] - req_class_name: Optional[str] = Field(exclude=True) - sort: list[SortItem] = Field(alias="sort", default_factory=list) + req_class_name: Optional[str] = Field(default=None, exclude=True) + sort: list[SortItem] = Field(default_factory=list, alias="sort") class Config: json_encoders = {Query: lambda v: v.to_dict(), SortItem: lambda v: v.to_dict()} @@ -1859,7 +1859,7 @@ class IndexSearchRequest(SearchRequest): relation_attributes: list[str] = Field( default_factory=list, alias="relationAttributes" ) - suppress_logs: Optional[bool] = Field(alias="suppressLogs") + suppress_logs: Optional[bool] = Field(default=None, alias="suppressLogs") show_search_score: Optional[bool] = Field( description="When true, include the score of each result. By default, this is false and scores are excluded.", alias="showSearchScore", diff --git a/pyatlan/model/search_log.py b/pyatlan/model/search_log.py index ad12a0b05..5785c0e62 100644 --- a/pyatlan/model/search_log.py +++ b/pyatlan/model/search_log.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import Any, Generator, Iterable, Optional -from pydantic import Field, ValidationError, parse_obj_as +from pydantic.v1 import Field, ValidationError, parse_obj_as from pyatlan.client.common import ApiCaller from pyatlan.client.constants import SEARCH_LOG diff --git a/pyatlan/model/structs.py b/pyatlan/model/structs.py index da30e5a79..83ebf6923 100644 --- a/pyatlan/model/structs.py +++ b/pyatlan/model/structs.py @@ -4,7 +4,7 @@ from datetime import datetime from typing import Optional, Union -from pydantic import BaseModel, Extra, Field +from pydantic.v1 import BaseModel, Extra, Field from pyatlan.model.enums import ( BadgeComparisonOperator, @@ -27,10 +27,14 @@ class Config: class MCRuleSchedule(AtlanObject): """Description""" - mc_rule_schedule_type: Optional[str] = Field(None, description="") - mc_rule_schedule_interval_in_minutes: Optional[int] = Field(None, description="") - mc_rule_schedule_start_time: Optional[datetime] = Field(None, description="") - mc_rule_schedule_crontab: Optional[str] = Field(None, description="") + mc_rule_schedule_type: Optional[str] = Field(default=None, description="") + mc_rule_schedule_interval_in_minutes: Optional[int] = Field( + default=None, description="" + ) + mc_rule_schedule_start_time: Optional[datetime] = Field( + default=None, description="" + ) + mc_rule_schedule_crontab: Optional[str] = Field(default=None, description="") class AwsCloudWatchMetric(AtlanObject): @@ -40,6 +44,15 @@ class AwsCloudWatchMetric(AtlanObject): aws_cloud_watch_metric_scope: str = Field(description="") +class KafkaTopicConsumption(AtlanObject): + """Description""" + + topic_name: Optional[str] = Field(default=None, description="") + topic_partition: Optional[str] = Field(default=None, description="") + topic_lag: Optional[int] = Field(default=None, description="") + topic_current_offset: Optional[int] = Field(default=None, description="") + + class Histogram(AtlanObject): """Description""" @@ -47,20 +60,33 @@ class Histogram(AtlanObject): frequencies: set[float] = Field(description="") -class KafkaTopicConsumption(AtlanObject): +class ColumnValueFrequencyMap(AtlanObject): """Description""" - topic_name: Optional[str] = Field(None, description="") - topic_partition: Optional[str] = Field(None, description="") - topic_lag: Optional[int] = Field(None, description="") - topic_current_offset: Optional[int] = Field(None, description="") + column_value: Optional[str] = Field(default=None, description="") + column_value_frequency: Optional[int] = Field(default=None, description="") -class ColumnValueFrequencyMap(AtlanObject): +class SourceTagAttachmentValue(AtlanObject): """Description""" - column_value: Optional[str] = Field(None, description="") - column_value_frequency: Optional[int] = Field(None, description="") + tag_attachment_key: Optional[str] = Field(default=None, description="") + tag_attachment_value: Optional[str] = Field(default=None, description="") + + +class SourceTagAttachment(AtlanObject): + """Description""" + + source_tag_name: Optional[str] = Field(default=None, description="") + source_tag_qualified_name: Optional[str] = Field(default=None, description="") + source_tag_guid: Optional[str] = Field(default=None, description="") + source_tag_connector_name: Optional[str] = Field(default=None, description="") + source_tag_value: Optional[list[SourceTagAttachmentValue]] = Field( + default=None, description="" + ) + is_source_tag_synced: Optional[bool] = Field(default=None, description="") + source_tag_sync_timestamp: Optional[datetime] = Field(default=None, description="") + source_tag_sync_error: Optional[str] = Field(default=None, description="") class BadgeCondition(AtlanObject): @@ -91,31 +117,9 @@ def create( else badge_condition_colorhex, ) - badge_condition_operator: Optional[str] = Field(None, description="") - badge_condition_value: Optional[str] = Field(None, description="") - badge_condition_colorhex: Optional[str] = Field(None, description="") - - -class SourceTagAttachmentValue(AtlanObject): - """Description""" - - tag_attachment_key: Optional[str] = Field(None, description="") - tag_attachment_value: Optional[str] = Field(None, description="") - - -class SourceTagAttachment(AtlanObject): - """Description""" - - source_tag_name: Optional[str] = Field(None, description="") - source_tag_qualified_name: Optional[str] = Field(None, description="") - source_tag_guid: Optional[str] = Field(None, description="") - source_tag_connector_name: Optional[str] = Field(None, description="") - source_tag_value: Optional[list[SourceTagAttachmentValue]] = Field( - None, description="" - ) - is_source_tag_synced: Optional[bool] = Field(None, description="") - source_tag_sync_timestamp: Optional[datetime] = Field(None, description="") - source_tag_sync_error: Optional[str] = Field(None, description="") + badge_condition_operator: Optional[str] = Field(default=None, description="") + badge_condition_value: Optional[str] = Field(default=None, description="") + badge_condition_colorhex: Optional[str] = Field(default=None, description="") class AzureTag(AtlanObject): @@ -128,8 +132,8 @@ class AzureTag(AtlanObject): class StarredDetails(AtlanObject): """Description""" - asset_starred_by: Optional[str] = Field(None, description="") - asset_starred_at: Optional[datetime] = Field(None, description="") + asset_starred_by: Optional[str] = Field(default=None, description="") + asset_starred_at: Optional[datetime] = Field(default=None, description="") class AuthPolicyCondition(AtlanObject): @@ -156,10 +160,12 @@ class GoogleTag(AtlanObject): class DbtMetricFilter(AtlanObject): """Description""" - dbt_metric_filter_column_qualified_name: Optional[str] = Field(None, description="") - dbt_metric_filter_field: Optional[str] = Field(None, description="") - dbt_metric_filter_operator: Optional[str] = Field(None, description="") - dbt_metric_filter_value: Optional[str] = Field(None, description="") + dbt_metric_filter_column_qualified_name: Optional[str] = Field( + default=None, description="" + ) + dbt_metric_filter_field: Optional[str] = Field(default=None, description="") + dbt_metric_filter_operator: Optional[str] = Field(default=None, description="") + dbt_metric_filter_value: Optional[str] = Field(default=None, description="") class AuthPolicyValiditySchedule(AtlanObject): @@ -173,13 +179,13 @@ class AuthPolicyValiditySchedule(AtlanObject): class MCRuleComparison(AtlanObject): """Description""" - mc_rule_comparison_type: Optional[str] = Field(None, description="") - mc_rule_comparison_field: Optional[str] = Field(None, description="") - mc_rule_comparison_metric: Optional[str] = Field(None, description="") - mc_rule_comparison_operator: Optional[str] = Field(None, description="") - mc_rule_comparison_threshold: Optional[float] = Field(None, description="") + mc_rule_comparison_type: Optional[str] = Field(default=None, description="") + mc_rule_comparison_field: Optional[str] = Field(default=None, description="") + mc_rule_comparison_metric: Optional[str] = Field(default=None, description="") + mc_rule_comparison_operator: Optional[str] = Field(default=None, description="") + mc_rule_comparison_threshold: Optional[float] = Field(default=None, description="") mc_rule_comparison_is_threshold_relative: Optional[bool] = Field( - None, description="" + default=None, description="" ) @@ -193,42 +199,46 @@ class GoogleLabel(AtlanObject): class PopularityInsights(AtlanObject): """Description""" - record_user: Optional[str] = Field(None, description="") - record_query: Optional[str] = Field(None, description="") - record_query_duration: Optional[int] = Field(None, description="") - record_query_count: Optional[int] = Field(None, description="") - record_total_user_count: Optional[int] = Field(None, description="") - record_compute_cost: Optional[float] = Field(None, description="") - record_max_compute_cost: Optional[float] = Field(None, description="") - record_compute_cost_unit: Optional[SourceCostUnitType] = Field(None, description="") - record_last_timestamp: Optional[datetime] = Field(None, description="") - record_warehouse: Optional[str] = Field(None, description="") + record_user: Optional[str] = Field(default=None, description="") + record_query: Optional[str] = Field(default=None, description="") + record_query_duration: Optional[int] = Field(default=None, description="") + record_query_count: Optional[int] = Field(default=None, description="") + record_total_user_count: Optional[int] = Field(default=None, description="") + record_compute_cost: Optional[float] = Field(default=None, description="") + record_max_compute_cost: Optional[float] = Field(default=None, description="") + record_compute_cost_unit: Optional[SourceCostUnitType] = Field( + default=None, description="" + ) + record_last_timestamp: Optional[datetime] = Field(default=None, description="") + record_warehouse: Optional[str] = Field(default=None, description="") class SourceTagAttribute(AtlanObject): """Description""" - tag_attribute_key: Optional[str] = Field(None, description="") - tag_attribute_value: Optional[str] = Field(None, description="") - tag_attribute_properties: Optional[dict[str, str]] = Field(None, description="") + tag_attribute_key: Optional[str] = Field(default=None, description="") + tag_attribute_value: Optional[str] = Field(default=None, description="") + tag_attribute_properties: Optional[dict[str, str]] = Field( + default=None, description="" + ) MCRuleSchedule.update_forward_refs() AwsCloudWatchMetric.update_forward_refs() -Histogram.update_forward_refs() - KafkaTopicConsumption.update_forward_refs() -ColumnValueFrequencyMap.update_forward_refs() +Histogram.update_forward_refs() -BadgeCondition.update_forward_refs() +ColumnValueFrequencyMap.update_forward_refs() SourceTagAttachmentValue.update_forward_refs() SourceTagAttachment.update_forward_refs() +BadgeCondition.update_forward_refs() + AzureTag.update_forward_refs() StarredDetails.update_forward_refs() diff --git a/pyatlan/model/typedef.py b/pyatlan/model/typedef.py index 9793dbfd9..a5f931173 100644 --- a/pyatlan/model/typedef.py +++ b/pyatlan/model/typedef.py @@ -4,7 +4,7 @@ import time from typing import Any, Callable, ClassVar, Optional, cast -from pydantic import Field +from pydantic.v1 import Field from pyatlan.errors import ErrorCode from pyatlan.model.atlan_image import AtlanImage @@ -167,35 +167,40 @@ def _get_all_qualified_names(asset_type: str) -> set[str]: class TypeDef(AtlanObject): category: AtlanTypeCategory = Field(description="Type of the type definition.") create_time: Optional[int] = Field( - None, + default=None, description="Time (epoch) at which this object was created, in milliseconds.", ) created_by: Optional[str] = Field( - description="Username of the user who created the object." + default=None, description="Username of the user who created the object." ) description: Optional[str] = Field( - description="Description of the type definition." + default=None, description="Description of the type definition." ) guid: Optional[str] = Field( - description="Unique identifier that represents the type definition." + default=None, + description="Unique identifier that represents the type definition.", ) - name: str = Field(description="Unique name of this type definition.") - type_version: Optional[str] = Field(description="Internal use only.") + name: str = Field(default=None, description="Unique name of this type definition.") + type_version: Optional[str] = Field(default=None, description="Internal use only.") update_time: Optional[int] = Field( - description="Time (epoch) at which this object was last assets_updated, in milliseconds." + default=None, + description="Time (epoch) at which this object was last assets_updated, in milliseconds.", ) updated_by: Optional[str] = Field( - description="Username of the user who last assets_updated the object." + default=None, + description="Username of the user who last assets_updated the object.", + ) + version: Optional[int] = Field( + default=None, description="Version of this type definition." ) - version: Optional[int] = Field(description="Version of this type definition.") class EnumDef(TypeDef): class ElementDef(AtlanObject): value: str = Field(description="One unique value within the enumeration.") - description: Optional[str] = Field(description="Unused.") + description: Optional[str] = Field(default=None, description="Unused.") ordinal: Optional[int] = Field( - description="Unique numeric identifier for the value." + default=None, description="Unique numeric identifier for the value." ) @staticmethod @@ -228,9 +233,9 @@ def list_from(values: list[str]) -> list[EnumDef.ElementDef]: description="Valid values for the enumeration." ) options: Optional[dict[str, Any]] = Field( - description="Optional properties of the type definition." + default=None, description="Optional properties of the type definition." ) - service_type: Optional[str] = Field(description="Internal use only.") + service_type: Optional[str] = Field(default=None, description="Internal use only.") @staticmethod def create(name: str, values: list[str]) -> EnumDef: @@ -262,75 +267,95 @@ class Options(AtlanObject): default="v2", ) description: Optional[str] = Field( - description="Optional description of the attribute." + default=None, description="Optional description of the attribute." ) applicable_entity_types: Optional[str] = Field( + default=None, description="Set of entities on which this attribute can be applied. " "Note: generally this should be left as-is. Any overrides should instead be applied through " "one or more of applicable_asset_types}, applicable_glossary_types}, or " "applicable_other_asset_types}.", ) custom_applicable_entity_types: Optional[str] = Field( + default=None, description="Set of entities on which this attribute should appear." "Deprecated: see applicable_asset_types, applicable_glossary_types and " "applicable_other_asset_types", ) allow_search: Optional[bool] = Field( + default=None, description="Whether the attribute should be searchable (true) or not (false).", ) max_str_length: Optional[str] = Field( + default=None, description="Maximum length allowed for a string value.", ) allow_filtering: Optional[bool] = Field( + default=None, description="Whether this attribute should appear in the filterable facets of discovery (true) or not " "(false).", ) multi_value_select: Optional[bool] = Field( + default=None, description="Whether this attribute can have multiple values (true) or only a single value (false).", ) show_in_overview: Optional[bool] = Field( + default=None, description="Whether users will see this attribute in the overview tab of the sidebar (true) or not " "(false).", ) is_deprecated: Optional[str] = Field( - description="Whether the attribute is deprecated ('true') or not (None or 'false')." + default=None, + description="Whether the attribute is deprecated ('true') or not (None or 'false').", ) is_enum: Optional[bool] = Field( + default=None, description="Whether the attribute is an enumeration (true) or not (None or false).", ) enum_type: Optional[str] = Field( - description="Name of the enumeration (options), when the attribute is an enumeration." + default=None, + description="Name of the enumeration (options), when the attribute is an enumeration.", ) custom_type: Optional[str] = Field( - description="Used for Atlan-specific types like `users`, `groups`, `url`, and `SQL`." + default=None, + description="Used for Atlan-specific types like `users`, `groups`, `url`, and `SQL`.", ) has_time_precision: Optional[bool] = Field( + default=None, description="If true for a date attribute, then time-level precision is also available in the UI " - "(otherwise only date-level)" + "(otherwise only date-level)", ) is_archived: Optional[bool] = Field( - description="Whether the attribute has been deleted (true) or is still active (false)." + default=None, + description="Whether the attribute has been deleted (true) or is still active (false).", ) archived_at: Optional[int] = Field( - description="When the attribute was deleted." + default=None, description="When the attribute was deleted." ) archived_by: Optional[str] = Field( - description="User who deleted the attribute." + default=None, description="User who deleted the attribute." + ) + is_soft_reference: Optional[str] = Field(default=None, description="TBC") + is_append_on_partial_update: Optional[str] = Field( + default=None, description="TBC" + ) + primitive_type: Optional[str] = Field( + default=None, description="Type of the attribute." ) - is_soft_reference: Optional[str] = Field(description="TBC") - is_append_on_partial_update: Optional[str] = Field(description="TBC") - primitive_type: Optional[str] = Field(description="Type of the attribute.") applicable_connections: Optional[str] = Field( + default=None, description="Qualified names of connections to which to restrict the attribute. " "Only assets within one of these connections will have this attribute available. " - "To further restrict the types of assets within the connections, see applicable_asset_types." + "To further restrict the types of assets within the connections, see applicable_asset_types.", ) applicable_glossaries: Optional[str] = Field( + default=None, description="Qualified names of glossaries to which to restrict the attribute. " "Only glossary assets within one of these glossaries will have this attribute available. " - "To further restrict the types of assets within the glossaries, see applicable_glossary_types." + "To further restrict the types of assets within the glossaries, see applicable_glossary_types.", ) applicable_asset_types: Optional[str] = Field( + default=None, alias="assetTypesList", description="Asset type names to which to restrict the attribute. " "Only assets of one of these types will have this attribute available. " @@ -338,6 +363,7 @@ class Options(AtlanObject): "connection, see applicable_connections. ", ) applicable_glossary_types: Optional[str] = Field( + default=None, alias="glossaryTypeList", description="Glossary type names to which to restrict the attribute. " "Only glossary assets of one of these types will have this attribute available. " @@ -345,6 +371,7 @@ class Options(AtlanObject): "custom metadata by glossary, see applicable_glossaries.", ) applicable_other_asset_types: Optional[str] = Field( + default=None, alias="otherAssetTypeList", description="Any other asset type names to which to restrict the attribute. " "These cover any asset type that is not managed within a connection or a glossary. " @@ -387,71 +414,84 @@ def create( return options is_new: Optional[bool] = Field( + default=None, description="Whether the attribute is being newly created (true) or not (false).", ) cardinality: Optional[Cardinality] = Field( + default=None, description="Whether the attribute allows a single or multiple values. In the case of multiple values, " "`LIST` indicates they are ordered and duplicates are allowed, while `SET` indicates " "they are unique and unordered.", ) constraints: Optional[list[dict[str, Any]]] = Field( - description="Internal use only." + default=None, description="Internal use only." ) enum_values: Optional[list[str]] = Field( - description="list of values for an enumeration." + default=None, description="list of values for an enumeration." ) description: Optional[str] = Field( - description="Description of the attribute definition." + default=None, description="Description of the attribute definition." ) default_value: Optional[str] = Field( - description="Default value for this attribute (if any)." + default=None, description="Default value for this attribute (if any)." ) display_name: Optional[str] = Field( + default=None, description="Name to use within all user interactions through the user interface. Note that this may not " "be the same name used to update or interact with the attribute through API operations, for " "that see the `name` property. (This property can be used instead of `name` for the creation " - "of an attribute definition as well.)" + "of an attribute definition as well.)", ) name: Optional[str] = Field( + default=None, description="Unique name of this attribute definition. When provided during creation, this should be the " "human-readable name for the attribute. When returned (or provided for an update) this will be " "the static-hashed name that Atlan uses internally. (This is to allow the name to be changed " "by the user without impacting existing instances of the attribute.)", ) - include_in_notification: Optional[bool] = Field(description="TBC") - index_type: Optional[IndexType] = Field(description="", example="DEFAULT") + include_in_notification: Optional[bool] = Field(default=None, description="TBC") + index_type: Optional[IndexType] = Field( + default=None, description="", example="DEFAULT" + ) is_indexable: Optional[bool] = Field( + default=None, description="When true, values for this attribute will be indexed for searching.", ) is_optional: Optional[bool] = Field( + default=None, description="When true, a value will not be required for this attribute.", ) is_unique: Optional[bool] = Field( + default=None, description="When true, this attribute must be unique across all assets.", ) options: Optional[AttributeDef.Options] = Field( - description="Extensible options for the attribute." + default=None, description="Extensible options for the attribute." ) - search_weight: Optional[float] = Field(description="TBC") + search_weight: Optional[float] = Field(default=None, description="TBC") skip_scrubbing: Optional[bool] = Field( - description="When true, scrubbing of data will be skipped." + default=None, description="When true, scrubbing of data will be skipped." + ) + type_name: Optional[str] = Field( + default=None, description="Type of this attribute." ) - type_name: Optional[str] = Field(description="Type of this attribute.") values_min_count: Optional[float] = Field( + default=None, description="Minimum number of values for this attribute. If greater than 0, this attribute " "becomes required.", ) values_max_count: Optional[float] = Field( + default=None, description="Maximum number of values for this attribute. If greater than 1, this attribute allows " "multiple values.", ) index_type_es_config: Optional[dict[str, str]] = Field( - description="Internal use only.", alias="indexTypeESConfig" + default=None, description="Internal use only.", alias="indexTypeESConfig" ) index_type_es_fields: Optional[dict[str, dict[str, str]]] = Field( - description="Internal use only.", alias="indexTypeESFields" + default=None, description="Internal use only.", alias="indexTypeESFields" ) - is_default_value_null: Optional[bool] = Field(description="TBC") + is_default_value_null: Optional[bool] = Field(default=None, description="TBC") def __setattr__(self, name, value): if name in AttributeDef._convenience_properties: @@ -680,49 +720,56 @@ def archive(self, by: str) -> AttributeDef: class RelationshipAttributeDef(AttributeDef): - is_legacy_attribute: Optional[bool] = Field(description="Unused.") + is_legacy_attribute: Optional[bool] = Field(default=None, description="Unused.") relationship_type_name: Optional[str] = Field( - description="Name of the relationship type." + default=None, description="Name of the relationship type." ) class StructDef(TypeDef): category: AtlanTypeCategory = AtlanTypeCategory.STRUCT attribute_defs: Optional[list[AttributeDef]] = Field( - None, + default=None, description="list of attributes that should be available in the type_ definition.", ) service_type: Optional[str] = Field( - None, description="Internal use only.", example="atlan" + default=None, description="Internal use only.", example="atlan" ) class AtlanTagDef(TypeDef): - attribute_defs: Optional[list[AttributeDef]] = Field(description="Unused.") + attribute_defs: Optional[list[AttributeDef]] = Field( + default=None, description="Unused." + ) category: AtlanTypeCategory = AtlanTypeCategory.CLASSIFICATION display_name: str = Field( - description="Name used for display purposes (in user interfaces)." + default=None, description="Name used for display purposes (in user interfaces)." ) entity_types: Optional[list[str]] = Field( + default=None, description="A list of the entity types that this classification can be used against." - " (This should be `Asset` to allow classification of any asset in Atlan.)" + " (This should be `Asset` to allow classification of any asset in Atlan.)", ) options: Optional[dict[str, Any]] = Field( - description="Optional properties of the type_ definition." + default=None, description="Optional properties of the type_ definition." ) sub_types: Optional[list[str]] = Field( + default=None, description="list of the sub-types that extend from this type_ definition. Generally this is not specified " "in any request, but is only supplied in responses. (This is intended for internal use only, and " "should not be used without specific guidance.)", ) super_types: Optional[list[str]] = Field( + default=None, description="list of the super-types that this type_ definition should extend. (This is intended for internal " "use only, and should not be used without specific guidance.)", ) service_type: Optional[str] = Field( - description="Name used for display purposes (in user interfaces)." + default=None, description="Name used for display purposes (in user interfaces)." + ) + skip_display_name_uniqueness_check: Optional[bool] = Field( + default=None, description="TBC" ) - skip_display_name_uniqueness_check: Optional[bool] = Field(description="TBC") @staticmethod def create( @@ -764,7 +811,7 @@ def create( class EntityDef(TypeDef): attribute_defs: Optional[list[dict[str, Any]]] = Field( - [], description="Unused.", example=[] + default_factory=list, description="Unused.", example=[] ) business_attribute_defs: Optional[dict[str, list[dict[str, Any]]]] = Field( default_factory=cast(Callable[[], dict[str, list[dict[str, Any]]]], dict), @@ -773,20 +820,20 @@ class EntityDef(TypeDef): ) category: AtlanTypeCategory = AtlanTypeCategory.ENTITY relationship_attribute_defs: Optional[list[dict[str, Any]]] = Field( - [], description="Unused.", example=[] + default_factory=list, description="Unused.", example=[] ) service_type: Optional[str] = Field( - None, description="Internal use only.", example="atlan" + default=None, description="Internal use only.", example="atlan" ) sub_types: Optional[list[str]] = Field( - [], + default_factory=list, description="list of the sub-types that extend from this type_ definition. Generally this is not specified in " "any request, but is only supplied in responses. (This is intended for internal use only, and " "should not be used without specific guidance.)", example=[], ) super_types: Optional[list[str]] = Field( - [], + default_factory=list, description="list of the super-types that this type_ definition should extend. (This is intended for internal " "use only, and should not be used without specific guidance.)", example=[], @@ -795,49 +842,55 @@ class EntityDef(TypeDef): class RelationshipDef(TypeDef): attribute_defs: Optional[list[dict[str, Any]]] = Field( - [], description="Unused.", example=[] + default_factory=list, description="Unused.", example=[] ) category: AtlanTypeCategory = AtlanTypeCategory.RELATIONSHIP - end_def1: Optional[dict[str, Any]] = Field({}, description="Unused.", example={}) - end_def2: Optional[dict[str, Any]] = Field({}, description="Unused.", example={}) + end_def1: Optional[dict[str, Any]] = Field( + default_factory=dict, description="Unused.", example={} + ) + end_def2: Optional[dict[str, Any]] = Field( + default_factory=dict, description="Unused.", example={} + ) propagate_tags: str = Field( - "ONE_TO_TWO", description="Unused", example="ONE_TO_TWO" + default="ONE_TO_TWO", description="Unused", example="ONE_TO_TWO" ) relationship_category: str = Field( - "AGGREGATION", description="Unused", example="AGGREGATION" + default="AGGREGATION", description="Unused", example="AGGREGATION" ) relationship_label: str = Field( - "__SalesforceOrganization.reports", + default="__SalesforceOrganization.reports", description="Unused", example="__SalesforceOrganization.reports", ) service_type: Optional[str] = Field( - None, description="Internal use only.", example="atlan" + default=None, description="Internal use only.", example="atlan" ) class CustomMetadataDef(TypeDef): class Options(AtlanObject): emoji: Optional[str] = Field( - description="If the logoType is emoji, this should hold the emoji character." + default=None, + description="If the logoType is emoji, this should hold the emoji character.", ) image_id: Optional[str] = Field( - description="The id of the image used for the logo." + default=None, description="The id of the image used for the logo." ) is_locked: Optional[bool] = Field( description="Indicates whether the custom metadata can be managed in the UI (false) or not (true)." ) logo_type: Optional[str] = Field( - description="Type of logo used for the custom metadata." + default=None, description="Type of logo used for the custom metadata." ) logo_url: Optional[str] = Field( - description="If the logoType is image, this should hold a URL to the image." + default=None, + description="If the logoType is image, this should hold a URL to the image.", ) icon_color: Optional[AtlanTagColor] = Field( - description="Color to use for the icon." + default=None, description="Color to use for the icon." ) icon_name: Optional[AtlanIcon] = Field( - description="Icon to use to represent the custom metadata." + default=None, description="Icon to use to represent the custom metadata." ) @staticmethod @@ -886,15 +939,15 @@ def with_logo_from_icon( ) attribute_defs: list[AttributeDef] = Field( - default=[], + default_factory=list, description="list of custom attributes defined within the custom metadata.", ) category: AtlanTypeCategory = AtlanTypeCategory.CUSTOM_METADATA display_name: str = Field( - description="Name used for display purposes (in user interfaces)." + default=None, description="Name used for display purposes (in user interfaces)." ) options: Optional[CustomMetadataDef.Options] = Field( - description="Optional properties of the type definition." + default=None, description="Optional properties of the type definition." ) @staticmethod @@ -915,24 +968,24 @@ def create(display_name: str) -> CustomMetadataDef: class TypeDefResponse(AtlanObject): enum_defs: list[EnumDef] = Field( - [], description="list of enumeration type definitions." + default_factory=list, description="list of enumeration type definitions." ) struct_defs: list[StructDef] = Field( - [], description="list of struct type definitions." + default_factory=list, description="list of struct type definitions." ) atlan_tag_defs: list[AtlanTagDef] = Field( - [], + default_factory=list, description="list of classification type definitions.", alias="classificationDefs", ) entity_defs: list[EntityDef] = Field( - [], description="list of entity type_ definitions." + default_factory=list, description="list of entity type_ definitions." ) relationship_defs: list[RelationshipDef] = Field( - [], description="list of relationship type_ definitions." + default_factory=list, description="list of relationship type_ definitions." ) custom_metadata_defs: list[CustomMetadataDef] = Field( - [], + default_factory=list, description="list of custom metadata type_ definitions.", alias="businessMetadataDefs", ) diff --git a/pyatlan/model/user.py b/pyatlan/model/user.py index d1e7b3788..42eca03cc 100644 --- a/pyatlan/model/user.py +++ b/pyatlan/model/user.py @@ -4,7 +4,7 @@ from typing import Any, Optional, Protocol -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.api_tokens import ApiToken from pyatlan.model.core import AtlanObject @@ -13,115 +13,141 @@ class AtlanUser(AtlanObject): class Attributes(AtlanObject): designation: Optional[list[str]] = Field( - description="Designation for the user, such as an honorific or title." + default=None, + description="Designation for the user, such as an honorific or title.", + ) + skills: Optional[list[str]] = Field( + default=None, description="Skills the user possesses." ) - skills: Optional[list[str]] = Field(description="Skills the user possesses.") slack: Optional[list[str]] = Field( - description="Unique Slack member identifier." + default=None, description="Unique Slack member identifier." + ) + jira: Optional[list[str]] = Field( + default=None, description="Unique JIRA user identifier." ) - jira: Optional[list[str]] = Field(description="Unique JIRA user identifier.") invited_at: Optional[list[str]] = Field( - description="Time at which the user was invited (as a formatted string)." + default=None, + description="Time at which the user was invited (as a formatted string).", ) invited_by: Optional[list[str]] = Field( - description="User who invited this user." + default=None, description="User who invited this user." ) - invited_by_name: Optional[list[str]] = Field(description="TBC") + invited_by_name: Optional[list[str]] = Field(default=None, description="TBC") class Persona(AtlanObject): id: Optional[str] = Field( - description="Unique identifier (GUID) of the persona." + default=None, description="Unique identifier (GUID) of the persona." + ) + name: Optional[str] = Field( + default=None, description="Internal name of the persona." ) - name: Optional[str] = Field(description="Internal name of the persona.") display_name: Optional[str] = Field( - description="Human-readable name of the persona." + default=None, description="Human-readable name of the persona." ) class LoginEvent(AtlanObject): client_id: Optional[str] = Field( - description="Where the login occurred (usually `atlan-frontend`)." + default=None, + description="Where the login occurred (usually `atlan-frontend`).", ) - details: Optional[Any] = Field(description="TBC") + details: Optional[Any] = Field(default=None, description="TBC") ip_address: Optional[str] = Field( - description="IP address from which the user logged in." + default=None, description="IP address from which the user logged in." ) - realm_id: Optional[str] = Field(description="TBC") + realm_id: Optional[str] = Field(default=None, description="TBC") session_id: Optional[str] = Field( - description="Unique identifier (GUID) of the session for the login." + default=None, + description="Unique identifier (GUID) of the session for the login.", ) time: Optional[int] = Field( description="Time (epoch) when the login occurred, in milliseconds." ) type: Optional[str] = Field( - description="Type of login event that occurred (usually `LOGIN`)." + default=None, + description="Type of login event that occurred (usually `LOGIN`).", ) user_id: Optional[str] = Field( - description="Unique identifier (GUID) of the user that logged in." + default=None, + description="Unique identifier (GUID) of the user that logged in.", ) class AuthDetails(AtlanObject): - client_id: Optional[str] = Field(description="TBC") - ip_address: Optional[str] = Field(description="TBC") - realm_id: Optional[str] = Field(description="TBC") - user_id: Optional[str] = Field(description="TBC") + client_id: Optional[str] = Field(default=None, description="TBC") + ip_address: Optional[str] = Field(default=None, description="TBC") + realm_id: Optional[str] = Field(default=None, description="TBC") + user_id: Optional[str] = Field(default=None, description="TBC") class AdminEvent(AtlanObject): operation_type: Optional[str] = Field( - description="Type of admin operation that occurred." + default=None, description="Type of admin operation that occurred." ) - realm_id: Optional[str] = Field(description="TBC") - representation: Optional[str] = Field(description="TBC") - resource_path: Optional[str] = Field(description="TBC") + realm_id: Optional[str] = Field(default=None, description="TBC") + representation: Optional[str] = Field(default=None, description="TBC") + resource_path: Optional[str] = Field(default=None, description="TBC") resource_type: Optional[str] = Field( - description="Type of resource for the admin operation that occurred." + default=None, + description="Type of resource for the admin operation that occurred.", ) time: Optional[int] = Field( - description="Time (epoch) when the admin operation occurred, in milliseconds." + default=None, + description="Time (epoch) when the admin operation occurred, in milliseconds.", + ) + auth_details: Optional[AtlanUser.AuthDetails] = Field( + default=None, description="TBC" ) - auth_details: Optional[AtlanUser.AuthDetails] = Field(description="TBC") - username: Optional[str] = Field(description="Username of the user within Atlan.") + username: Optional[str] = Field( + default=None, description="Username of the user within Atlan." + ) id: Optional[str] = Field( - description="Unique identifier (GUID) of the user within Atlan." + default=None, description="Unique identifier (GUID) of the user within Atlan." ) workspace_role: Optional[str] = Field( - description="Name of the role of the user within Atlan." + default=None, description="Name of the role of the user within Atlan." ) - email: Optional[str] = Field(description="Email address of the user.") + email: Optional[str] = Field(default=None, description="Email address of the user.") email_verified: Optional[bool] = Field( - description="When true, the email address of the user has been verified." + default=None, + description="When true, the email address of the user has been verified.", ) enabled: Optional[bool] = Field( - description="When true, the user is enabled. When false, the user has been deactivated." + default=None, + description="When true, the user is enabled. When false, the user has been deactivated.", + ) + first_name: Optional[str] = Field( + default=None, description="First name of the user." + ) + last_name: Optional[str] = Field( + default=None, description="Last name (surname) of the user." ) - first_name: Optional[str] = Field(description="First name of the user.") - last_name: Optional[str] = Field(description="Last name (surname) of the user.") attributes: Optional[AtlanUser.Attributes] = Field( - description="Detailed attributes of the user." + default=None, description="Detailed attributes of the user." ) created_timestamp: Optional[int] = Field( - description="Time (epoch) at which the user was created, in milliseconds." + default=None, + description="Time (epoch) at which the user was created, in milliseconds.", ) last_login_time: Optional[int] = Field( - description="Time (epoch) at which the user last logged into Atlan." + default=None, + description="Time (epoch) at which the user last logged into Atlan.", ) group_count: Optional[int] = Field( - description="Number of groups to which the user belongs." + default=None, description="Number of groups to which the user belongs." ) - default_roles: Optional[list[str]] = Field(description="TBC") - roles: Optional[list[str]] = Field(description="TBC") - decentralized_roles: Optional[Any] = Field(description="TBC") + default_roles: Optional[list[str]] = Field(default=None, description="TBC") + roles: Optional[list[str]] = Field(default=None, description="TBC") + decentralized_roles: Optional[Any] = Field(default=None, description="TBC") personas: Optional[list[AtlanUser.Persona]] = Field( - description="Personas the user is associated with." + default=None, description="Personas the user is associated with." ) purposes: Optional[list[Any]] = Field( - description="Purposes the user is associated with." + default=None, description="Purposes the user is associated with." ) admin_events: Optional[list[AtlanUser.AdminEvent]] = Field( - description="List of administration-related events for this user." + default=None, description="List of administration-related events for this user." ) login_events: Optional[list[AtlanUser.LoginEvent]] = Field( - description="List of login-related events for this user." + default=None, description="List of login-related events for this user." ) @staticmethod @@ -154,38 +180,50 @@ def create_for_modification( class UserMinimalResponse(AtlanObject): - username: Optional[str] = Field(description="Username of the user within Atlan.") + username: Optional[str] = Field( + default=None, description="Username of the user within Atlan." + ) id: Optional[str] = Field( - description="Unique identifier (GUID) of the user within Atlan." + default=None, description="Unique identifier (GUID) of the user within Atlan." ) - email: Optional[str] = Field(description="Email address of the user.") + email: Optional[str] = Field(default=None, description="Email address of the user.") email_verified: Optional[bool] = Field( - description="When true, the email address of the user has been verified." + default=None, + description="When true, the email address of the user has been verified.", ) enabled: Optional[bool] = Field( - description="When true, the user is enabled. When false, the user has been deactivated." + default=None, + description="When true, the user is enabled. When false, the user has been deactivated.", + ) + first_name: Optional[str] = Field( + default=None, description="First name of the user." + ) + last_name: Optional[str] = Field( + default=None, description="Last name (surname) of the user." ) - first_name: Optional[str] = Field(description="First name of the user.") - last_name: Optional[str] = Field(description="Last name (surname) of the user.") attributes: Optional[AtlanUser.Attributes] = Field( - description="Detailed attributes of the user." + default=None, description="Detailed attributes of the user." ) created_timestamp: Optional[int] = Field( - description="Time (epoch) at which the use was created, in milliseconds." + default=None, + description="Time (epoch) at which the use was created, in milliseconds.", ) - totp: Optional[bool] = Field(description="TBC") - disableable_credential_types: Optional[Any] = Field(description="TBC") - required_actions: Optional[Any] = Field(description="TBC") - access: Optional[Any] = Field(description="TBC") + totp: Optional[bool] = Field(default=None, description="TBC") + disableable_credential_types: Optional[Any] = Field(default=None, description="TBC") + required_actions: Optional[Any] = Field(default=None, description="TBC") + access: Optional[Any] = Field(default=None, description="TBC") class UserResponse(AtlanObject): - total_record: Optional[int] = Field(description="Total number of users.") + total_record: Optional[int] = Field( + default=None, description="Total number of users." + ) filter_record: Optional[int] = Field( + default=None, description="Number of users in the filtered response.", ) records: Optional[list[AtlanUser]] = Field( - description="Details of each user included in the response." + default=None, description="Details of each user included in the response." ) diff --git a/pyatlan/model/utils.py b/pyatlan/model/utils.py index f0d3a7b86..302ebd8b1 100644 --- a/pyatlan/model/utils.py +++ b/pyatlan/model/utils.py @@ -3,12 +3,14 @@ CAMEL_CASE_OVERRIDES = { - "IndexTypeEsFields": "IndexTypeESFields", - "sourceUrl": "sourceURL", - "sourceEmbedUrl": "sourceEmbedURL", + "index_type_es_fields": "IndexTypeESFields", + "source_url": "sourceURL", + "source_embed_url": "sourceEmbedURL", "sql_dbt_sources": "sqlDBTSources", "purpose_atlan_tags": "purposeClassifications", "mapped_atlan_tag_name": "mappedClassificationName", + "has_lineage": "__hasLineage", + "atlan_tags": "classifications", } @@ -28,9 +30,9 @@ def to_camel_case(value: str) -> str: raise ValueError("Value must be a string") if value == "__root__": return value - value = "".join(word.capitalize() for word in value.split("_")) if value in CAMEL_CASE_OVERRIDES: - value = CAMEL_CASE_OVERRIDES[value] + return CAMEL_CASE_OVERRIDES[value] + value = "".join(word.capitalize() for word in value.split("_")) if value.startswith("__"): value = value[2:] return f"{value[0].lower()}{value[1:]}" diff --git a/pyatlan/model/workflow.py b/pyatlan/model/workflow.py index 473e121e7..508c8118c 100644 --- a/pyatlan/model/workflow.py +++ b/pyatlan/model/workflow.py @@ -2,7 +2,7 @@ # Copyright 2022 Atlan Pte. Ltd. from typing import Any, Optional -from pydantic import Field +from pydantic.v1 import Field from pyatlan.model.core import AtlanObject from pyatlan.model.enums import AtlanWorkflowPhase, SortOrder @@ -16,16 +16,16 @@ class PackageParameter(AtlanObject): class WorkflowMetadata(AtlanObject): - annotations: Optional[dict[str, str]] - creation_timestamp: Optional[str] - generate_name: Optional[str] - generation: Optional[int] - labels: Optional[dict[str, str]] - managed_fields: Optional[list[Any]] - name: Optional[str] - namespace: Optional[str] - resource_version: Optional[str] - uid: Optional[str] + annotations: Optional[dict[str, str]] = Field(default=None) + creation_timestamp: Optional[str] = Field(default=None) + generate_name: Optional[str] = Field(default=None) + generation: Optional[int] = Field(default=None) + labels: Optional[dict[str, str]] = Field(default=None) + managed_fields: Optional[list[Any]] = Field(default=None) + name: Optional[str] = Field(default=None) + namespace: Optional[str] = Field(default=None) + resource_version: Optional[str] = Field(default=None) + uid: Optional[str] = Field(default=None) class WorkflowTemplateRef(AtlanObject): @@ -55,18 +55,18 @@ class WorkflowDAG(AtlanObject): class WorkflowTemplate(AtlanObject): name: str - inputs: Any - outputs: Any - metadata: Any + inputs: Any = Field(default=None) + outputs: Any = Field(default=None) + metadata: Any = Field(default=None) dag: WorkflowDAG class WorkflowSpec(AtlanObject): - entrypoint: Optional[str] - arguments: Optional[Any] - templates: Optional[list[WorkflowTemplate]] - workflow_template_ref: Optional[dict[str, str]] - workflow_metadata: Optional[WorkflowMetadata] + entrypoint: Optional[str] = Field(default=None) + arguments: Optional[Any] = Field(default=None) + templates: Optional[list[WorkflowTemplate]] = Field(default=None) + workflow_template_ref: Optional[dict[str, str]] = Field(default=None) + workflow_metadata: Optional[WorkflowMetadata] = Field(default=None) class Workflow(AtlanObject): @@ -76,22 +76,24 @@ class Workflow(AtlanObject): class WorkflowSearchResultStatus(AtlanObject): - artifact_gc_Status: Optional[dict[str, Any]] = Field(alias="artifactGCStatus") - artifact_repository_ref: Optional[Any] - compressed_nodes: Optional[str] - estimated_duration: Optional[int] - conditions: Optional[list[Any]] - message: Optional[str] - finished_at: Optional[str] - nodes: Optional[Any] - outputs: Optional[WorkflowParameters] - phase: Optional[AtlanWorkflowPhase] - progress: Optional[str] - resources_duration: Optional[dict[str, int]] - startedAt: Optional[str] - stored_templates: Any - storedWorkflowTemplateSpec: Any - synchronization: Optional[dict[str, Any]] + artifact_gc_Status: Optional[dict[str, Any]] = Field( + default=None, alias="artifactGCStatus" + ) + artifact_repository_ref: Optional[Any] = Field(default=None) + compressed_nodes: Optional[str] = Field(default=None) + estimated_duration: Optional[int] = Field(default=None) + conditions: Optional[list[Any]] = Field(default=None) + message: Optional[str] = Field(default=None) + finished_at: Optional[str] = Field(default=None) + nodes: Optional[Any] = Field(default=None) + outputs: Optional[WorkflowParameters] = Field(default=None) + phase: Optional[AtlanWorkflowPhase] = Field(default=None) + progress: Optional[str] = Field(default=None) + resources_duration: Optional[dict[str, int]] = Field(default=None) + startedAt: Optional[str] = Field(default=None) + stored_templates: Any = Field(default=None) + storedWorkflowTemplateSpec: Any = Field(default=None) + synchronization: Optional[dict[str, Any]] = Field(default=None) class WorkflowSearchResultDetail(AtlanObject): @@ -99,7 +101,7 @@ class WorkflowSearchResultDetail(AtlanObject): kind: str metadata: WorkflowMetadata spec: WorkflowSpec - status: Optional[WorkflowSearchResultStatus] + status: Optional[WorkflowSearchResultStatus] = Field(default=None) class WorkflowSearchResult(AtlanObject): @@ -124,11 +126,11 @@ def to_workflow(self) -> Workflow: class WorkflowSearchHits(AtlanObject): total: dict[str, Any] - hits: Optional[list[WorkflowSearchResult]] + hits: Optional[list[WorkflowSearchResult]] = Field(default=None) class WorkflowSearchResponse(AtlanObject): - took: Optional[int] + took: Optional[int] = Field(default=None) hits: WorkflowSearchHits shards: dict[str, Any] = Field(alias="_shards") @@ -146,7 +148,7 @@ def __init__(__pydantic_self__, **data: Any) -> None: class WorkflowResponse(AtlanObject): metadata: WorkflowMetadata spec: WorkflowSpec - payload: Optional[list[Any]] + payload: Optional[list[Any]] = Field(default=None) class WorkflowRunResponse(WorkflowResponse): @@ -157,8 +159,8 @@ class WorkflowSearchRequest(AtlanObject): from_: int = Field(0, alias="from") size: int = 10 track_total_hits: bool = Field(True, alias="track_total_hits") - post_filter: Optional[Query] = Field(alias="post_filter") - query: Optional[Query] + post_filter: Optional[Query] = Field(default=None, alias="post_filter") + query: Optional[Query] = Field(default=None) sort: list[SortItem] = Field( alias="sort", default=[ diff --git a/pyatlan/pkg/models.py b/pyatlan/pkg/models.py index 2f957e754..a6e091c90 100644 --- a/pyatlan/pkg/models.py +++ b/pyatlan/pkg/models.py @@ -9,7 +9,7 @@ from typing import Literal, Optional, Protocol from jinja2 import Environment, PackageLoader -from pydantic import BaseModel, Field, PrivateAttr, StrictStr, validate_arguments +from pydantic.v1 import BaseModel, Field, PrivateAttr, StrictStr, validate_arguments from pyatlan.model.enums import AtlanConnectorType from pyatlan.pkg.ui import UIConfig diff --git a/pyatlan/pkg/templates/package_config.jinja2 b/pyatlan/pkg/templates/package_config.jinja2 index 40db9916e..aafd66dd4 100644 --- a/pyatlan/pkg/templates/package_config.jinja2 +++ b/pyatlan/pkg/templates/package_config.jinja2 @@ -1,6 +1,6 @@ from datetime import datetime from pathlib import Path -from pydantic import BaseModel, BaseSettings, Field, validator +from pydantic.v1 import BaseModel, BaseSettings, Field, validator from pyatlan.model.assets import Connection from pyatlan.pkg.models import ConnectorAndConnection from pyatlan.pkg.utils import validate_connection, validate_multiselect, validate_connector_and_connection diff --git a/pyatlan/pkg/ui.py b/pyatlan/pkg/ui.py index 549967837..6a2969ac0 100644 --- a/pyatlan/pkg/ui.py +++ b/pyatlan/pkg/ui.py @@ -3,8 +3,8 @@ from dataclasses import dataclass, field from typing import Any, Optional, TypeVar, Union -from pydantic import StrictStr, validate_arguments -from pydantic.json import pydantic_encoder +from pydantic.v1 import StrictStr, validate_arguments +from pydantic.v1.json import pydantic_encoder from pyatlan.pkg.widgets import ( APITokenSelector, diff --git a/pyatlan/pkg/utils.py b/pyatlan/pkg/utils.py index 5b75d5723..6cbbca03a 100644 --- a/pyatlan/pkg/utils.py +++ b/pyatlan/pkg/utils.py @@ -4,7 +4,7 @@ import logging import os -from pydantic import parse_obj_as, parse_raw_as +from pydantic.v1 import parse_obj_as, parse_raw_as from pyatlan.client.atlan import AtlanClient from pyatlan.pkg.models import RuntimeConfig diff --git a/pyatlan/pkg/widgets.py b/pyatlan/pkg/widgets.py index e4aaa682c..385fe6cb9 100644 --- a/pyatlan/pkg/widgets.py +++ b/pyatlan/pkg/widgets.py @@ -5,7 +5,7 @@ from dataclasses import dataclass, field from typing import Optional, Union -from pydantic import ( +from pydantic.v1 import ( Field, StrictBool, StrictInt, @@ -13,7 +13,7 @@ dataclasses, validate_arguments, ) -from pydantic.json import pydantic_encoder +from pydantic.v1.json import pydantic_encoder Widget = Union[ "APITokenSelectorWidget", diff --git a/pyatlan/samples/search/and_star_assets.py b/pyatlan/samples/search/and_star_assets.py index 4138cb3d5..3aa305c12 100644 --- a/pyatlan/samples/search/and_star_assets.py +++ b/pyatlan/samples/search/and_star_assets.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2023 Atlan Pte. Ltd. import logging -import time +from datetime import datetime from typing import List from pyatlan.client.asset import IndexSearchResults @@ -67,7 +67,6 @@ def star_asset(asset: Asset, usernames: list[str]) -> None: :param usernames: to ensure have starred the asset :return: nothing (void) """ - now = round(time.time() * 1000) starred_details_list: List[StarredDetails] = asset.starred_details_list or [] starred_count = len(starred_details_list) starred_by: set[str] = asset.starred_by or set() @@ -76,7 +75,7 @@ def star_asset(asset: Asset, usernames: list[str]) -> None: starred_by.add(user) starred_count += 1 starred_details_list.append( - StarredDetails(asset_starred_by=user, asset_starred_at=now) + StarredDetails(asset_starred_by=user, asset_starred_at=datetime.now()) ) to_update = asset.trim_to_required() to_update.starred_details_list = starred_details_list diff --git a/pyatlan/samples/search/and_traverse_lineage.py b/pyatlan/samples/search/and_traverse_lineage.py index a777f0211..4af3e2437 100644 --- a/pyatlan/samples/search/and_traverse_lineage.py +++ b/pyatlan/samples/search/and_traverse_lineage.py @@ -62,7 +62,7 @@ def upstream_certified_sources(guid: str) -> list[Asset]: attribute_value=CertificateStatus.VERIFIED.value, ) ], - ) + ) # type: ignore[call-arg] response = client.asset.get_lineage_list(request) verified_assets: list[Asset] = [ asset diff --git a/pyatlan/utils.py b/pyatlan/utils.py index 0c1c5850c..b1f2baae6 100644 --- a/pyatlan/utils.py +++ b/pyatlan/utils.py @@ -15,8 +15,8 @@ from functools import reduce, wraps from typing import Any, Mapping, Optional -from pydantic import HttpUrl -from pydantic.dataclasses import dataclass +from pydantic.v1 import HttpUrl +from pydantic.v1.dataclasses import dataclass from pyatlan.errors import ErrorCode @@ -454,3 +454,16 @@ def __init__(self, logger: logging.Logger, contextvar: ContextVar): def process(self, msg, kwargs): return f"[{self.extra['requestid']}] {msg}", kwargs + + +def validate_single_required_field(field_names: list[str], values: list[Any]): + indexes = [idx for idx, value in enumerate(values) if value is not None] + if not indexes: + raise ValueError( + f"One of the following parameters are required: {', '.join(field_names)}" + ) + if len(indexes) > 1: + names = [field_names[idx] for idx in indexes] + raise ValueError( + f"Only one of the following parameters are allowed: {', '.join(names)}" + ) diff --git a/requirements-dev.txt b/requirements-dev.txt index baafe3329..0b62f6d73 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ flake8==6.1.0 -mypy==1.5.1 +mypy~=1.8.0 black==23.7.0 types-requests==2.31.0.2 pytest==7.4.0 diff --git a/requirements.txt b/requirements.txt index 75ef016b1..80242c3b2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ requests>=2.24 -pydantic==1.10.12 +pydantic~=2.6.1 jinja2==3.1.3 networkx==3.1 tenacity==8.2.3 diff --git a/tests/integration/admin_test.py b/tests/integration/admin_test.py index 04ddcfdc7..c22dbce44 100644 --- a/tests/integration/admin_test.py +++ b/tests/integration/admin_test.py @@ -4,7 +4,7 @@ from typing import Generator import pytest -from pydantic import StrictStr +from pydantic.v1 import StrictStr from pyatlan.cache.role_cache import RoleCache from pyatlan.client.atlan import AtlanClient @@ -12,7 +12,7 @@ from pyatlan.model.keycloak_events import AdminEventRequest, KeycloakEventRequest from tests.integration.client import TestId -FIXED_USER = "ernest" +FIXED_USER = "aryaman.bhushan" TODAY = datetime.now().strftime("%Y-%m-%d") YESTERDAY = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d") MODULE_NAME = TestId.make_unique("Admin") diff --git a/tests/integration/glossary_test.py b/tests/integration/glossary_test.py index 239396f15..36286b856 100644 --- a/tests/integration/glossary_test.py +++ b/tests/integration/glossary_test.py @@ -6,7 +6,7 @@ from typing import Generator, Optional import pytest -from pydantic import StrictStr +from pydantic.v1 import StrictStr from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed from pyatlan.client.atlan import AtlanClient diff --git a/tests/integration/lineage_test.py b/tests/integration/lineage_test.py index d09f1e3f6..dd709d3d9 100644 --- a/tests/integration/lineage_test.py +++ b/tests/integration/lineage_test.py @@ -9,6 +9,7 @@ from pyatlan.model.assets import ( Asset, Column, + ColumnProcess, Connection, Database, MaterialisedView, @@ -17,7 +18,6 @@ Table, View, ) -from pyatlan.model.assets.asset00 import ColumnProcess from pyatlan.model.enums import AtlanConnectorType, EntityStatus, LineageDirection from pyatlan.model.lineage import FluentLineage, LineageRequest from pyatlan.model.search import DSL, Bool, IndexSearchRequest, Prefix, Term diff --git a/tests/integration/owner_propagator_cfg.py b/tests/integration/owner_propagator_cfg.py index 9eea46027..dd1bc196f 100644 --- a/tests/integration/owner_propagator_cfg.py +++ b/tests/integration/owner_propagator_cfg.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Any, Optional -from pydantic import BaseModel, BaseSettings, Field, parse_obj_as +from pydantic.v1 import BaseModel, BaseSettings, Field, parse_obj_as from pyatlan.model.assets import Connection from pyatlan.model.enums import AtlanConnectorType diff --git a/tests/integration/purpose_test.py b/tests/integration/purpose_test.py index 7e489572c..dd71fcb8c 100644 --- a/tests/integration/purpose_test.py +++ b/tests/integration/purpose_test.py @@ -8,8 +8,7 @@ from pyatlan.client.atlan import AtlanClient from pyatlan.client.token import SERVICE_ACCOUNT_ from pyatlan.model.api_tokens import ApiToken -from pyatlan.model.assets import AuthPolicy, Purpose -from pyatlan.model.assets.asset00 import Column +from pyatlan.model.assets import AuthPolicy, Column, Purpose from pyatlan.model.core import AtlanTagName from pyatlan.model.enums import ( AssetSidebarTab, diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py index 07b7d7405..6449fdf5c 100644 --- a/tests/integration/test_client.py +++ b/tests/integration/test_client.py @@ -3,7 +3,7 @@ from typing import Generator, Optional, Type import pytest -from pydantic import StrictStr +from pydantic.v1 import StrictStr from pyatlan.client.atlan import AtlanClient from pyatlan.client.search_log import ( diff --git a/tests/unit/model/column_process_test.py b/tests/unit/model/column_process_test.py index 15d3b5a06..7dd5452b1 100644 --- a/tests/unit/model/column_process_test.py +++ b/tests/unit/model/column_process_test.py @@ -1,6 +1,6 @@ import pytest -from pyatlan.model.assets.asset00 import Column, ColumnProcess, Process +from pyatlan.model.assets import Column, ColumnProcess, Process from tests.unit.model.constants import ( CP_CONNECTION_QUALIFIED_NAME, CP_NAME, diff --git a/tests/unit/pkg/test_models.py b/tests/unit/pkg/test_models.py index 06381aebc..d9c0bec94 100644 --- a/tests/unit/pkg/test_models.py +++ b/tests/unit/pkg/test_models.py @@ -2,7 +2,7 @@ from unittest.mock import patch import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.pkg.models import CustomPackage, PackageConfig, PackageWriter, generate from pyatlan.pkg.ui import UIConfig, UIStep diff --git a/tests/unit/pkg/test_ui.py b/tests/unit/pkg/test_ui.py index 06039317b..94943d080 100644 --- a/tests/unit/pkg/test_ui.py +++ b/tests/unit/pkg/test_ui.py @@ -1,5 +1,5 @@ import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.pkg.ui import UIConfig, UIRule, UIStep from pyatlan.pkg.widgets import AbstractUIElement, TextInput diff --git a/tests/unit/pkg/test_widgets.py b/tests/unit/pkg/test_widgets.py index bb5d2de5b..3fcbd686e 100644 --- a/tests/unit/pkg/test_widgets.py +++ b/tests/unit/pkg/test_widgets.py @@ -1,5 +1,5 @@ import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.pkg.widgets import ( APITokenSelector, diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 9d27c5301..ba72cbbfd 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -5,7 +5,7 @@ from unittest.mock import DEFAULT, Mock, call, patch import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.client.asset import AssetClient, Batch, CustomMetadataHandling from pyatlan.client.atlan import AtlanClient diff --git a/tests/unit/test_credential_client.py b/tests/unit/test_credential_client.py index 6ca63b759..b708f49c5 100644 --- a/tests/unit/test_credential_client.py +++ b/tests/unit/test_credential_client.py @@ -3,7 +3,7 @@ from unittest.mock import Mock import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.client.common import ApiCaller from pyatlan.client.credential import CredentialClient diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py index 0b70c4baa..99133cbec 100644 --- a/tests/unit/test_model.py +++ b/tests/unit/test_model.py @@ -9,7 +9,7 @@ import pytest # from deepdiff import DeepDiff -from pydantic.error_wrappers import ValidationError +from pydantic.v1.error_wrappers import ValidationError import pyatlan.cache.atlan_tag_cache from pyatlan.errors import InvalidRequestError @@ -150,7 +150,6 @@ ThoughtspotDashlet, ThoughtspotLiveboard, View, - validate_single_required_field, ) from pyatlan.model.constants import DELETED_ from pyatlan.model.core import Announcement @@ -208,6 +207,7 @@ StarredDetails, ) from pyatlan.model.typedef import TypeDefResponse +from pyatlan.utils import validate_single_required_field CM_ATTR_ID = "WQ6XGXwq9o7UnZlkWyKhQN" diff --git a/tests/unit/test_query_client.py b/tests/unit/test_query_client.py index 9d9a0526c..96c442bb5 100644 --- a/tests/unit/test_query_client.py +++ b/tests/unit/test_query_client.py @@ -4,7 +4,7 @@ from unittest.mock import Mock, patch import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.client.atlan import AtlanClient from pyatlan.client.query import QueryClient diff --git a/tests/unit/test_search_model.py b/tests/unit/test_search_model.py index 8fe882dd3..3e37ef1a1 100644 --- a/tests/unit/test_search_model.py +++ b/tests/unit/test_search_model.py @@ -4,7 +4,7 @@ from typing import Literal, Union import pytest -from pydantic import StrictBool, StrictStr, ValidationError +from pydantic.v1 import StrictBool, StrictStr, ValidationError from pyatlan.model.audit import AuditSearchRequest from pyatlan.model.enums import AtlanConnectorType, CertificateStatus diff --git a/tests/unit/test_workflow_client.py b/tests/unit/test_workflow_client.py index 81d63e839..db0ea2785 100644 --- a/tests/unit/test_workflow_client.py +++ b/tests/unit/test_workflow_client.py @@ -3,7 +3,7 @@ from unittest.mock import Mock import pytest -from pydantic import ValidationError +from pydantic.v1 import ValidationError from pyatlan.client.common import ApiCaller from pyatlan.client.constants import WORKFLOW_INDEX_SEARCH @@ -11,6 +11,7 @@ from pyatlan.errors import InvalidRequestError from pyatlan.model.enums import WorkflowPackage from pyatlan.model.workflow import ( + PackageParameter, Workflow, WorkflowMetadata, WorkflowResponse, @@ -55,7 +56,7 @@ def search_result(search_result_detail) -> WorkflowSearchResult: primary_term=2, sort=["sort"], source=search_result_detail, - ) + ) # type: ignore[call-arg] @pytest.fixture() @@ -63,7 +64,7 @@ def search_response(search_result: WorkflowSearchResult) -> WorkflowSearchRespon return WorkflowSearchResponse( hits=WorkflowSearchHits(total={"dummy": "dummy"}, hits=[search_result]), shards={"dummy": "dummy"}, - ) + ) # type: ignore[call-arg] @pytest.fixture() @@ -80,7 +81,7 @@ def run_response() -> WorkflowResponse: return WorkflowResponse( metadata=WorkflowMetadata(name="name", namespace="namespace"), spec=WorkflowSpec(), - payload=[{"parameter": "test-param", "type": "test-type", "body": {}}], + payload=[PackageParameter(parameter="test-param", type="test-type", body={})], ) @@ -222,8 +223,10 @@ def test_run_when_given_workflow( Workflow( metadata=WorkflowMetadata(name="name", namespace="namespace"), spec=WorkflowSpec(), - payload=[{"parameter": "test-param", "type": "test-type", "body": {}}], - ) + payload=[ + PackageParameter(parameter="test-param", type="test-type", body={}) + ], + ) # type: ignore[call-arg] ) assert response == run_response