diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 640c8bb0..39bae593 100755 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: 07961597-3730-4940-9fd0-35eb4118eab3 management: - docChecksum: d90db45a44e468bd5d9ffc93c6e415c9 + docChecksum: 9d937f0b088a71ee7aaf5a76d6324576 docVersion: 1.0.0 - speakeasyVersion: 1.299.4 - generationVersion: 2.338.7 - releaseVersion: 0.50.1 - configChecksum: fbcdc2ce57f888d9af79756839bf1a17 + speakeasyVersion: 1.335.2 + generationVersion: 2.372.3 + releaseVersion: 0.51.0 + configChecksum: c86e7977f90b4e8be3a63a01a28dfe90 repoURL: https://github.com/airbytehq/airbyte-api-python-sdk.git repoSubDirectory: . installationURL: https://github.com/airbytehq/airbyte-api-python-sdk.git @@ -16,7 +16,7 @@ features: additionalDependencies: 0.1.0 additionalProperties: 0.1.0 constsAndDefaults: 0.1.3 - core: 4.6.8 + core: 4.8.1 globalSecurity: 2.83.5 globalSecurityCallbacks: 0.1.0 globalServerURLs: 2.82.2 @@ -25,13 +25,14 @@ features: oauth2ClientCredentials: 0.1.2 responseFormat: 0.1.0 sdkHooks: 0.1.0 - unions: 2.82.7 + unions: 2.82.8 generatedFiles: - src/airbyte_api/sdkconfiguration.py - src/airbyte_api/connections.py - src/airbyte_api/destinations.py - src/airbyte_api/health.py - src/airbyte_api/jobs.py + - src/airbyte_api/organizations.py - src/airbyte_api/permissions.py - src/airbyte_api/sources.py - src/airbyte_api/streams.py @@ -40,6 +41,7 @@ generatedFiles: - src/airbyte_api/sdk.py - py.typed - pylintrc + - scripts/publish.sh - setup.py - src/airbyte_api/__init__.py - src/airbyte_api/utils/__init__.py @@ -62,6 +64,7 @@ generatedFiles: - src/airbyte_api/api/createjob.py - src/airbyte_api/api/getjob.py - src/airbyte_api/api/listjobs.py + - src/airbyte_api/api/listorganizationsforuser.py - src/airbyte_api/api/createpermission.py - src/airbyte_api/api/deletepermission.py - src/airbyte_api/api/getpermission.py @@ -75,7 +78,7 @@ generatedFiles: - src/airbyte_api/api/patchsource.py - src/airbyte_api/api/putsource.py - src/airbyte_api/api/getstreamproperties.py - - src/airbyte_api/api/listusers.py + - src/airbyte_api/api/listuserswithinanorganization.py - src/airbyte_api/api/createorupdateworkspaceoauthcredentials.py - src/airbyte_api/api/createworkspace.py - src/airbyte_api/api/deleteworkspace.py @@ -118,7 +121,6 @@ generatedFiles: - src/airbyte_api/models/destination_firebolt.py - src/airbyte_api/models/destination_firestore.py - src/airbyte_api/models/destination_gcs.py - - src/airbyte_api/models/destination_langchain.py - src/airbyte_api/models/destination_milvus.py - src/airbyte_api/models/destination_mongodb.py - src/airbyte_api/models/destination_mssql.py @@ -149,6 +151,8 @@ generatedFiles: - src/airbyte_api/models/jobtypeenum.py - src/airbyte_api/models/jobcreaterequest.py - src/airbyte_api/models/jobsresponse.py + - src/airbyte_api/models/organizationsresponse.py + - src/airbyte_api/models/organizationresponse.py - src/airbyte_api/models/permissionresponse.py - src/airbyte_api/models/permissiontype.py - src/airbyte_api/models/permissioncreaterequest.py @@ -181,6 +185,7 @@ generatedFiles: - src/airbyte_api/models/source_cart.py - src/airbyte_api/models/source_chargebee.py - src/airbyte_api/models/source_chartmogul.py + - src/airbyte_api/models/source_clazar.py - src/airbyte_api/models/source_clickhouse.py - src/airbyte_api/models/source_clickup_api.py - src/airbyte_api/models/source_clockify.py @@ -191,6 +196,7 @@ generatedFiles: - src/airbyte_api/models/source_configcat.py - src/airbyte_api/models/source_confluence.py - src/airbyte_api/models/source_convex.py + - src/airbyte_api/models/source_datadog.py - src/airbyte_api/models/source_datascope.py - src/airbyte_api/models/source_delighted.py - src/airbyte_api/models/source_dixa.py @@ -205,6 +211,7 @@ generatedFiles: - src/airbyte_api/models/source_fauna.py - src/airbyte_api/models/source_file.py - src/airbyte_api/models/source_firebolt.py + - src/airbyte_api/models/source_fleetio.py - src/airbyte_api/models/source_freshcaller.py - src/airbyte_api/models/source_freshdesk.py - src/airbyte_api/models/source_freshsales.py @@ -215,9 +222,9 @@ generatedFiles: - src/airbyte_api/models/source_gitlab.py - src/airbyte_api/models/source_glassfrog.py - src/airbyte_api/models/source_gnews.py + - src/airbyte_api/models/source_goldcast.py - src/airbyte_api/models/source_google_ads.py - src/airbyte_api/models/source_google_analytics_data_api.py - - src/airbyte_api/models/source_google_analytics_v4_service_account_only.py - src/airbyte_api/models/source_google_directory.py - src/airbyte_api/models/source_google_drive.py - src/airbyte_api/models/source_google_pagespeed_insights.py @@ -287,7 +294,6 @@ generatedFiles: - src/airbyte_api/models/source_posthog.py - src/airbyte_api/models/source_postmarkapp.py - src/airbyte_api/models/source_prestashop.py - - src/airbyte_api/models/source_punk_api.py - src/airbyte_api/models/source_pypi.py - src/airbyte_api/models/source_qualaroo.py - src/airbyte_api/models/source_railz.py @@ -446,6 +452,7 @@ generatedFiles: - docs/api/getjobresponse.md - docs/api/listjobsrequest.md - docs/api/listjobsresponse.md + - docs/api/listorganizationsforuserresponse.md - docs/api/createpermissionresponse.md - docs/api/deletepermissionrequest.md - docs/api/deletepermissionresponse.md @@ -469,8 +476,8 @@ generatedFiles: - docs/api/putsourceresponse.md - docs/api/getstreampropertiesrequest.md - docs/api/getstreampropertiesresponse.md - - docs/api/listusersrequest.md - - docs/api/listusersresponse.md + - docs/api/listuserswithinanorganizationrequest.md + - docs/api/listuserswithinanorganizationresponse.md - docs/api/createorupdateworkspaceoauthcredentialsrequest.md - docs/api/createorupdateworkspaceoauthcredentialsresponse.md - docs/api/createworkspaceresponse.md @@ -654,21 +661,6 @@ generatedFiles: - docs/models/destinationgcsoutputformat.md - docs/models/gcsbucketregion.md - docs/models/destinationgcs.md - - docs/models/langchain.md - - docs/models/destinationlangchainschemasmode.md - - docs/models/destinationlangchainfake.md - - docs/models/destinationlangchainmode.md - - docs/models/destinationlangchainopenai.md - - docs/models/destinationlangchainembedding.md - - docs/models/destinationlangchainschemasindexingindexing3mode.md - - docs/models/chromalocalpersistance.md - - docs/models/destinationlangchainschemasindexingindexingmode.md - - docs/models/docarrayhnswsearch.md - - docs/models/destinationlangchainschemasindexingmode.md - - docs/models/destinationlangchainpinecone.md - - docs/models/destinationlangchainindexing.md - - docs/models/destinationlangchainprocessingconfigmodel.md - - docs/models/destinationlangchain.md - docs/models/milvus.md - docs/models/destinationmilvusschemasembeddingembedding5mode.md - docs/models/destinationmilvusopenaicompatible.md @@ -854,13 +846,6 @@ generatedFiles: - docs/models/destinationredshifttunnelmethod.md - docs/models/destinationredshiftnotunnel.md - docs/models/destinationredshiftsshtunnelmethod.md - - docs/models/destinationredshiftschemasmethod.md - - docs/models/standard.md - - docs/models/destinationredshiftencryptiontype.md - - docs/models/aescbcenvelopeencryption.md - - docs/models/encryptiontype.md - - docs/models/noencryption.md - - docs/models/destinationredshiftencryption.md - docs/models/destinationredshiftmethod.md - docs/models/destinationredshifts3bucketregion.md - docs/models/awss3staging.md @@ -941,7 +926,7 @@ generatedFiles: - docs/models/destinationsnowflakecortexopenai.md - docs/models/destinationsnowflakecortexembedding.md - docs/models/destinationsnowflakecortexcredentials.md - - docs/models/destinationsnowflakecortexindexing.md + - docs/models/snowflakeconnection.md - docs/models/destinationsnowflakecortexfieldnamemappingconfigmodel.md - docs/models/destinationsnowflakecortexlanguage.md - docs/models/destinationsnowflakecortexschemasprocessingtextsplittertextsplittermode.md @@ -1041,6 +1026,8 @@ generatedFiles: - docs/models/jobtypeenum.md - docs/models/jobcreaterequest.md - docs/models/jobsresponse.md + - docs/models/organizationsresponse.md + - docs/models/organizationresponse.md - docs/models/permissionresponse.md - docs/models/permissiontype.md - docs/models/permissioncreaterequest.md @@ -1101,6 +1088,7 @@ generatedFiles: - docs/models/sourceauth0authenticationmethod.md - docs/models/auth0.md - docs/models/sourceauth0.md + - docs/models/filterappliedwhilefetchingrecordsbasedonattributekeyandattributevaluewhichwillbeappendedontherequestbody.md - docs/models/awscloudtrail.md - docs/models/sourceawscloudtrail.md - docs/models/sourceazureblobstorageschemasauthtype.md @@ -1165,6 +1153,8 @@ generatedFiles: - docs/models/sourcechargebee.md - docs/models/chartmogul.md - docs/models/sourcechartmogul.md + - docs/models/clazar.md + - docs/models/sourceclazar.md - docs/models/sourceclickhouseclickhouse.md - docs/models/sourceclickhouseschemastunnelmethodtunnelmethod.md - docs/models/sourceclickhousepasswordauthentication.md @@ -1194,6 +1184,11 @@ generatedFiles: - docs/models/sourceconfluence.md - docs/models/sourceconvexconvex.md - docs/models/sourceconvex.md + - docs/models/sourcedatadogdatasource.md + - docs/models/queries.md + - docs/models/site.md + - docs/models/datadog.md + - docs/models/sourcedatadog.md - docs/models/datascope.md - docs/models/sourcedatascope.md - docs/models/delighted.md @@ -1228,6 +1223,11 @@ generatedFiles: - docs/models/validadstatuses.md - docs/models/validadsetstatuses.md - docs/models/validcampaignstatuses.md + - docs/models/sourcefacebookmarketingschemasauthtype.md + - docs/models/serviceaccountkeyauthentication.md + - docs/models/sourcefacebookmarketingauthtype.md + - docs/models/authenticateviafacebookmarketingoauth.md + - docs/models/sourcefacebookmarketingauthentication.md - docs/models/validactionbreakdowns.md - docs/models/sourcefacebookmarketingactionreporttime.md - docs/models/validbreakdowns.md @@ -1266,6 +1266,8 @@ generatedFiles: - docs/models/sourcefile.md - docs/models/sourcefireboltfirebolt.md - docs/models/sourcefirebolt.md + - docs/models/fleetio.md + - docs/models/sourcefleetio.md - docs/models/freshcaller.md - docs/models/sourcefreshcaller.md - docs/models/freshdesk.md @@ -1315,13 +1317,15 @@ generatedFiles: - docs/models/gnews.md - docs/models/topheadlinestopic.md - docs/models/sourcegnews.md + - docs/models/goldcast.md + - docs/models/sourcegoldcast.md - docs/models/googlecredentials.md - docs/models/customqueriesarray.md - docs/models/customerstatus.md - docs/models/sourcegoogleadsgoogleads.md - docs/models/sourcegoogleads.md - docs/models/sourcegoogleanalyticsdataapischemasauthtype.md - - docs/models/serviceaccountkeyauthentication.md + - docs/models/sourcegoogleanalyticsdataapiserviceaccountkeyauthentication.md - docs/models/sourcegoogleanalyticsdataapiauthtype.md - docs/models/authenticateviagoogleoauth.md - docs/models/sourcegoogleanalyticsdataapicredentials.md @@ -1571,11 +1575,6 @@ generatedFiles: - docs/models/sourcegoogleanalyticsdataapicustomreportconfig.md - docs/models/sourcegoogleanalyticsdataapigoogleanalyticsdataapi.md - docs/models/sourcegoogleanalyticsdataapi.md - - docs/models/sourcegoogleanalyticsv4serviceaccountonlyauthtype.md - - docs/models/sourcegoogleanalyticsv4serviceaccountonlyserviceaccountkeyauthentication.md - - docs/models/sourcegoogleanalyticsv4serviceaccountonlycredentials.md - - docs/models/googleanalyticsv4serviceaccountonly.md - - docs/models/sourcegoogleanalyticsv4serviceaccountonly.md - docs/models/sourcegoogledirectoryschemascredentialstitle.md - docs/models/serviceaccountkey.md - docs/models/sourcegoogledirectorycredentialstitle.md @@ -1709,6 +1708,7 @@ generatedFiles: - docs/models/sourcelinkedinpagesoauth20.md - docs/models/sourcelinkedinpagesauthentication.md - docs/models/linkedinpages.md + - docs/models/timegranularitytype.md - docs/models/sourcelinkedinpages.md - docs/models/linnworks.md - docs/models/sourcelinnworks.md @@ -1742,7 +1742,7 @@ generatedFiles: - docs/models/sourcemicrosoftonedrivelocal.md - docs/models/sourcemicrosoftonedriveprocessing.md - docs/models/sourcemicrosoftonedriveparsingstrategy.md - - docs/models/sourcemicrosoftonedrivedocumentfiletypeformatexperimental.md + - docs/models/unstructureddocumentformat.md - docs/models/sourcemicrosoftonedriveschemasstreamsformatfiletype.md - docs/models/sourcemicrosoftonedriveparquetformat.md - docs/models/sourcemicrosoftonedriveschemasstreamsfiletype.md @@ -1774,7 +1774,7 @@ generatedFiles: - docs/models/sourcemicrosoftsharepointlocal.md - docs/models/sourcemicrosoftsharepointprocessing.md - docs/models/sourcemicrosoftsharepointparsingstrategy.md - - docs/models/sourcemicrosoftsharepointdocumentfiletypeformatexperimental.md + - docs/models/sourcemicrosoftsharepointunstructureddocumentformat.md - docs/models/sourcemicrosoftsharepointschemasstreamsformatfiletype.md - docs/models/sourcemicrosoftsharepointparquetformat.md - docs/models/sourcemicrosoftsharepointschemasstreamsfiletype.md @@ -2010,8 +2010,6 @@ generatedFiles: - docs/models/sourcepostmarkapp.md - docs/models/prestashop.md - docs/models/sourceprestashop.md - - docs/models/punkapi.md - - docs/models/sourcepunkapi.md - docs/models/pypi.md - docs/models/sourcepypi.md - docs/models/qualaroo.md @@ -2056,7 +2054,7 @@ generatedFiles: - docs/models/sources3local.md - docs/models/sources3processing.md - docs/models/sources3parsingstrategy.md - - docs/models/sources3documentfiletypeformatexperimental.md + - docs/models/sources3unstructureddocumentformat.md - docs/models/sources3schemasstreamsformatformat4filetype.md - docs/models/sources3parquetformat.md - docs/models/sources3schemasstreamsformatformatfiletype.md @@ -2176,8 +2174,10 @@ generatedFiles: - docs/models/swipeupattributionwindow.md - docs/models/viewattributionwindow.md - docs/models/sourcesnapchatmarketing.md - - docs/models/sourcesnowflakeschemasauthtype.md + - docs/models/sourcesnowflakeschemascredentialsauthtype.md - docs/models/sourcesnowflakeusernameandpassword.md + - docs/models/sourcesnowflakeschemasauthtype.md + - docs/models/sourcesnowflakekeypairauthentication.md - docs/models/sourcesnowflakeauthtype.md - docs/models/sourcesnowflakeoauth20.md - docs/models/sourcesnowflakeauthorizationmethod.md @@ -2326,6 +2326,7 @@ generatedFiles: - docs/models/azureblobstoragecredentials.md - docs/models/azureblobstorage.md - docs/models/bingads.md + - docs/models/facebookmarketingcredentials.md - docs/models/facebookmarketing.md - docs/models/githubcredentials.md - docs/models/github.md @@ -2407,6 +2408,7 @@ generatedFiles: - docs/sdks/destinations/README.md - docs/sdks/health/README.md - docs/sdks/jobs/README.md + - docs/sdks/organizations/README.md - docs/sdks/permissions/README.md - docs/sdks/sources/README.md - docs/sdks/streams/README.md diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index dd68711c..55d81ac6 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -2,8 +2,8 @@ speakeasyVersion: 1.291.0 sources: my-source: sourceNamespace: my-source - sourceRevisionDigest: sha256:8c5e2173984ab00ea9fa2e659527d7b65173e1c3cd3facd7b4200389a01d099d - sourceBlobDigest: sha256:a5ef729718fb224548bdc82ebec88384a0657acdacbb737b497e2f4c9690394d + sourceRevisionDigest: sha256:d2907ce474df2e9b9a63b10a6560e61bacf92fbceda25cdccd6ae874fb7262d0 + sourceBlobDigest: sha256:74d197d36d2d4b37a1e10fced9c4aa5414ce0055d7f8011903c097aaff54f085 tags: - latest - main @@ -11,8 +11,8 @@ targets: python-api: source: my-source sourceNamespace: my-source - sourceRevisionDigest: sha256:8c5e2173984ab00ea9fa2e659527d7b65173e1c3cd3facd7b4200389a01d099d - sourceBlobDigest: sha256:a5ef729718fb224548bdc82ebec88384a0657acdacbb737b497e2f4c9690394d + sourceRevisionDigest: sha256:d2907ce474df2e9b9a63b10a6560e61bacf92fbceda25cdccd6ae874fb7262d0 + sourceBlobDigest: sha256:74d197d36d2d4b37a1e10fced9c4aa5414ce0055d7f8011903c097aaff54f085 outLocation: /github/workspace/repo workflow: workflowVersion: 1.0.0 diff --git a/README.md b/README.md index 7152d0fb..047b6e24 100755 --- a/README.md +++ b/README.md @@ -85,6 +85,10 @@ if res.connection_response is not None: * [get_job](docs/sdks/jobs/README.md#get_job) - Get Job status and details * [list_jobs](docs/sdks/jobs/README.md#list_jobs) - List Jobs by sync type +### [organizations](docs/sdks/organizations/README.md) + +* [list_organizations_for_user](docs/sdks/organizations/README.md#list_organizations_for_user) - List all organizations for a user + ### [permissions](docs/sdks/permissions/README.md) * [create_permission](docs/sdks/permissions/README.md#create_permission) - Create a permission @@ -109,7 +113,7 @@ if res.connection_response is not None: ### [users](docs/sdks/users/README.md) -* [list_users](docs/sdks/users/README.md#list_users) - List users +* [list_users_within_an_organization](docs/sdks/users/README.md#list_users_within_an_organization) - List all users within an organization ### [workspaces](docs/sdks/workspaces/README.md) diff --git a/RELEASES.md b/RELEASES.md index 7c8dc539..ea4f5cec 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -928,4 +928,14 @@ Based on: ### Generated - [python v0.50.1] . ### Releases -- [PyPI v0.50.1] https://pypi.org/project/airbyte-api/0.50.1 - . \ No newline at end of file +- [PyPI v0.50.1] https://pypi.org/project/airbyte-api/0.50.1 - . + +## 2024-07-15 18:36:21 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.335.2 (2.372.3) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.51.0] . +### Releases +- [PyPI v0.51.0] https://pypi.org/project/airbyte-api/0.51.0 - . \ No newline at end of file diff --git a/docs/api/listorganizationsforuserresponse.md b/docs/api/listorganizationsforuserresponse.md new file mode 100644 index 00000000..23f7e71f --- /dev/null +++ b/docs/api/listorganizationsforuserresponse.md @@ -0,0 +1,11 @@ +# ListOrganizationsForUserResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `organizations_response` | [Optional[models.OrganizationsResponse]](../models/organizationsresponse.md) | :heavy_minus_sign: | List user's organizations. | \ No newline at end of file diff --git a/docs/api/listpermissionsrequest.md b/docs/api/listpermissionsrequest.md index 260ec676..64c7e01d 100644 --- a/docs/api/listpermissionsrequest.md +++ b/docs/api/listpermissionsrequest.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------- | ---------------------- | ---------------------- | ---------------------- | -| `user_id` | *Optional[str]* | :heavy_minus_sign: | User Id in permission. | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | +| `organization_id` | *Optional[str]* | :heavy_minus_sign: | This is required if you want to read someone else's permissions, and you should have organization admin or a higher role. | +| `user_id` | *Optional[str]* | :heavy_minus_sign: | User Id in permission. | \ No newline at end of file diff --git a/docs/api/listusersrequest.md b/docs/api/listuserswithinanorganizationrequest.md similarity index 74% rename from docs/api/listusersrequest.md rename to docs/api/listuserswithinanorganizationrequest.md index 5e51fb81..259c6918 100644 --- a/docs/api/listusersrequest.md +++ b/docs/api/listuserswithinanorganizationrequest.md @@ -1,10 +1,10 @@ -# ListUsersRequest +# ListUsersWithinAnOrganizationRequest ## Fields | Field | Type | Required | Description | | -------------------------------- | -------------------------------- | -------------------------------- | -------------------------------- | +| `organization_id` | *str* | :heavy_check_mark: | N/A | | `emails` | List[*str*] | :heavy_minus_sign: | List of user emails to filter by | -| `ids` | List[*str*] | :heavy_minus_sign: | List of user IDs to filter by | -| `organization_id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| `ids` | List[*str*] | :heavy_minus_sign: | List of user IDs to filter by | \ No newline at end of file diff --git a/docs/api/listusersresponse.md b/docs/api/listuserswithinanorganizationresponse.md similarity index 98% rename from docs/api/listusersresponse.md rename to docs/api/listuserswithinanorganizationresponse.md index 6054be44..0d04aba8 100644 --- a/docs/api/listusersresponse.md +++ b/docs/api/listuserswithinanorganizationresponse.md @@ -1,4 +1,4 @@ -# ListUsersResponse +# ListUsersWithinAnOrganizationResponse ## Fields diff --git a/docs/models/aescbcenvelopeencryption.md b/docs/models/aescbcenvelopeencryption.md deleted file mode 100644 index 2e589491..00000000 --- a/docs/models/aescbcenvelopeencryption.md +++ /dev/null @@ -1,11 +0,0 @@ -# AESCBCEnvelopeEncryption - -Staging data will be encrypted using AES-CBC envelope encryption. - - -## Fields - -| Field | Type | Required | Description | -| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | -| `encryption_type` | [Optional[models.DestinationRedshiftEncryptionType]](../models/destinationredshiftencryptiontype.md) | :heavy_minus_sign: | N/A | -| `key_encrypting_key` | *Optional[str]* | :heavy_minus_sign: | The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync. | \ No newline at end of file diff --git a/docs/models/authenticateviafacebookmarketingoauth.md b/docs/models/authenticateviafacebookmarketingoauth.md new file mode 100644 index 00000000..919e9d88 --- /dev/null +++ b/docs/models/authenticateviafacebookmarketingoauth.md @@ -0,0 +1,11 @@ +# AuthenticateViaFacebookMarketingOauth + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | Client ID for the Facebook Marketing API | +| `client_secret` | *str* | :heavy_check_mark: | Client Secret for the Facebook Marketing API | +| `access_token` | *Optional[str]* | :heavy_minus_sign: | The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | +| `auth_type` | [Optional[models.SourceFacebookMarketingAuthType]](../models/sourcefacebookmarketingauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/authentication.md b/docs/models/authentication.md index ce8f5d0e..2e324b93 100644 --- a/docs/models/authentication.md +++ b/docs/models/authentication.md @@ -5,9 +5,9 @@ An HMAC key is a type of credential and can be associated with a service account ## Supported Types -### HMACKey +### `models.HMACKey` ```python -authentication: models.HMACKey = /* values here */ +value: models.HMACKey = /* values here */ ``` diff --git a/docs/models/authenticationmechanism.md b/docs/models/authenticationmechanism.md index fac33541..86e1de89 100644 --- a/docs/models/authenticationmechanism.md +++ b/docs/models/authenticationmechanism.md @@ -5,15 +5,15 @@ Choose how to authenticate to Github ## Supported Types -### AuthenticateViaAsanaOauth +### `models.AuthenticateViaAsanaOauth` ```python -authenticationMechanism: models.AuthenticateViaAsanaOauth = /* values here */ +value: models.AuthenticateViaAsanaOauth = /* values here */ ``` -### AuthenticateWithPersonalAccessToken +### `models.AuthenticateWithPersonalAccessToken` ```python -authenticationMechanism: models.AuthenticateWithPersonalAccessToken = /* values here */ +value: models.AuthenticateWithPersonalAccessToken = /* values here */ ``` diff --git a/docs/models/authenticationmethod.md b/docs/models/authenticationmethod.md index 02448ea8..9ffe7611 100644 --- a/docs/models/authenticationmethod.md +++ b/docs/models/authenticationmethod.md @@ -5,15 +5,15 @@ The type of authentication to be used ## Supported Types -### APIKeySecret +### `models.APIKeySecret` ```python -authenticationMethod: models.APIKeySecret = /* values here */ +value: models.APIKeySecret = /* values here */ ``` -### UsernamePassword +### `models.UsernamePassword` ```python -authenticationMethod: models.UsernamePassword = /* values here */ +value: models.UsernamePassword = /* values here */ ``` diff --git a/docs/models/authenticationmode.md b/docs/models/authenticationmode.md index e1a2a787..eb8edbc9 100644 --- a/docs/models/authenticationmode.md +++ b/docs/models/authenticationmode.md @@ -5,15 +5,15 @@ Choose How to Authenticate to AWS. ## Supported Types -### IAMRole +### `models.IAMRole` ```python -authenticationMode: models.IAMRole = /* values here */ +value: models.IAMRole = /* values here */ ``` -### IAMUser +### `models.IAMUser` ```python -authenticationMode: models.IAMUser = /* values here */ +value: models.IAMUser = /* values here */ ``` diff --git a/docs/models/authenticationtype.md b/docs/models/authenticationtype.md index 896bf28c..d944ace6 100644 --- a/docs/models/authenticationtype.md +++ b/docs/models/authenticationtype.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleSearchConsoleOAuth +### `models.SourceGoogleSearchConsoleOAuth` ```python -authenticationType: models.SourceGoogleSearchConsoleOAuth = /* values here */ +value: models.SourceGoogleSearchConsoleOAuth = /* values here */ ``` -### SourceGoogleSearchConsoleServiceAccountKeyAuthentication +### `models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication` ```python -authenticationType: models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication = /* values here */ +value: models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication = /* values here */ ``` diff --git a/docs/models/authenticationwildcard.md b/docs/models/authenticationwildcard.md index 09b6e110..48453a87 100644 --- a/docs/models/authenticationwildcard.md +++ b/docs/models/authenticationwildcard.md @@ -5,15 +5,15 @@ Choose how to authenticate to Mixpanel ## Supported Types -### ServiceAccount +### `models.ServiceAccount` ```python -authenticationWildcard: models.ServiceAccount = /* values here */ +value: models.ServiceAccount = /* values here */ ``` -### ProjectSecret +### `models.ProjectSecret` ```python -authenticationWildcard: models.ProjectSecret = /* values here */ +value: models.ProjectSecret = /* values here */ ``` diff --git a/docs/models/authorizationmethod.md b/docs/models/authorizationmethod.md index d5105cdc..51ad3999 100644 --- a/docs/models/authorizationmethod.md +++ b/docs/models/authorizationmethod.md @@ -3,21 +3,21 @@ ## Supported Types -### KeyPairAuthentication +### `models.KeyPairAuthentication` ```python -authorizationMethod: models.KeyPairAuthentication = /* values here */ +value: models.KeyPairAuthentication = /* values here */ ``` -### UsernameAndPassword +### `models.UsernameAndPassword` ```python -authorizationMethod: models.UsernameAndPassword = /* values here */ +value: models.UsernameAndPassword = /* values here */ ``` -### DestinationSnowflakeOAuth20 +### `models.DestinationSnowflakeOAuth20` ```python -authorizationMethod: models.DestinationSnowflakeOAuth20 = /* values here */ +value: models.DestinationSnowflakeOAuth20 = /* values here */ ``` diff --git a/docs/models/authorizationtype.md b/docs/models/authorizationtype.md index 508bff82..e7c767cf 100644 --- a/docs/models/authorizationtype.md +++ b/docs/models/authorizationtype.md @@ -5,15 +5,15 @@ Authorization type. ## Supported Types -### NoneT +### `models.NoneT` ```python -authorizationType: models.NoneT = /* values here */ +value: models.NoneT = /* values here */ ``` -### LoginPassword +### `models.LoginPassword` ```python -authorizationType: models.LoginPassword = /* values here */ +value: models.LoginPassword = /* values here */ ``` diff --git a/docs/models/awss3staging.md b/docs/models/awss3staging.md index fff40690..0f971245 100644 --- a/docs/models/awss3staging.md +++ b/docs/models/awss3staging.md @@ -10,7 +10,6 @@ | `access_key_id` | *str* | :heavy_check_mark: | This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key. | | | `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the staging S3 bucket. | airbyte.staging | | `secret_access_key` | *str* | :heavy_check_mark: | The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key. | | -| `encryption` | [Optional[models.DestinationRedshiftEncryption]](../models/destinationredshiftencryption.md) | :heavy_minus_sign: | How to encrypt the staging data | | | `file_name_pattern` | *Optional[str]* | :heavy_minus_sign: | The pattern allows you to set the file-name format for the S3 staging file(s) | {date} | | `method` | [models.DestinationRedshiftMethod](../models/destinationredshiftmethod.md) | :heavy_check_mark: | N/A | | | `purge_staging_data` | *Optional[bool]* | :heavy_minus_sign: | Whether to delete the staging files from S3 after completing the sync. See docs for details. | | diff --git a/docs/models/baseurl.md b/docs/models/baseurl.md index 210ae994..17d11d38 100644 --- a/docs/models/baseurl.md +++ b/docs/models/baseurl.md @@ -5,15 +5,15 @@ Is your account location is EU based? If yes, the base url to retrieve data will ## Supported Types -### EUBasedAccount +### `models.EUBasedAccount` ```python -baseURL: models.EUBasedAccount = /* values here */ +value: models.EUBasedAccount = /* values here */ ``` -### GlobalAccount +### `models.GlobalAccount` ```python -baseURL: models.GlobalAccount = /* values here */ +value: models.GlobalAccount = /* values here */ ``` diff --git a/docs/models/chromalocalpersistance.md b/docs/models/chromalocalpersistance.md deleted file mode 100644 index a3192f26..00000000 --- a/docs/models/chromalocalpersistance.md +++ /dev/null @@ -1,12 +0,0 @@ -# ChromaLocalPersistance - -Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync. - - -## Fields - -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | -| `destination_path` | *str* | :heavy_check_mark: | Path to the directory where chroma files will be written. The files will be placed inside that local mount. | /local/my_chroma_db | -| `collection_name` | *Optional[str]* | :heavy_minus_sign: | Name of the collection to use. | | -| `mode` | [Optional[models.DestinationLangchainSchemasIndexingIndexing3Mode]](../models/destinationlangchainschemasindexingindexing3mode.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationlangchainmode.md b/docs/models/clazar.md similarity index 54% rename from docs/models/destinationlangchainmode.md rename to docs/models/clazar.md index 511ab5cc..8ce88874 100644 --- a/docs/models/destinationlangchainmode.md +++ b/docs/models/clazar.md @@ -1,8 +1,8 @@ -# DestinationLangchainMode +# Clazar ## Values | Name | Value | | -------- | -------- | -| `OPENAI` | openai | \ No newline at end of file +| `CLAZAR` | clazar | \ No newline at end of file diff --git a/docs/models/clustertype.md b/docs/models/clustertype.md index bce578b2..f6c3ff89 100644 --- a/docs/models/clustertype.md +++ b/docs/models/clustertype.md @@ -5,15 +5,15 @@ Configures the MongoDB cluster type. ## Supported Types -### MongoDBAtlasReplicaSet +### `models.MongoDBAtlasReplicaSet` ```python -clusterType: models.MongoDBAtlasReplicaSet = /* values here */ +value: models.MongoDBAtlasReplicaSet = /* values here */ ``` -### SelfManagedReplicaSet +### `models.SelfManagedReplicaSet` ```python -clusterType: models.SelfManagedReplicaSet = /* values here */ +value: models.SelfManagedReplicaSet = /* values here */ ``` diff --git a/docs/models/cohortreports.md b/docs/models/cohortreports.md index a138ce3b..2e64e990 100644 --- a/docs/models/cohortreports.md +++ b/docs/models/cohortreports.md @@ -5,15 +5,15 @@ Cohort reports creates a time series of user retention for the cohort. ## Supported Types -### SourceGoogleAnalyticsDataAPIDisabled +### `models.SourceGoogleAnalyticsDataAPIDisabled` ```python -cohortReports: models.SourceGoogleAnalyticsDataAPIDisabled = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIDisabled = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasEnabled +### `models.SourceGoogleAnalyticsDataAPISchemasEnabled` ```python -cohortReports: models.SourceGoogleAnalyticsDataAPISchemasEnabled = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasEnabled = /* values here */ ``` diff --git a/docs/models/compression.md b/docs/models/compression.md index 1fb21970..4f1173f7 100644 --- a/docs/models/compression.md +++ b/docs/models/compression.md @@ -5,15 +5,15 @@ Whether the output files should be compressed. If compression is selected, the o ## Supported Types -### DestinationGcsNoCompression +### `models.DestinationGcsNoCompression` ```python -compression: models.DestinationGcsNoCompression = /* values here */ +value: models.DestinationGcsNoCompression = /* values here */ ``` -### Gzip +### `models.Gzip` ```python -compression: models.Gzip = /* values here */ +value: models.Gzip = /* values here */ ``` diff --git a/docs/models/compressioncodec.md b/docs/models/compressioncodec.md index a1fd1ace..4f4f22fc 100644 --- a/docs/models/compressioncodec.md +++ b/docs/models/compressioncodec.md @@ -5,39 +5,39 @@ The compression algorithm used to compress data. Default to no compression. ## Supported Types -### NoCompression +### `models.NoCompression` ```python -compressionCodec: models.NoCompression = /* values here */ +value: models.NoCompression = /* values here */ ``` -### Deflate +### `models.Deflate` ```python -compressionCodec: models.Deflate = /* values here */ +value: models.Deflate = /* values here */ ``` -### Bzip2 +### `models.Bzip2` ```python -compressionCodec: models.Bzip2 = /* values here */ +value: models.Bzip2 = /* values here */ ``` -### Xz +### `models.Xz` ```python -compressionCodec: models.Xz = /* values here */ +value: models.Xz = /* values here */ ``` -### Zstandard +### `models.Zstandard` ```python -compressionCodec: models.Zstandard = /* values here */ +value: models.Zstandard = /* values here */ ``` -### Snappy +### `models.Snappy` ```python -compressionCodec: models.Snappy = /* values here */ +value: models.Snappy = /* values here */ ``` diff --git a/docs/models/connectby.md b/docs/models/connectby.md index 6af5d914..c96c9ede 100644 --- a/docs/models/connectby.md +++ b/docs/models/connectby.md @@ -5,15 +5,15 @@ Connect data that will be used for DB connection ## Supported Types -### ServiceName +### `models.ServiceName` ```python -connectBy: models.ServiceName = /* values here */ +value: models.ServiceName = /* values here */ ``` -### SystemIDSID +### `models.SystemIDSID` ```python -connectBy: models.SystemIDSID = /* values here */ +value: models.SystemIDSID = /* values here */ ``` diff --git a/docs/models/credential.md b/docs/models/credential.md index a2af0ddf..bbbb217d 100644 --- a/docs/models/credential.md +++ b/docs/models/credential.md @@ -5,9 +5,9 @@ An HMAC key is a type of credential and can be associated with a service account ## Supported Types -### DestinationBigqueryHMACKey +### `models.DestinationBigqueryHMACKey` ```python -credential: models.DestinationBigqueryHMACKey = /* values here */ +value: models.DestinationBigqueryHMACKey = /* values here */ ``` diff --git a/docs/models/csvcommaseparatedvalues.md b/docs/models/csvcommaseparatedvalues.md index 4e6b2d80..51bbbecd 100644 --- a/docs/models/csvcommaseparatedvalues.md +++ b/docs/models/csvcommaseparatedvalues.md @@ -5,5 +5,6 @@ | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | +| `file_extension` | *Optional[bool]* | :heavy_minus_sign: | Add file extensions to the output file. | | `flattening` | [Optional[models.NormalizationFlattening]](../models/normalizationflattening.md) | :heavy_minus_sign: | Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details. | | `format_type` | [models.FormatType](../models/formattype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/csvheaderdefinition.md b/docs/models/csvheaderdefinition.md index 4718cee3..68124107 100644 --- a/docs/models/csvheaderdefinition.md +++ b/docs/models/csvheaderdefinition.md @@ -5,21 +5,21 @@ How headers will be defined. `User Provided` assumes the CSV does not have a hea ## Supported Types -### FromCSV +### `models.FromCSV` ```python -csvHeaderDefinition: models.FromCSV = /* values here */ +value: models.FromCSV = /* values here */ ``` -### Autogenerated +### `models.Autogenerated` ```python -csvHeaderDefinition: models.Autogenerated = /* values here */ +value: models.Autogenerated = /* values here */ ``` -### UserProvided +### `models.UserProvided` ```python -csvHeaderDefinition: models.UserProvided = /* values here */ +value: models.UserProvided = /* values here */ ``` diff --git a/docs/models/datadog.md b/docs/models/datadog.md new file mode 100644 index 00000000..92d52b68 --- /dev/null +++ b/docs/models/datadog.md @@ -0,0 +1,8 @@ +# Datadog + + +## Values + +| Name | Value | +| --------- | --------- | +| `DATADOG` | datadog | \ No newline at end of file diff --git a/docs/models/datasource.md b/docs/models/datasource.md index 8b339e39..a7338b32 100644 --- a/docs/models/datasource.md +++ b/docs/models/datasource.md @@ -5,21 +5,21 @@ Storage on which the delta lake is built. ## Supported Types -### RecommendedManagedTables +### `models.RecommendedManagedTables` ```python -dataSource: models.RecommendedManagedTables = /* values here */ +value: models.RecommendedManagedTables = /* values here */ ``` -### AmazonS3 +### `models.AmazonS3` ```python -dataSource: models.AmazonS3 = /* values here */ +value: models.AmazonS3 = /* values here */ ``` -### DestinationDatabricksAzureBlobStorage +### `models.DestinationDatabricksAzureBlobStorage` ```python -dataSource: models.DestinationDatabricksAzureBlobStorage = /* values here */ +value: models.DestinationDatabricksAzureBlobStorage = /* values here */ ``` diff --git a/docs/models/deletionmode.md b/docs/models/deletionmode.md index d4394d1c..2eaa8842 100644 --- a/docs/models/deletionmode.md +++ b/docs/models/deletionmode.md @@ -8,15 +8,15 @@ Enabled - Enables this feature. When a document is deleted, the connector export ## Supported Types -### Disabled +### `models.Disabled` ```python -deletionMode: models.Disabled = /* values here */ +value: models.Disabled = /* values here */ ``` -### Enabled +### `models.Enabled` ```python -deletionMode: models.Enabled = /* values here */ +value: models.Enabled = /* values here */ ``` diff --git a/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md b/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md index f351a9ba..76cda9c8 100644 --- a/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md +++ b/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| `file_extension` | *Optional[bool]* | :heavy_minus_sign: | Add file extensions to the output file. | | `format_type` | [models.DestinationAzureBlobStorageFormatType](../models/destinationazureblobstorageformattype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/destinationconfiguration.md b/docs/models/destinationconfiguration.md index 2586b7f5..656883d3 100644 --- a/docs/models/destinationconfiguration.md +++ b/docs/models/destinationconfiguration.md @@ -5,225 +5,219 @@ The values required to configure the destination. ## Supported Types -### DestinationGoogleSheets +### `models.DestinationGoogleSheets` ```python -destinationConfiguration: models.DestinationGoogleSheets = /* values here */ +value: models.DestinationGoogleSheets = /* values here */ ``` -### DestinationAstra +### `models.DestinationAstra` ```python -destinationConfiguration: models.DestinationAstra = /* values here */ +value: models.DestinationAstra = /* values here */ ``` -### DestinationAwsDatalake +### `models.DestinationAwsDatalake` ```python -destinationConfiguration: models.DestinationAwsDatalake = /* values here */ +value: models.DestinationAwsDatalake = /* values here */ ``` -### DestinationAzureBlobStorage +### `models.DestinationAzureBlobStorage` ```python -destinationConfiguration: models.DestinationAzureBlobStorage = /* values here */ +value: models.DestinationAzureBlobStorage = /* values here */ ``` -### DestinationBigquery +### `models.DestinationBigquery` ```python -destinationConfiguration: models.DestinationBigquery = /* values here */ +value: models.DestinationBigquery = /* values here */ ``` -### DestinationClickhouse +### `models.DestinationClickhouse` ```python -destinationConfiguration: models.DestinationClickhouse = /* values here */ +value: models.DestinationClickhouse = /* values here */ ``` -### DestinationConvex +### `models.DestinationConvex` ```python -destinationConfiguration: models.DestinationConvex = /* values here */ +value: models.DestinationConvex = /* values here */ ``` -### DestinationDatabricks +### `models.DestinationDatabricks` ```python -destinationConfiguration: models.DestinationDatabricks = /* values here */ +value: models.DestinationDatabricks = /* values here */ ``` -### DestinationDevNull +### `models.DestinationDevNull` ```python -destinationConfiguration: models.DestinationDevNull = /* values here */ +value: models.DestinationDevNull = /* values here */ ``` -### DestinationDuckdb +### `models.DestinationDuckdb` ```python -destinationConfiguration: models.DestinationDuckdb = /* values here */ +value: models.DestinationDuckdb = /* values here */ ``` -### DestinationDynamodb +### `models.DestinationDynamodb` ```python -destinationConfiguration: models.DestinationDynamodb = /* values here */ +value: models.DestinationDynamodb = /* values here */ ``` -### DestinationElasticsearch +### `models.DestinationElasticsearch` ```python -destinationConfiguration: models.DestinationElasticsearch = /* values here */ +value: models.DestinationElasticsearch = /* values here */ ``` -### DestinationFirebolt +### `models.DestinationFirebolt` ```python -destinationConfiguration: models.DestinationFirebolt = /* values here */ +value: models.DestinationFirebolt = /* values here */ ``` -### DestinationFirestore +### `models.DestinationFirestore` ```python -destinationConfiguration: models.DestinationFirestore = /* values here */ +value: models.DestinationFirestore = /* values here */ ``` -### DestinationGcs +### `models.DestinationGcs` ```python -destinationConfiguration: models.DestinationGcs = /* values here */ +value: models.DestinationGcs = /* values here */ ``` -### DestinationLangchain +### `models.DestinationMilvus` ```python -destinationConfiguration: models.DestinationLangchain = /* values here */ +value: models.DestinationMilvus = /* values here */ ``` -### DestinationMilvus +### `models.DestinationMongodb` ```python -destinationConfiguration: models.DestinationMilvus = /* values here */ +value: models.DestinationMongodb = /* values here */ ``` -### DestinationMongodb +### `models.DestinationMssql` ```python -destinationConfiguration: models.DestinationMongodb = /* values here */ +value: models.DestinationMssql = /* values here */ ``` -### DestinationMssql +### `models.DestinationMysql` ```python -destinationConfiguration: models.DestinationMssql = /* values here */ +value: models.DestinationMysql = /* values here */ ``` -### DestinationMysql +### `models.DestinationOracle` ```python -destinationConfiguration: models.DestinationMysql = /* values here */ +value: models.DestinationOracle = /* values here */ ``` -### DestinationOracle +### `models.DestinationPinecone` ```python -destinationConfiguration: models.DestinationOracle = /* values here */ +value: models.DestinationPinecone = /* values here */ ``` -### DestinationPinecone +### `models.DestinationPostgres` ```python -destinationConfiguration: models.DestinationPinecone = /* values here */ +value: models.DestinationPostgres = /* values here */ ``` -### DestinationPostgres +### `models.DestinationPubsub` ```python -destinationConfiguration: models.DestinationPostgres = /* values here */ +value: models.DestinationPubsub = /* values here */ ``` -### DestinationPubsub +### `models.DestinationQdrant` ```python -destinationConfiguration: models.DestinationPubsub = /* values here */ +value: models.DestinationQdrant = /* values here */ ``` -### DestinationQdrant +### `models.DestinationRedis` ```python -destinationConfiguration: models.DestinationQdrant = /* values here */ +value: models.DestinationRedis = /* values here */ ``` -### DestinationRedis +### `models.DestinationRedshift` ```python -destinationConfiguration: models.DestinationRedis = /* values here */ +value: models.DestinationRedshift = /* values here */ ``` -### DestinationRedshift +### `models.DestinationS3` ```python -destinationConfiguration: models.DestinationRedshift = /* values here */ +value: models.DestinationS3 = /* values here */ ``` -### DestinationS3 +### `models.DestinationS3Glue` ```python -destinationConfiguration: models.DestinationS3 = /* values here */ +value: models.DestinationS3Glue = /* values here */ ``` -### DestinationS3Glue +### `models.DestinationSftpJSON` ```python -destinationConfiguration: models.DestinationS3Glue = /* values here */ +value: models.DestinationSftpJSON = /* values here */ ``` -### DestinationSftpJSON +### `models.DestinationSnowflake` ```python -destinationConfiguration: models.DestinationSftpJSON = /* values here */ +value: models.DestinationSnowflake = /* values here */ ``` -### DestinationSnowflake +### `models.DestinationSnowflakeCortex` ```python -destinationConfiguration: models.DestinationSnowflake = /* values here */ +value: models.DestinationSnowflakeCortex = /* values here */ ``` -### DestinationSnowflakeCortex +### `models.DestinationTeradata` ```python -destinationConfiguration: models.DestinationSnowflakeCortex = /* values here */ +value: models.DestinationTeradata = /* values here */ ``` -### DestinationTeradata +### `models.DestinationTypesense` ```python -destinationConfiguration: models.DestinationTeradata = /* values here */ +value: models.DestinationTypesense = /* values here */ ``` -### DestinationTypesense +### `models.DestinationVectara` ```python -destinationConfiguration: models.DestinationTypesense = /* values here */ +value: models.DestinationVectara = /* values here */ ``` -### DestinationVectara +### `models.DestinationWeaviate` ```python -destinationConfiguration: models.DestinationVectara = /* values here */ +value: models.DestinationWeaviate = /* values here */ ``` -### DestinationWeaviate +### `models.DestinationYellowbrick` ```python -destinationConfiguration: models.DestinationWeaviate = /* values here */ -``` - -### DestinationYellowbrick - -```python -destinationConfiguration: models.DestinationYellowbrick = /* values here */ +value: models.DestinationYellowbrick = /* values here */ ``` diff --git a/docs/models/destinationfireboltloadingmethod.md b/docs/models/destinationfireboltloadingmethod.md index 138982d1..e13e7615 100644 --- a/docs/models/destinationfireboltloadingmethod.md +++ b/docs/models/destinationfireboltloadingmethod.md @@ -5,15 +5,15 @@ Loading method used to select the way data will be uploaded to Firebolt ## Supported Types -### SQLInserts +### `models.SQLInserts` ```python -destinationFireboltLoadingMethod: models.SQLInserts = /* values here */ +value: models.SQLInserts = /* values here */ ``` -### ExternalTableViaS3 +### `models.ExternalTableViaS3` ```python -destinationFireboltLoadingMethod: models.ExternalTableViaS3 = /* values here */ +value: models.ExternalTableViaS3 = /* values here */ ``` diff --git a/docs/models/destinationgcscompression.md b/docs/models/destinationgcscompression.md index d5fd48d8..b971ca74 100644 --- a/docs/models/destinationgcscompression.md +++ b/docs/models/destinationgcscompression.md @@ -5,15 +5,15 @@ Whether the output files should be compressed. If compression is selected, the o ## Supported Types -### DestinationGcsSchemasNoCompression +### `models.DestinationGcsSchemasNoCompression` ```python -destinationGcsCompression: models.DestinationGcsSchemasNoCompression = /* values here */ +value: models.DestinationGcsSchemasNoCompression = /* values here */ ``` -### DestinationGcsGZIP +### `models.DestinationGcsGZIP` ```python -destinationGcsCompression: models.DestinationGcsGZIP = /* values here */ +value: models.DestinationGcsGZIP = /* values here */ ``` diff --git a/docs/models/destinationgcsoutputformat.md b/docs/models/destinationgcsoutputformat.md index e8df9434..c411b61c 100644 --- a/docs/models/destinationgcsoutputformat.md +++ b/docs/models/destinationgcsoutputformat.md @@ -5,27 +5,27 @@ Output data format. One of the following formats must be selected - role that you want to use to access Snowflake | AIRBYTE_ROLE | | `schema` | *str* | :heavy_check_mark: | Enter the name of the default schema | AIRBYTE_SCHEMA | | `username` | *str* | :heavy_check_mark: | Enter the name of the user you want to use to access the database | AIRBYTE_USER | -| `warehouse` | *str* | :heavy_check_mark: | Enter the name of the warehouse that you want to sync data into | AIRBYTE_WAREHOUSE | +| `warehouse` | *str* | :heavy_check_mark: | Enter the name of the warehouse that you want to use as a compute cluster | AIRBYTE_WAREHOUSE | | `credentials` | [Optional[models.AuthorizationMethod]](../models/authorizationmethod.md) | :heavy_minus_sign: | N/A | | | `destination_type` | [models.DestinationSnowflakeSnowflake](../models/destinationsnowflakesnowflake.md) | :heavy_check_mark: | N/A | | | `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions | | diff --git a/docs/models/destinationsnowflakecortex.md b/docs/models/destinationsnowflakecortex.md index 2d9be2ee..8e0e861d 100644 --- a/docs/models/destinationsnowflakecortex.md +++ b/docs/models/destinationsnowflakecortex.md @@ -17,7 +17,7 @@ Processing, embedding and advanced configuration are provided by this base class | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `embedding` | [models.DestinationSnowflakeCortexEmbedding](../models/destinationsnowflakecortexembedding.md) | :heavy_check_mark: | Embedding configuration | -| `indexing` | [models.DestinationSnowflakeCortexIndexing](../models/destinationsnowflakecortexindexing.md) | :heavy_check_mark: | Snowflake can be used to store vector data and retrieve embeddings. | +| `indexing` | [models.SnowflakeConnection](../models/snowflakeconnection.md) | :heavy_check_mark: | Snowflake can be used to store vector data and retrieve embeddings. | | `processing` | [models.DestinationSnowflakeCortexProcessingConfigModel](../models/destinationsnowflakecortexprocessingconfigmodel.md) | :heavy_check_mark: | N/A | | `destination_type` | [models.SnowflakeCortex](../models/snowflakecortex.md) | :heavy_check_mark: | N/A | | `omit_raw_text` | *Optional[bool]* | :heavy_minus_sign: | Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. | \ No newline at end of file diff --git a/docs/models/destinationsnowflakecortexembedding.md b/docs/models/destinationsnowflakecortexembedding.md index 2c0fd9e5..71498a11 100644 --- a/docs/models/destinationsnowflakecortexembedding.md +++ b/docs/models/destinationsnowflakecortexembedding.md @@ -5,33 +5,33 @@ Embedding configuration ## Supported Types -### DestinationSnowflakeCortexOpenAI +### `models.DestinationSnowflakeCortexOpenAI` ```python -destinationSnowflakeCortexEmbedding: models.DestinationSnowflakeCortexOpenAI = /* values here */ +value: models.DestinationSnowflakeCortexOpenAI = /* values here */ ``` -### DestinationSnowflakeCortexCohere +### `models.DestinationSnowflakeCortexCohere` ```python -destinationSnowflakeCortexEmbedding: models.DestinationSnowflakeCortexCohere = /* values here */ +value: models.DestinationSnowflakeCortexCohere = /* values here */ ``` -### DestinationSnowflakeCortexFake +### `models.DestinationSnowflakeCortexFake` ```python -destinationSnowflakeCortexEmbedding: models.DestinationSnowflakeCortexFake = /* values here */ +value: models.DestinationSnowflakeCortexFake = /* values here */ ``` -### DestinationSnowflakeCortexAzureOpenAI +### `models.DestinationSnowflakeCortexAzureOpenAI` ```python -destinationSnowflakeCortexEmbedding: models.DestinationSnowflakeCortexAzureOpenAI = /* values here */ +value: models.DestinationSnowflakeCortexAzureOpenAI = /* values here */ ``` -### DestinationSnowflakeCortexOpenAICompatible +### `models.DestinationSnowflakeCortexOpenAICompatible` ```python -destinationSnowflakeCortexEmbedding: models.DestinationSnowflakeCortexOpenAICompatible = /* values here */ +value: models.DestinationSnowflakeCortexOpenAICompatible = /* values here */ ``` diff --git a/docs/models/destinationsnowflakecortextextsplitter.md b/docs/models/destinationsnowflakecortextextsplitter.md index 644ac4bb..c8440fa0 100644 --- a/docs/models/destinationsnowflakecortextextsplitter.md +++ b/docs/models/destinationsnowflakecortextextsplitter.md @@ -5,21 +5,21 @@ Split text fields into chunks based on the specified method. ## Supported Types -### DestinationSnowflakeCortexBySeparator +### `models.DestinationSnowflakeCortexBySeparator` ```python -destinationSnowflakeCortexTextSplitter: models.DestinationSnowflakeCortexBySeparator = /* values here */ +value: models.DestinationSnowflakeCortexBySeparator = /* values here */ ``` -### DestinationSnowflakeCortexByMarkdownHeader +### `models.DestinationSnowflakeCortexByMarkdownHeader` ```python -destinationSnowflakeCortexTextSplitter: models.DestinationSnowflakeCortexByMarkdownHeader = /* values here */ +value: models.DestinationSnowflakeCortexByMarkdownHeader = /* values here */ ``` -### DestinationSnowflakeCortexByProgrammingLanguage +### `models.DestinationSnowflakeCortexByProgrammingLanguage` ```python -destinationSnowflakeCortexTextSplitter: models.DestinationSnowflakeCortexByProgrammingLanguage = /* values here */ +value: models.DestinationSnowflakeCortexByProgrammingLanguage = /* values here */ ``` diff --git a/docs/models/destinationteradatasslmodes.md b/docs/models/destinationteradatasslmodes.md index 4abfb23d..88825d2f 100644 --- a/docs/models/destinationteradatasslmodes.md +++ b/docs/models/destinationteradatasslmodes.md @@ -12,39 +12,39 @@ SSL connection modes. ## Supported Types -### DestinationTeradataDisable +### `models.DestinationTeradataDisable` ```python -destinationTeradataSSLModes: models.DestinationTeradataDisable = /* values here */ +value: models.DestinationTeradataDisable = /* values here */ ``` -### DestinationTeradataAllow +### `models.DestinationTeradataAllow` ```python -destinationTeradataSSLModes: models.DestinationTeradataAllow = /* values here */ +value: models.DestinationTeradataAllow = /* values here */ ``` -### DestinationTeradataPrefer +### `models.DestinationTeradataPrefer` ```python -destinationTeradataSSLModes: models.DestinationTeradataPrefer = /* values here */ +value: models.DestinationTeradataPrefer = /* values here */ ``` -### DestinationTeradataRequire +### `models.DestinationTeradataRequire` ```python -destinationTeradataSSLModes: models.DestinationTeradataRequire = /* values here */ +value: models.DestinationTeradataRequire = /* values here */ ``` -### DestinationTeradataVerifyCa +### `models.DestinationTeradataVerifyCa` ```python -destinationTeradataSSLModes: models.DestinationTeradataVerifyCa = /* values here */ +value: models.DestinationTeradataVerifyCa = /* values here */ ``` -### DestinationTeradataVerifyFull +### `models.DestinationTeradataVerifyFull` ```python -destinationTeradataSSLModes: models.DestinationTeradataVerifyFull = /* values here */ +value: models.DestinationTeradataVerifyFull = /* values here */ ``` diff --git a/docs/models/destinationweaviateauthentication.md b/docs/models/destinationweaviateauthentication.md index 6ee84db1..bd041e7e 100644 --- a/docs/models/destinationweaviateauthentication.md +++ b/docs/models/destinationweaviateauthentication.md @@ -5,21 +5,21 @@ Authentication method ## Supported Types -### DestinationWeaviateAPIToken +### `models.DestinationWeaviateAPIToken` ```python -destinationWeaviateAuthentication: models.DestinationWeaviateAPIToken = /* values here */ +value: models.DestinationWeaviateAPIToken = /* values here */ ``` -### DestinationWeaviateUsernamePassword +### `models.DestinationWeaviateUsernamePassword` ```python -destinationWeaviateAuthentication: models.DestinationWeaviateUsernamePassword = /* values here */ +value: models.DestinationWeaviateUsernamePassword = /* values here */ ``` -### NoAuthentication +### `models.NoAuthentication` ```python -destinationWeaviateAuthentication: models.NoAuthentication = /* values here */ +value: models.NoAuthentication = /* values here */ ``` diff --git a/docs/models/destinationweaviateembedding.md b/docs/models/destinationweaviateembedding.md index e720b843..676ea3e4 100644 --- a/docs/models/destinationweaviateembedding.md +++ b/docs/models/destinationweaviateembedding.md @@ -5,45 +5,45 @@ Embedding configuration ## Supported Types -### NoExternalEmbedding +### `models.NoExternalEmbedding` ```python -destinationWeaviateEmbedding: models.NoExternalEmbedding = /* values here */ +value: models.NoExternalEmbedding = /* values here */ ``` -### DestinationWeaviateAzureOpenAI +### `models.DestinationWeaviateAzureOpenAI` ```python -destinationWeaviateEmbedding: models.DestinationWeaviateAzureOpenAI = /* values here */ +value: models.DestinationWeaviateAzureOpenAI = /* values here */ ``` -### DestinationWeaviateOpenAI +### `models.DestinationWeaviateOpenAI` ```python -destinationWeaviateEmbedding: models.DestinationWeaviateOpenAI = /* values here */ +value: models.DestinationWeaviateOpenAI = /* values here */ ``` -### DestinationWeaviateCohere +### `models.DestinationWeaviateCohere` ```python -destinationWeaviateEmbedding: models.DestinationWeaviateCohere = /* values here */ +value: models.DestinationWeaviateCohere = /* values here */ ``` -### FromField +### `models.FromField` ```python -destinationWeaviateEmbedding: models.FromField = /* values here */ +value: models.FromField = /* values here */ ``` -### DestinationWeaviateFake +### `models.DestinationWeaviateFake` ```python -destinationWeaviateEmbedding: models.DestinationWeaviateFake = /* values here */ +value: models.DestinationWeaviateFake = /* values here */ ``` -### DestinationWeaviateOpenAICompatible +### `models.DestinationWeaviateOpenAICompatible` ```python -destinationWeaviateEmbedding: models.DestinationWeaviateOpenAICompatible = /* values here */ +value: models.DestinationWeaviateOpenAICompatible = /* values here */ ``` diff --git a/docs/models/destinationweaviatetextsplitter.md b/docs/models/destinationweaviatetextsplitter.md index 04862cde..28f5eef3 100644 --- a/docs/models/destinationweaviatetextsplitter.md +++ b/docs/models/destinationweaviatetextsplitter.md @@ -5,21 +5,21 @@ Split text fields into chunks based on the specified method. ## Supported Types -### DestinationWeaviateBySeparator +### `models.DestinationWeaviateBySeparator` ```python -destinationWeaviateTextSplitter: models.DestinationWeaviateBySeparator = /* values here */ +value: models.DestinationWeaviateBySeparator = /* values here */ ``` -### DestinationWeaviateByMarkdownHeader +### `models.DestinationWeaviateByMarkdownHeader` ```python -destinationWeaviateTextSplitter: models.DestinationWeaviateByMarkdownHeader = /* values here */ +value: models.DestinationWeaviateByMarkdownHeader = /* values here */ ``` -### DestinationWeaviateByProgrammingLanguage +### `models.DestinationWeaviateByProgrammingLanguage` ```python -destinationWeaviateTextSplitter: models.DestinationWeaviateByProgrammingLanguage = /* values here */ +value: models.DestinationWeaviateByProgrammingLanguage = /* values here */ ``` diff --git a/docs/models/destinationyellowbricksshtunnelmethod.md b/docs/models/destinationyellowbricksshtunnelmethod.md index 7f795f68..f2f3c366 100644 --- a/docs/models/destinationyellowbricksshtunnelmethod.md +++ b/docs/models/destinationyellowbricksshtunnelmethod.md @@ -5,21 +5,21 @@ Whether to initiate an SSH tunnel before connecting to the database, and if so, ## Supported Types -### DestinationYellowbrickNoTunnel +### `models.DestinationYellowbrickNoTunnel` ```python -destinationYellowbrickSSHTunnelMethod: models.DestinationYellowbrickNoTunnel = /* values here */ +value: models.DestinationYellowbrickNoTunnel = /* values here */ ``` -### DestinationYellowbrickSSHKeyAuthentication +### `models.DestinationYellowbrickSSHKeyAuthentication` ```python -destinationYellowbrickSSHTunnelMethod: models.DestinationYellowbrickSSHKeyAuthentication = /* values here */ +value: models.DestinationYellowbrickSSHKeyAuthentication = /* values here */ ``` -### DestinationYellowbrickPasswordAuthentication +### `models.DestinationYellowbrickPasswordAuthentication` ```python -destinationYellowbrickSSHTunnelMethod: models.DestinationYellowbrickPasswordAuthentication = /* values here */ +value: models.DestinationYellowbrickPasswordAuthentication = /* values here */ ``` diff --git a/docs/models/destinationyellowbricksslmodes.md b/docs/models/destinationyellowbricksslmodes.md index b6ded302..8eae5469 100644 --- a/docs/models/destinationyellowbricksslmodes.md +++ b/docs/models/destinationyellowbricksslmodes.md @@ -12,39 +12,39 @@ SSL connection modes. ## Supported Types -### DestinationYellowbrickDisable +### `models.DestinationYellowbrickDisable` ```python -destinationYellowbrickSSLModes: models.DestinationYellowbrickDisable = /* values here */ +value: models.DestinationYellowbrickDisable = /* values here */ ``` -### DestinationYellowbrickAllow +### `models.DestinationYellowbrickAllow` ```python -destinationYellowbrickSSLModes: models.DestinationYellowbrickAllow = /* values here */ +value: models.DestinationYellowbrickAllow = /* values here */ ``` -### DestinationYellowbrickPrefer +### `models.DestinationYellowbrickPrefer` ```python -destinationYellowbrickSSLModes: models.DestinationYellowbrickPrefer = /* values here */ +value: models.DestinationYellowbrickPrefer = /* values here */ ``` -### DestinationYellowbrickRequire +### `models.DestinationYellowbrickRequire` ```python -destinationYellowbrickSSLModes: models.DestinationYellowbrickRequire = /* values here */ +value: models.DestinationYellowbrickRequire = /* values here */ ``` -### DestinationYellowbrickVerifyCa +### `models.DestinationYellowbrickVerifyCa` ```python -destinationYellowbrickSSLModes: models.DestinationYellowbrickVerifyCa = /* values here */ +value: models.DestinationYellowbrickVerifyCa = /* values here */ ``` -### DestinationYellowbrickVerifyFull +### `models.DestinationYellowbrickVerifyFull` ```python -destinationYellowbrickSSLModes: models.DestinationYellowbrickVerifyFull = /* values here */ +value: models.DestinationYellowbrickVerifyFull = /* values here */ ``` diff --git a/docs/models/detectchangeswithxminsystemcolumn.md b/docs/models/detectchangeswithxminsystemcolumn.md index 29f87320..017e6819 100644 --- a/docs/models/detectchangeswithxminsystemcolumn.md +++ b/docs/models/detectchangeswithxminsystemcolumn.md @@ -1,6 +1,6 @@ # DetectChangesWithXminSystemColumn -Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. +Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Suitable for databases that have low transaction pressure. ## Fields diff --git a/docs/models/dimensionsfilter.md b/docs/models/dimensionsfilter.md index 62005abd..8ceff45a 100644 --- a/docs/models/dimensionsfilter.md +++ b/docs/models/dimensionsfilter.md @@ -5,27 +5,27 @@ Dimensions filter ## Supported Types -### AndGroup +### `models.AndGroup` ```python -dimensionsFilter: models.AndGroup = /* values here */ +value: models.AndGroup = /* values here */ ``` -### OrGroup +### `models.OrGroup` ```python -dimensionsFilter: models.OrGroup = /* values here */ +value: models.OrGroup = /* values here */ ``` -### NotExpression +### `models.NotExpression` ```python -dimensionsFilter: models.NotExpression = /* values here */ +value: models.NotExpression = /* values here */ ``` -### Filter +### `models.Filter` ```python -dimensionsFilter: models.Filter = /* values here */ +value: models.Filter = /* values here */ ``` diff --git a/docs/models/embedding.md b/docs/models/embedding.md index 8123ed4d..44a48151 100644 --- a/docs/models/embedding.md +++ b/docs/models/embedding.md @@ -5,33 +5,33 @@ Embedding configuration ## Supported Types -### OpenAI +### `models.OpenAI` ```python -embedding: models.OpenAI = /* values here */ +value: models.OpenAI = /* values here */ ``` -### Cohere +### `models.Cohere` ```python -embedding: models.Cohere = /* values here */ +value: models.Cohere = /* values here */ ``` -### Fake +### `models.Fake` ```python -embedding: models.Fake = /* values here */ +value: models.Fake = /* values here */ ``` -### AzureOpenAI +### `models.AzureOpenAI` ```python -embedding: models.AzureOpenAI = /* values here */ +value: models.AzureOpenAI = /* values here */ ``` -### OpenAICompatible +### `models.OpenAICompatible` ```python -embedding: models.OpenAICompatible = /* values here */ +value: models.OpenAICompatible = /* values here */ ``` diff --git a/docs/models/encryption.md b/docs/models/encryption.md index 160221fb..23121bd8 100644 --- a/docs/models/encryption.md +++ b/docs/models/encryption.md @@ -5,15 +5,15 @@ The encryption method with is used when communicating with the database. ## Supported Types -### NativeNetworkEncryptionNNE +### `models.NativeNetworkEncryptionNNE` ```python -encryption: models.NativeNetworkEncryptionNNE = /* values here */ +value: models.NativeNetworkEncryptionNNE = /* values here */ ``` -### TLSEncryptedVerifyCertificate +### `models.TLSEncryptedVerifyCertificate` ```python -encryption: models.TLSEncryptedVerifyCertificate = /* values here */ +value: models.TLSEncryptedVerifyCertificate = /* values here */ ``` diff --git a/docs/models/encryptiontype.md b/docs/models/encryptiontype.md deleted file mode 100644 index 65b39556..00000000 --- a/docs/models/encryptiontype.md +++ /dev/null @@ -1,8 +0,0 @@ -# EncryptionType - - -## Values - -| Name | Value | -| ------ | ------ | -| `NONE` | none | \ No newline at end of file diff --git a/docs/models/facebookmarketing.md b/docs/models/facebookmarketing.md index a346beb2..74abee47 100644 --- a/docs/models/facebookmarketing.md +++ b/docs/models/facebookmarketing.md @@ -3,7 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client Id for your OAuth app | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret for your OAuth app | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `credentials` | [Optional[models.FacebookMarketingCredentials]](../models/facebookmarketingcredentials.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/facebookmarketingcredentials.md b/docs/models/facebookmarketingcredentials.md new file mode 100644 index 00000000..381a19ea --- /dev/null +++ b/docs/models/facebookmarketingcredentials.md @@ -0,0 +1,9 @@ +# FacebookMarketingCredentials + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | +| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client Id for your OAuth app | +| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret for your OAuth app | \ No newline at end of file diff --git a/docs/models/filterappliedwhilefetchingrecordsbasedonattributekeyandattributevaluewhichwillbeappendedontherequestbody.md b/docs/models/filterappliedwhilefetchingrecordsbasedonattributekeyandattributevaluewhichwillbeappendedontherequestbody.md new file mode 100644 index 00000000..b834fb50 --- /dev/null +++ b/docs/models/filterappliedwhilefetchingrecordsbasedonattributekeyandattributevaluewhichwillbeappendedontherequestbody.md @@ -0,0 +1,9 @@ +# FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------ | ------------------------ | ------------------------ | ------------------------ | ------------------------ | +| `attribute_key` | *Optional[str]* | :heavy_minus_sign: | N/A | EventName | +| `attribute_value` | *Optional[str]* | :heavy_minus_sign: | N/A | ListInstanceAssociations | \ No newline at end of file diff --git a/docs/models/fleetio.md b/docs/models/fleetio.md new file mode 100644 index 00000000..f6322eb0 --- /dev/null +++ b/docs/models/fleetio.md @@ -0,0 +1,8 @@ +# Fleetio + + +## Values + +| Name | Value | +| --------- | --------- | +| `FLEETIO` | fleetio | \ No newline at end of file diff --git a/docs/models/format.md b/docs/models/format.md index 3d16ca67..6259259d 100644 --- a/docs/models/format.md +++ b/docs/models/format.md @@ -5,33 +5,33 @@ The configuration options that are used to alter how to read incoming files that ## Supported Types -### AvroFormat +### `models.AvroFormat` ```python -format: models.AvroFormat = /* values here */ +value: models.AvroFormat = /* values here */ ``` -### CSVFormat +### `models.CSVFormat` ```python -format: models.CSVFormat = /* values here */ +value: models.CSVFormat = /* values here */ ``` -### JsonlFormat +### `models.JsonlFormat` ```python -format: models.JsonlFormat = /* values here */ +value: models.JsonlFormat = /* values here */ ``` -### ParquetFormat +### `models.ParquetFormat` ```python -format: models.ParquetFormat = /* values here */ +value: models.ParquetFormat = /* values here */ ``` -### DocumentFileTypeFormatExperimental +### `models.DocumentFileTypeFormatExperimental` ```python -format: models.DocumentFileTypeFormatExperimental = /* values here */ +value: models.DocumentFileTypeFormatExperimental = /* values here */ ``` diff --git a/docs/models/fromvalue.md b/docs/models/fromvalue.md index 4af54e8e..6e0f7982 100644 --- a/docs/models/fromvalue.md +++ b/docs/models/fromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPIInt64Value +### `models.SourceGoogleAnalyticsDataAPIInt64Value` ```python -fromValue: models.SourceGoogleAnalyticsDataAPIInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPIDoubleValue +### `models.SourceGoogleAnalyticsDataAPIDoubleValue` ```python -fromValue: models.SourceGoogleAnalyticsDataAPIDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIDoubleValue = /* values here */ ``` diff --git a/docs/models/punkapi.md b/docs/models/goldcast.md similarity index 64% rename from docs/models/punkapi.md rename to docs/models/goldcast.md index cd46a23a..1673ba0c 100644 --- a/docs/models/punkapi.md +++ b/docs/models/goldcast.md @@ -1,8 +1,8 @@ -# PunkAPI +# Goldcast ## Values | Name | Value | | ---------- | ---------- | -| `PUNK_API` | punk-api | \ No newline at end of file +| `GOLDCAST` | goldcast | \ No newline at end of file diff --git a/docs/models/googleanalyticsv4serviceaccountonly.md b/docs/models/googleanalyticsv4serviceaccountonly.md deleted file mode 100644 index bdac6184..00000000 --- a/docs/models/googleanalyticsv4serviceaccountonly.md +++ /dev/null @@ -1,8 +0,0 @@ -# GoogleAnalyticsV4ServiceAccountOnly - - -## Values - -| Name | Value | -| ------------------------------------------ | ------------------------------------------ | -| `GOOGLE_ANALYTICS_V4_SERVICE_ACCOUNT_ONLY` | google-analytics-v4-service-account-only | \ No newline at end of file diff --git a/docs/models/insightconfig.md b/docs/models/insightconfig.md index abf8e097..66986c86 100644 --- a/docs/models/insightconfig.md +++ b/docs/models/insightconfig.md @@ -5,16 +5,16 @@ Config for custom insights ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `name` | *str* | :heavy_check_mark: | The name value of insight | | -| `action_breakdowns` | List[[models.ValidActionBreakdowns](../models/validactionbreakdowns.md)] | :heavy_minus_sign: | A list of chosen action_breakdowns for action_breakdowns | | -| `action_report_time` | [Optional[models.SourceFacebookMarketingActionReportTime]](../models/sourcefacebookmarketingactionreporttime.md) | :heavy_minus_sign: | Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd. | | -| `breakdowns` | List[[models.ValidBreakdowns](../models/validbreakdowns.md)] | :heavy_minus_sign: | A list of chosen breakdowns for breakdowns | | -| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. | 2017-01-26T00:00:00Z | -| `fields` | List[[models.SourceFacebookMarketingValidEnums](../models/sourcefacebookmarketingvalidenums.md)] | :heavy_minus_sign: | A list of chosen fields for fields parameter | | -| `insights_job_timeout` | *Optional[int]* | :heavy_minus_sign: | The insights job timeout | | -| `insights_lookback_window` | *Optional[int]* | :heavy_minus_sign: | The attribution window | | -| `level` | [Optional[models.Level]](../models/level.md) | :heavy_minus_sign: | Chosen level for API | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. | 2017-01-25T00:00:00Z | -| `time_increment` | *Optional[int]* | :heavy_minus_sign: | Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only). | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `name` | *str* | :heavy_check_mark: | The name value of insight | | +| `action_breakdowns` | List[[models.ValidActionBreakdowns](../models/validactionbreakdowns.md)] | :heavy_minus_sign: | A list of chosen action_breakdowns for action_breakdowns | | +| `action_report_time` | [Optional[models.SourceFacebookMarketingActionReportTime]](../models/sourcefacebookmarketingactionreporttime.md) | :heavy_minus_sign: | Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd. | | +| `breakdowns` | List[[models.ValidBreakdowns](../models/validbreakdowns.md)] | :heavy_minus_sign: | A list of chosen breakdowns for breakdowns | | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. | 2017-01-26T00:00:00Z | +| `fields` | List[[models.SourceFacebookMarketingValidEnums](../models/sourcefacebookmarketingvalidenums.md)] | :heavy_minus_sign: | A list of chosen fields for fields parameter | | +| `insights_job_timeout` | *Optional[int]* | :heavy_minus_sign: | The insights job timeout | | +| `insights_lookback_window` | *Optional[int]* | :heavy_minus_sign: | The attribution window | | +| `level` | [Optional[models.Level]](../models/level.md) | :heavy_minus_sign: | Chosen level for API | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. | 2017-01-25T00:00:00Z | +| `time_increment` | *Optional[int]* | :heavy_minus_sign: | Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only). The minimum allowed value for this field is 1, and the maximum is 89. | | \ No newline at end of file diff --git a/docs/models/jobtypeenum.md b/docs/models/jobtypeenum.md index 4603c8bd..cb150e12 100644 --- a/docs/models/jobtypeenum.md +++ b/docs/models/jobtypeenum.md @@ -5,7 +5,9 @@ Enum that describes the different types of jobs that the platform runs. ## Values -| Name | Value | -| ------- | ------- | -| `SYNC` | sync | -| `RESET` | reset | \ No newline at end of file +| Name | Value | +| --------- | --------- | +| `SYNC` | sync | +| `RESET` | reset | +| `REFRESH` | refresh | +| `CLEAR` | clear | \ No newline at end of file diff --git a/docs/models/langchain.md b/docs/models/langchain.md deleted file mode 100644 index 92275770..00000000 --- a/docs/models/langchain.md +++ /dev/null @@ -1,8 +0,0 @@ -# Langchain - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `LANGCHAIN` | langchain | \ No newline at end of file diff --git a/docs/models/loadingmethod.md b/docs/models/loadingmethod.md index 5c0ff2c6..d83cb3cb 100644 --- a/docs/models/loadingmethod.md +++ b/docs/models/loadingmethod.md @@ -5,15 +5,15 @@ The way data will be uploaded to BigQuery. ## Supported Types -### GCSStaging +### `models.GCSStaging` ```python -loadingMethod: models.GCSStaging = /* values here */ +value: models.GCSStaging = /* values here */ ``` -### StandardInserts +### `models.StandardInserts` ```python -loadingMethod: models.StandardInserts = /* values here */ +value: models.StandardInserts = /* values here */ ``` diff --git a/docs/models/metricsfilter.md b/docs/models/metricsfilter.md index 97518e33..21b633b9 100644 --- a/docs/models/metricsfilter.md +++ b/docs/models/metricsfilter.md @@ -5,27 +5,27 @@ Metrics filter ## Supported Types -### SourceGoogleAnalyticsDataAPIAndGroup +### `models.SourceGoogleAnalyticsDataAPIAndGroup` ```python -metricsFilter: models.SourceGoogleAnalyticsDataAPIAndGroup = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIAndGroup = /* values here */ ``` -### SourceGoogleAnalyticsDataAPIOrGroup +### `models.SourceGoogleAnalyticsDataAPIOrGroup` ```python -metricsFilter: models.SourceGoogleAnalyticsDataAPIOrGroup = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIOrGroup = /* values here */ ``` -### SourceGoogleAnalyticsDataAPINotExpression +### `models.SourceGoogleAnalyticsDataAPINotExpression` ```python -metricsFilter: models.SourceGoogleAnalyticsDataAPINotExpression = /* values here */ +value: models.SourceGoogleAnalyticsDataAPINotExpression = /* values here */ ``` -### SourceGoogleAnalyticsDataAPIFilter +### `models.SourceGoogleAnalyticsDataAPIFilter` ```python -metricsFilter: models.SourceGoogleAnalyticsDataAPIFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIFilter = /* values here */ ``` diff --git a/docs/models/mockcatalog.md b/docs/models/mockcatalog.md index 8eb995c1..a21feb72 100644 --- a/docs/models/mockcatalog.md +++ b/docs/models/mockcatalog.md @@ -3,15 +3,15 @@ ## Supported Types -### SingleSchema +### `models.SingleSchema` ```python -mockCatalog: models.SingleSchema = /* values here */ +value: models.SingleSchema = /* values here */ ``` -### MultiSchema +### `models.MultiSchema` ```python -mockCatalog: models.MultiSchema = /* values here */ +value: models.MultiSchema = /* values here */ ``` diff --git a/docs/models/mongodbinstancetype.md b/docs/models/mongodbinstancetype.md index f71d7846..ddeda39d 100644 --- a/docs/models/mongodbinstancetype.md +++ b/docs/models/mongodbinstancetype.md @@ -5,21 +5,21 @@ MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection ## Supported Types -### StandaloneMongoDbInstance +### `models.StandaloneMongoDbInstance` ```python -mongoDbInstanceType: models.StandaloneMongoDbInstance = /* values here */ +value: models.StandaloneMongoDbInstance = /* values here */ ``` -### ReplicaSet +### `models.ReplicaSet` ```python -mongoDbInstanceType: models.ReplicaSet = /* values here */ +value: models.ReplicaSet = /* values here */ ``` -### MongoDBAtlas +### `models.MongoDBAtlas` ```python -mongoDbInstanceType: models.MongoDBAtlas = /* values here */ +value: models.MongoDBAtlas = /* values here */ ``` diff --git a/docs/models/noencryption.md b/docs/models/noencryption.md deleted file mode 100644 index 56318d40..00000000 --- a/docs/models/noencryption.md +++ /dev/null @@ -1,10 +0,0 @@ -# NoEncryption - -Staging data will be stored in plaintext. - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -| `encryption_type` | [Optional[models.EncryptionType]](../models/encryptiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/oauthcredentialsconfiguration.md b/docs/models/oauthcredentialsconfiguration.md index 0f973255..d007362a 100644 --- a/docs/models/oauthcredentialsconfiguration.md +++ b/docs/models/oauthcredentialsconfiguration.md @@ -5,267 +5,267 @@ The values required to configure the source. ## Supported Types -### Airtable +### `models.Airtable` ```python -oAuthCredentialsConfiguration: models.Airtable = /* values here */ +value: models.Airtable = /* values here */ ``` -### AmazonAds +### `models.AmazonAds` ```python -oAuthCredentialsConfiguration: models.AmazonAds = /* values here */ +value: models.AmazonAds = /* values here */ ``` -### AmazonSellerPartner +### `models.AmazonSellerPartner` ```python -oAuthCredentialsConfiguration: models.AmazonSellerPartner = /* values here */ +value: models.AmazonSellerPartner = /* values here */ ``` -### Asana +### `models.Asana` ```python -oAuthCredentialsConfiguration: models.Asana = /* values here */ +value: models.Asana = /* values here */ ``` -### AzureBlobStorage +### `models.AzureBlobStorage` ```python -oAuthCredentialsConfiguration: models.AzureBlobStorage = /* values here */ +value: models.AzureBlobStorage = /* values here */ ``` -### BingAds +### `models.BingAds` ```python -oAuthCredentialsConfiguration: models.BingAds = /* values here */ +value: models.BingAds = /* values here */ ``` -### FacebookMarketing +### `models.FacebookMarketing` ```python -oAuthCredentialsConfiguration: models.FacebookMarketing = /* values here */ +value: models.FacebookMarketing = /* values here */ ``` -### Github +### `models.Github` ```python -oAuthCredentialsConfiguration: models.Github = /* values here */ +value: models.Github = /* values here */ ``` -### Gitlab +### `models.Gitlab` ```python -oAuthCredentialsConfiguration: models.Gitlab = /* values here */ +value: models.Gitlab = /* values here */ ``` -### GoogleAds +### `models.GoogleAds` ```python -oAuthCredentialsConfiguration: models.GoogleAds = /* values here */ +value: models.GoogleAds = /* values here */ ``` -### GoogleAnalyticsDataAPI +### `models.GoogleAnalyticsDataAPI` ```python -oAuthCredentialsConfiguration: models.GoogleAnalyticsDataAPI = /* values here */ +value: models.GoogleAnalyticsDataAPI = /* values here */ ``` -### GoogleDrive +### `models.GoogleDrive` ```python -oAuthCredentialsConfiguration: models.GoogleDrive = /* values here */ +value: models.GoogleDrive = /* values here */ ``` -### GoogleSearchConsole +### `models.GoogleSearchConsole` ```python -oAuthCredentialsConfiguration: models.GoogleSearchConsole = /* values here */ +value: models.GoogleSearchConsole = /* values here */ ``` -### GoogleSheets +### `models.GoogleSheets` ```python -oAuthCredentialsConfiguration: models.GoogleSheets = /* values here */ +value: models.GoogleSheets = /* values here */ ``` -### Hubspot +### `models.Hubspot` ```python -oAuthCredentialsConfiguration: models.Hubspot = /* values here */ +value: models.Hubspot = /* values here */ ``` -### Instagram +### `models.Instagram` ```python -oAuthCredentialsConfiguration: models.Instagram = /* values here */ +value: models.Instagram = /* values here */ ``` -### Intercom +### `models.Intercom` ```python -oAuthCredentialsConfiguration: models.Intercom = /* values here */ +value: models.Intercom = /* values here */ ``` -### LeverHiring +### `models.LeverHiring` ```python -oAuthCredentialsConfiguration: models.LeverHiring = /* values here */ +value: models.LeverHiring = /* values here */ ``` -### LinkedinAds +### `models.LinkedinAds` ```python -oAuthCredentialsConfiguration: models.LinkedinAds = /* values here */ +value: models.LinkedinAds = /* values here */ ``` -### Mailchimp +### `models.Mailchimp` ```python -oAuthCredentialsConfiguration: models.Mailchimp = /* values here */ +value: models.Mailchimp = /* values here */ ``` -### MicrosoftOnedrive +### `models.MicrosoftOnedrive` ```python -oAuthCredentialsConfiguration: models.MicrosoftOnedrive = /* values here */ +value: models.MicrosoftOnedrive = /* values here */ ``` -### MicrosoftSharepoint +### `models.MicrosoftSharepoint` ```python -oAuthCredentialsConfiguration: models.MicrosoftSharepoint = /* values here */ +value: models.MicrosoftSharepoint = /* values here */ ``` -### MicrosoftTeams +### `models.MicrosoftTeams` ```python -oAuthCredentialsConfiguration: models.MicrosoftTeams = /* values here */ +value: models.MicrosoftTeams = /* values here */ ``` -### Monday +### `models.Monday` ```python -oAuthCredentialsConfiguration: models.Monday = /* values here */ +value: models.Monday = /* values here */ ``` -### Notion +### `models.Notion` ```python -oAuthCredentialsConfiguration: models.Notion = /* values here */ +value: models.Notion = /* values here */ ``` -### Pinterest +### `models.Pinterest` ```python -oAuthCredentialsConfiguration: models.Pinterest = /* values here */ +value: models.Pinterest = /* values here */ ``` -### Retently +### `models.Retently` ```python -oAuthCredentialsConfiguration: models.Retently = /* values here */ +value: models.Retently = /* values here */ ``` -### Salesforce +### `models.Salesforce` ```python -oAuthCredentialsConfiguration: models.Salesforce = /* values here */ +value: models.Salesforce = /* values here */ ``` -### Shopify +### `models.Shopify` ```python -oAuthCredentialsConfiguration: models.Shopify = /* values here */ +value: models.Shopify = /* values here */ ``` -### Slack +### `models.Slack` ```python -oAuthCredentialsConfiguration: models.Slack = /* values here */ +value: models.Slack = /* values here */ ``` -### Smartsheets +### `models.Smartsheets` ```python -oAuthCredentialsConfiguration: models.Smartsheets = /* values here */ +value: models.Smartsheets = /* values here */ ``` -### SnapchatMarketing +### `models.SnapchatMarketing` ```python -oAuthCredentialsConfiguration: models.SnapchatMarketing = /* values here */ +value: models.SnapchatMarketing = /* values here */ ``` -### Snowflake +### `models.Snowflake` ```python -oAuthCredentialsConfiguration: models.Snowflake = /* values here */ +value: models.Snowflake = /* values here */ ``` -### Square +### `models.Square` ```python -oAuthCredentialsConfiguration: models.Square = /* values here */ +value: models.Square = /* values here */ ``` -### Strava +### `models.Strava` ```python -oAuthCredentialsConfiguration: models.Strava = /* values here */ +value: models.Strava = /* values here */ ``` -### Surveymonkey +### `models.Surveymonkey` ```python -oAuthCredentialsConfiguration: models.Surveymonkey = /* values here */ +value: models.Surveymonkey = /* values here */ ``` -### TiktokMarketing +### `models.TiktokMarketing` ```python -oAuthCredentialsConfiguration: models.TiktokMarketing = /* values here */ +value: models.TiktokMarketing = /* values here */ ``` -### +### `Any` ```python -oAuthCredentialsConfiguration: Any = /* values here */ +value: Any = /* values here */ ``` -### Typeform +### `models.Typeform` ```python -oAuthCredentialsConfiguration: models.Typeform = /* values here */ +value: models.Typeform = /* values here */ ``` -### YoutubeAnalytics +### `models.YoutubeAnalytics` ```python -oAuthCredentialsConfiguration: models.YoutubeAnalytics = /* values here */ +value: models.YoutubeAnalytics = /* values here */ ``` -### ZendeskChat +### `models.ZendeskChat` ```python -oAuthCredentialsConfiguration: models.ZendeskChat = /* values here */ +value: models.ZendeskChat = /* values here */ ``` -### ZendeskSunshine +### `models.ZendeskSunshine` ```python -oAuthCredentialsConfiguration: models.ZendeskSunshine = /* values here */ +value: models.ZendeskSunshine = /* values here */ ``` -### ZendeskSupport +### `models.ZendeskSupport` ```python -oAuthCredentialsConfiguration: models.ZendeskSupport = /* values here */ +value: models.ZendeskSupport = /* values here */ ``` -### ZendeskTalk +### `models.ZendeskTalk` ```python -oAuthCredentialsConfiguration: models.ZendeskTalk = /* values here */ +value: models.ZendeskTalk = /* values here */ ``` diff --git a/docs/models/organizationresponse.md b/docs/models/organizationresponse.md new file mode 100644 index 00000000..e45b002f --- /dev/null +++ b/docs/models/organizationresponse.md @@ -0,0 +1,12 @@ +# OrganizationResponse + +Provides details of a single organization for a user. + + +## Fields + +| Field | Type | Required | Description | +| ------------------- | ------------------- | ------------------- | ------------------- | +| `email` | *str* | :heavy_check_mark: | N/A | +| `organization_id` | *str* | :heavy_check_mark: | N/A | +| `organization_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/organizationsresponse.md b/docs/models/organizationsresponse.md new file mode 100644 index 00000000..09b852e8 --- /dev/null +++ b/docs/models/organizationsresponse.md @@ -0,0 +1,10 @@ +# OrganizationsResponse + +List/Array of multiple organizations. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `data` | List[[models.OrganizationResponse](../models/organizationresponse.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/outputformat.md b/docs/models/outputformat.md index 60307129..d8aae9b4 100644 --- a/docs/models/outputformat.md +++ b/docs/models/outputformat.md @@ -5,15 +5,15 @@ Output data format ## Supported Types -### CSVCommaSeparatedValues +### `models.CSVCommaSeparatedValues` ```python -outputFormat: models.CSVCommaSeparatedValues = /* values here */ +value: models.CSVCommaSeparatedValues = /* values here */ ``` -### DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON +### `models.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON` ```python -outputFormat: models.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON = /* values here */ +value: models.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON = /* values here */ ``` diff --git a/docs/models/outputformatwildcard.md b/docs/models/outputformatwildcard.md index 6638398f..eccb2f94 100644 --- a/docs/models/outputformatwildcard.md +++ b/docs/models/outputformatwildcard.md @@ -5,15 +5,15 @@ Format of the data output. ## Supported Types -### JSONLinesNewlineDelimitedJSON +### `models.JSONLinesNewlineDelimitedJSON` ```python -outputFormatWildcard: models.JSONLinesNewlineDelimitedJSON = /* values here */ +value: models.JSONLinesNewlineDelimitedJSON = /* values here */ ``` -### ParquetColumnarStorage +### `models.ParquetColumnarStorage` ```python -outputFormatWildcard: models.ParquetColumnarStorage = /* values here */ +value: models.ParquetColumnarStorage = /* values here */ ``` diff --git a/docs/models/processing.md b/docs/models/processing.md index 827afa06..393264f7 100644 --- a/docs/models/processing.md +++ b/docs/models/processing.md @@ -5,9 +5,9 @@ Processing configuration ## Supported Types -### Local +### `models.Local` ```python -processing: models.Local = /* values here */ +value: models.Local = /* values here */ ``` diff --git a/docs/models/queries.md b/docs/models/queries.md new file mode 100644 index 00000000..3a43d29c --- /dev/null +++ b/docs/models/queries.md @@ -0,0 +1,10 @@ +# Queries + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `data_source` | [models.SourceDatadogDataSource](../models/sourcedatadogdatasource.md) | :heavy_check_mark: | A data source that is powered by the platform. | +| `name` | *str* | :heavy_check_mark: | The variable name for use in queries. | +| `query` | *str* | :heavy_check_mark: | A classic query string. | \ No newline at end of file diff --git a/docs/models/readchangesusingbinarylogcdc.md b/docs/models/readchangesusingbinarylogcdc.md index fd2d8089..f7fe93f4 100644 --- a/docs/models/readchangesusingbinarylogcdc.md +++ b/docs/models/readchangesusingbinarylogcdc.md @@ -7,6 +7,7 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC logs. | | `initial_waiting_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. | | `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceMysqlInvalidCDCPositionBehaviorAdvanced]](../models/sourcemysqlinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | | `method` | [models.SourceMysqlMethod](../models/sourcemysqlmethod.md) | :heavy_check_mark: | N/A | diff --git a/docs/models/serviceaccountkeyauthentication.md b/docs/models/serviceaccountkeyauthentication.md index 531606b1..dfd4fc91 100644 --- a/docs/models/serviceaccountkeyauthentication.md +++ b/docs/models/serviceaccountkeyauthentication.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `credentials_json` | *str* | :heavy_check_mark: | The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide. | { "type": "service_account", "project_id": YOUR_PROJECT_ID, "private_key_id": YOUR_PRIVATE_KEY, ... } | -| `auth_type` | [Optional[models.SourceGoogleAnalyticsDataAPISchemasAuthType]](../models/sourcegoogleanalyticsdataapischemasauthtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | +| `auth_type` | [Optional[models.SourceFacebookMarketingSchemasAuthType]](../models/sourcefacebookmarketingschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shopifyauthorizationmethod.md b/docs/models/shopifyauthorizationmethod.md index 933af2a7..df47ed0e 100644 --- a/docs/models/shopifyauthorizationmethod.md +++ b/docs/models/shopifyauthorizationmethod.md @@ -5,15 +5,15 @@ The authorization method to use to retrieve data from Shopify ## Supported Types -### SourceShopifyOAuth20 +### `models.SourceShopifyOAuth20` ```python -shopifyAuthorizationMethod: models.SourceShopifyOAuth20 = /* values here */ +value: models.SourceShopifyOAuth20 = /* values here */ ``` -### APIPassword +### `models.APIPassword` ```python -shopifyAuthorizationMethod: models.APIPassword = /* values here */ +value: models.APIPassword = /* values here */ ``` diff --git a/docs/models/site.md b/docs/models/site.md new file mode 100644 index 00000000..13b20070 --- /dev/null +++ b/docs/models/site.md @@ -0,0 +1,14 @@ +# Site + +The site where Datadog data resides in. + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `DATADOGHQ_COM` | datadoghq.com | +| `US3_DATADOGHQ_COM` | us3.datadoghq.com | +| `US5_DATADOGHQ_COM` | us5.datadoghq.com | +| `DATADOGHQ_EU` | datadoghq.eu | +| `DDOG_GOV_COM` | ddog-gov.com | \ No newline at end of file diff --git a/docs/models/destinationsnowflakecortexindexing.md b/docs/models/snowflakeconnection.md similarity index 97% rename from docs/models/destinationsnowflakecortexindexing.md rename to docs/models/snowflakeconnection.md index e7b0724b..83987966 100644 --- a/docs/models/destinationsnowflakecortexindexing.md +++ b/docs/models/snowflakeconnection.md @@ -1,4 +1,4 @@ -# DestinationSnowflakeCortexIndexing +# SnowflakeConnection Snowflake can be used to store vector data and retrieve embeddings. @@ -13,4 +13,4 @@ Snowflake can be used to store vector data and retrieve embeddings. | `host` | *str* | :heavy_check_mark: | Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com | AIRBYTE_ACCOUNT | | `role` | *str* | :heavy_check_mark: | Enter the role that you want to use to access Snowflake | AIRBYTE_ROLE | | `username` | *str* | :heavy_check_mark: | Enter the name of the user you want to use to access the database | AIRBYTE_USER | -| `warehouse` | *str* | :heavy_check_mark: | Enter the name of the warehouse that you want to sync data into | AIRBYTE_WAREHOUSE | \ No newline at end of file +| `warehouse` | *str* | :heavy_check_mark: | Enter the name of the warehouse that you want to use as a compute cluster | AIRBYTE_WAREHOUSE | \ No newline at end of file diff --git a/docs/models/sourceairtableauthentication.md b/docs/models/sourceairtableauthentication.md index 3c196e14..a756120d 100644 --- a/docs/models/sourceairtableauthentication.md +++ b/docs/models/sourceairtableauthentication.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceAirtableOAuth20 +### `models.SourceAirtableOAuth20` ```python -sourceAirtableAuthentication: models.SourceAirtableOAuth20 = /* values here */ +value: models.SourceAirtableOAuth20 = /* values here */ ``` -### PersonalAccessToken +### `models.PersonalAccessToken` ```python -sourceAirtableAuthentication: models.PersonalAccessToken = /* values here */ +value: models.PersonalAccessToken = /* values here */ ``` diff --git a/docs/models/sourceamazonsellerpartner.md b/docs/models/sourceamazonsellerpartner.md index 12bff9ee..4c1ca154 100644 --- a/docs/models/sourceamazonsellerpartner.md +++ b/docs/models/sourceamazonsellerpartner.md @@ -3,17 +3,17 @@ ## Fields -| Field | Type | Required | Description | Example | -| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `lwa_app_id` | *str* | :heavy_check_mark: | Your Login with Amazon Client ID. | | -| `lwa_client_secret` | *str* | :heavy_check_mark: | Your Login with Amazon Client Secret. | | -| `refresh_token` | *str* | :heavy_check_mark: | The Refresh Token obtained via OAuth flow authorization. | | -| `account_type` | [Optional[models.AWSSellerPartnerAccountType]](../models/awssellerpartneraccounttype.md) | :heavy_minus_sign: | Type of the Account you're going to authorize the Airbyte application by | | -| `auth_type` | [Optional[models.SourceAmazonSellerPartnerAuthType]](../models/sourceamazonsellerpartnerauthtype.md) | :heavy_minus_sign: | N/A | | -| `aws_environment` | [Optional[models.AWSEnvironment]](../models/awsenvironment.md) | :heavy_minus_sign: | Select the AWS Environment. | | -| `period_in_days` | *Optional[int]* | :heavy_minus_sign: | For syncs spanning a large date range, this option is used to request data in a smaller fixed window to improve sync reliability. This time window can be configured granularly by day. | | -| `region` | [Optional[models.AWSRegion]](../models/awsregion.md) | :heavy_minus_sign: | Select the AWS Region. | | -| `replication_end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated. | 2017-01-25T00:00:00Z | -| `replication_start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If start date is not provided, the date 2 years ago from today will be used. | 2017-01-25T00:00:00Z | -| `report_options_list` | List[[models.ReportOptions](../models/reportoptions.md)] | :heavy_minus_sign: | Additional information passed to reports. This varies by report type. | | -| `source_type` | [models.SourceAmazonSellerPartnerAmazonSellerPartner](../models/sourceamazonsellerpartneramazonsellerpartner.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `lwa_app_id` | *str* | :heavy_check_mark: | Your Login with Amazon Client ID. | | +| `lwa_client_secret` | *str* | :heavy_check_mark: | Your Login with Amazon Client Secret. | | +| `refresh_token` | *str* | :heavy_check_mark: | The Refresh Token obtained via OAuth flow authorization. | | +| `account_type` | [Optional[models.AWSSellerPartnerAccountType]](../models/awssellerpartneraccounttype.md) | :heavy_minus_sign: | Type of the Account you're going to authorize the Airbyte application by | | +| `auth_type` | [Optional[models.SourceAmazonSellerPartnerAuthType]](../models/sourceamazonsellerpartnerauthtype.md) | :heavy_minus_sign: | N/A | | +| `aws_environment` | [Optional[models.AWSEnvironment]](../models/awsenvironment.md) | :heavy_minus_sign: | Select the AWS Environment. | | +| `period_in_days` | *Optional[int]* | :heavy_minus_sign: | For syncs spanning a large date range, this option is used to request data in a smaller fixed window to improve sync reliability. This time window can be configured granularly by day. | | +| `region` | [Optional[models.AWSRegion]](../models/awsregion.md) | :heavy_minus_sign: | Select the AWS Region. | | +| `replication_end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated. | 2017-01-25T00:00:00Z | +| `replication_start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If start date is not provided or older than 2 years ago from today, the date 2 years ago from today will be used. | 2017-01-25T00:00:00Z | +| `report_options_list` | List[[models.ReportOptions](../models/reportoptions.md)] | :heavy_minus_sign: | Additional information passed to reports. This varies by report type. | | +| `source_type` | [models.SourceAmazonSellerPartnerAmazonSellerPartner](../models/sourceamazonsellerpartneramazonsellerpartner.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceauth0authenticationmethod.md b/docs/models/sourceauth0authenticationmethod.md index e13064b9..7a36a667 100644 --- a/docs/models/sourceauth0authenticationmethod.md +++ b/docs/models/sourceauth0authenticationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### OAuth2ConfidentialApplication +### `models.OAuth2ConfidentialApplication` ```python -sourceAuth0AuthenticationMethod: models.OAuth2ConfidentialApplication = /* values here */ +value: models.OAuth2ConfidentialApplication = /* values here */ ``` -### OAuth2AccessToken +### `models.OAuth2AccessToken` ```python -sourceAuth0AuthenticationMethod: models.OAuth2AccessToken = /* values here */ +value: models.OAuth2AccessToken = /* values here */ ``` diff --git a/docs/models/sourceawscloudtrail.md b/docs/models/sourceawscloudtrail.md index 335449fa..f5d2db13 100644 --- a/docs/models/sourceawscloudtrail.md +++ b/docs/models/sourceawscloudtrail.md @@ -3,10 +3,11 @@ ## Fields -| Field | Type | Required | Description | Example | -| --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `aws_key_id` | *str* | :heavy_check_mark: | AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key. | | -| `aws_region_name` | *str* | :heavy_check_mark: | The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name. | | -| `aws_secret_key` | *str* | :heavy_check_mark: | AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key. | | -| `source_type` | [models.AwsCloudtrail](../models/awscloudtrail.md) | :heavy_check_mark: | N/A | | -| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD. | 2021-01-01 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `aws_key_id` | *str* | :heavy_check_mark: | AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key. | | +| `aws_secret_key` | *str* | :heavy_check_mark: | AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key. | | +| `aws_region_name` | *Optional[str]* | :heavy_minus_sign: | The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name. | | +| `lookup_attributes_filter` | [Optional[models.FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody]](../models/filterappliedwhilefetchingrecordsbasedonattributekeyandattributevaluewhichwillbeappendedontherequestbody.md) | :heavy_minus_sign: | N/A | | +| `source_type` | [models.AwsCloudtrail](../models/awscloudtrail.md) | :heavy_check_mark: | N/A | | +| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD. | 2021-01-01 | \ No newline at end of file diff --git a/docs/models/sourceazureblobstorageauthentication.md b/docs/models/sourceazureblobstorageauthentication.md index 95fa5236..a3363d18 100644 --- a/docs/models/sourceazureblobstorageauthentication.md +++ b/docs/models/sourceazureblobstorageauthentication.md @@ -5,15 +5,15 @@ Credentials for connecting to the Azure Blob Storage ## Supported Types -### AuthenticateViaOauth2 +### `models.AuthenticateViaOauth2` ```python -sourceAzureBlobStorageAuthentication: models.AuthenticateViaOauth2 = /* values here */ +value: models.AuthenticateViaOauth2 = /* values here */ ``` -### AuthenticateViaStorageAccountKey +### `models.AuthenticateViaStorageAccountKey` ```python -sourceAzureBlobStorageAuthentication: models.AuthenticateViaStorageAccountKey = /* values here */ +value: models.AuthenticateViaStorageAccountKey = /* values here */ ``` diff --git a/docs/models/sourcecartauthorizationmethod.md b/docs/models/sourcecartauthorizationmethod.md index 56d7a82e..a54a18dd 100644 --- a/docs/models/sourcecartauthorizationmethod.md +++ b/docs/models/sourcecartauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### CentralAPIRouter +### `models.CentralAPIRouter` ```python -sourceCartAuthorizationMethod: models.CentralAPIRouter = /* values here */ +value: models.CentralAPIRouter = /* values here */ ``` -### SingleStoreAccessToken +### `models.SingleStoreAccessToken` ```python -sourceCartAuthorizationMethod: models.SingleStoreAccessToken = /* values here */ +value: models.SingleStoreAccessToken = /* values here */ ``` diff --git a/docs/models/sourceclazar.md b/docs/models/sourceclazar.md new file mode 100644 index 00000000..628912ad --- /dev/null +++ b/docs/models/sourceclazar.md @@ -0,0 +1,10 @@ +# SourceClazar + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Clazar](../models/clazar.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceclickhousesshtunnelmethod.md b/docs/models/sourceclickhousesshtunnelmethod.md index 27ac3fe3..c01dc856 100644 --- a/docs/models/sourceclickhousesshtunnelmethod.md +++ b/docs/models/sourceclickhousesshtunnelmethod.md @@ -5,21 +5,21 @@ Whether to initiate an SSH tunnel before connecting to the database, and if so, ## Supported Types -### SourceClickhouseNoTunnel +### `models.SourceClickhouseNoTunnel` ```python -sourceClickhouseSSHTunnelMethod: models.SourceClickhouseNoTunnel = /* values here */ +value: models.SourceClickhouseNoTunnel = /* values here */ ``` -### SourceClickhouseSSHKeyAuthentication +### `models.SourceClickhouseSSHKeyAuthentication` ```python -sourceClickhouseSSHTunnelMethod: models.SourceClickhouseSSHKeyAuthentication = /* values here */ +value: models.SourceClickhouseSSHKeyAuthentication = /* values here */ ``` -### SourceClickhousePasswordAuthentication +### `models.SourceClickhousePasswordAuthentication` ```python -sourceClickhouseSSHTunnelMethod: models.SourceClickhousePasswordAuthentication = /* values here */ +value: models.SourceClickhousePasswordAuthentication = /* values here */ ``` diff --git a/docs/models/sourceconfiguration.md b/docs/models/sourceconfiguration.md index 144c0a49..74b01b22 100644 --- a/docs/models/sourceconfiguration.md +++ b/docs/models/sourceconfiguration.md @@ -5,1167 +5,1179 @@ The values required to configure the source. ## Supported Types -### SourceAha +### `models.SourceAha` ```python -sourceConfiguration: models.SourceAha = /* values here */ +value: models.SourceAha = /* values here */ ``` -### SourceAircall +### `models.SourceAircall` ```python -sourceConfiguration: models.SourceAircall = /* values here */ +value: models.SourceAircall = /* values here */ ``` -### SourceAirtable +### `models.SourceAirtable` ```python -sourceConfiguration: models.SourceAirtable = /* values here */ +value: models.SourceAirtable = /* values here */ ``` -### SourceAmazonAds +### `models.SourceAmazonAds` ```python -sourceConfiguration: models.SourceAmazonAds = /* values here */ +value: models.SourceAmazonAds = /* values here */ ``` -### SourceAmazonSellerPartner +### `models.SourceAmazonSellerPartner` ```python -sourceConfiguration: models.SourceAmazonSellerPartner = /* values here */ +value: models.SourceAmazonSellerPartner = /* values here */ ``` -### SourceAmazonSqs +### `models.SourceAmazonSqs` ```python -sourceConfiguration: models.SourceAmazonSqs = /* values here */ +value: models.SourceAmazonSqs = /* values here */ ``` -### SourceAmplitude +### `models.SourceAmplitude` ```python -sourceConfiguration: models.SourceAmplitude = /* values here */ +value: models.SourceAmplitude = /* values here */ ``` -### SourceApifyDataset +### `models.SourceApifyDataset` ```python -sourceConfiguration: models.SourceApifyDataset = /* values here */ +value: models.SourceApifyDataset = /* values here */ ``` -### SourceAppfollow +### `models.SourceAppfollow` ```python -sourceConfiguration: models.SourceAppfollow = /* values here */ +value: models.SourceAppfollow = /* values here */ ``` -### SourceAsana +### `models.SourceAsana` ```python -sourceConfiguration: models.SourceAsana = /* values here */ +value: models.SourceAsana = /* values here */ ``` -### SourceAuth0 +### `models.SourceAuth0` ```python -sourceConfiguration: models.SourceAuth0 = /* values here */ +value: models.SourceAuth0 = /* values here */ ``` -### SourceAwsCloudtrail +### `models.SourceAwsCloudtrail` ```python -sourceConfiguration: models.SourceAwsCloudtrail = /* values here */ +value: models.SourceAwsCloudtrail = /* values here */ ``` -### SourceAzureBlobStorage +### `models.SourceAzureBlobStorage` ```python -sourceConfiguration: models.SourceAzureBlobStorage = /* values here */ +value: models.SourceAzureBlobStorage = /* values here */ ``` -### SourceAzureTable +### `models.SourceAzureTable` ```python -sourceConfiguration: models.SourceAzureTable = /* values here */ +value: models.SourceAzureTable = /* values here */ ``` -### SourceBambooHr +### `models.SourceBambooHr` ```python -sourceConfiguration: models.SourceBambooHr = /* values here */ +value: models.SourceBambooHr = /* values here */ ``` -### SourceBigquery +### `models.SourceBigquery` ```python -sourceConfiguration: models.SourceBigquery = /* values here */ +value: models.SourceBigquery = /* values here */ ``` -### SourceBingAds +### `models.SourceBingAds` ```python -sourceConfiguration: models.SourceBingAds = /* values here */ +value: models.SourceBingAds = /* values here */ ``` -### SourceBraintree +### `models.SourceBraintree` ```python -sourceConfiguration: models.SourceBraintree = /* values here */ +value: models.SourceBraintree = /* values here */ ``` -### SourceBraze +### `models.SourceBraze` ```python -sourceConfiguration: models.SourceBraze = /* values here */ +value: models.SourceBraze = /* values here */ ``` -### SourceCart +### `models.SourceCart` ```python -sourceConfiguration: models.SourceCart = /* values here */ +value: models.SourceCart = /* values here */ ``` -### SourceChargebee +### `models.SourceChargebee` ```python -sourceConfiguration: models.SourceChargebee = /* values here */ +value: models.SourceChargebee = /* values here */ ``` -### SourceChartmogul +### `models.SourceChartmogul` ```python -sourceConfiguration: models.SourceChartmogul = /* values here */ +value: models.SourceChartmogul = /* values here */ ``` -### SourceClickhouse +### `models.SourceClazar` ```python -sourceConfiguration: models.SourceClickhouse = /* values here */ +value: models.SourceClazar = /* values here */ ``` -### SourceClickupAPI +### `models.SourceClickhouse` ```python -sourceConfiguration: models.SourceClickupAPI = /* values here */ +value: models.SourceClickhouse = /* values here */ ``` -### SourceClockify +### `models.SourceClickupAPI` ```python -sourceConfiguration: models.SourceClockify = /* values here */ +value: models.SourceClickupAPI = /* values here */ ``` -### SourceCloseCom +### `models.SourceClockify` ```python -sourceConfiguration: models.SourceCloseCom = /* values here */ +value: models.SourceClockify = /* values here */ ``` -### SourceCoda +### `models.SourceCloseCom` ```python -sourceConfiguration: models.SourceCoda = /* values here */ +value: models.SourceCloseCom = /* values here */ ``` -### SourceCoinAPI +### `models.SourceCoda` ```python -sourceConfiguration: models.SourceCoinAPI = /* values here */ +value: models.SourceCoda = /* values here */ ``` -### SourceCoinmarketcap +### `models.SourceCoinAPI` ```python -sourceConfiguration: models.SourceCoinmarketcap = /* values here */ +value: models.SourceCoinAPI = /* values here */ ``` -### SourceConfigcat +### `models.SourceCoinmarketcap` ```python -sourceConfiguration: models.SourceConfigcat = /* values here */ +value: models.SourceCoinmarketcap = /* values here */ ``` -### SourceConfluence +### `models.SourceConfigcat` ```python -sourceConfiguration: models.SourceConfluence = /* values here */ +value: models.SourceConfigcat = /* values here */ ``` -### SourceConvex +### `models.SourceConfluence` ```python -sourceConfiguration: models.SourceConvex = /* values here */ +value: models.SourceConfluence = /* values here */ ``` -### SourceDatascope +### `models.SourceConvex` ```python -sourceConfiguration: models.SourceDatascope = /* values here */ +value: models.SourceConvex = /* values here */ ``` -### SourceDelighted +### `models.SourceDatadog` ```python -sourceConfiguration: models.SourceDelighted = /* values here */ +value: models.SourceDatadog = /* values here */ ``` -### SourceDixa +### `models.SourceDatascope` ```python -sourceConfiguration: models.SourceDixa = /* values here */ +value: models.SourceDatascope = /* values here */ ``` -### SourceDockerhub +### `models.SourceDelighted` ```python -sourceConfiguration: models.SourceDockerhub = /* values here */ +value: models.SourceDelighted = /* values here */ ``` -### SourceDremio +### `models.SourceDixa` ```python -sourceConfiguration: models.SourceDremio = /* values here */ +value: models.SourceDixa = /* values here */ ``` -### SourceDynamodb +### `models.SourceDockerhub` ```python -sourceConfiguration: models.SourceDynamodb = /* values here */ +value: models.SourceDockerhub = /* values here */ ``` -### SourceE2eTestCloud +### `models.SourceDremio` ```python -sourceConfiguration: models.SourceE2eTestCloud = /* values here */ +value: models.SourceDremio = /* values here */ ``` -### SourceEmailoctopus +### `models.SourceDynamodb` ```python -sourceConfiguration: models.SourceEmailoctopus = /* values here */ +value: models.SourceDynamodb = /* values here */ ``` -### SourceExchangeRates +### `models.SourceE2eTestCloud` ```python -sourceConfiguration: models.SourceExchangeRates = /* values here */ +value: models.SourceE2eTestCloud = /* values here */ ``` -### SourceFacebookMarketing +### `models.SourceEmailoctopus` ```python -sourceConfiguration: models.SourceFacebookMarketing = /* values here */ +value: models.SourceEmailoctopus = /* values here */ ``` -### SourceFaker +### `models.SourceExchangeRates` ```python -sourceConfiguration: models.SourceFaker = /* values here */ +value: models.SourceExchangeRates = /* values here */ ``` -### SourceFauna +### `models.SourceFacebookMarketing` ```python -sourceConfiguration: models.SourceFauna = /* values here */ +value: models.SourceFacebookMarketing = /* values here */ ``` -### SourceFile +### `models.SourceFaker` ```python -sourceConfiguration: models.SourceFile = /* values here */ +value: models.SourceFaker = /* values here */ ``` -### SourceFirebolt +### `models.SourceFauna` ```python -sourceConfiguration: models.SourceFirebolt = /* values here */ +value: models.SourceFauna = /* values here */ ``` -### SourceFreshcaller +### `models.SourceFile` ```python -sourceConfiguration: models.SourceFreshcaller = /* values here */ +value: models.SourceFile = /* values here */ ``` -### SourceFreshdesk +### `models.SourceFirebolt` ```python -sourceConfiguration: models.SourceFreshdesk = /* values here */ +value: models.SourceFirebolt = /* values here */ ``` -### SourceFreshsales +### `models.SourceFleetio` ```python -sourceConfiguration: models.SourceFreshsales = /* values here */ +value: models.SourceFleetio = /* values here */ ``` -### SourceGainsightPx +### `models.SourceFreshcaller` ```python -sourceConfiguration: models.SourceGainsightPx = /* values here */ +value: models.SourceFreshcaller = /* values here */ ``` -### SourceGcs +### `models.SourceFreshdesk` ```python -sourceConfiguration: models.SourceGcs = /* values here */ +value: models.SourceFreshdesk = /* values here */ ``` -### SourceGetlago +### `models.SourceFreshsales` ```python -sourceConfiguration: models.SourceGetlago = /* values here */ +value: models.SourceFreshsales = /* values here */ ``` -### SourceGithub +### `models.SourceGainsightPx` ```python -sourceConfiguration: models.SourceGithub = /* values here */ +value: models.SourceGainsightPx = /* values here */ ``` -### SourceGitlab +### `models.SourceGcs` ```python -sourceConfiguration: models.SourceGitlab = /* values here */ +value: models.SourceGcs = /* values here */ ``` -### SourceGlassfrog +### `models.SourceGetlago` ```python -sourceConfiguration: models.SourceGlassfrog = /* values here */ +value: models.SourceGetlago = /* values here */ ``` -### SourceGnews +### `models.SourceGithub` ```python -sourceConfiguration: models.SourceGnews = /* values here */ +value: models.SourceGithub = /* values here */ ``` -### SourceGoogleAds +### `models.SourceGitlab` ```python -sourceConfiguration: models.SourceGoogleAds = /* values here */ +value: models.SourceGitlab = /* values here */ ``` -### SourceGoogleAnalyticsDataAPI +### `models.SourceGlassfrog` ```python -sourceConfiguration: models.SourceGoogleAnalyticsDataAPI = /* values here */ +value: models.SourceGlassfrog = /* values here */ ``` -### SourceGoogleAnalyticsV4ServiceAccountOnly +### `models.SourceGnews` ```python -sourceConfiguration: models.SourceGoogleAnalyticsV4ServiceAccountOnly = /* values here */ +value: models.SourceGnews = /* values here */ ``` -### SourceGoogleDirectory +### `models.SourceGoldcast` ```python -sourceConfiguration: models.SourceGoogleDirectory = /* values here */ +value: models.SourceGoldcast = /* values here */ ``` -### SourceGoogleDrive +### `models.SourceGoogleAds` ```python -sourceConfiguration: models.SourceGoogleDrive = /* values here */ +value: models.SourceGoogleAds = /* values here */ ``` -### SourceGooglePagespeedInsights +### `models.SourceGoogleAnalyticsDataAPI` ```python -sourceConfiguration: models.SourceGooglePagespeedInsights = /* values here */ +value: models.SourceGoogleAnalyticsDataAPI = /* values here */ ``` -### SourceGoogleSearchConsole +### `models.SourceGoogleDirectory` ```python -sourceConfiguration: models.SourceGoogleSearchConsole = /* values here */ +value: models.SourceGoogleDirectory = /* values here */ ``` -### SourceGoogleSheets +### `models.SourceGoogleDrive` ```python -sourceConfiguration: models.SourceGoogleSheets = /* values here */ +value: models.SourceGoogleDrive = /* values here */ ``` -### SourceGoogleWebfonts +### `models.SourceGooglePagespeedInsights` ```python -sourceConfiguration: models.SourceGoogleWebfonts = /* values here */ +value: models.SourceGooglePagespeedInsights = /* values here */ ``` -### SourceGreenhouse +### `models.SourceGoogleSearchConsole` ```python -sourceConfiguration: models.SourceGreenhouse = /* values here */ +value: models.SourceGoogleSearchConsole = /* values here */ ``` -### SourceGridly +### `models.SourceGoogleSheets` ```python -sourceConfiguration: models.SourceGridly = /* values here */ +value: models.SourceGoogleSheets = /* values here */ ``` -### SourceHarvest +### `models.SourceGoogleWebfonts` ```python -sourceConfiguration: models.SourceHarvest = /* values here */ +value: models.SourceGoogleWebfonts = /* values here */ ``` -### SourceHubplanner +### `models.SourceGreenhouse` ```python -sourceConfiguration: models.SourceHubplanner = /* values here */ +value: models.SourceGreenhouse = /* values here */ ``` -### SourceHubspot +### `models.SourceGridly` ```python -sourceConfiguration: models.SourceHubspot = /* values here */ +value: models.SourceGridly = /* values here */ ``` -### SourceInsightly +### `models.SourceHarvest` ```python -sourceConfiguration: models.SourceInsightly = /* values here */ +value: models.SourceHarvest = /* values here */ ``` -### SourceInstagram +### `models.SourceHubplanner` ```python -sourceConfiguration: models.SourceInstagram = /* values here */ +value: models.SourceHubplanner = /* values here */ ``` -### SourceInstatus +### `models.SourceHubspot` ```python -sourceConfiguration: models.SourceInstatus = /* values here */ +value: models.SourceHubspot = /* values here */ ``` -### SourceIntercom +### `models.SourceInsightly` ```python -sourceConfiguration: models.SourceIntercom = /* values here */ +value: models.SourceInsightly = /* values here */ ``` -### SourceIp2whois +### `models.SourceInstagram` ```python -sourceConfiguration: models.SourceIp2whois = /* values here */ +value: models.SourceInstagram = /* values here */ ``` -### SourceIterable +### `models.SourceInstatus` ```python -sourceConfiguration: models.SourceIterable = /* values here */ +value: models.SourceInstatus = /* values here */ ``` -### SourceJira +### `models.SourceIntercom` ```python -sourceConfiguration: models.SourceJira = /* values here */ +value: models.SourceIntercom = /* values here */ ``` -### SourceK6Cloud +### `models.SourceIp2whois` ```python -sourceConfiguration: models.SourceK6Cloud = /* values here */ +value: models.SourceIp2whois = /* values here */ ``` -### SourceKlarna +### `models.SourceIterable` ```python -sourceConfiguration: models.SourceKlarna = /* values here */ +value: models.SourceIterable = /* values here */ ``` -### SourceKlaviyo +### `models.SourceJira` ```python -sourceConfiguration: models.SourceKlaviyo = /* values here */ +value: models.SourceJira = /* values here */ ``` -### SourceKyve +### `models.SourceK6Cloud` ```python -sourceConfiguration: models.SourceKyve = /* values here */ +value: models.SourceK6Cloud = /* values here */ ``` -### SourceLaunchdarkly +### `models.SourceKlarna` ```python -sourceConfiguration: models.SourceLaunchdarkly = /* values here */ +value: models.SourceKlarna = /* values here */ ``` -### SourceLemlist +### `models.SourceKlaviyo` ```python -sourceConfiguration: models.SourceLemlist = /* values here */ +value: models.SourceKlaviyo = /* values here */ ``` -### SourceLeverHiring +### `models.SourceKyve` ```python -sourceConfiguration: models.SourceLeverHiring = /* values here */ +value: models.SourceKyve = /* values here */ ``` -### SourceLinkedinAds +### `models.SourceLaunchdarkly` ```python -sourceConfiguration: models.SourceLinkedinAds = /* values here */ +value: models.SourceLaunchdarkly = /* values here */ ``` -### SourceLinkedinPages +### `models.SourceLemlist` ```python -sourceConfiguration: models.SourceLinkedinPages = /* values here */ +value: models.SourceLemlist = /* values here */ ``` -### SourceLinnworks +### `models.SourceLeverHiring` ```python -sourceConfiguration: models.SourceLinnworks = /* values here */ +value: models.SourceLeverHiring = /* values here */ ``` -### SourceLokalise +### `models.SourceLinkedinAds` ```python -sourceConfiguration: models.SourceLokalise = /* values here */ +value: models.SourceLinkedinAds = /* values here */ ``` -### SourceMailchimp +### `models.SourceLinkedinPages` ```python -sourceConfiguration: models.SourceMailchimp = /* values here */ +value: models.SourceLinkedinPages = /* values here */ ``` -### SourceMailgun +### `models.SourceLinnworks` ```python -sourceConfiguration: models.SourceMailgun = /* values here */ +value: models.SourceLinnworks = /* values here */ ``` -### SourceMailjetSms +### `models.SourceLokalise` ```python -sourceConfiguration: models.SourceMailjetSms = /* values here */ +value: models.SourceLokalise = /* values here */ ``` -### SourceMarketo +### `models.SourceMailchimp` ```python -sourceConfiguration: models.SourceMarketo = /* values here */ +value: models.SourceMailchimp = /* values here */ ``` -### SourceMetabase +### `models.SourceMailgun` ```python -sourceConfiguration: models.SourceMetabase = /* values here */ +value: models.SourceMailgun = /* values here */ ``` -### SourceMicrosoftOnedrive +### `models.SourceMailjetSms` ```python -sourceConfiguration: models.SourceMicrosoftOnedrive = /* values here */ +value: models.SourceMailjetSms = /* values here */ ``` -### SourceMicrosoftSharepoint +### `models.SourceMarketo` ```python -sourceConfiguration: models.SourceMicrosoftSharepoint = /* values here */ +value: models.SourceMarketo = /* values here */ ``` -### SourceMicrosoftTeams +### `models.SourceMetabase` ```python -sourceConfiguration: models.SourceMicrosoftTeams = /* values here */ +value: models.SourceMetabase = /* values here */ ``` -### SourceMixpanel +### `models.SourceMicrosoftOnedrive` ```python -sourceConfiguration: models.SourceMixpanel = /* values here */ +value: models.SourceMicrosoftOnedrive = /* values here */ ``` -### SourceMonday +### `models.SourceMicrosoftSharepoint` ```python -sourceConfiguration: models.SourceMonday = /* values here */ +value: models.SourceMicrosoftSharepoint = /* values here */ ``` -### SourceMongodbInternalPoc +### `models.SourceMicrosoftTeams` ```python -sourceConfiguration: models.SourceMongodbInternalPoc = /* values here */ +value: models.SourceMicrosoftTeams = /* values here */ ``` -### SourceMongodbV2 +### `models.SourceMixpanel` ```python -sourceConfiguration: models.SourceMongodbV2 = /* values here */ +value: models.SourceMixpanel = /* values here */ ``` -### SourceMssql +### `models.SourceMonday` ```python -sourceConfiguration: models.SourceMssql = /* values here */ +value: models.SourceMonday = /* values here */ ``` -### SourceMyHours +### `models.SourceMongodbInternalPoc` ```python -sourceConfiguration: models.SourceMyHours = /* values here */ +value: models.SourceMongodbInternalPoc = /* values here */ ``` -### SourceMysql +### `models.SourceMongodbV2` ```python -sourceConfiguration: models.SourceMysql = /* values here */ +value: models.SourceMongodbV2 = /* values here */ ``` -### SourceNetsuite +### `models.SourceMssql` ```python -sourceConfiguration: models.SourceNetsuite = /* values here */ +value: models.SourceMssql = /* values here */ ``` -### SourceNotion +### `models.SourceMyHours` ```python -sourceConfiguration: models.SourceNotion = /* values here */ +value: models.SourceMyHours = /* values here */ ``` -### SourceNytimes +### `models.SourceMysql` ```python -sourceConfiguration: models.SourceNytimes = /* values here */ +value: models.SourceMysql = /* values here */ ``` -### SourceOkta +### `models.SourceNetsuite` ```python -sourceConfiguration: models.SourceOkta = /* values here */ +value: models.SourceNetsuite = /* values here */ ``` -### SourceOmnisend +### `models.SourceNotion` ```python -sourceConfiguration: models.SourceOmnisend = /* values here */ +value: models.SourceNotion = /* values here */ ``` -### SourceOnesignal +### `models.SourceNytimes` ```python -sourceConfiguration: models.SourceOnesignal = /* values here */ +value: models.SourceNytimes = /* values here */ ``` -### SourceOracle +### `models.SourceOkta` ```python -sourceConfiguration: models.SourceOracle = /* values here */ +value: models.SourceOkta = /* values here */ ``` -### SourceOrb +### `models.SourceOmnisend` ```python -sourceConfiguration: models.SourceOrb = /* values here */ +value: models.SourceOmnisend = /* values here */ ``` -### SourceOrbit +### `models.SourceOnesignal` ```python -sourceConfiguration: models.SourceOrbit = /* values here */ +value: models.SourceOnesignal = /* values here */ ``` -### SourceOutbrainAmplify +### `models.SourceOracle` ```python -sourceConfiguration: models.SourceOutbrainAmplify = /* values here */ +value: models.SourceOracle = /* values here */ ``` -### SourceOutreach +### `models.SourceOrb` ```python -sourceConfiguration: models.SourceOutreach = /* values here */ +value: models.SourceOrb = /* values here */ ``` -### SourcePaypalTransaction +### `models.SourceOrbit` ```python -sourceConfiguration: models.SourcePaypalTransaction = /* values here */ +value: models.SourceOrbit = /* values here */ ``` -### SourcePaystack +### `models.SourceOutbrainAmplify` ```python -sourceConfiguration: models.SourcePaystack = /* values here */ +value: models.SourceOutbrainAmplify = /* values here */ ``` -### SourcePendo +### `models.SourceOutreach` ```python -sourceConfiguration: models.SourcePendo = /* values here */ +value: models.SourceOutreach = /* values here */ ``` -### SourcePersistiq +### `models.SourcePaypalTransaction` ```python -sourceConfiguration: models.SourcePersistiq = /* values here */ +value: models.SourcePaypalTransaction = /* values here */ ``` -### SourcePexelsAPI +### `models.SourcePaystack` ```python -sourceConfiguration: models.SourcePexelsAPI = /* values here */ +value: models.SourcePaystack = /* values here */ ``` -### SourcePinterest +### `models.SourcePendo` ```python -sourceConfiguration: models.SourcePinterest = /* values here */ +value: models.SourcePendo = /* values here */ ``` -### SourcePipedrive +### `models.SourcePersistiq` ```python -sourceConfiguration: models.SourcePipedrive = /* values here */ +value: models.SourcePersistiq = /* values here */ ``` -### SourcePocket +### `models.SourcePexelsAPI` ```python -sourceConfiguration: models.SourcePocket = /* values here */ +value: models.SourcePexelsAPI = /* values here */ ``` -### SourcePokeapi +### `models.SourcePinterest` ```python -sourceConfiguration: models.SourcePokeapi = /* values here */ +value: models.SourcePinterest = /* values here */ ``` -### SourcePolygonStockAPI +### `models.SourcePipedrive` ```python -sourceConfiguration: models.SourcePolygonStockAPI = /* values here */ +value: models.SourcePipedrive = /* values here */ ``` -### SourcePostgres +### `models.SourcePocket` ```python -sourceConfiguration: models.SourcePostgres = /* values here */ +value: models.SourcePocket = /* values here */ ``` -### SourcePosthog +### `models.SourcePokeapi` ```python -sourceConfiguration: models.SourcePosthog = /* values here */ +value: models.SourcePokeapi = /* values here */ ``` -### SourcePostmarkapp +### `models.SourcePolygonStockAPI` ```python -sourceConfiguration: models.SourcePostmarkapp = /* values here */ +value: models.SourcePolygonStockAPI = /* values here */ ``` -### SourcePrestashop +### `models.SourcePostgres` ```python -sourceConfiguration: models.SourcePrestashop = /* values here */ +value: models.SourcePostgres = /* values here */ ``` -### SourcePunkAPI +### `models.SourcePosthog` ```python -sourceConfiguration: models.SourcePunkAPI = /* values here */ +value: models.SourcePosthog = /* values here */ ``` -### SourcePypi +### `models.SourcePostmarkapp` ```python -sourceConfiguration: models.SourcePypi = /* values here */ +value: models.SourcePostmarkapp = /* values here */ ``` -### SourceQualaroo +### `models.SourcePrestashop` ```python -sourceConfiguration: models.SourceQualaroo = /* values here */ +value: models.SourcePrestashop = /* values here */ ``` -### SourceRailz +### `models.SourcePypi` ```python -sourceConfiguration: models.SourceRailz = /* values here */ +value: models.SourcePypi = /* values here */ ``` -### SourceRecharge +### `models.SourceQualaroo` ```python -sourceConfiguration: models.SourceRecharge = /* values here */ +value: models.SourceQualaroo = /* values here */ ``` -### SourceRecreation +### `models.SourceRailz` ```python -sourceConfiguration: models.SourceRecreation = /* values here */ +value: models.SourceRailz = /* values here */ ``` -### SourceRecruitee +### `models.SourceRecharge` ```python -sourceConfiguration: models.SourceRecruitee = /* values here */ +value: models.SourceRecharge = /* values here */ ``` -### SourceRecurly +### `models.SourceRecreation` ```python -sourceConfiguration: models.SourceRecurly = /* values here */ +value: models.SourceRecreation = /* values here */ ``` -### SourceRedshift +### `models.SourceRecruitee` ```python -sourceConfiguration: models.SourceRedshift = /* values here */ +value: models.SourceRecruitee = /* values here */ ``` -### SourceRetently +### `models.SourceRecurly` ```python -sourceConfiguration: models.SourceRetently = /* values here */ +value: models.SourceRecurly = /* values here */ ``` -### SourceRkiCovid +### `models.SourceRedshift` ```python -sourceConfiguration: models.SourceRkiCovid = /* values here */ +value: models.SourceRedshift = /* values here */ ``` -### SourceRss +### `models.SourceRetently` ```python -sourceConfiguration: models.SourceRss = /* values here */ +value: models.SourceRetently = /* values here */ ``` -### SourceS3 +### `models.SourceRkiCovid` ```python -sourceConfiguration: models.SourceS3 = /* values here */ +value: models.SourceRkiCovid = /* values here */ ``` -### SourceSalesforce +### `models.SourceRss` ```python -sourceConfiguration: models.SourceSalesforce = /* values here */ +value: models.SourceRss = /* values here */ ``` -### SourceSalesloft +### `models.SourceS3` ```python -sourceConfiguration: models.SourceSalesloft = /* values here */ +value: models.SourceS3 = /* values here */ ``` -### SourceSapFieldglass +### `models.SourceSalesforce` ```python -sourceConfiguration: models.SourceSapFieldglass = /* values here */ +value: models.SourceSalesforce = /* values here */ ``` -### SourceSecoda +### `models.SourceSalesloft` ```python -sourceConfiguration: models.SourceSecoda = /* values here */ +value: models.SourceSalesloft = /* values here */ ``` -### SourceSendgrid +### `models.SourceSapFieldglass` ```python -sourceConfiguration: models.SourceSendgrid = /* values here */ +value: models.SourceSapFieldglass = /* values here */ ``` -### SourceSendinblue +### `models.SourceSecoda` ```python -sourceConfiguration: models.SourceSendinblue = /* values here */ +value: models.SourceSecoda = /* values here */ ``` -### SourceSenseforce +### `models.SourceSendgrid` ```python -sourceConfiguration: models.SourceSenseforce = /* values here */ +value: models.SourceSendgrid = /* values here */ ``` -### SourceSentry +### `models.SourceSendinblue` ```python -sourceConfiguration: models.SourceSentry = /* values here */ +value: models.SourceSendinblue = /* values here */ ``` -### SourceSftp +### `models.SourceSenseforce` ```python -sourceConfiguration: models.SourceSftp = /* values here */ +value: models.SourceSenseforce = /* values here */ ``` -### SourceSftpBulk +### `models.SourceSentry` ```python -sourceConfiguration: models.SourceSftpBulk = /* values here */ +value: models.SourceSentry = /* values here */ ``` -### SourceShopify +### `models.SourceSftp` ```python -sourceConfiguration: models.SourceShopify = /* values here */ +value: models.SourceSftp = /* values here */ ``` -### SourceShortio +### `models.SourceSftpBulk` ```python -sourceConfiguration: models.SourceShortio = /* values here */ +value: models.SourceSftpBulk = /* values here */ ``` -### SourceSlack +### `models.SourceShopify` ```python -sourceConfiguration: models.SourceSlack = /* values here */ +value: models.SourceShopify = /* values here */ ``` -### SourceSmaily +### `models.SourceShortio` ```python -sourceConfiguration: models.SourceSmaily = /* values here */ +value: models.SourceShortio = /* values here */ ``` -### SourceSmartengage +### `models.SourceSlack` ```python -sourceConfiguration: models.SourceSmartengage = /* values here */ +value: models.SourceSlack = /* values here */ ``` -### SourceSmartsheets +### `models.SourceSmaily` ```python -sourceConfiguration: models.SourceSmartsheets = /* values here */ +value: models.SourceSmaily = /* values here */ ``` -### SourceSnapchatMarketing +### `models.SourceSmartengage` ```python -sourceConfiguration: models.SourceSnapchatMarketing = /* values here */ +value: models.SourceSmartengage = /* values here */ ``` -### SourceSnowflake +### `models.SourceSmartsheets` ```python -sourceConfiguration: models.SourceSnowflake = /* values here */ +value: models.SourceSmartsheets = /* values here */ ``` -### SourceSonarCloud +### `models.SourceSnapchatMarketing` ```python -sourceConfiguration: models.SourceSonarCloud = /* values here */ +value: models.SourceSnapchatMarketing = /* values here */ ``` -### SourceSpacexAPI +### `models.SourceSnowflake` ```python -sourceConfiguration: models.SourceSpacexAPI = /* values here */ +value: models.SourceSnowflake = /* values here */ ``` -### SourceSquare +### `models.SourceSonarCloud` ```python -sourceConfiguration: models.SourceSquare = /* values here */ +value: models.SourceSonarCloud = /* values here */ ``` -### SourceStrava +### `models.SourceSpacexAPI` ```python -sourceConfiguration: models.SourceStrava = /* values here */ +value: models.SourceSpacexAPI = /* values here */ ``` -### SourceStripe +### `models.SourceSquare` ```python -sourceConfiguration: models.SourceStripe = /* values here */ +value: models.SourceSquare = /* values here */ ``` -### SourceSurveySparrow +### `models.SourceStrava` ```python -sourceConfiguration: models.SourceSurveySparrow = /* values here */ +value: models.SourceStrava = /* values here */ ``` -### SourceSurveymonkey +### `models.SourceStripe` ```python -sourceConfiguration: models.SourceSurveymonkey = /* values here */ +value: models.SourceStripe = /* values here */ ``` -### SourceTempo +### `models.SourceSurveySparrow` ```python -sourceConfiguration: models.SourceTempo = /* values here */ +value: models.SourceSurveySparrow = /* values here */ ``` -### SourceTheGuardianAPI +### `models.SourceSurveymonkey` ```python -sourceConfiguration: models.SourceTheGuardianAPI = /* values here */ +value: models.SourceSurveymonkey = /* values here */ ``` -### SourceTiktokMarketing +### `models.SourceTempo` ```python -sourceConfiguration: models.SourceTiktokMarketing = /* values here */ +value: models.SourceTempo = /* values here */ ``` -### SourceTrello +### `models.SourceTheGuardianAPI` ```python -sourceConfiguration: models.SourceTrello = /* values here */ +value: models.SourceTheGuardianAPI = /* values here */ ``` -### SourceTrustpilot +### `models.SourceTiktokMarketing` ```python -sourceConfiguration: models.SourceTrustpilot = /* values here */ +value: models.SourceTiktokMarketing = /* values here */ ``` -### SourceTvmazeSchedule +### `models.SourceTrello` ```python -sourceConfiguration: models.SourceTvmazeSchedule = /* values here */ +value: models.SourceTrello = /* values here */ ``` -### SourceTwilio +### `models.SourceTrustpilot` ```python -sourceConfiguration: models.SourceTwilio = /* values here */ +value: models.SourceTrustpilot = /* values here */ ``` -### SourceTwilioTaskrouter +### `models.SourceTvmazeSchedule` ```python -sourceConfiguration: models.SourceTwilioTaskrouter = /* values here */ +value: models.SourceTvmazeSchedule = /* values here */ ``` -### SourceTwitter +### `models.SourceTwilio` ```python -sourceConfiguration: models.SourceTwitter = /* values here */ +value: models.SourceTwilio = /* values here */ ``` -### SourceTypeform +### `models.SourceTwilioTaskrouter` ```python -sourceConfiguration: models.SourceTypeform = /* values here */ +value: models.SourceTwilioTaskrouter = /* values here */ ``` -### SourceUsCensus +### `models.SourceTwitter` ```python -sourceConfiguration: models.SourceUsCensus = /* values here */ +value: models.SourceTwitter = /* values here */ ``` -### SourceVantage +### `models.SourceTypeform` ```python -sourceConfiguration: models.SourceVantage = /* values here */ +value: models.SourceTypeform = /* values here */ ``` -### SourceWebflow +### `models.SourceUsCensus` ```python -sourceConfiguration: models.SourceWebflow = /* values here */ +value: models.SourceUsCensus = /* values here */ ``` -### SourceWhiskyHunter +### `models.SourceVantage` ```python -sourceConfiguration: models.SourceWhiskyHunter = /* values here */ +value: models.SourceVantage = /* values here */ ``` -### SourceWikipediaPageviews +### `models.SourceWebflow` ```python -sourceConfiguration: models.SourceWikipediaPageviews = /* values here */ +value: models.SourceWebflow = /* values here */ ``` -### SourceWoocommerce +### `models.SourceWhiskyHunter` ```python -sourceConfiguration: models.SourceWoocommerce = /* values here */ +value: models.SourceWhiskyHunter = /* values here */ ``` -### SourceXkcd +### `models.SourceWikipediaPageviews` ```python -sourceConfiguration: models.SourceXkcd = /* values here */ +value: models.SourceWikipediaPageviews = /* values here */ ``` -### SourceYandexMetrica +### `models.SourceWoocommerce` ```python -sourceConfiguration: models.SourceYandexMetrica = /* values here */ +value: models.SourceWoocommerce = /* values here */ ``` -### SourceYotpo +### `models.SourceXkcd` ```python -sourceConfiguration: models.SourceYotpo = /* values here */ +value: models.SourceXkcd = /* values here */ ``` -### SourceYoutubeAnalytics +### `models.SourceYandexMetrica` ```python -sourceConfiguration: models.SourceYoutubeAnalytics = /* values here */ +value: models.SourceYandexMetrica = /* values here */ ``` -### SourceZendeskChat +### `models.SourceYotpo` ```python -sourceConfiguration: models.SourceZendeskChat = /* values here */ +value: models.SourceYotpo = /* values here */ ``` -### SourceZendeskSell +### `models.SourceYoutubeAnalytics` ```python -sourceConfiguration: models.SourceZendeskSell = /* values here */ +value: models.SourceYoutubeAnalytics = /* values here */ ``` -### SourceZendeskSunshine +### `models.SourceZendeskChat` ```python -sourceConfiguration: models.SourceZendeskSunshine = /* values here */ +value: models.SourceZendeskChat = /* values here */ ``` -### SourceZendeskSupport +### `models.SourceZendeskSell` ```python -sourceConfiguration: models.SourceZendeskSupport = /* values here */ +value: models.SourceZendeskSell = /* values here */ ``` -### SourceZendeskTalk +### `models.SourceZendeskSunshine` ```python -sourceConfiguration: models.SourceZendeskTalk = /* values here */ +value: models.SourceZendeskSunshine = /* values here */ ``` -### SourceZenloop +### `models.SourceZendeskSupport` ```python -sourceConfiguration: models.SourceZenloop = /* values here */ +value: models.SourceZendeskSupport = /* values here */ ``` -### SourceZohoCrm +### `models.SourceZendeskTalk` ```python -sourceConfiguration: models.SourceZohoCrm = /* values here */ +value: models.SourceZendeskTalk = /* values here */ ``` -### SourceZoom +### `models.SourceZenloop` ```python -sourceConfiguration: models.SourceZoom = /* values here */ +value: models.SourceZenloop = /* values here */ +``` + +### `models.SourceZohoCrm` + +```python +value: models.SourceZohoCrm = /* values here */ +``` + +### `models.SourceZoom` + +```python +value: models.SourceZoom = /* values here */ ``` diff --git a/docs/models/sourcedatadog.md b/docs/models/sourcedatadog.md new file mode 100644 index 00000000..87732ceb --- /dev/null +++ b/docs/models/sourcedatadog.md @@ -0,0 +1,16 @@ +# SourceDatadog + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Datadog API key | | +| `application_key` | *str* | :heavy_check_mark: | Datadog application key | | +| `end_date` | *Optional[str]* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. | 2022-10-01T00:00:00Z | +| `max_records_per_request` | *Optional[int]* | :heavy_minus_sign: | Maximum number of records to collect per request. | | +| `queries` | List[[models.Queries](../models/queries.md)] | :heavy_minus_sign: | List of queries to be run and used as inputs. | | +| `query` | *Optional[str]* | :heavy_minus_sign: | The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. | | +| `site` | [Optional[models.Site]](../models/site.md) | :heavy_minus_sign: | The site where Datadog data resides in. | | +| `source_type` | [models.Datadog](../models/datadog.md) | :heavy_check_mark: | N/A | | +| `start_date` | *Optional[str]* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. | 2022-10-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcedatadogdatasource.md b/docs/models/sourcedatadogdatasource.md new file mode 100644 index 00000000..3fbb8616 --- /dev/null +++ b/docs/models/sourcedatadogdatasource.md @@ -0,0 +1,13 @@ +# SourceDatadogDataSource + +A data source that is powered by the platform. + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `METRICS` | metrics | +| `CLOUD_COST` | cloud_cost | +| `LOGS` | logs | +| `RUM` | rum | \ No newline at end of file diff --git a/docs/models/sourcedynamodbcredentials.md b/docs/models/sourcedynamodbcredentials.md index fc3291ff..a17cd23b 100644 --- a/docs/models/sourcedynamodbcredentials.md +++ b/docs/models/sourcedynamodbcredentials.md @@ -5,15 +5,15 @@ Credentials for the service ## Supported Types -### AuthenticateViaAccessKeys +### `models.AuthenticateViaAccessKeys` ```python -sourceDynamodbCredentials: models.AuthenticateViaAccessKeys = /* values here */ +value: models.AuthenticateViaAccessKeys = /* values here */ ``` -### RoleBasedAuthentication +### `models.RoleBasedAuthentication` ```python -sourceDynamodbCredentials: models.RoleBasedAuthentication = /* values here */ +value: models.RoleBasedAuthentication = /* values here */ ``` diff --git a/docs/models/sourcee2etestcloud.md b/docs/models/sourcee2etestcloud.md index f2f949ba..53104aa4 100644 --- a/docs/models/sourcee2etestcloud.md +++ b/docs/models/sourcee2etestcloud.md @@ -3,9 +3,9 @@ ## Supported Types -### ContinuousFeed +### `models.ContinuousFeed` ```python -sourceE2eTestCloud: models.ContinuousFeed = /* values here */ +value: models.ContinuousFeed = /* values here */ ``` diff --git a/docs/models/sourcefacebookmarketing.md b/docs/models/sourcefacebookmarketing.md index 63178aef..eb66dc47 100644 --- a/docs/models/sourcefacebookmarketing.md +++ b/docs/models/sourcefacebookmarketing.md @@ -5,14 +5,15 @@ | Field | Type | Required | Description | Example | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `access_token` | *str* | :heavy_check_mark: | The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | | | `account_ids` | List[*str*] | :heavy_check_mark: | The Facebook Ad account ID(s) to pull data from. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information. | 111111111111111 | +| `access_token` | *Optional[str]* | :heavy_minus_sign: | The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | | | `action_breakdowns_allow_empty` | *Optional[bool]* | :heavy_minus_sign: | Allows action_breakdowns to be an empty list | | | `ad_statuses` | List[[models.ValidAdStatuses](../models/validadstatuses.md)] | :heavy_minus_sign: | Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out. | | | `adset_statuses` | List[[models.ValidAdSetStatuses](../models/validadsetstatuses.md)] | :heavy_minus_sign: | Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out. | | | `campaign_statuses` | List[[models.ValidCampaignStatuses](../models/validcampaignstatuses.md)] | :heavy_minus_sign: | Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out. | | | `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client Id for your OAuth app | | | `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret for your OAuth app | | +| `credentials` | [Optional[models.SourceFacebookMarketingAuthentication]](../models/sourcefacebookmarketingauthentication.md) | :heavy_minus_sign: | Credentials for connecting to the Facebook Marketing API | | | `custom_insights` | List[[models.InsightConfig](../models/insightconfig.md)] | :heavy_minus_sign: | A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. | | | `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. | 2017-01-26T00:00:00Z | | `fetch_thumbnail_images` | *Optional[bool]* | :heavy_minus_sign: | Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative. | | diff --git a/docs/models/sourcefacebookmarketingauthentication.md b/docs/models/sourcefacebookmarketingauthentication.md new file mode 100644 index 00000000..37d23c86 --- /dev/null +++ b/docs/models/sourcefacebookmarketingauthentication.md @@ -0,0 +1,19 @@ +# SourceFacebookMarketingAuthentication + +Credentials for connecting to the Facebook Marketing API + + +## Supported Types + +### `models.AuthenticateViaFacebookMarketingOauth` + +```python +value: models.AuthenticateViaFacebookMarketingOauth = /* values here */ +``` + +### `models.ServiceAccountKeyAuthentication` + +```python +value: models.ServiceAccountKeyAuthentication = /* values here */ +``` + diff --git a/docs/models/sourcefacebookmarketingauthtype.md b/docs/models/sourcefacebookmarketingauthtype.md new file mode 100644 index 00000000..c6c4e148 --- /dev/null +++ b/docs/models/sourcefacebookmarketingauthtype.md @@ -0,0 +1,8 @@ +# SourceFacebookMarketingAuthType + + +## Values + +| Name | Value | +| -------- | -------- | +| `CLIENT` | Client | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsv4serviceaccountonlyauthtype.md b/docs/models/sourcefacebookmarketingschemasauthtype.md similarity index 63% rename from docs/models/sourcegoogleanalyticsv4serviceaccountonlyauthtype.md rename to docs/models/sourcefacebookmarketingschemasauthtype.md index 75ecd502..d1f5baa1 100644 --- a/docs/models/sourcegoogleanalyticsv4serviceaccountonlyauthtype.md +++ b/docs/models/sourcefacebookmarketingschemasauthtype.md @@ -1,4 +1,4 @@ -# SourceGoogleAnalyticsV4ServiceAccountOnlyAuthType +# SourceFacebookMarketingSchemasAuthType ## Values diff --git a/docs/models/sourcefacebookmarketingvalidenums.md b/docs/models/sourcefacebookmarketingvalidenums.md index e3c28552..69dafae2 100644 --- a/docs/models/sourcefacebookmarketingvalidenums.md +++ b/docs/models/sourcefacebookmarketingvalidenums.md @@ -19,7 +19,6 @@ An enumeration. | `ADSET_END` | adset_end | | `ADSET_ID` | adset_id | | `ADSET_NAME` | adset_name | -| `ADSET_START` | adset_start | | `AGE_TARGETING` | age_targeting | | `ATTRIBUTION_SETTING` | attribution_setting | | `AUCTION_BID` | auction_bid | @@ -36,7 +35,6 @@ An enumeration. | `CATALOG_SEGMENT_VALUE_OMNI_PURCHASE_ROAS` | catalog_segment_value_omni_purchase_roas | | `CATALOG_SEGMENT_VALUE_WEBSITE_PURCHASE_ROAS` | catalog_segment_value_website_purchase_roas | | `CLICKS` | clicks | -| `CONVERSION_LEAD_RATE` | conversion_lead_rate | | `CONVERSION_RATE_RANKING` | conversion_rate_ranking | | `CONVERSION_VALUES` | conversion_values | | `CONVERSIONS` | conversions | @@ -47,7 +45,6 @@ An enumeration. | `COST_PER_ACTION_TYPE` | cost_per_action_type | | `COST_PER_AD_CLICK` | cost_per_ad_click | | `COST_PER_CONVERSION` | cost_per_conversion | -| `COST_PER_CONVERSION_LEAD` | cost_per_conversion_lead | | `COST_PER_DDA_COUNTBY_CONVS` | cost_per_dda_countby_convs | | `COST_PER_ESTIMATED_AD_RECALLERS` | cost_per_estimated_ad_recallers | | `COST_PER_INLINE_LINK_CLICK` | cost_per_inline_link_click | diff --git a/docs/models/sourcefleetio.md b/docs/models/sourcefleetio.md new file mode 100644 index 00000000..bf0e862b --- /dev/null +++ b/docs/models/sourcefleetio.md @@ -0,0 +1,10 @@ +# SourceFleetio + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------- | -------------------------------------- | -------------------------------------- | -------------------------------------- | +| `account_token` | *str* | :heavy_check_mark: | N/A | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Fleetio](../models/fleetio.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegcscsvheaderdefinition.md b/docs/models/sourcegcscsvheaderdefinition.md index db49630d..372a941d 100644 --- a/docs/models/sourcegcscsvheaderdefinition.md +++ b/docs/models/sourcegcscsvheaderdefinition.md @@ -5,21 +5,21 @@ How headers will be defined. `User Provided` assumes the CSV does not have a hea ## Supported Types -### SourceGcsFromCSV +### `models.SourceGcsFromCSV` ```python -sourceGcsCSVHeaderDefinition: models.SourceGcsFromCSV = /* values here */ +value: models.SourceGcsFromCSV = /* values here */ ``` -### SourceGcsAutogenerated +### `models.SourceGcsAutogenerated` ```python -sourceGcsCSVHeaderDefinition: models.SourceGcsAutogenerated = /* values here */ +value: models.SourceGcsAutogenerated = /* values here */ ``` -### SourceGcsUserProvided +### `models.SourceGcsUserProvided` ```python -sourceGcsCSVHeaderDefinition: models.SourceGcsUserProvided = /* values here */ +value: models.SourceGcsUserProvided = /* values here */ ``` diff --git a/docs/models/sourcegcsformat.md b/docs/models/sourcegcsformat.md index d067ef0f..43398ba8 100644 --- a/docs/models/sourcegcsformat.md +++ b/docs/models/sourcegcsformat.md @@ -5,9 +5,9 @@ The configuration options that are used to alter how to read incoming files that ## Supported Types -### SourceGcsCSVFormat +### `models.SourceGcsCSVFormat` ```python -sourceGcsFormat: models.SourceGcsCSVFormat = /* values here */ +value: models.SourceGcsCSVFormat = /* values here */ ``` diff --git a/docs/models/sourcegithubauthentication.md b/docs/models/sourcegithubauthentication.md index f8f90ddc..425e18e9 100644 --- a/docs/models/sourcegithubauthentication.md +++ b/docs/models/sourcegithubauthentication.md @@ -5,15 +5,15 @@ Choose how to authenticate to GitHub ## Supported Types -### OAuth +### `models.OAuth` ```python -sourceGithubAuthentication: models.OAuth = /* values here */ +value: models.OAuth = /* values here */ ``` -### SourceGithubPersonalAccessToken +### `models.SourceGithubPersonalAccessToken` ```python -sourceGithubAuthentication: models.SourceGithubPersonalAccessToken = /* values here */ +value: models.SourceGithubPersonalAccessToken = /* values here */ ``` diff --git a/docs/models/sourcegitlabauthorizationmethod.md b/docs/models/sourcegitlabauthorizationmethod.md index f7d58d73..93f74208 100644 --- a/docs/models/sourcegitlabauthorizationmethod.md +++ b/docs/models/sourcegitlabauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGitlabOAuth20 +### `models.SourceGitlabOAuth20` ```python -sourceGitlabAuthorizationMethod: models.SourceGitlabOAuth20 = /* values here */ +value: models.SourceGitlabOAuth20 = /* values here */ ``` -### PrivateToken +### `models.PrivateToken` ```python -sourceGitlabAuthorizationMethod: models.PrivateToken = /* values here */ +value: models.PrivateToken = /* values here */ ``` diff --git a/docs/models/sourcegoldcast.md b/docs/models/sourcegoldcast.md new file mode 100644 index 00000000..352b0d14 --- /dev/null +++ b/docs/models/sourcegoldcast.md @@ -0,0 +1,9 @@ +# SourceGoldcast + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `access_key` | *str* | :heavy_check_mark: | Your API Access Key. See here. The key is case sensitive. | +| `source_type` | [models.Goldcast](../models/goldcast.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapi.md b/docs/models/sourcegoogleanalyticsdataapi.md index 4c1252e6..ddbe8707 100644 --- a/docs/models/sourcegoogleanalyticsdataapi.md +++ b/docs/models/sourcegoogleanalyticsdataapi.md @@ -11,5 +11,6 @@ | `custom_reports_array` | List[[models.SourceGoogleAnalyticsDataAPICustomReportConfig](../models/sourcegoogleanalyticsdataapicustomreportconfig.md)] | :heavy_minus_sign: | You can add your Custom Analytics report by creating one. | | | `date_ranges_start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports. | 2021-01-01 | | `keep_empty_rows` | *Optional[bool]* | :heavy_minus_sign: | If false, each row with all metrics equal to 0 will not be returned. If true, these rows will be returned if they are not separately removed by a filter. More information is available in the documentation. | | +| `lookback_window` | *Optional[int]* | :heavy_minus_sign: | Since attribution changes after the event date, and Google Analytics has a data processing latency, we should specify how many days in the past we should refresh the data in every run. So if you set it at 5 days, in every sync it will fetch the last bookmark date minus 5 days. | 2 | | `source_type` | [models.SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI](../models/sourcegoogleanalyticsdataapigoogleanalyticsdataapi.md) | :heavy_check_mark: | N/A | | | `window_in_days` | *Optional[int]* | :heavy_minus_sign: | The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation. | 30 | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapicredentials.md b/docs/models/sourcegoogleanalyticsdataapicredentials.md index 9d5a2ae2..c4d096c4 100644 --- a/docs/models/sourcegoogleanalyticsdataapicredentials.md +++ b/docs/models/sourcegoogleanalyticsdataapicredentials.md @@ -5,15 +5,15 @@ Credentials for the service ## Supported Types -### AuthenticateViaGoogleOauth +### `models.AuthenticateViaGoogleOauth` ```python -sourceGoogleAnalyticsDataAPICredentials: models.AuthenticateViaGoogleOauth = /* values here */ +value: models.AuthenticateViaGoogleOauth = /* values here */ ``` -### ServiceAccountKeyAuthentication +### `models.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication` ```python -sourceGoogleAnalyticsDataAPICredentials: models.ServiceAccountKeyAuthentication = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapifromvalue.md b/docs/models/sourcegoogleanalyticsdataapifromvalue.md index bc442ea3..ac110af2 100644 --- a/docs/models/sourcegoogleanalyticsdataapifromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapifromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPIFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPIFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md index a7f50a7b..e64b1101 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md index 931f3831..a05ff8ac 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasStringFilter +### `models.SourceGoogleAnalyticsDataAPISchemasStringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasStringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasInListFilter +### `models.SourceGoogleAnalyticsDataAPISchemasInListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasInListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasNumericFilter +### `models.SourceGoogleAnalyticsDataAPISchemasNumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasNumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasBetweenFilter +### `models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md index 3ad9b682..df7903c1 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfromvalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfromvalue.md index d59cd7e9..b5c1ca75 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltertovalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltertovalue.md index 0548f99b..e6cdf07d 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltertovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltertovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltervalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltervalue.md index 1055b14f..40bdd496 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltervalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfiltervalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md index 838ee2ce..354bc3c7 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPIStringFilter +### `models.SourceGoogleAnalyticsDataAPIStringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter: models.SourceGoogleAnalyticsDataAPIStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIStringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPIInListFilter +### `models.SourceGoogleAnalyticsDataAPIInListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter: models.SourceGoogleAnalyticsDataAPIInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIInListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPINumericFilter +### `models.SourceGoogleAnalyticsDataAPINumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter: models.SourceGoogleAnalyticsDataAPINumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPINumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPIBetweenFilter +### `models.SourceGoogleAnalyticsDataAPIBetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter: models.SourceGoogleAnalyticsDataAPIBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfromvalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfromvalue.md index 8fd3cdec..46983443 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md index ee04b22c..5fd492ea 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfromvalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfromvalue.md index 51af0b3c..4f2ca495 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3filter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3filter.md index cf74e57a..61e5a861 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3filter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3filter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3fromvalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3fromvalue.md index b83b5948..367aa964 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3fromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3fromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3tovalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3tovalue.md index 4919b9f1..94832df6 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3tovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3tovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3value.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3value.md index 5f12d8dd..5d330b03 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3value.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter3value.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfilter.md index 982e0452..040a00cc 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfromvalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfromvalue.md index d04850f9..b7d9eb38 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilterfromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltertovalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltertovalue.md index f4c2b0d7..ef0ed50e 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltertovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltertovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltervalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltervalue.md index 750088fa..8eb513a9 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltervalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfiltervalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltertovalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltertovalue.md index 77c03a22..13fafdea 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltertovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltertovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltervalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltervalue.md index d17bc837..eb244863 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltervalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltervalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraytovalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraytovalue.md index a7d3ac8a..a2c39ca6 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraytovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraytovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayvalue.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayvalue.md index c6554c86..73a01e8f 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemasfilter.md b/docs/models/sourcegoogleanalyticsdataapischemasfilter.md index 560f0b94..3f18bfbf 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemasfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemasfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### StringFilter +### `models.StringFilter` ```python -sourceGoogleAnalyticsDataAPISchemasFilter: models.StringFilter = /* values here */ +value: models.StringFilter = /* values here */ ``` -### InListFilter +### `models.InListFilter` ```python -sourceGoogleAnalyticsDataAPISchemasFilter: models.InListFilter = /* values here */ +value: models.InListFilter = /* values here */ ``` -### NumericFilter +### `models.NumericFilter` ```python -sourceGoogleAnalyticsDataAPISchemasFilter: models.NumericFilter = /* values here */ +value: models.NumericFilter = /* values here */ ``` -### BetweenFilter +### `models.BetweenFilter` ```python -sourceGoogleAnalyticsDataAPISchemasFilter: models.BetweenFilter = /* values here */ +value: models.BetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemasfromvalue.md b/docs/models/sourcegoogleanalyticsdataapischemasfromvalue.md index c3ff694e..adf154b9 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemasfromvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemasfromvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasFromValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemastovalue.md b/docs/models/sourcegoogleanalyticsdataapischemastovalue.md index 2f5a91df..ba8b9236 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemastovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemastovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPISchemasToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemasvalue.md b/docs/models/sourcegoogleanalyticsdataapischemasvalue.md index 8d675b40..fc1054f3 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemasvalue.md +++ b/docs/models/sourcegoogleanalyticsdataapischemasvalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value` ```python -sourceGoogleAnalyticsDataAPISchemasValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue` ```python -sourceGoogleAnalyticsDataAPISchemasValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue = /* values here */ ``` diff --git a/docs/models/docarrayhnswsearch.md b/docs/models/sourcegoogleanalyticsdataapiserviceaccountkeyauthentication.md similarity index 51% rename from docs/models/docarrayhnswsearch.md rename to docs/models/sourcegoogleanalyticsdataapiserviceaccountkeyauthentication.md index c66d5252..5e243d08 100644 --- a/docs/models/docarrayhnswsearch.md +++ b/docs/models/sourcegoogleanalyticsdataapiserviceaccountkeyauthentication.md @@ -1,11 +1,9 @@ -# DocArrayHnswSearch - -DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite. +# SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `destination_path` | *str* | :heavy_check_mark: | Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run. | /local/my_hnswlib_index | -| `mode` | [Optional[models.DestinationLangchainSchemasIndexingIndexingMode]](../models/destinationlangchainschemasindexingindexingmode.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `credentials_json` | *str* | :heavy_check_mark: | The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide. | { "type": "service_account", "project_id": YOUR_PROJECT_ID, "private_key_id": YOUR_PRIVATE_KEY, ... } | +| `auth_type` | [Optional[models.SourceGoogleAnalyticsDataAPISchemasAuthType]](../models/sourcegoogleanalyticsdataapischemasauthtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapitovalue.md b/docs/models/sourcegoogleanalyticsdataapitovalue.md index d96a3618..9f7de730 100644 --- a/docs/models/sourcegoogleanalyticsdataapitovalue.md +++ b/docs/models/sourcegoogleanalyticsdataapitovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value` ```python -sourceGoogleAnalyticsDataAPIToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue` ```python -sourceGoogleAnalyticsDataAPIToValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapivalue.md b/docs/models/sourcegoogleanalyticsdataapivalue.md index 462821f3..b5f2b7d8 100644 --- a/docs/models/sourcegoogleanalyticsdataapivalue.md +++ b/docs/models/sourcegoogleanalyticsdataapivalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value` ```python -sourceGoogleAnalyticsDataAPIValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue` ```python -sourceGoogleAnalyticsDataAPIValue: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsv4serviceaccountonly.md b/docs/models/sourcegoogleanalyticsv4serviceaccountonly.md deleted file mode 100644 index cc7905c3..00000000 --- a/docs/models/sourcegoogleanalyticsv4serviceaccountonly.md +++ /dev/null @@ -1,14 +0,0 @@ -# SourceGoogleAnalyticsV4ServiceAccountOnly - - -## Fields - -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_check_mark: | The date in the format YYYY-MM-DD. Any data before this date will not be replicated. | 2020-06-01 | -| `view_id` | *str* | :heavy_check_mark: | The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer. | | -| `credentials` | [Optional[models.SourceGoogleAnalyticsV4ServiceAccountOnlyCredentials]](../models/sourcegoogleanalyticsv4serviceaccountonlycredentials.md) | :heavy_minus_sign: | Credentials for the service | | -| `custom_reports` | *Optional[str]* | :heavy_minus_sign: | A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field. | | -| `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The date in the format YYYY-MM-DD. Any data after this date will not be replicated. | 2020-06-01 | -| `source_type` | [models.GoogleAnalyticsV4ServiceAccountOnly](../models/googleanalyticsv4serviceaccountonly.md) | :heavy_check_mark: | N/A | | -| `window_in_days` | *Optional[int]* | :heavy_minus_sign: | The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364. | 30 | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsv4serviceaccountonlycredentials.md b/docs/models/sourcegoogleanalyticsv4serviceaccountonlycredentials.md deleted file mode 100644 index d7163194..00000000 --- a/docs/models/sourcegoogleanalyticsv4serviceaccountonlycredentials.md +++ /dev/null @@ -1,13 +0,0 @@ -# SourceGoogleAnalyticsV4ServiceAccountOnlyCredentials - -Credentials for the service - - -## Supported Types - -### SourceGoogleAnalyticsV4ServiceAccountOnlyServiceAccountKeyAuthentication - -```python -sourceGoogleAnalyticsV4ServiceAccountOnlyCredentials: models.SourceGoogleAnalyticsV4ServiceAccountOnlyServiceAccountKeyAuthentication = /* values here */ -``` - diff --git a/docs/models/sourcegoogleanalyticsv4serviceaccountonlyserviceaccountkeyauthentication.md b/docs/models/sourcegoogleanalyticsv4serviceaccountonlyserviceaccountkeyauthentication.md deleted file mode 100644 index 896dc9e5..00000000 --- a/docs/models/sourcegoogleanalyticsv4serviceaccountonlyserviceaccountkeyauthentication.md +++ /dev/null @@ -1,9 +0,0 @@ -# SourceGoogleAnalyticsV4ServiceAccountOnlyServiceAccountKeyAuthentication - - -## Fields - -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | -| `credentials_json` | *str* | :heavy_check_mark: | The JSON key of the service account to use for authorization | { "type": "service_account", "project_id": YOUR_PROJECT_ID, "private_key_id": YOUR_PRIVATE_KEY, ... } | -| `auth_type` | [Optional[models.SourceGoogleAnalyticsV4ServiceAccountOnlyAuthType]](../models/sourcegoogleanalyticsv4serviceaccountonlyauthtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcegoogledirectorygooglecredentials.md b/docs/models/sourcegoogledirectorygooglecredentials.md index 946beef1..814c29ac 100644 --- a/docs/models/sourcegoogledirectorygooglecredentials.md +++ b/docs/models/sourcegoogledirectorygooglecredentials.md @@ -5,15 +5,15 @@ Google APIs use the OAuth 2.0 protocol for authentication and authorization. The ## Supported Types -### SignInViaGoogleOAuth +### `models.SignInViaGoogleOAuth` ```python -sourceGoogleDirectoryGoogleCredentials: models.SignInViaGoogleOAuth = /* values here */ +value: models.SignInViaGoogleOAuth = /* values here */ ``` -### ServiceAccountKey +### `models.ServiceAccountKey` ```python -sourceGoogleDirectoryGoogleCredentials: models.ServiceAccountKey = /* values here */ +value: models.ServiceAccountKey = /* values here */ ``` diff --git a/docs/models/sourcegoogledriveauthentication.md b/docs/models/sourcegoogledriveauthentication.md index 7f37c9b9..b3fdd7f8 100644 --- a/docs/models/sourcegoogledriveauthentication.md +++ b/docs/models/sourcegoogledriveauthentication.md @@ -5,15 +5,15 @@ Credentials for connecting to the Google Drive API ## Supported Types -### SourceGoogleDriveAuthenticateViaGoogleOAuth +### `models.SourceGoogleDriveAuthenticateViaGoogleOAuth` ```python -sourceGoogleDriveAuthentication: models.SourceGoogleDriveAuthenticateViaGoogleOAuth = /* values here */ +value: models.SourceGoogleDriveAuthenticateViaGoogleOAuth = /* values here */ ``` -### SourceGoogleDriveServiceAccountKeyAuthentication +### `models.SourceGoogleDriveServiceAccountKeyAuthentication` ```python -sourceGoogleDriveAuthentication: models.SourceGoogleDriveServiceAccountKeyAuthentication = /* values here */ +value: models.SourceGoogleDriveServiceAccountKeyAuthentication = /* values here */ ``` diff --git a/docs/models/sourcegoogledrivecsvheaderdefinition.md b/docs/models/sourcegoogledrivecsvheaderdefinition.md index 693e9d38..f5ac4156 100644 --- a/docs/models/sourcegoogledrivecsvheaderdefinition.md +++ b/docs/models/sourcegoogledrivecsvheaderdefinition.md @@ -5,21 +5,21 @@ How headers will be defined. `User Provided` assumes the CSV does not have a hea ## Supported Types -### SourceGoogleDriveFromCSV +### `models.SourceGoogleDriveFromCSV` ```python -sourceGoogleDriveCSVHeaderDefinition: models.SourceGoogleDriveFromCSV = /* values here */ +value: models.SourceGoogleDriveFromCSV = /* values here */ ``` -### SourceGoogleDriveAutogenerated +### `models.SourceGoogleDriveAutogenerated` ```python -sourceGoogleDriveCSVHeaderDefinition: models.SourceGoogleDriveAutogenerated = /* values here */ +value: models.SourceGoogleDriveAutogenerated = /* values here */ ``` -### SourceGoogleDriveUserProvided +### `models.SourceGoogleDriveUserProvided` ```python -sourceGoogleDriveCSVHeaderDefinition: models.SourceGoogleDriveUserProvided = /* values here */ +value: models.SourceGoogleDriveUserProvided = /* values here */ ``` diff --git a/docs/models/sourcegoogledriveformat.md b/docs/models/sourcegoogledriveformat.md index 5ed61033..09cffae4 100644 --- a/docs/models/sourcegoogledriveformat.md +++ b/docs/models/sourcegoogledriveformat.md @@ -5,33 +5,33 @@ The configuration options that are used to alter how to read incoming files that ## Supported Types -### SourceGoogleDriveAvroFormat +### `models.SourceGoogleDriveAvroFormat` ```python -sourceGoogleDriveFormat: models.SourceGoogleDriveAvroFormat = /* values here */ +value: models.SourceGoogleDriveAvroFormat = /* values here */ ``` -### SourceGoogleDriveCSVFormat +### `models.SourceGoogleDriveCSVFormat` ```python -sourceGoogleDriveFormat: models.SourceGoogleDriveCSVFormat = /* values here */ +value: models.SourceGoogleDriveCSVFormat = /* values here */ ``` -### SourceGoogleDriveJsonlFormat +### `models.SourceGoogleDriveJsonlFormat` ```python -sourceGoogleDriveFormat: models.SourceGoogleDriveJsonlFormat = /* values here */ +value: models.SourceGoogleDriveJsonlFormat = /* values here */ ``` -### SourceGoogleDriveParquetFormat +### `models.SourceGoogleDriveParquetFormat` ```python -sourceGoogleDriveFormat: models.SourceGoogleDriveParquetFormat = /* values here */ +value: models.SourceGoogleDriveParquetFormat = /* values here */ ``` -### SourceGoogleDriveDocumentFileTypeFormatExperimental +### `models.SourceGoogleDriveDocumentFileTypeFormatExperimental` ```python -sourceGoogleDriveFormat: models.SourceGoogleDriveDocumentFileTypeFormatExperimental = /* values here */ +value: models.SourceGoogleDriveDocumentFileTypeFormatExperimental = /* values here */ ``` diff --git a/docs/models/sourcegoogledriveprocessing.md b/docs/models/sourcegoogledriveprocessing.md index 669ac025..797900c0 100644 --- a/docs/models/sourcegoogledriveprocessing.md +++ b/docs/models/sourcegoogledriveprocessing.md @@ -5,9 +5,9 @@ Processing configuration ## Supported Types -### SourceGoogleDriveLocal +### `models.SourceGoogleDriveLocal` ```python -sourceGoogleDriveProcessing: models.SourceGoogleDriveLocal = /* values here */ +value: models.SourceGoogleDriveLocal = /* values here */ ``` diff --git a/docs/models/sourcegooglesheetsauthentication.md b/docs/models/sourcegooglesheetsauthentication.md index 7808a8e2..16c9e55c 100644 --- a/docs/models/sourcegooglesheetsauthentication.md +++ b/docs/models/sourcegooglesheetsauthentication.md @@ -5,15 +5,15 @@ Credentials for connecting to the Google Sheets API ## Supported Types -### SourceGoogleSheetsAuthenticateViaGoogleOAuth +### `models.SourceGoogleSheetsAuthenticateViaGoogleOAuth` ```python -sourceGoogleSheetsAuthentication: models.SourceGoogleSheetsAuthenticateViaGoogleOAuth = /* values here */ +value: models.SourceGoogleSheetsAuthenticateViaGoogleOAuth = /* values here */ ``` -### SourceGoogleSheetsServiceAccountKeyAuthentication +### `models.SourceGoogleSheetsServiceAccountKeyAuthentication` ```python -sourceGoogleSheetsAuthentication: models.SourceGoogleSheetsServiceAccountKeyAuthentication = /* values here */ +value: models.SourceGoogleSheetsServiceAccountKeyAuthentication = /* values here */ ``` diff --git a/docs/models/sourceharvestauthenticationmechanism.md b/docs/models/sourceharvestauthenticationmechanism.md index e4057dc4..74711065 100644 --- a/docs/models/sourceharvestauthenticationmechanism.md +++ b/docs/models/sourceharvestauthenticationmechanism.md @@ -5,15 +5,15 @@ Choose how to authenticate to Harvest. ## Supported Types -### AuthenticateViaHarvestOAuth +### `models.AuthenticateViaHarvestOAuth` ```python -sourceHarvestAuthenticationMechanism: models.AuthenticateViaHarvestOAuth = /* values here */ +value: models.AuthenticateViaHarvestOAuth = /* values here */ ``` -### SourceHarvestAuthenticateWithPersonalAccessToken +### `models.SourceHarvestAuthenticateWithPersonalAccessToken` ```python -sourceHarvestAuthenticationMechanism: models.SourceHarvestAuthenticateWithPersonalAccessToken = /* values here */ +value: models.SourceHarvestAuthenticateWithPersonalAccessToken = /* values here */ ``` diff --git a/docs/models/sourcehubspotauthentication.md b/docs/models/sourcehubspotauthentication.md index eb2bbccd..8c15359e 100644 --- a/docs/models/sourcehubspotauthentication.md +++ b/docs/models/sourcehubspotauthentication.md @@ -5,15 +5,15 @@ Choose how to authenticate to HubSpot. ## Supported Types -### SourceHubspotOAuth +### `models.SourceHubspotOAuth` ```python -sourceHubspotAuthentication: models.SourceHubspotOAuth = /* values here */ +value: models.SourceHubspotOAuth = /* values here */ ``` -### PrivateApp +### `models.PrivateApp` ```python -sourceHubspotAuthentication: models.PrivateApp = /* values here */ +value: models.PrivateApp = /* values here */ ``` diff --git a/docs/models/sourceklarna.md b/docs/models/sourceklarna.md index 061b6138..c7ad7e50 100644 --- a/docs/models/sourceklarna.md +++ b/docs/models/sourceklarna.md @@ -6,7 +6,7 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `password` | *str* | :heavy_check_mark: | A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication) | -| `region` | [models.SourceKlarnaRegion](../models/sourceklarnaregion.md) | :heavy_check_mark: | Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc' | +| `region` | [models.SourceKlarnaRegion](../models/sourceklarnaregion.md) | :heavy_check_mark: | Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'na', 'oc' | | `username` | *str* | :heavy_check_mark: | Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication) | | `playground` | *Optional[bool]* | :heavy_minus_sign: | Propertie defining if connector is used against playground or production environment | | `source_type` | [models.Klarna](../models/klarna.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceklarnaregion.md b/docs/models/sourceklarnaregion.md index 7bfa1769..f838a745 100644 --- a/docs/models/sourceklarnaregion.md +++ b/docs/models/sourceklarnaregion.md @@ -1,6 +1,6 @@ # SourceKlarnaRegion -Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc' +Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'na', 'oc' ## Values @@ -8,5 +8,5 @@ Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/p | Name | Value | | ----- | ----- | | `EU` | eu | -| `US` | us | +| `NA` | na | | `OC` | oc | \ No newline at end of file diff --git a/docs/models/sourceleverhiringauthenticationmechanism.md b/docs/models/sourceleverhiringauthenticationmechanism.md index 9e39636d..b41af360 100644 --- a/docs/models/sourceleverhiringauthenticationmechanism.md +++ b/docs/models/sourceleverhiringauthenticationmechanism.md @@ -5,15 +5,15 @@ Choose how to authenticate to Lever Hiring. ## Supported Types -### AuthenticateViaLeverOAuth +### `models.AuthenticateViaLeverOAuth` ```python -sourceLeverHiringAuthenticationMechanism: models.AuthenticateViaLeverOAuth = /* values here */ +value: models.AuthenticateViaLeverOAuth = /* values here */ ``` -### AuthenticateViaLeverAPIKey +### `models.AuthenticateViaLeverAPIKey` ```python -sourceLeverHiringAuthenticationMechanism: models.AuthenticateViaLeverAPIKey = /* values here */ +value: models.AuthenticateViaLeverAPIKey = /* values here */ ``` diff --git a/docs/models/sourcelinkedinadsauthentication.md b/docs/models/sourcelinkedinadsauthentication.md index 25e9bf84..1ef430fa 100644 --- a/docs/models/sourcelinkedinadsauthentication.md +++ b/docs/models/sourcelinkedinadsauthentication.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceLinkedinAdsOAuth20 +### `models.SourceLinkedinAdsOAuth20` ```python -sourceLinkedinAdsAuthentication: models.SourceLinkedinAdsOAuth20 = /* values here */ +value: models.SourceLinkedinAdsOAuth20 = /* values here */ ``` -### AccessToken +### `models.AccessToken` ```python -sourceLinkedinAdsAuthentication: models.AccessToken = /* values here */ +value: models.AccessToken = /* values here */ ``` diff --git a/docs/models/sourcelinkedinpages.md b/docs/models/sourcelinkedinpages.md index 97fde45d..d2b8e3df 100644 --- a/docs/models/sourcelinkedinpages.md +++ b/docs/models/sourcelinkedinpages.md @@ -3,8 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | -| `org_id` | *str* | :heavy_check_mark: | Specify the Organization ID | 123456789 | -| `credentials` | [Optional[models.SourceLinkedinPagesAuthentication]](../models/sourcelinkedinpagesauthentication.md) | :heavy_minus_sign: | N/A | | -| `source_type` | [models.LinkedinPages](../models/linkedinpages.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `org_id` | *str* | :heavy_check_mark: | Specify the Organization ID | 123456789 | +| `credentials` | [Optional[models.SourceLinkedinPagesAuthentication]](../models/sourcelinkedinpagesauthentication.md) | :heavy_minus_sign: | N/A | | +| `source_type` | [models.LinkedinPages](../models/linkedinpages.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | Start date for getting metrics per time period. Must be atmost 12 months before the request date (UTC) and atleast 2 days prior to the request date (UTC). See https://bit.ly/linkedin-pages-date-rules {{ "\n" }} {{ response.errorDetails }} | | +| `time_granularity_type` | [Optional[models.TimeGranularityType]](../models/timegranularitytype.md) | :heavy_minus_sign: | Granularity of the statistics for metrics per time period. Must be either "DAY" or "MONTH" | | \ No newline at end of file diff --git a/docs/models/sourcelinkedinpagesauthentication.md b/docs/models/sourcelinkedinpagesauthentication.md index 542fa99e..abd67973 100644 --- a/docs/models/sourcelinkedinpagesauthentication.md +++ b/docs/models/sourcelinkedinpagesauthentication.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceLinkedinPagesOAuth20 +### `models.SourceLinkedinPagesOAuth20` ```python -sourceLinkedinPagesAuthentication: models.SourceLinkedinPagesOAuth20 = /* values here */ +value: models.SourceLinkedinPagesOAuth20 = /* values here */ ``` -### SourceLinkedinPagesAccessToken +### `models.SourceLinkedinPagesAccessToken` ```python -sourceLinkedinPagesAuthentication: models.SourceLinkedinPagesAccessToken = /* values here */ +value: models.SourceLinkedinPagesAccessToken = /* values here */ ``` diff --git a/docs/models/sourcemailchimpauthentication.md b/docs/models/sourcemailchimpauthentication.md index 30a146cf..8c24beba 100644 --- a/docs/models/sourcemailchimpauthentication.md +++ b/docs/models/sourcemailchimpauthentication.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceMailchimpOAuth20 +### `models.SourceMailchimpOAuth20` ```python -sourceMailchimpAuthentication: models.SourceMailchimpOAuth20 = /* values here */ +value: models.SourceMailchimpOAuth20 = /* values here */ ``` -### APIKey +### `models.APIKey` ```python -sourceMailchimpAuthentication: models.APIKey = /* values here */ +value: models.APIKey = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftonedriveauthentication.md b/docs/models/sourcemicrosoftonedriveauthentication.md index 8a4611e1..7ff97fb9 100644 --- a/docs/models/sourcemicrosoftonedriveauthentication.md +++ b/docs/models/sourcemicrosoftonedriveauthentication.md @@ -5,15 +5,15 @@ Credentials for connecting to the One Drive API ## Supported Types -### AuthenticateViaMicrosoftOAuth +### `models.AuthenticateViaMicrosoftOAuth` ```python -sourceMicrosoftOnedriveAuthentication: models.AuthenticateViaMicrosoftOAuth = /* values here */ +value: models.AuthenticateViaMicrosoftOAuth = /* values here */ ``` -### ServiceKeyAuthentication +### `models.ServiceKeyAuthentication` ```python -sourceMicrosoftOnedriveAuthentication: models.ServiceKeyAuthentication = /* values here */ +value: models.ServiceKeyAuthentication = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftonedrivecsvheaderdefinition.md b/docs/models/sourcemicrosoftonedrivecsvheaderdefinition.md index 5a1fb600..8f019fe7 100644 --- a/docs/models/sourcemicrosoftonedrivecsvheaderdefinition.md +++ b/docs/models/sourcemicrosoftonedrivecsvheaderdefinition.md @@ -5,21 +5,21 @@ How headers will be defined. `User Provided` assumes the CSV does not have a hea ## Supported Types -### SourceMicrosoftOnedriveFromCSV +### `models.SourceMicrosoftOnedriveFromCSV` ```python -sourceMicrosoftOnedriveCSVHeaderDefinition: models.SourceMicrosoftOnedriveFromCSV = /* values here */ +value: models.SourceMicrosoftOnedriveFromCSV = /* values here */ ``` -### SourceMicrosoftOnedriveAutogenerated +### `models.SourceMicrosoftOnedriveAutogenerated` ```python -sourceMicrosoftOnedriveCSVHeaderDefinition: models.SourceMicrosoftOnedriveAutogenerated = /* values here */ +value: models.SourceMicrosoftOnedriveAutogenerated = /* values here */ ``` -### SourceMicrosoftOnedriveUserProvided +### `models.SourceMicrosoftOnedriveUserProvided` ```python -sourceMicrosoftOnedriveCSVHeaderDefinition: models.SourceMicrosoftOnedriveUserProvided = /* values here */ +value: models.SourceMicrosoftOnedriveUserProvided = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftonedriveformat.md b/docs/models/sourcemicrosoftonedriveformat.md index 3ccc0672..843a7c84 100644 --- a/docs/models/sourcemicrosoftonedriveformat.md +++ b/docs/models/sourcemicrosoftonedriveformat.md @@ -5,33 +5,33 @@ The configuration options that are used to alter how to read incoming files that ## Supported Types -### SourceMicrosoftOnedriveAvroFormat +### `models.SourceMicrosoftOnedriveAvroFormat` ```python -sourceMicrosoftOnedriveFormat: models.SourceMicrosoftOnedriveAvroFormat = /* values here */ +value: models.SourceMicrosoftOnedriveAvroFormat = /* values here */ ``` -### SourceMicrosoftOnedriveCSVFormat +### `models.SourceMicrosoftOnedriveCSVFormat` ```python -sourceMicrosoftOnedriveFormat: models.SourceMicrosoftOnedriveCSVFormat = /* values here */ +value: models.SourceMicrosoftOnedriveCSVFormat = /* values here */ ``` -### SourceMicrosoftOnedriveJsonlFormat +### `models.SourceMicrosoftOnedriveJsonlFormat` ```python -sourceMicrosoftOnedriveFormat: models.SourceMicrosoftOnedriveJsonlFormat = /* values here */ +value: models.SourceMicrosoftOnedriveJsonlFormat = /* values here */ ``` -### SourceMicrosoftOnedriveParquetFormat +### `models.SourceMicrosoftOnedriveParquetFormat` ```python -sourceMicrosoftOnedriveFormat: models.SourceMicrosoftOnedriveParquetFormat = /* values here */ +value: models.SourceMicrosoftOnedriveParquetFormat = /* values here */ ``` -### SourceMicrosoftOnedriveDocumentFileTypeFormatExperimental +### `models.UnstructuredDocumentFormat` ```python -sourceMicrosoftOnedriveFormat: models.SourceMicrosoftOnedriveDocumentFileTypeFormatExperimental = /* values here */ +value: models.UnstructuredDocumentFormat = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftonedriveprocessing.md b/docs/models/sourcemicrosoftonedriveprocessing.md index 3bce263d..c96822f9 100644 --- a/docs/models/sourcemicrosoftonedriveprocessing.md +++ b/docs/models/sourcemicrosoftonedriveprocessing.md @@ -5,9 +5,9 @@ Processing configuration ## Supported Types -### SourceMicrosoftOnedriveLocal +### `models.SourceMicrosoftOnedriveLocal` ```python -sourceMicrosoftOnedriveProcessing: models.SourceMicrosoftOnedriveLocal = /* values here */ +value: models.SourceMicrosoftOnedriveLocal = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftsharepointauthentication.md b/docs/models/sourcemicrosoftsharepointauthentication.md index 6f529926..45374585 100644 --- a/docs/models/sourcemicrosoftsharepointauthentication.md +++ b/docs/models/sourcemicrosoftsharepointauthentication.md @@ -5,15 +5,15 @@ Credentials for connecting to the One Drive API ## Supported Types -### SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth +### `models.SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth` ```python -sourceMicrosoftSharepointAuthentication: models.SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth = /* values here */ +value: models.SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth = /* values here */ ``` -### SourceMicrosoftSharepointServiceKeyAuthentication +### `models.SourceMicrosoftSharepointServiceKeyAuthentication` ```python -sourceMicrosoftSharepointAuthentication: models.SourceMicrosoftSharepointServiceKeyAuthentication = /* values here */ +value: models.SourceMicrosoftSharepointServiceKeyAuthentication = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftsharepointcsvheaderdefinition.md b/docs/models/sourcemicrosoftsharepointcsvheaderdefinition.md index 0773bf5d..f4731787 100644 --- a/docs/models/sourcemicrosoftsharepointcsvheaderdefinition.md +++ b/docs/models/sourcemicrosoftsharepointcsvheaderdefinition.md @@ -5,21 +5,21 @@ How headers will be defined. `User Provided` assumes the CSV does not have a hea ## Supported Types -### SourceMicrosoftSharepointFromCSV +### `models.SourceMicrosoftSharepointFromCSV` ```python -sourceMicrosoftSharepointCSVHeaderDefinition: models.SourceMicrosoftSharepointFromCSV = /* values here */ +value: models.SourceMicrosoftSharepointFromCSV = /* values here */ ``` -### SourceMicrosoftSharepointAutogenerated +### `models.SourceMicrosoftSharepointAutogenerated` ```python -sourceMicrosoftSharepointCSVHeaderDefinition: models.SourceMicrosoftSharepointAutogenerated = /* values here */ +value: models.SourceMicrosoftSharepointAutogenerated = /* values here */ ``` -### SourceMicrosoftSharepointUserProvided +### `models.SourceMicrosoftSharepointUserProvided` ```python -sourceMicrosoftSharepointCSVHeaderDefinition: models.SourceMicrosoftSharepointUserProvided = /* values here */ +value: models.SourceMicrosoftSharepointUserProvided = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftsharepointformat.md b/docs/models/sourcemicrosoftsharepointformat.md index 16995664..7d4a7850 100644 --- a/docs/models/sourcemicrosoftsharepointformat.md +++ b/docs/models/sourcemicrosoftsharepointformat.md @@ -5,33 +5,33 @@ The configuration options that are used to alter how to read incoming files that ## Supported Types -### SourceMicrosoftSharepointAvroFormat +### `models.SourceMicrosoftSharepointAvroFormat` ```python -sourceMicrosoftSharepointFormat: models.SourceMicrosoftSharepointAvroFormat = /* values here */ +value: models.SourceMicrosoftSharepointAvroFormat = /* values here */ ``` -### SourceMicrosoftSharepointCSVFormat +### `models.SourceMicrosoftSharepointCSVFormat` ```python -sourceMicrosoftSharepointFormat: models.SourceMicrosoftSharepointCSVFormat = /* values here */ +value: models.SourceMicrosoftSharepointCSVFormat = /* values here */ ``` -### SourceMicrosoftSharepointJsonlFormat +### `models.SourceMicrosoftSharepointJsonlFormat` ```python -sourceMicrosoftSharepointFormat: models.SourceMicrosoftSharepointJsonlFormat = /* values here */ +value: models.SourceMicrosoftSharepointJsonlFormat = /* values here */ ``` -### SourceMicrosoftSharepointParquetFormat +### `models.SourceMicrosoftSharepointParquetFormat` ```python -sourceMicrosoftSharepointFormat: models.SourceMicrosoftSharepointParquetFormat = /* values here */ +value: models.SourceMicrosoftSharepointParquetFormat = /* values here */ ``` -### SourceMicrosoftSharepointDocumentFileTypeFormatExperimental +### `models.SourceMicrosoftSharepointUnstructuredDocumentFormat` ```python -sourceMicrosoftSharepointFormat: models.SourceMicrosoftSharepointDocumentFileTypeFormatExperimental = /* values here */ +value: models.SourceMicrosoftSharepointUnstructuredDocumentFormat = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftsharepointprocessing.md b/docs/models/sourcemicrosoftsharepointprocessing.md index 7a7f5f58..594e87e0 100644 --- a/docs/models/sourcemicrosoftsharepointprocessing.md +++ b/docs/models/sourcemicrosoftsharepointprocessing.md @@ -5,9 +5,9 @@ Processing configuration ## Supported Types -### SourceMicrosoftSharepointLocal +### `models.SourceMicrosoftSharepointLocal` ```python -sourceMicrosoftSharepointProcessing: models.SourceMicrosoftSharepointLocal = /* values here */ +value: models.SourceMicrosoftSharepointLocal = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftsharepointdocumentfiletypeformatexperimental.md b/docs/models/sourcemicrosoftsharepointunstructureddocumentformat.md similarity index 99% rename from docs/models/sourcemicrosoftsharepointdocumentfiletypeformatexperimental.md rename to docs/models/sourcemicrosoftsharepointunstructureddocumentformat.md index dfa84077..7622ee59 100644 --- a/docs/models/sourcemicrosoftsharepointdocumentfiletypeformatexperimental.md +++ b/docs/models/sourcemicrosoftsharepointunstructureddocumentformat.md @@ -1,4 +1,4 @@ -# SourceMicrosoftSharepointDocumentFileTypeFormatExperimental +# SourceMicrosoftSharepointUnstructuredDocumentFormat Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. diff --git a/docs/models/sourcemicrosoftteamsauthenticationmechanism.md b/docs/models/sourcemicrosoftteamsauthenticationmechanism.md index f020e44e..3a99bb20 100644 --- a/docs/models/sourcemicrosoftteamsauthenticationmechanism.md +++ b/docs/models/sourcemicrosoftteamsauthenticationmechanism.md @@ -5,15 +5,15 @@ Choose how to authenticate to Microsoft ## Supported Types -### AuthenticateViaMicrosoftOAuth20 +### `models.AuthenticateViaMicrosoftOAuth20` ```python -sourceMicrosoftTeamsAuthenticationMechanism: models.AuthenticateViaMicrosoftOAuth20 = /* values here */ +value: models.AuthenticateViaMicrosoftOAuth20 = /* values here */ ``` -### AuthenticateViaMicrosoft +### `models.AuthenticateViaMicrosoft` ```python -sourceMicrosoftTeamsAuthenticationMechanism: models.AuthenticateViaMicrosoft = /* values here */ +value: models.AuthenticateViaMicrosoft = /* values here */ ``` diff --git a/docs/models/sourcemondayauthorizationmethod.md b/docs/models/sourcemondayauthorizationmethod.md index b66cfb9b..1027b35e 100644 --- a/docs/models/sourcemondayauthorizationmethod.md +++ b/docs/models/sourcemondayauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceMondayOAuth20 +### `models.SourceMondayOAuth20` ```python -sourceMondayAuthorizationMethod: models.SourceMondayOAuth20 = /* values here */ +value: models.SourceMondayOAuth20 = /* values here */ ``` -### APIToken +### `models.APIToken` ```python -sourceMondayAuthorizationMethod: models.APIToken = /* values here */ +value: models.APIToken = /* values here */ ``` diff --git a/docs/models/sourcemssqlsshtunnelmethod.md b/docs/models/sourcemssqlsshtunnelmethod.md index 6561e20e..b31dc03f 100644 --- a/docs/models/sourcemssqlsshtunnelmethod.md +++ b/docs/models/sourcemssqlsshtunnelmethod.md @@ -5,21 +5,21 @@ Whether to initiate an SSH tunnel before connecting to the database, and if so, ## Supported Types -### SourceMssqlNoTunnel +### `models.SourceMssqlNoTunnel` ```python -sourceMssqlSSHTunnelMethod: models.SourceMssqlNoTunnel = /* values here */ +value: models.SourceMssqlNoTunnel = /* values here */ ``` -### SourceMssqlSSHKeyAuthentication +### `models.SourceMssqlSSHKeyAuthentication` ```python -sourceMssqlSSHTunnelMethod: models.SourceMssqlSSHKeyAuthentication = /* values here */ +value: models.SourceMssqlSSHKeyAuthentication = /* values here */ ``` -### SourceMssqlPasswordAuthentication +### `models.SourceMssqlPasswordAuthentication` ```python -sourceMssqlSSHTunnelMethod: models.SourceMssqlPasswordAuthentication = /* values here */ +value: models.SourceMssqlPasswordAuthentication = /* values here */ ``` diff --git a/docs/models/sourcemssqlsslmethod.md b/docs/models/sourcemssqlsslmethod.md index aeda2ded..d8166280 100644 --- a/docs/models/sourcemssqlsslmethod.md +++ b/docs/models/sourcemssqlsslmethod.md @@ -5,21 +5,21 @@ The encryption method which is used when communicating with the database. ## Supported Types -### Unencrypted +### `models.Unencrypted` ```python -sourceMssqlSSLMethod: models.Unencrypted = /* values here */ +value: models.Unencrypted = /* values here */ ``` -### SourceMssqlEncryptedTrustServerCertificate +### `models.SourceMssqlEncryptedTrustServerCertificate` ```python -sourceMssqlSSLMethod: models.SourceMssqlEncryptedTrustServerCertificate = /* values here */ +value: models.SourceMssqlEncryptedTrustServerCertificate = /* values here */ ``` -### SourceMssqlEncryptedVerifyCertificate +### `models.SourceMssqlEncryptedVerifyCertificate` ```python -sourceMssqlSSLMethod: models.SourceMssqlEncryptedVerifyCertificate = /* values here */ +value: models.SourceMssqlEncryptedVerifyCertificate = /* values here */ ``` diff --git a/docs/models/sourcemysqlsshtunnelmethod.md b/docs/models/sourcemysqlsshtunnelmethod.md index 587935db..e9637805 100644 --- a/docs/models/sourcemysqlsshtunnelmethod.md +++ b/docs/models/sourcemysqlsshtunnelmethod.md @@ -5,21 +5,21 @@ Whether to initiate an SSH tunnel before connecting to the database, and if so, ## Supported Types -### SourceMysqlNoTunnel +### `models.SourceMysqlNoTunnel` ```python -sourceMysqlSSHTunnelMethod: models.SourceMysqlNoTunnel = /* values here */ +value: models.SourceMysqlNoTunnel = /* values here */ ``` -### SourceMysqlSSHKeyAuthentication +### `models.SourceMysqlSSHKeyAuthentication` ```python -sourceMysqlSSHTunnelMethod: models.SourceMysqlSSHKeyAuthentication = /* values here */ +value: models.SourceMysqlSSHKeyAuthentication = /* values here */ ``` -### SourceMysqlPasswordAuthentication +### `models.SourceMysqlPasswordAuthentication` ```python -sourceMysqlSSHTunnelMethod: models.SourceMysqlPasswordAuthentication = /* values here */ +value: models.SourceMysqlPasswordAuthentication = /* values here */ ``` diff --git a/docs/models/sourcemysqlsslmodes.md b/docs/models/sourcemysqlsslmodes.md index c88d85f4..afb302c8 100644 --- a/docs/models/sourcemysqlsslmodes.md +++ b/docs/models/sourcemysqlsslmodes.md @@ -5,27 +5,27 @@ SSL connection modes. Read more docs for more information on how to obtain this key. | | +| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | AIRBYTE_USER | +| `auth_type` | [Optional[models.SourceSnowflakeSchemasAuthType]](../models/sourcesnowflakeschemasauthtype.md) | :heavy_minus_sign: | N/A | | +| `private_key_password` | *Optional[str]* | :heavy_minus_sign: | Passphrase for private key | | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeschemasauthtype.md b/docs/models/sourcesnowflakeschemasauthtype.md index 518e93bd..65f28e75 100644 --- a/docs/models/sourcesnowflakeschemasauthtype.md +++ b/docs/models/sourcesnowflakeschemasauthtype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------- | ------------------- | -| `USERNAME_PASSWORD` | username/password | \ No newline at end of file +| Name | Value | +| ------------------------- | ------------------------- | +| `KEY_PAIR_AUTHENTICATION` | Key Pair Authentication | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeschemascredentialsauthtype.md b/docs/models/sourcesnowflakeschemascredentialsauthtype.md new file mode 100644 index 00000000..466bf25b --- /dev/null +++ b/docs/models/sourcesnowflakeschemascredentialsauthtype.md @@ -0,0 +1,8 @@ +# SourceSnowflakeSchemasCredentialsAuthType + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `USERNAME_PASSWORD` | username/password | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeusernameandpassword.md b/docs/models/sourcesnowflakeusernameandpassword.md index 4a867e75..6d3db2ab 100644 --- a/docs/models/sourcesnowflakeusernameandpassword.md +++ b/docs/models/sourcesnowflakeusernameandpassword.md @@ -3,8 +3,8 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `password` | *str* | :heavy_check_mark: | The password associated with the username. | | -| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | AIRBYTE_USER | -| `auth_type` | [models.SourceSnowflakeSchemasAuthType](../models/sourcesnowflakeschemasauthtype.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `password` | *str* | :heavy_check_mark: | The password associated with the username. | | +| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | AIRBYTE_USER | +| `auth_type` | [models.SourceSnowflakeSchemasCredentialsAuthType](../models/sourcesnowflakeschemascredentialsauthtype.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcesquareauthentication.md b/docs/models/sourcesquareauthentication.md index 1a7f119e..e9050f38 100644 --- a/docs/models/sourcesquareauthentication.md +++ b/docs/models/sourcesquareauthentication.md @@ -5,15 +5,15 @@ Choose how to authenticate to Square. ## Supported Types -### OauthAuthentication +### `models.OauthAuthentication` ```python -sourceSquareAuthentication: models.OauthAuthentication = /* values here */ +value: models.OauthAuthentication = /* values here */ ``` -### SourceSquareAPIKey +### `models.SourceSquareAPIKey` ```python -sourceSquareAuthentication: models.SourceSquareAPIKey = /* values here */ +value: models.SourceSquareAPIKey = /* values here */ ``` diff --git a/docs/models/sourcetiktokmarketing.md b/docs/models/sourcetiktokmarketing.md index d6f5d986..a0397afe 100644 --- a/docs/models/sourcetiktokmarketing.md +++ b/docs/models/sourcetiktokmarketing.md @@ -8,6 +8,6 @@ | `attribution_window` | *Optional[int]* | :heavy_minus_sign: | The attribution window in days. | | `credentials` | [Optional[models.SourceTiktokMarketingAuthenticationMethod]](../models/sourcetiktokmarketingauthenticationmethod.md) | :heavy_minus_sign: | Authentication method | | `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date. | -| `include_deleted` | *Optional[bool]* | :heavy_minus_sign: | Set to active if you want to include deleted data in reports. | +| `include_deleted` | *Optional[bool]* | :heavy_minus_sign: | Set to active if you want to include deleted data in report based streams and Ads, Ad Groups and Campaign streams. | | `source_type` | [Optional[models.SourceTiktokMarketingTiktokMarketing]](../models/sourcetiktokmarketingtiktokmarketing.md) | :heavy_minus_sign: | N/A | | `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated. | \ No newline at end of file diff --git a/docs/models/sourcetiktokmarketingauthenticationmethod.md b/docs/models/sourcetiktokmarketingauthenticationmethod.md index 3eb09e1e..1e340e79 100644 --- a/docs/models/sourcetiktokmarketingauthenticationmethod.md +++ b/docs/models/sourcetiktokmarketingauthenticationmethod.md @@ -5,15 +5,15 @@ Authentication method ## Supported Types -### SourceTiktokMarketingOAuth20 +### `models.SourceTiktokMarketingOAuth20` ```python -sourceTiktokMarketingAuthenticationMethod: models.SourceTiktokMarketingOAuth20 = /* values here */ +value: models.SourceTiktokMarketingOAuth20 = /* values here */ ``` -### SandboxAccessToken +### `models.SandboxAccessToken` ```python -sourceTiktokMarketingAuthenticationMethod: models.SandboxAccessToken = /* values here */ +value: models.SandboxAccessToken = /* values here */ ``` diff --git a/docs/models/sourcetrustpilotauthorizationmethod.md b/docs/models/sourcetrustpilotauthorizationmethod.md index cc139377..b5d3e5c9 100644 --- a/docs/models/sourcetrustpilotauthorizationmethod.md +++ b/docs/models/sourcetrustpilotauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceTrustpilotOAuth20 +### `models.SourceTrustpilotOAuth20` ```python -sourceTrustpilotAuthorizationMethod: models.SourceTrustpilotOAuth20 = /* values here */ +value: models.SourceTrustpilotOAuth20 = /* values here */ ``` -### SourceTrustpilotAPIKey +### `models.SourceTrustpilotAPIKey` ```python -sourceTrustpilotAuthorizationMethod: models.SourceTrustpilotAPIKey = /* values here */ +value: models.SourceTrustpilotAPIKey = /* values here */ ``` diff --git a/docs/models/sourcetypeformauthorizationmethod.md b/docs/models/sourcetypeformauthorizationmethod.md index 59dbcada..441b5907 100644 --- a/docs/models/sourcetypeformauthorizationmethod.md +++ b/docs/models/sourcetypeformauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceTypeformOAuth20 +### `models.SourceTypeformOAuth20` ```python -sourceTypeformAuthorizationMethod: models.SourceTypeformOAuth20 = /* values here */ +value: models.SourceTypeformOAuth20 = /* values here */ ``` -### SourceTypeformPrivateToken +### `models.SourceTypeformPrivateToken` ```python -sourceTypeformAuthorizationMethod: models.SourceTypeformPrivateToken = /* values here */ +value: models.SourceTypeformPrivateToken = /* values here */ ``` diff --git a/docs/models/sourcewhiskyhunter.md b/docs/models/sourcewhiskyhunter.md index d056125c..7fe620b7 100644 --- a/docs/models/sourcewhiskyhunter.md +++ b/docs/models/sourcewhiskyhunter.md @@ -3,6 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | -| `source_type` | [Optional[models.WhiskyHunter]](../models/whiskyhunter.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | +| `source_type` | [models.WhiskyHunter](../models/whiskyhunter.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcewikipediapageviews.md b/docs/models/sourcewikipediapageviews.md index d399d5a6..9547a62c 100644 --- a/docs/models/sourcewikipediapageviews.md +++ b/docs/models/sourcewikipediapageviews.md @@ -11,5 +11,5 @@ | `country` | *str* | :heavy_check_mark: | The ISO 3166-1 alpha-2 code of a country for which to retrieve top articles. | FR | | `end` | *str* | :heavy_check_mark: | The date of the last day to include, in YYYYMMDD or YYYYMMDDHH format. | | | `project` | *str* | :heavy_check_mark: | If you want to filter by project, use the domain of any Wikimedia project. | en.wikipedia.org | -| `start` | *str* | :heavy_check_mark: | The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format. | | +| `start` | *str* | :heavy_check_mark: | The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format. Also serves as the date to retrieve data for the top articles. | | | `source_type` | [models.WikipediaPageviews](../models/wikipediapageviews.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcezendeskchatauthorizationmethod.md b/docs/models/sourcezendeskchatauthorizationmethod.md index 379e9572..e6db9a21 100644 --- a/docs/models/sourcezendeskchatauthorizationmethod.md +++ b/docs/models/sourcezendeskchatauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceZendeskChatOAuth20 +### `models.SourceZendeskChatOAuth20` ```python -sourceZendeskChatAuthorizationMethod: models.SourceZendeskChatOAuth20 = /* values here */ +value: models.SourceZendeskChatOAuth20 = /* values here */ ``` -### SourceZendeskChatAccessToken +### `models.SourceZendeskChatAccessToken` ```python -sourceZendeskChatAuthorizationMethod: models.SourceZendeskChatAccessToken = /* values here */ +value: models.SourceZendeskChatAccessToken = /* values here */ ``` diff --git a/docs/models/sourcezendesksunshineauthorizationmethod.md b/docs/models/sourcezendesksunshineauthorizationmethod.md index c0efaa74..a43d75b3 100644 --- a/docs/models/sourcezendesksunshineauthorizationmethod.md +++ b/docs/models/sourcezendesksunshineauthorizationmethod.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceZendeskSunshineOAuth20 +### `models.SourceZendeskSunshineOAuth20` ```python -sourceZendeskSunshineAuthorizationMethod: models.SourceZendeskSunshineOAuth20 = /* values here */ +value: models.SourceZendeskSunshineOAuth20 = /* values here */ ``` -### SourceZendeskSunshineAPIToken +### `models.SourceZendeskSunshineAPIToken` ```python -sourceZendeskSunshineAuthorizationMethod: models.SourceZendeskSunshineAPIToken = /* values here */ +value: models.SourceZendeskSunshineAPIToken = /* values here */ ``` diff --git a/docs/models/sourcezendesksupportauthentication.md b/docs/models/sourcezendesksupportauthentication.md index 75b7dd55..bd3891d4 100644 --- a/docs/models/sourcezendesksupportauthentication.md +++ b/docs/models/sourcezendesksupportauthentication.md @@ -5,15 +5,15 @@ Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Air ## Supported Types -### SourceZendeskSupportOAuth20 +### `models.SourceZendeskSupportOAuth20` ```python -sourceZendeskSupportAuthentication: models.SourceZendeskSupportOAuth20 = /* values here */ +value: models.SourceZendeskSupportOAuth20 = /* values here */ ``` -### SourceZendeskSupportAPIToken +### `models.SourceZendeskSupportAPIToken` ```python -sourceZendeskSupportAuthentication: models.SourceZendeskSupportAPIToken = /* values here */ +value: models.SourceZendeskSupportAPIToken = /* values here */ ``` diff --git a/docs/models/sourcezendesktalkauthentication.md b/docs/models/sourcezendesktalkauthentication.md index 2136cd98..ac12a0b7 100644 --- a/docs/models/sourcezendesktalkauthentication.md +++ b/docs/models/sourcezendesktalkauthentication.md @@ -5,15 +5,15 @@ Zendesk service provides two authentication methods. Choose between: `OAuth2.0` ## Supported Types -### SourceZendeskTalkOAuth20 +### `models.SourceZendeskTalkOAuth20` ```python -sourceZendeskTalkAuthentication: models.SourceZendeskTalkOAuth20 = /* values here */ +value: models.SourceZendeskTalkOAuth20 = /* values here */ ``` -### SourceZendeskTalkAPIToken +### `models.SourceZendeskTalkAPIToken` ```python -sourceZendeskTalkAuthentication: models.SourceZendeskTalkAPIToken = /* values here */ +value: models.SourceZendeskTalkAPIToken = /* values here */ ``` diff --git a/docs/models/sshtunnelmethod.md b/docs/models/sshtunnelmethod.md index 35d628ed..7544569f 100644 --- a/docs/models/sshtunnelmethod.md +++ b/docs/models/sshtunnelmethod.md @@ -5,21 +5,21 @@ Whether to initiate an SSH tunnel before connecting to the database, and if so, ## Supported Types -### NoTunnel +### `models.NoTunnel` ```python -sshTunnelMethod: models.NoTunnel = /* values here */ +value: models.NoTunnel = /* values here */ ``` -### SSHKeyAuthentication +### `models.SSHKeyAuthentication` ```python -sshTunnelMethod: models.SSHKeyAuthentication = /* values here */ +value: models.SSHKeyAuthentication = /* values here */ ``` -### PasswordAuthentication +### `models.PasswordAuthentication` ```python -sshTunnelMethod: models.PasswordAuthentication = /* values here */ +value: models.PasswordAuthentication = /* values here */ ``` diff --git a/docs/models/sslmethod.md b/docs/models/sslmethod.md index 00e956df..fe584132 100644 --- a/docs/models/sslmethod.md +++ b/docs/models/sslmethod.md @@ -5,15 +5,15 @@ The encryption method which is used to communicate with the database. ## Supported Types -### EncryptedTrustServerCertificate +### `models.EncryptedTrustServerCertificate` ```python -sslMethod: models.EncryptedTrustServerCertificate = /* values here */ +value: models.EncryptedTrustServerCertificate = /* values here */ ``` -### EncryptedVerifyCertificate +### `models.EncryptedVerifyCertificate` ```python -sslMethod: models.EncryptedVerifyCertificate = /* values here */ +value: models.EncryptedVerifyCertificate = /* values here */ ``` diff --git a/docs/models/sslmodes.md b/docs/models/sslmodes.md index a2934b3b..8fbb7e6a 100644 --- a/docs/models/sslmodes.md +++ b/docs/models/sslmodes.md @@ -12,39 +12,39 @@ SSL connection modes. ## Supported Types -### Disable +### `models.Disable` ```python -sslModes: models.Disable = /* values here */ +value: models.Disable = /* values here */ ``` -### Allow +### `models.Allow` ```python -sslModes: models.Allow = /* values here */ +value: models.Allow = /* values here */ ``` -### Prefer +### `models.Prefer` ```python -sslModes: models.Prefer = /* values here */ +value: models.Prefer = /* values here */ ``` -### Require +### `models.Require` ```python -sslModes: models.Require = /* values here */ +value: models.Require = /* values here */ ``` -### VerifyCa +### `models.VerifyCa` ```python -sslModes: models.VerifyCa = /* values here */ +value: models.VerifyCa = /* values here */ ``` -### VerifyFull +### `models.VerifyFull` ```python -sslModes: models.VerifyFull = /* values here */ +value: models.VerifyFull = /* values here */ ``` diff --git a/docs/models/standard.md b/docs/models/standard.md deleted file mode 100644 index 5d342dbe..00000000 --- a/docs/models/standard.md +++ /dev/null @@ -1,10 +0,0 @@ -# Standard - -(not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use S3 uploading. - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | -| `method` | [models.DestinationRedshiftSchemasMethod](../models/destinationredshiftschemasmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/storageprovider.md b/docs/models/storageprovider.md index eab78e94..e5323533 100644 --- a/docs/models/storageprovider.md +++ b/docs/models/storageprovider.md @@ -5,45 +5,45 @@ The storage Provider or Location of the file(s) which should be replicated. ## Supported Types -### HTTPSPublicWeb +### `models.HTTPSPublicWeb` ```python -storageProvider: models.HTTPSPublicWeb = /* values here */ +value: models.HTTPSPublicWeb = /* values here */ ``` -### GCSGoogleCloudStorage +### `models.GCSGoogleCloudStorage` ```python -storageProvider: models.GCSGoogleCloudStorage = /* values here */ +value: models.GCSGoogleCloudStorage = /* values here */ ``` -### SourceFileS3AmazonWebServices +### `models.SourceFileS3AmazonWebServices` ```python -storageProvider: models.SourceFileS3AmazonWebServices = /* values here */ +value: models.SourceFileS3AmazonWebServices = /* values here */ ``` -### AzBlobAzureBlobStorage +### `models.AzBlobAzureBlobStorage` ```python -storageProvider: models.AzBlobAzureBlobStorage = /* values here */ +value: models.AzBlobAzureBlobStorage = /* values here */ ``` -### SSHSecureShell +### `models.SSHSecureShell` ```python -storageProvider: models.SSHSecureShell = /* values here */ +value: models.SSHSecureShell = /* values here */ ``` -### SCPSecureCopyProtocol +### `models.SCPSecureCopyProtocol` ```python -storageProvider: models.SCPSecureCopyProtocol = /* values here */ +value: models.SCPSecureCopyProtocol = /* values here */ ``` -### SFTPSecureFileTransferProtocol +### `models.SFTPSecureFileTransferProtocol` ```python -storageProvider: models.SFTPSecureFileTransferProtocol = /* values here */ +value: models.SFTPSecureFileTransferProtocol = /* values here */ ``` diff --git a/docs/models/testdestination.md b/docs/models/testdestination.md index 37c80457..0c251056 100644 --- a/docs/models/testdestination.md +++ b/docs/models/testdestination.md @@ -5,9 +5,9 @@ The type of destination to be used ## Supported Types -### Silent +### `models.Silent` ```python -testDestination: models.Silent = /* values here */ +value: models.Silent = /* values here */ ``` diff --git a/docs/models/textsplitter.md b/docs/models/textsplitter.md index f214577a..875d4820 100644 --- a/docs/models/textsplitter.md +++ b/docs/models/textsplitter.md @@ -5,21 +5,21 @@ Split text fields into chunks based on the specified method. ## Supported Types -### BySeparator +### `models.BySeparator` ```python -textSplitter: models.BySeparator = /* values here */ +value: models.BySeparator = /* values here */ ``` -### ByMarkdownHeader +### `models.ByMarkdownHeader` ```python -textSplitter: models.ByMarkdownHeader = /* values here */ +value: models.ByMarkdownHeader = /* values here */ ``` -### ByProgrammingLanguage +### `models.ByProgrammingLanguage` ```python -textSplitter: models.ByProgrammingLanguage = /* values here */ +value: models.ByProgrammingLanguage = /* values here */ ``` diff --git a/docs/models/timegranularitytype.md b/docs/models/timegranularitytype.md new file mode 100644 index 00000000..3e0038e4 --- /dev/null +++ b/docs/models/timegranularitytype.md @@ -0,0 +1,11 @@ +# TimeGranularityType + +Granularity of the statistics for metrics per time period. Must be either "DAY" or "MONTH" + + +## Values + +| Name | Value | +| ------- | ------- | +| `DAY` | DAY | +| `MONTH` | MONTH | \ No newline at end of file diff --git a/docs/models/tovalue.md b/docs/models/tovalue.md index 1cc3bce0..b37748a7 100644 --- a/docs/models/tovalue.md +++ b/docs/models/tovalue.md @@ -3,15 +3,15 @@ ## Supported Types -### SourceGoogleAnalyticsDataAPISchemasInt64Value +### `models.SourceGoogleAnalyticsDataAPISchemasInt64Value` ```python -toValue: models.SourceGoogleAnalyticsDataAPISchemasInt64Value = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasInt64Value = /* values here */ ``` -### SourceGoogleAnalyticsDataAPISchemasDoubleValue +### `models.SourceGoogleAnalyticsDataAPISchemasDoubleValue` ```python -toValue: models.SourceGoogleAnalyticsDataAPISchemasDoubleValue = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasDoubleValue = /* values here */ ``` diff --git a/docs/models/sourcemicrosoftonedrivedocumentfiletypeformatexperimental.md b/docs/models/unstructureddocumentformat.md similarity index 99% rename from docs/models/sourcemicrosoftonedrivedocumentfiletypeformatexperimental.md rename to docs/models/unstructureddocumentformat.md index f6603935..97f24e36 100644 --- a/docs/models/sourcemicrosoftonedrivedocumentfiletypeformatexperimental.md +++ b/docs/models/unstructureddocumentformat.md @@ -1,4 +1,4 @@ -# SourceMicrosoftOnedriveDocumentFileTypeFormatExperimental +# UnstructuredDocumentFormat Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. diff --git a/docs/models/updatemethod.md b/docs/models/updatemethod.md index f3450c7f..0a6f98d1 100644 --- a/docs/models/updatemethod.md +++ b/docs/models/updatemethod.md @@ -5,15 +5,15 @@ Configures how data is extracted from the database. ## Supported Types -### ReadChangesUsingChangeDataCaptureCDC +### `models.ReadChangesUsingChangeDataCaptureCDC` ```python -updateMethod: models.ReadChangesUsingChangeDataCaptureCDC = /* values here */ +value: models.ReadChangesUsingChangeDataCaptureCDC = /* values here */ ``` -### ScanChangesWithUserDefinedCursor +### `models.ScanChangesWithUserDefinedCursor` ```python -updateMethod: models.ScanChangesWithUserDefinedCursor = /* values here */ +value: models.ScanChangesWithUserDefinedCursor = /* values here */ ``` diff --git a/docs/models/uploadingmethod.md b/docs/models/uploadingmethod.md index 79127778..621c5cb5 100644 --- a/docs/models/uploadingmethod.md +++ b/docs/models/uploadingmethod.md @@ -5,15 +5,9 @@ The way data will be uploaded to Redshift. ## Supported Types -### AWSS3Staging +### `models.AWSS3Staging` ```python -uploadingMethod: models.AWSS3Staging = /* values here */ -``` - -### Standard - -```python -uploadingMethod: models.Standard = /* values here */ +value: models.AWSS3Staging = /* values here */ ``` diff --git a/docs/models/userresponse.md b/docs/models/userresponse.md index 057561f1..f42fd517 100644 --- a/docs/models/userresponse.md +++ b/docs/models/userresponse.md @@ -1,6 +1,6 @@ # UserResponse -Provides details of a single user. +Provides details of a single user in an organization. ## Fields @@ -8,5 +8,5 @@ Provides details of a single user. | Field | Type | Required | Description | | ------------------------ | ------------------------ | ------------------------ | ------------------------ | | `email` | *str* | :heavy_check_mark: | N/A | -| `user_id` | *str* | :heavy_check_mark: | Internal Airbyte user ID | -| `name` | *Optional[str]* | :heavy_minus_sign: | Name of the user | \ No newline at end of file +| `id` | *str* | :heavy_check_mark: | Internal Airbyte user ID | +| `name` | *str* | :heavy_check_mark: | Name of the user | \ No newline at end of file diff --git a/docs/models/usersresponse.md b/docs/models/usersresponse.md index 136d01e5..95a2513a 100644 --- a/docs/models/usersresponse.md +++ b/docs/models/usersresponse.md @@ -1,6 +1,6 @@ # UsersResponse -List/Array of multiple users +List/Array of multiple users in an organization ## Fields diff --git a/docs/models/value.md b/docs/models/value.md index 7a8093ea..42932758 100644 --- a/docs/models/value.md +++ b/docs/models/value.md @@ -3,13 +3,13 @@ ## Supported Types -### Int64Value +### `models.Int64Value` ```python value: models.Int64Value = /* values here */ ``` -### DoubleValue +### `models.DoubleValue` ```python value: models.DoubleValue = /* values here */ diff --git a/docs/sdks/organizations/README.md b/docs/sdks/organizations/README.md new file mode 100644 index 00000000..5ba43523 --- /dev/null +++ b/docs/sdks/organizations/README.md @@ -0,0 +1,44 @@ +# Organizations +(*organizations*) + +### Available Operations + +* [list_organizations_for_user](#list_organizations_for_user) - List all organizations for a user + +## list_organizations_for_user + +Lists users organizations. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password="", + username="", + ), + ), +) + + +res = s.organizations.list_organizations_for_user() + +if res.organizations_response is not None: + # handle response + pass + +``` + + +### Response + +**[api.ListOrganizationsForUserResponse](../../api/listorganizationsforuserresponse.md)** +### Errors + +| Error Object | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4xx-5xx | */* | diff --git a/docs/sdks/users/README.md b/docs/sdks/users/README.md index 4f551ca9..795de797 100644 --- a/docs/sdks/users/README.md +++ b/docs/sdks/users/README.md @@ -3,11 +3,11 @@ ### Available Operations -* [list_users](#list_users) - List users +* [list_users_within_an_organization](#list_users_within_an_organization) - List all users within an organization -## list_users +## list_users_within_an_organization -Lists users based on provided filters. You can filter on either a list of IDs or a list of emails, but not both. If no filters provided we will list all users by default. +Organization Admin user can list all users within the same organization. Also provide filtering on a list of user IDs or/and a list of user emails. ### Example Usage @@ -25,7 +25,9 @@ s = airbyte_api.AirbyteAPI( ) -res = s.users.list_users(request=api.ListUsersRequest()) +res = s.users.list_users_within_an_organization(request=api.ListUsersWithinAnOrganizationRequest( + organization_id='', +)) if res.users_response is not None: # handle response @@ -35,14 +37,14 @@ if res.users_response is not None: ### Parameters -| Parameter | Type | Required | Description | -| ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | -| `request` | [api.ListUsersRequest](../../api/listusersrequest.md) | :heavy_check_mark: | The request object to use for the request. | +| Parameter | Type | Required | Description | +| --------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | +| `request` | [api.ListUsersWithinAnOrganizationRequest](../../api/listuserswithinanorganizationrequest.md) | :heavy_check_mark: | The request object to use for the request. | ### Response -**[api.ListUsersResponse](../../api/listusersresponse.md)** +**[api.ListUsersWithinAnOrganizationResponse](../../api/listuserswithinanorganizationresponse.md)** ### Errors | Error Object | Status Code | Content Type | diff --git a/docs/sdks/workspaces/README.md b/docs/sdks/workspaces/README.md index 33c665c6..4b8e97bd 100644 --- a/docs/sdks/workspaces/README.md +++ b/docs/sdks/workspaces/README.md @@ -13,7 +13,7 @@ ## create_or_update_workspace_o_auth_credentials Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth. -In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. +In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. ### Example Usage diff --git a/gen.yaml b/gen.yaml index f21c1141..a234723b 100644 --- a/gen.yaml +++ b/gen.yaml @@ -11,7 +11,7 @@ generation: auth: oAuth2ClientCredentialsEnabled: true python: - version: 0.50.1 + version: 0.51.0 additionalDependencies: dependencies: {} extraDependencies: @@ -35,3 +35,4 @@ python: packageName: airbyte-api projectUrls: {} responseFormat: envelope + templateVersion: v1 diff --git a/scripts/publish.sh b/scripts/publish.sh new file mode 100755 index 00000000..ed45d8a9 --- /dev/null +++ b/scripts/publish.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +export TWINE_USERNAME=__token__ +export TWINE_PASSWORD=${PYPI_TOKEN} + +python -m pip install --upgrade pip +pip install setuptools wheel twine +python setup.py sdist bdist_wheel +twine upload dist/* diff --git a/setup.py b/setup.py index 85e85a8a..ea758483 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setuptools.setup( name='airbyte-api', - version='0.50.1', + version='0.51.0', author='Airbyte', description='Python Client SDK for Airbyte API', url='https://github.com/airbytehq/airbyte-api-python-sdk.git', diff --git a/src/airbyte_api/api/__init__.py b/src/airbyte_api/api/__init__.py index ceac95d1..358e2bcf 100644 --- a/src/airbyte_api/api/__init__.py +++ b/src/airbyte_api/api/__init__.py @@ -25,9 +25,10 @@ from .listconnections import * from .listdestinations import * from .listjobs import * +from .listorganizationsforuser import * from .listpermissions import * from .listsources import * -from .listusers import * +from .listuserswithinanorganization import * from .listworkspaces import * from .patchconnection import * from .patchdestination import * @@ -37,4 +38,4 @@ from .updatepermission import * from .updateworkspace import * -__all__ = ["CancelJobRequest","CancelJobResponse","CreateConnectionResponse","CreateDestinationResponse","CreateJobResponse","CreateOrUpdateWorkspaceOAuthCredentialsRequest","CreateOrUpdateWorkspaceOAuthCredentialsResponse","CreatePermissionResponse","CreateSourceResponse","CreateWorkspaceResponse","DeleteConnectionRequest","DeleteConnectionResponse","DeleteDestinationRequest","DeleteDestinationResponse","DeletePermissionRequest","DeletePermissionResponse","DeleteSourceRequest","DeleteSourceResponse","DeleteWorkspaceRequest","DeleteWorkspaceResponse","GetConnectionRequest","GetConnectionResponse","GetDestinationRequest","GetDestinationResponse","GetHealthCheckResponse","GetJobRequest","GetJobResponse","GetPermissionRequest","GetPermissionResponse","GetSourceRequest","GetSourceResponse","GetStreamPropertiesRequest","GetStreamPropertiesResponse","GetWorkspaceRequest","GetWorkspaceResponse","InitiateOAuthResponse","ListConnectionsRequest","ListConnectionsResponse","ListDestinationsRequest","ListDestinationsResponse","ListJobsRequest","ListJobsResponse","ListPermissionsRequest","ListPermissionsResponse","ListSourcesRequest","ListSourcesResponse","ListUsersRequest","ListUsersResponse","ListWorkspacesRequest","ListWorkspacesResponse","PatchConnectionRequest","PatchConnectionResponse","PatchDestinationRequest","PatchDestinationResponse","PatchSourceRequest","PatchSourceResponse","PutDestinationRequest","PutDestinationResponse","PutSourceRequest","PutSourceResponse","UpdatePermissionRequest","UpdatePermissionResponse","UpdateWorkspaceRequest","UpdateWorkspaceResponse"] +__all__ = ["CancelJobRequest","CancelJobResponse","CreateConnectionResponse","CreateDestinationResponse","CreateJobResponse","CreateOrUpdateWorkspaceOAuthCredentialsRequest","CreateOrUpdateWorkspaceOAuthCredentialsResponse","CreatePermissionResponse","CreateSourceResponse","CreateWorkspaceResponse","DeleteConnectionRequest","DeleteConnectionResponse","DeleteDestinationRequest","DeleteDestinationResponse","DeletePermissionRequest","DeletePermissionResponse","DeleteSourceRequest","DeleteSourceResponse","DeleteWorkspaceRequest","DeleteWorkspaceResponse","GetConnectionRequest","GetConnectionResponse","GetDestinationRequest","GetDestinationResponse","GetHealthCheckResponse","GetJobRequest","GetJobResponse","GetPermissionRequest","GetPermissionResponse","GetSourceRequest","GetSourceResponse","GetStreamPropertiesRequest","GetStreamPropertiesResponse","GetWorkspaceRequest","GetWorkspaceResponse","InitiateOAuthResponse","ListConnectionsRequest","ListConnectionsResponse","ListDestinationsRequest","ListDestinationsResponse","ListJobsRequest","ListJobsResponse","ListOrganizationsForUserResponse","ListPermissionsRequest","ListPermissionsResponse","ListSourcesRequest","ListSourcesResponse","ListUsersWithinAnOrganizationRequest","ListUsersWithinAnOrganizationResponse","ListWorkspacesRequest","ListWorkspacesResponse","PatchConnectionRequest","PatchConnectionResponse","PatchDestinationRequest","PatchDestinationResponse","PatchSourceRequest","PatchSourceResponse","PutDestinationRequest","PutDestinationResponse","PutSourceRequest","PutSourceResponse","UpdatePermissionRequest","UpdatePermissionResponse","UpdateWorkspaceRequest","UpdateWorkspaceResponse"] diff --git a/src/airbyte_api/api/listorganizationsforuser.py b/src/airbyte_api/api/listorganizationsforuser.py new file mode 100644 index 00000000..d6719de9 --- /dev/null +++ b/src/airbyte_api/api/listorganizationsforuser.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import organizationsresponse as models_organizationsresponse +from typing import Optional + + +@dataclasses.dataclass +class ListOrganizationsForUserResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + organizations_response: Optional[models_organizationsresponse.OrganizationsResponse] = dataclasses.field(default=None) + r"""List user's organizations.""" + + diff --git a/src/airbyte_api/api/listpermissions.py b/src/airbyte_api/api/listpermissions.py index ba662ebb..5f678a6c 100644 --- a/src/airbyte_api/api/listpermissions.py +++ b/src/airbyte_api/api/listpermissions.py @@ -9,6 +9,8 @@ @dataclasses.dataclass class ListPermissionsRequest: + organization_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'organizationId', 'style': 'form', 'explode': True }}) + r"""This is required if you want to read someone else's permissions, and you should have organization admin or a higher role.""" user_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'userId', 'style': 'form', 'explode': True }}) r"""User Id in permission.""" diff --git a/src/airbyte_api/api/listusers.py b/src/airbyte_api/api/listuserswithinanorganization.py similarity index 83% rename from src/airbyte_api/api/listusers.py rename to src/airbyte_api/api/listuserswithinanorganization.py index 044a560d..97ee8fdb 100644 --- a/src/airbyte_api/api/listusers.py +++ b/src/airbyte_api/api/listuserswithinanorganization.py @@ -8,18 +8,18 @@ @dataclasses.dataclass -class ListUsersRequest: +class ListUsersWithinAnOrganizationRequest: + organization_id: str = dataclasses.field(metadata={'query_param': { 'field_name': 'organizationId', 'style': 'form', 'explode': True }}) emails: Optional[List[str]] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'emails', 'style': 'form', 'explode': True }}) r"""List of user emails to filter by""" ids: Optional[List[str]] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'ids', 'style': 'form', 'explode': True }}) r"""List of user IDs to filter by""" - organization_id: Optional[str] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'organizationId', 'style': 'form', 'explode': True }}) @dataclasses.dataclass -class ListUsersResponse: +class ListUsersWithinAnOrganizationResponse: content_type: str = dataclasses.field() r"""HTTP response content type for this operation""" status_code: int = dataclasses.field() diff --git a/src/airbyte_api/models/__init__.py b/src/airbyte_api/models/__init__.py index 99102d65..c3d9e7d2 100644 --- a/src/airbyte_api/models/__init__.py +++ b/src/airbyte_api/models/__init__.py @@ -30,7 +30,6 @@ from .destination_firestore import * from .destination_gcs import * from .destination_google_sheets import * -from .destination_langchain import * from .destination_milvus import * from .destination_mongodb import * from .destination_mssql import * @@ -91,6 +90,8 @@ from .notion import * from .oauthactornames import * from .oauthcredentialsconfiguration import * +from .organizationresponse import * +from .organizationsresponse import * from .permissioncreaterequest import * from .permissionresponse import * from .permissionresponseread import * @@ -135,6 +136,7 @@ from .source_cart import * from .source_chargebee import * from .source_chartmogul import * +from .source_clazar import * from .source_clickhouse import * from .source_clickup_api import * from .source_clockify import * @@ -145,6 +147,7 @@ from .source_configcat import * from .source_confluence import * from .source_convex import * +from .source_datadog import * from .source_datascope import * from .source_delighted import * from .source_dixa import * @@ -159,6 +162,7 @@ from .source_fauna import * from .source_file import * from .source_firebolt import * +from .source_fleetio import * from .source_freshcaller import * from .source_freshdesk import * from .source_freshsales import * @@ -169,9 +173,9 @@ from .source_gitlab import * from .source_glassfrog import * from .source_gnews import * +from .source_goldcast import * from .source_google_ads import * from .source_google_analytics_data_api import * -from .source_google_analytics_v4_service_account_only import * from .source_google_directory import * from .source_google_drive import * from .source_google_pagespeed_insights import * @@ -241,7 +245,6 @@ from .source_posthog import * from .source_postmarkapp import * from .source_prestashop import * -from .source_punk_api import * from .source_pypi import * from .source_qualaroo import * from .source_railz import * @@ -334,4 +337,4 @@ from .zendesk_support import * from .zendesk_talk import * -__all__ = ["AESCBCEnvelopeEncryption","APIAccessToken","APIKey","APIKeyAuth","APIKeySecret","APIParameterConfigModel","APIPassword","APIToken","AWSEnvironment","AWSRegion","AWSS3Staging","AWSSellerPartnerAccountType","AccessToken","AccessTokenIsRequiredForAuthenticationRequests","AccountNames","ActionReportTime","ActorTypeEnum","AdAnalyticsReportConfiguration","Aha","AirbyteAPIConnectionSchedule","Aircall","Airtable","Allow","AmazonAds","AmazonS3","AmazonSellerPartner","AmazonSqs","Amplitude","AndGroup","ApifyDataset","Appfollow","Applications","Asana","AsanaCredentials","Astra","Auth0","AuthMethod","AuthType","AuthenticateViaAPIKey","AuthenticateViaAccessKeys","AuthenticateViaAsanaOauth","AuthenticateViaGoogleOauth","AuthenticateViaHarvestOAuth","AuthenticateViaLeverAPIKey","AuthenticateViaLeverOAuth","AuthenticateViaMicrosoft","AuthenticateViaMicrosoftOAuth","AuthenticateViaMicrosoftOAuth20","AuthenticateViaOAuth","AuthenticateViaOAuth20","AuthenticateViaOauth2","AuthenticateViaPassword","AuthenticateViaPrivateKey","AuthenticateViaRetentlyOAuth","AuthenticateViaStorageAccountKey","AuthenticateWithAPIToken","AuthenticateWithPersonalAccessToken","Authentication","AuthenticationMechanism","AuthenticationMethod","AuthenticationMode","AuthenticationType","AuthenticationViaGoogleOAuth","AuthenticationWildcard","Authorization","AuthorizationMethod","AuthorizationType","Autogenerated","Avro","AvroApacheAvro","AvroFormat","AwsCloudtrail","AwsDatalake","AzBlobAzureBlobStorage","AzureBlobStorage","AzureBlobStorageCredentials","AzureOpenAI","AzureTable","BambooHr","BaseURL","BetweenFilter","Bigquery","BingAds","BothUsernameAndPasswordIsRequiredForAuthenticationRequest","Braintree","Braze","ByMarkdownHeader","ByProgrammingLanguage","BySeparator","Bzip2","CSVCommaSeparatedValues","CSVFormat","CSVHeaderDefinition","CacheType","CaptureModeAdvanced","Cart","Categories","CentralAPIRouter","Chargebee","Chartmogul","ChooseHowToPartitionData","ChromaLocalPersistance","ClickWindowDays","Clickhouse","ClickupAPI","Clockify","CloseCom","ClusterType","Coda","Codec","Cohere","CohortReportSettings","CohortReports","Cohorts","CohortsRange","CoinAPI","Coinmarketcap","Collection","Compression","CompressionCodec","CompressionCodecOptional","CompressionType","Configcat","Confluence","ConnectBy","ConnectionCreateRequest","ConnectionPatchRequest","ConnectionResponse","ConnectionScheduleResponse","ConnectionStatusEnum","ConnectionSyncModeEnum","ConnectionType","ConnectionsResponse","ContentType","ContinuousFeed","ConversionReportTime","Convex","Country","Credential","CredentialType","Credentials","CredentialsTitle","Csv","CustomQueriesArray","CustomReportConfig","CustomerStatus","DataCenterLocation","DataFreshness","DataRegion","DataSource","DataSourceType","DataType","Databricks","Datascope","DatasetLocation","DateRange","DefaultVectorizer","Deflate","DeletionMode","Delighted","DestinationAstra","DestinationAstraLanguage","DestinationAstraMode","DestinationAstraSchemasEmbeddingEmbedding1Mode","DestinationAstraSchemasEmbeddingEmbeddingMode","DestinationAstraSchemasEmbeddingMode","DestinationAstraSchemasMode","DestinationAstraSchemasProcessingMode","DestinationAstraSchemasProcessingTextSplitterMode","DestinationAstraSchemasProcessingTextSplitterTextSplitterMode","DestinationAwsDatalake","DestinationAwsDatalakeCompressionCodecOptional","DestinationAwsDatalakeCredentialsTitle","DestinationAwsDatalakeFormatTypeWildcard","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorage","DestinationAzureBlobStorageFormatType","DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON","DestinationBigquery","DestinationBigqueryCredentialType","DestinationBigqueryHMACKey","DestinationBigqueryMethod","DestinationClickhouse","DestinationClickhouseSchemasTunnelMethod","DestinationClickhouseTunnelMethod","DestinationConfiguration","DestinationConvex","DestinationCreateRequest","DestinationDatabricks","DestinationDatabricksAzureBlobStorage","DestinationDatabricksDataSourceType","DestinationDatabricksS3BucketRegion","DestinationDatabricksSchemasDataSourceType","DestinationDevNull","DestinationDuckdb","DestinationDynamodb","DestinationElasticsearch","DestinationElasticsearchMethod","DestinationElasticsearchSchemasMethod","DestinationFirebolt","DestinationFireboltLoadingMethod","DestinationFireboltMethod","DestinationFireboltSchemasMethod","DestinationFirestore","DestinationGcs","DestinationGcsCSVCommaSeparatedValues","DestinationGcsCodec","DestinationGcsCompression","DestinationGcsCompressionCodec","DestinationGcsCompressionType","DestinationGcsFormatType","DestinationGcsGZIP","DestinationGcsJSONLinesNewlineDelimitedJSON","DestinationGcsNoCompression","DestinationGcsOutputFormat","DestinationGcsParquetColumnarStorage","DestinationGcsSchemasCodec","DestinationGcsSchemasCompressionType","DestinationGcsSchemasFormatCodec","DestinationGcsSchemasFormatCompressionType","DestinationGcsSchemasFormatFormatType","DestinationGcsSchemasFormatOutputFormat1Codec","DestinationGcsSchemasFormatOutputFormatCodec","DestinationGcsSchemasFormatOutputFormatFormatType","DestinationGcsSchemasFormatType","DestinationGcsSchemasNoCompression","DestinationGoogleSheets","DestinationGoogleSheetsGoogleSheets","DestinationLangchain","DestinationLangchainEmbedding","DestinationLangchainFake","DestinationLangchainIndexing","DestinationLangchainMode","DestinationLangchainOpenAI","DestinationLangchainPinecone","DestinationLangchainProcessingConfigModel","DestinationLangchainSchemasIndexingIndexing3Mode","DestinationLangchainSchemasIndexingIndexingMode","DestinationLangchainSchemasIndexingMode","DestinationLangchainSchemasMode","DestinationMilvus","DestinationMilvusAPIToken","DestinationMilvusAuthentication","DestinationMilvusAzureOpenAI","DestinationMilvusByMarkdownHeader","DestinationMilvusByProgrammingLanguage","DestinationMilvusBySeparator","DestinationMilvusCohere","DestinationMilvusEmbedding","DestinationMilvusFake","DestinationMilvusFieldNameMappingConfigModel","DestinationMilvusIndexing","DestinationMilvusLanguage","DestinationMilvusMode","DestinationMilvusOpenAI","DestinationMilvusOpenAICompatible","DestinationMilvusProcessingConfigModel","DestinationMilvusSchemasEmbeddingEmbedding5Mode","DestinationMilvusSchemasEmbeddingEmbeddingMode","DestinationMilvusSchemasEmbeddingMode","DestinationMilvusSchemasIndexingAuthAuthenticationMode","DestinationMilvusSchemasIndexingAuthMode","DestinationMilvusSchemasIndexingMode","DestinationMilvusSchemasMode","DestinationMilvusSchemasProcessingMode","DestinationMilvusSchemasProcessingTextSplitterMode","DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode","DestinationMilvusTextSplitter","DestinationMilvusUsernamePassword","DestinationMongodb","DestinationMongodbAuthorization","DestinationMongodbInstance","DestinationMongodbNoTunnel","DestinationMongodbPasswordAuthentication","DestinationMongodbSSHKeyAuthentication","DestinationMongodbSSHTunnelMethod","DestinationMongodbSchemasAuthorization","DestinationMongodbSchemasInstance","DestinationMongodbSchemasTunnelMethod","DestinationMongodbSchemasTunnelMethodTunnelMethod","DestinationMongodbTunnelMethod","DestinationMssql","DestinationMssqlNoTunnel","DestinationMssqlPasswordAuthentication","DestinationMssqlSSHKeyAuthentication","DestinationMssqlSSHTunnelMethod","DestinationMssqlSchemasSslMethod","DestinationMssqlSchemasTunnelMethod","DestinationMssqlSchemasTunnelMethodTunnelMethod","DestinationMssqlSslMethod","DestinationMssqlTunnelMethod","DestinationMysql","DestinationMysqlNoTunnel","DestinationMysqlPasswordAuthentication","DestinationMysqlSSHKeyAuthentication","DestinationMysqlSSHTunnelMethod","DestinationMysqlSchemasTunnelMethod","DestinationMysqlSchemasTunnelMethodTunnelMethod","DestinationMysqlTunnelMethod","DestinationOracle","DestinationOracleNoTunnel","DestinationOraclePasswordAuthentication","DestinationOracleSSHKeyAuthentication","DestinationOracleSSHTunnelMethod","DestinationOracleSchemasTunnelMethod","DestinationOracleSchemasTunnelMethodTunnelMethod","DestinationOracleTunnelMethod","DestinationPatchRequest","DestinationPinecone","DestinationPineconeAzureOpenAI","DestinationPineconeByMarkdownHeader","DestinationPineconeByProgrammingLanguage","DestinationPineconeBySeparator","DestinationPineconeCohere","DestinationPineconeEmbedding","DestinationPineconeFake","DestinationPineconeFieldNameMappingConfigModel","DestinationPineconeIndexing","DestinationPineconeLanguage","DestinationPineconeMode","DestinationPineconeOpenAI","DestinationPineconeOpenAICompatible","DestinationPineconeProcessingConfigModel","DestinationPineconeSchemasEmbeddingEmbedding5Mode","DestinationPineconeSchemasEmbeddingEmbeddingMode","DestinationPineconeSchemasEmbeddingMode","DestinationPineconeSchemasMode","DestinationPineconeSchemasProcessingMode","DestinationPineconeSchemasProcessingTextSplitterMode","DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode","DestinationPineconeTextSplitter","DestinationPostgres","DestinationPostgresMode","DestinationPostgresNoTunnel","DestinationPostgresPasswordAuthentication","DestinationPostgresSSHKeyAuthentication","DestinationPostgresSSHTunnelMethod","DestinationPostgresSchemasMode","DestinationPostgresSchemasSSLModeSSLModes6Mode","DestinationPostgresSchemasSSLModeSSLModesMode","DestinationPostgresSchemasSslModeMode","DestinationPostgresSchemasTunnelMethod","DestinationPostgresSchemasTunnelMethodTunnelMethod","DestinationPostgresTunnelMethod","DestinationPubsub","DestinationPutRequest","DestinationQdrant","DestinationQdrantAuthenticationMethod","DestinationQdrantAzureOpenAI","DestinationQdrantByMarkdownHeader","DestinationQdrantByProgrammingLanguage","DestinationQdrantBySeparator","DestinationQdrantCohere","DestinationQdrantEmbedding","DestinationQdrantFake","DestinationQdrantFieldNameMappingConfigModel","DestinationQdrantIndexing","DestinationQdrantLanguage","DestinationQdrantMode","DestinationQdrantNoAuth","DestinationQdrantOpenAI","DestinationQdrantOpenAICompatible","DestinationQdrantProcessingConfigModel","DestinationQdrantSchemasEmbeddingEmbedding5Mode","DestinationQdrantSchemasEmbeddingEmbeddingMode","DestinationQdrantSchemasEmbeddingMode","DestinationQdrantSchemasIndexingAuthMethodMode","DestinationQdrantSchemasIndexingMode","DestinationQdrantSchemasMode","DestinationQdrantSchemasProcessingMode","DestinationQdrantSchemasProcessingTextSplitterMode","DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode","DestinationQdrantTextSplitter","DestinationRedis","DestinationRedisDisable","DestinationRedisMode","DestinationRedisNoTunnel","DestinationRedisPasswordAuthentication","DestinationRedisSSHKeyAuthentication","DestinationRedisSSHTunnelMethod","DestinationRedisSSLModes","DestinationRedisSchemasMode","DestinationRedisSchemasTunnelMethod","DestinationRedisSchemasTunnelMethodTunnelMethod","DestinationRedisTunnelMethod","DestinationRedisVerifyFull","DestinationRedshift","DestinationRedshiftEncryption","DestinationRedshiftEncryptionType","DestinationRedshiftMethod","DestinationRedshiftNoTunnel","DestinationRedshiftPasswordAuthentication","DestinationRedshiftS3BucketRegion","DestinationRedshiftSSHKeyAuthentication","DestinationRedshiftSSHTunnelMethod","DestinationRedshiftSchemasMethod","DestinationRedshiftSchemasTunnelMethod","DestinationRedshiftSchemasTunnelMethodTunnelMethod","DestinationRedshiftTunnelMethod","DestinationResponse","DestinationS3","DestinationS3AvroApacheAvro","DestinationS3Bzip2","DestinationS3CSVCommaSeparatedValues","DestinationS3Codec","DestinationS3Compression","DestinationS3CompressionCodec","DestinationS3CompressionType","DestinationS3Deflate","DestinationS3Flattening","DestinationS3FormatType","DestinationS3GZIP","DestinationS3Glue","DestinationS3GlueCompression","DestinationS3GlueCompressionType","DestinationS3GlueFormatType","DestinationS3GlueGZIP","DestinationS3GlueJSONLinesNewlineDelimitedJSON","DestinationS3GlueNoCompression","DestinationS3GlueOutputFormat","DestinationS3GlueS3BucketRegion","DestinationS3GlueSchemasCompressionType","DestinationS3JSONLinesNewlineDelimitedJSON","DestinationS3NoCompression","DestinationS3OutputFormat","DestinationS3ParquetColumnarStorage","DestinationS3S3BucketRegion","DestinationS3SchemasCodec","DestinationS3SchemasCompression","DestinationS3SchemasCompressionCodec","DestinationS3SchemasCompressionType","DestinationS3SchemasFlattening","DestinationS3SchemasFormatCodec","DestinationS3SchemasFormatCompressionType","DestinationS3SchemasFormatFormatType","DestinationS3SchemasFormatNoCompression","DestinationS3SchemasFormatOutputFormat3Codec","DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec","DestinationS3SchemasFormatOutputFormatCodec","DestinationS3SchemasFormatOutputFormatCompressionType","DestinationS3SchemasFormatOutputFormatFormatType","DestinationS3SchemasFormatType","DestinationS3SchemasGZIP","DestinationS3SchemasNoCompression","DestinationS3Snappy","DestinationS3Xz","DestinationS3Zstandard","DestinationSftpJSON","DestinationSnowflake","DestinationSnowflakeAuthType","DestinationSnowflakeCortex","DestinationSnowflakeCortexAzureOpenAI","DestinationSnowflakeCortexByMarkdownHeader","DestinationSnowflakeCortexByProgrammingLanguage","DestinationSnowflakeCortexBySeparator","DestinationSnowflakeCortexCohere","DestinationSnowflakeCortexCredentials","DestinationSnowflakeCortexEmbedding","DestinationSnowflakeCortexFake","DestinationSnowflakeCortexFieldNameMappingConfigModel","DestinationSnowflakeCortexIndexing","DestinationSnowflakeCortexLanguage","DestinationSnowflakeCortexMode","DestinationSnowflakeCortexOpenAI","DestinationSnowflakeCortexOpenAICompatible","DestinationSnowflakeCortexProcessingConfigModel","DestinationSnowflakeCortexSchemasEmbeddingEmbedding5Mode","DestinationSnowflakeCortexSchemasEmbeddingEmbeddingMode","DestinationSnowflakeCortexSchemasEmbeddingMode","DestinationSnowflakeCortexSchemasMode","DestinationSnowflakeCortexSchemasProcessingMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterTextSplitterMode","DestinationSnowflakeCortexTextSplitter","DestinationSnowflakeOAuth20","DestinationSnowflakeSchemasAuthType","DestinationSnowflakeSchemasCredentialsAuthType","DestinationSnowflakeSnowflake","DestinationTeradata","DestinationTeradataAllow","DestinationTeradataDisable","DestinationTeradataMode","DestinationTeradataPrefer","DestinationTeradataRequire","DestinationTeradataSSLModes","DestinationTeradataSchemasMode","DestinationTeradataSchemasSSLModeSSLModes5Mode","DestinationTeradataSchemasSSLModeSSLModes6Mode","DestinationTeradataSchemasSSLModeSSLModesMode","DestinationTeradataSchemasSslModeMode","DestinationTeradataVerifyCa","DestinationTeradataVerifyFull","DestinationTypesense","DestinationVectara","DestinationWeaviate","DestinationWeaviateAPIToken","DestinationWeaviateAuthentication","DestinationWeaviateAzureOpenAI","DestinationWeaviateByMarkdownHeader","DestinationWeaviateByProgrammingLanguage","DestinationWeaviateBySeparator","DestinationWeaviateCohere","DestinationWeaviateEmbedding","DestinationWeaviateFake","DestinationWeaviateFieldNameMappingConfigModel","DestinationWeaviateIndexing","DestinationWeaviateLanguage","DestinationWeaviateMode","DestinationWeaviateOpenAI","DestinationWeaviateOpenAICompatible","DestinationWeaviateProcessingConfigModel","DestinationWeaviateSchemasEmbeddingEmbedding5Mode","DestinationWeaviateSchemasEmbeddingEmbedding6Mode","DestinationWeaviateSchemasEmbeddingEmbedding7Mode","DestinationWeaviateSchemasEmbeddingEmbeddingMode","DestinationWeaviateSchemasEmbeddingMode","DestinationWeaviateSchemasIndexingAuthAuthenticationMode","DestinationWeaviateSchemasIndexingAuthMode","DestinationWeaviateSchemasIndexingMode","DestinationWeaviateSchemasMode","DestinationWeaviateSchemasProcessingMode","DestinationWeaviateSchemasProcessingTextSplitterMode","DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode","DestinationWeaviateTextSplitter","DestinationWeaviateUsernamePassword","DestinationYellowbrick","DestinationYellowbrickAllow","DestinationYellowbrickDisable","DestinationYellowbrickMode","DestinationYellowbrickNoTunnel","DestinationYellowbrickPasswordAuthentication","DestinationYellowbrickPrefer","DestinationYellowbrickRequire","DestinationYellowbrickSSHKeyAuthentication","DestinationYellowbrickSSHTunnelMethod","DestinationYellowbrickSSLModes","DestinationYellowbrickSchemasMode","DestinationYellowbrickSchemasSSLModeSSLModes5Mode","DestinationYellowbrickSchemasSSLModeSSLModes6Mode","DestinationYellowbrickSchemasSSLModeSSLModesMode","DestinationYellowbrickSchemasSslModeMode","DestinationYellowbrickSchemasTunnelMethod","DestinationYellowbrickSchemasTunnelMethodTunnelMethod","DestinationYellowbrickTunnelMethod","DestinationYellowbrickVerifyCa","DestinationYellowbrickVerifyFull","DestinationsResponse","DetailType","DetectChangesWithXminSystemColumn","DevNull","Dimension","DimensionsFilter","Disable","Disabled","DistanceMetric","Dixa","DocArrayHnswSearch","Dockerhub","DocumentFileTypeFormatExperimental","DomainRegionCode","DoubleValue","Dremio","Duckdb","DynamoDBRegion","Dynamodb","E2eTestCloud","EUBasedAccount","Elasticsearch","Emailoctopus","Embedding","Enabled","EncryptedTrustServerCertificate","EncryptedVerifyCertificate","Encryption","EncryptionAlgorithm","EncryptionMethod","EncryptionType","EngagementWindowDays","Environment","ExchangeRates","Expression","ExternalTableViaS3","FacebookMarketing","Fake","Faker","Fauna","FieldNameMappingConfigModel","File","FileBasedStreamConfig","FileFormat","Filetype","Filter","FilterName","FilterType","Firebolt","Firestore","Flattening","Format","FormatType","FormatTypeWildcard","Freshcaller","Freshdesk","Freshsales","FromCSV","FromField","FromValue","GCSBucketRegion","GCSGoogleCloudStorage","GCSStaging","GCSTmpFilesAfterwardProcessing","GainsightPx","Gcs","GeographyEnum","GeographyEnumNoDefault","Getlago","Github","GithubCredentials","Gitlab","GitlabCredentials","Glassfrog","GlobalAccount","Gnews","GoogleAds","GoogleAdsCredentials","GoogleAnalyticsDataAPI","GoogleAnalyticsDataAPICredentials","GoogleAnalyticsV4ServiceAccountOnly","GoogleCredentials","GoogleDirectory","GoogleDrive","GoogleDriveCredentials","GooglePagespeedInsights","GoogleSearchConsole","GoogleSheets","GoogleSheetsCredentials","GoogleWebfonts","Granularity","GranularityForGeoLocationRegion","GranularityForPeriodicReports","Greenhouse","Gridly","Gzip","HMACKey","HTTPSPublicWeb","Harvest","Header","HeaderDefinitionType","Hubplanner","Hubspot","HubspotCredentials","IAMRole","IAMUser","In","InListFilter","Indexing","InferenceType","InitiateOauthRequest","InsightConfig","Insightly","Instagram","Instance","Instatus","Int64Value","Intercom","InvalidCDCPositionBehaviorAdvanced","Ip2whois","IssuesStreamExpandWith","Iterable","JSONLinesNewlineDelimitedJSON","Jira","JobCreateRequest","JobResponse","JobStatusEnum","JobTypeEnum","JobsResponse","Jsonl","JsonlFormat","K6Cloud","KeyPairAuthentication","Klarna","Klaviyo","Kyve","LSNCommitBehaviour","Langchain","Language","Launchdarkly","Lemlist","Level","LeverHiring","LeverHiringCredentials","LinkedinAds","LinkedinAdsCredentials","LinkedinPages","Linnworks","LoadingMethod","Local","LoginPassword","Lokalise","Mailchimp","MailchimpCredentials","Mailgun","MailjetSms","Marketo","Metabase","Method","MetricsFilter","MicrosoftOnedrive","MicrosoftOnedriveCredentials","MicrosoftSharepoint","MicrosoftSharepointCredentials","MicrosoftTeams","MicrosoftTeamsCredentials","Milvus","Mixpanel","MockCatalog","Mode","Monday","MondayCredentials","MongoDBAtlas","MongoDBAtlasReplicaSet","MongoDbInstanceType","Mongodb","MongodbInternalPoc","MongodbV2","Mssql","MultiSchema","MyHours","Mysql","NamespaceDefinitionEnum","NamespaceDefinitionEnumNoDefault","NativeNetworkEncryptionNNE","Netsuite","NoAuth","NoAuthentication","NoCompression","NoEncryption","NoExternalEmbedding","NoTunnel","NonBreakingSchemaUpdatesBehaviorEnum","NonBreakingSchemaUpdatesBehaviorEnumNoDefault","NoneT","Normalization","NormalizationFlattening","NotExpression","Notion","NotionCredentials","Nullable","NumericFilter","Nytimes","OAuth","OAuth20","OAuth20Credentials","OAuth2AccessToken","OAuth2ConfidentialApplication","OAuthActorNames","OAuthCredentialsConfiguration","OauthAuthentication","Okta","Omnisend","Onesignal","OpenAI","OpenAICompatible","Operator","OptionTitle","OptionsList","OrGroup","Oracle","Orb","Orbit","OriginDatacenterOfTheSurveyMonkeyAccount","OutbrainAmplify","OutputFormat","OutputFormatWildcard","Outreach","Parquet","ParquetColumnarStorage","ParquetFormat","ParsingStrategy","PasswordAuthentication","PaypalTransaction","Paystack","Pendo","PeriodUsedForMostPopularStreams","PermissionCreateRequest","PermissionResponse","PermissionResponseRead","PermissionScope","PermissionType","PermissionUpdateRequest","PermissionsResponse","Persistiq","PersonalAccessToken","PexelsAPI","Pinecone","Pinterest","PinterestCredentials","Pipedrive","PivotCategory","Plugin","Pocket","Pokeapi","PokemonName","PolygonStockAPI","Postgres","Posthog","Postmarkapp","Prefer","Preferred","Prestashop","PrivateApp","PrivateToken","Processing","ProcessingConfigModel","ProductCatalog","ProjectSecret","PublicPermissionType","Pubsub","PunkAPI","Pypi","Qdrant","Qualaroo","Railz","ReadChangesUsingBinaryLogCDC","ReadChangesUsingChangeDataCaptureCDC","ReadChangesUsingWriteAheadLogCDC","Recharge","RecommendedManagedTables","Recreation","Recruitee","Recurly","Redis","Redshift","Region","ReplicaSet","ReportConfig","ReportName","ReportOptions","ReportRecordTypes","ReportingDataObject","Require","Required","Retently","RetentlyCredentials","RkiCovid","RoleBasedAuthentication","Rss","S3","S3AmazonWebServices","S3BucketRegion","S3Glue","SCPSecureCopyProtocol","SFTPSecureFileTransferProtocol","SQLInserts","SSHKeyAuthentication","SSHSecureShell","SSHTunnelMethod","SSLMethod","SSLModes","Salesforce","Salesloft","SandboxAccessToken","SapFieldglass","ScanChangesWithUserDefinedCursor","ScheduleTypeEnum","ScheduleTypeWithBasicEnum","SchemeBasicAuth","SchemeClientCredentials","SearchCriteria","SearchScope","Secoda","Security","SelectedFieldInfo","SelfManagedReplicaSet","Sendgrid","Sendinblue","Senseforce","Sentry","SerializationLibrary","ServiceAccount","ServiceAccountKey","ServiceAccountKeyAuthentication","ServiceKeyAuthentication","ServiceName","Sftp","SftpBulk","SftpJSON","ShareTypeUsedForMostPopularSharedStream","Shopify","ShopifyAuthorizationMethod","ShopifyCredentials","Shortio","SignInViaGoogleOAuth","SignInViaSlackOAuth","Silent","SingleSchema","SingleStoreAccessToken","Slack","SlackCredentials","Smaily","Smartengage","Smartsheets","SmartsheetsCredentials","SnapchatMarketing","Snappy","Snowflake","SnowflakeCortex","SnowflakeCredentials","SonarCloud","SortBy","SourceAha","SourceAircall","SourceAirtable","SourceAirtableAirtable","SourceAirtableAuthMethod","SourceAirtableAuthentication","SourceAirtableOAuth20","SourceAirtableSchemasAuthMethod","SourceAmazonAds","SourceAmazonAdsAmazonAds","SourceAmazonAdsAuthType","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAmazonSellerPartner","SourceAmazonSellerPartnerAuthType","SourceAmazonSqs","SourceAmazonSqsAWSRegion","SourceAmplitude","SourceApifyDataset","SourceAppfollow","SourceAsana","SourceAsanaAsana","SourceAsanaCredentialsTitle","SourceAsanaSchemasCredentialsTitle","SourceAuth0","SourceAuth0AuthenticationMethod","SourceAuth0SchemasAuthenticationMethod","SourceAuth0SchemasCredentialsAuthenticationMethod","SourceAwsCloudtrail","SourceAzureBlobStorage","SourceAzureBlobStorageAuthType","SourceAzureBlobStorageAuthentication","SourceAzureBlobStorageAzureBlobStorage","SourceAzureBlobStorageFiletype","SourceAzureBlobStorageHeaderDefinitionType","SourceAzureBlobStorageMode","SourceAzureBlobStorageSchemasAuthType","SourceAzureBlobStorageSchemasFiletype","SourceAzureBlobStorageSchemasHeaderDefinitionType","SourceAzureBlobStorageSchemasStreamsFiletype","SourceAzureBlobStorageSchemasStreamsFormatFiletype","SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype","SourceAzureTable","SourceBambooHr","SourceBigquery","SourceBigqueryBigquery","SourceBingAds","SourceBingAdsBingAds","SourceBraintree","SourceBraintreeEnvironment","SourceBraze","SourceCart","SourceCartAuthType","SourceCartAuthorizationMethod","SourceCartSchemasAuthType","SourceChargebee","SourceChartmogul","SourceClickhouse","SourceClickhouseClickhouse","SourceClickhouseNoTunnel","SourceClickhousePasswordAuthentication","SourceClickhouseSSHKeyAuthentication","SourceClickhouseSSHTunnelMethod","SourceClickhouseSchemasTunnelMethod","SourceClickhouseSchemasTunnelMethodTunnelMethod","SourceClickhouseTunnelMethod","SourceClickupAPI","SourceClockify","SourceCloseCom","SourceCoda","SourceCoinAPI","SourceCoinmarketcap","SourceConfigcat","SourceConfiguration","SourceConfluence","SourceConvex","SourceConvexConvex","SourceCreateRequest","SourceDatascope","SourceDelighted","SourceDixa","SourceDockerhub","SourceDremio","SourceDynamodb","SourceDynamodbAuthType","SourceDynamodbCredentials","SourceDynamodbDynamodb","SourceDynamodbDynamodbRegion","SourceDynamodbSchemasAuthType","SourceE2eTestCloud","SourceE2eTestCloudSchemasType","SourceE2eTestCloudType","SourceEmailoctopus","SourceExchangeRates","SourceFacebookMarketing","SourceFacebookMarketingActionReportTime","SourceFacebookMarketingFacebookMarketing","SourceFacebookMarketingValidEnums","SourceFaker","SourceFauna","SourceFaunaDeletionMode","SourceFaunaSchemasDeletionMode","SourceFile","SourceFileS3AmazonWebServices","SourceFileSchemasProviderStorage","SourceFileSchemasProviderStorageProvider6Storage","SourceFileSchemasProviderStorageProvider7Storage","SourceFileSchemasProviderStorageProviderStorage","SourceFileSchemasStorage","SourceFileStorage","SourceFirebolt","SourceFireboltFirebolt","SourceFreshcaller","SourceFreshdesk","SourceFreshsales","SourceGCSStreamConfig","SourceGainsightPx","SourceGcs","SourceGcsAutogenerated","SourceGcsCSVFormat","SourceGcsCSVHeaderDefinition","SourceGcsFiletype","SourceGcsFormat","SourceGcsFromCSV","SourceGcsGcs","SourceGcsHeaderDefinitionType","SourceGcsInferenceType","SourceGcsSchemasHeaderDefinitionType","SourceGcsSchemasStreamsHeaderDefinitionType","SourceGcsUserProvided","SourceGcsValidationPolicy","SourceGetlago","SourceGithub","SourceGithubAuthentication","SourceGithubGithub","SourceGithubOptionTitle","SourceGithubPersonalAccessToken","SourceGitlab","SourceGitlabAuthType","SourceGitlabAuthorizationMethod","SourceGitlabGitlab","SourceGitlabOAuth20","SourceGitlabSchemasAuthType","SourceGlassfrog","SourceGnews","SourceGoogleAds","SourceGoogleAdsGoogleAds","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIAndGroup","SourceGoogleAnalyticsDataAPIAuthType","SourceGoogleAnalyticsDataAPIBetweenFilter","SourceGoogleAnalyticsDataAPICredentials","SourceGoogleAnalyticsDataAPICustomReportConfig","SourceGoogleAnalyticsDataAPIDisabled","SourceGoogleAnalyticsDataAPIDoubleValue","SourceGoogleAnalyticsDataAPIEnabled","SourceGoogleAnalyticsDataAPIExpression","SourceGoogleAnalyticsDataAPIFilter","SourceGoogleAnalyticsDataAPIFilterName","SourceGoogleAnalyticsDataAPIFilterType","SourceGoogleAnalyticsDataAPIFromValue","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIGranularity","SourceGoogleAnalyticsDataAPIInListFilter","SourceGoogleAnalyticsDataAPIInt64Value","SourceGoogleAnalyticsDataAPINotExpression","SourceGoogleAnalyticsDataAPINumericFilter","SourceGoogleAnalyticsDataAPIOrGroup","SourceGoogleAnalyticsDataAPISchemasAuthType","SourceGoogleAnalyticsDataAPISchemasBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayEnabled","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType","SourceGoogleAnalyticsDataAPISchemasDoubleValue","SourceGoogleAnalyticsDataAPISchemasEnabled","SourceGoogleAnalyticsDataAPISchemasExpression","SourceGoogleAnalyticsDataAPISchemasFilter","SourceGoogleAnalyticsDataAPISchemasFilterName","SourceGoogleAnalyticsDataAPISchemasFilterType","SourceGoogleAnalyticsDataAPISchemasFromValue","SourceGoogleAnalyticsDataAPISchemasInListFilter","SourceGoogleAnalyticsDataAPISchemasInt64Value","SourceGoogleAnalyticsDataAPISchemasNumericFilter","SourceGoogleAnalyticsDataAPISchemasStringFilter","SourceGoogleAnalyticsDataAPISchemasToValue","SourceGoogleAnalyticsDataAPISchemasValidEnums","SourceGoogleAnalyticsDataAPISchemasValue","SourceGoogleAnalyticsDataAPISchemasValueType","SourceGoogleAnalyticsDataAPIStringFilter","SourceGoogleAnalyticsDataAPIToValue","SourceGoogleAnalyticsDataAPIValidEnums","SourceGoogleAnalyticsDataAPIValue","SourceGoogleAnalyticsDataAPIValueType","SourceGoogleAnalyticsV4ServiceAccountOnly","SourceGoogleAnalyticsV4ServiceAccountOnlyAuthType","SourceGoogleAnalyticsV4ServiceAccountOnlyCredentials","SourceGoogleAnalyticsV4ServiceAccountOnlyServiceAccountKeyAuthentication","SourceGoogleDirectory","SourceGoogleDirectoryCredentialsTitle","SourceGoogleDirectoryGoogleCredentials","SourceGoogleDirectorySchemasCredentialsTitle","SourceGoogleDrive","SourceGoogleDriveAuthType","SourceGoogleDriveAuthenticateViaGoogleOAuth","SourceGoogleDriveAuthentication","SourceGoogleDriveAutogenerated","SourceGoogleDriveAvroFormat","SourceGoogleDriveCSVFormat","SourceGoogleDriveCSVHeaderDefinition","SourceGoogleDriveDocumentFileTypeFormatExperimental","SourceGoogleDriveFileBasedStreamConfig","SourceGoogleDriveFiletype","SourceGoogleDriveFormat","SourceGoogleDriveFromCSV","SourceGoogleDriveGoogleDrive","SourceGoogleDriveHeaderDefinitionType","SourceGoogleDriveJsonlFormat","SourceGoogleDriveLocal","SourceGoogleDriveMode","SourceGoogleDriveParquetFormat","SourceGoogleDriveParsingStrategy","SourceGoogleDriveProcessing","SourceGoogleDriveSchemasAuthType","SourceGoogleDriveSchemasFiletype","SourceGoogleDriveSchemasHeaderDefinitionType","SourceGoogleDriveSchemasStreamsFiletype","SourceGoogleDriveSchemasStreamsFormatFiletype","SourceGoogleDriveSchemasStreamsFormatFormatFiletype","SourceGoogleDriveSchemasStreamsHeaderDefinitionType","SourceGoogleDriveServiceAccountKeyAuthentication","SourceGoogleDriveUserProvided","SourceGoogleDriveValidationPolicy","SourceGooglePagespeedInsights","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthType","SourceGoogleSearchConsoleCustomReportConfig","SourceGoogleSearchConsoleGoogleSearchConsole","SourceGoogleSearchConsoleOAuth","SourceGoogleSearchConsoleSchemasAuthType","SourceGoogleSearchConsoleServiceAccountKeyAuthentication","SourceGoogleSearchConsoleValidEnums","SourceGoogleSheets","SourceGoogleSheetsAuthType","SourceGoogleSheetsAuthenticateViaGoogleOAuth","SourceGoogleSheetsAuthentication","SourceGoogleSheetsGoogleSheets","SourceGoogleSheetsSchemasAuthType","SourceGoogleSheetsServiceAccountKeyAuthentication","SourceGoogleWebfonts","SourceGreenhouse","SourceGridly","SourceHarvest","SourceHarvestAuthType","SourceHarvestAuthenticateWithPersonalAccessToken","SourceHarvestAuthenticationMechanism","SourceHarvestSchemasAuthType","SourceHubplanner","SourceHubspot","SourceHubspotAuthType","SourceHubspotAuthentication","SourceHubspotHubspot","SourceHubspotOAuth","SourceHubspotSchemasAuthType","SourceInsightly","SourceInstagram","SourceInstagramInstagram","SourceInstatus","SourceIntercom","SourceIntercomIntercom","SourceIp2whois","SourceIterable","SourceJira","SourceK6Cloud","SourceKlarna","SourceKlarnaRegion","SourceKlaviyo","SourceKyve","SourceLaunchdarkly","SourceLemlist","SourceLeverHiring","SourceLeverHiringAuthType","SourceLeverHiringAuthenticationMechanism","SourceLeverHiringEnvironment","SourceLeverHiringLeverHiring","SourceLeverHiringSchemasAuthType","SourceLinkedinAds","SourceLinkedinAdsAuthMethod","SourceLinkedinAdsAuthentication","SourceLinkedinAdsLinkedinAds","SourceLinkedinAdsOAuth20","SourceLinkedinAdsSchemasAuthMethod","SourceLinkedinPages","SourceLinkedinPagesAccessToken","SourceLinkedinPagesAuthMethod","SourceLinkedinPagesAuthentication","SourceLinkedinPagesOAuth20","SourceLinkedinPagesSchemasAuthMethod","SourceLinnworks","SourceLokalise","SourceMailchimp","SourceMailchimpAuthType","SourceMailchimpAuthentication","SourceMailchimpMailchimp","SourceMailchimpOAuth20","SourceMailchimpSchemasAuthType","SourceMailgun","SourceMailjetSms","SourceMarketo","SourceMetabase","SourceMicrosoftOnedrive","SourceMicrosoftOnedriveAuthType","SourceMicrosoftOnedriveAuthentication","SourceMicrosoftOnedriveAutogenerated","SourceMicrosoftOnedriveAvroFormat","SourceMicrosoftOnedriveCSVFormat","SourceMicrosoftOnedriveCSVHeaderDefinition","SourceMicrosoftOnedriveDocumentFileTypeFormatExperimental","SourceMicrosoftOnedriveFileBasedStreamConfig","SourceMicrosoftOnedriveFiletype","SourceMicrosoftOnedriveFormat","SourceMicrosoftOnedriveFromCSV","SourceMicrosoftOnedriveHeaderDefinitionType","SourceMicrosoftOnedriveJsonlFormat","SourceMicrosoftOnedriveLocal","SourceMicrosoftOnedriveMicrosoftOnedrive","SourceMicrosoftOnedriveMode","SourceMicrosoftOnedriveParquetFormat","SourceMicrosoftOnedriveParsingStrategy","SourceMicrosoftOnedriveProcessing","SourceMicrosoftOnedriveSchemasAuthType","SourceMicrosoftOnedriveSchemasFiletype","SourceMicrosoftOnedriveSchemasHeaderDefinitionType","SourceMicrosoftOnedriveSchemasStreamsFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsHeaderDefinitionType","SourceMicrosoftOnedriveUserProvided","SourceMicrosoftOnedriveValidationPolicy","SourceMicrosoftSharepoint","SourceMicrosoftSharepointAuthType","SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth","SourceMicrosoftSharepointAuthentication","SourceMicrosoftSharepointAutogenerated","SourceMicrosoftSharepointAvroFormat","SourceMicrosoftSharepointCSVFormat","SourceMicrosoftSharepointCSVHeaderDefinition","SourceMicrosoftSharepointDocumentFileTypeFormatExperimental","SourceMicrosoftSharepointFileBasedStreamConfig","SourceMicrosoftSharepointFiletype","SourceMicrosoftSharepointFormat","SourceMicrosoftSharepointFromCSV","SourceMicrosoftSharepointHeaderDefinitionType","SourceMicrosoftSharepointJsonlFormat","SourceMicrosoftSharepointLocal","SourceMicrosoftSharepointMicrosoftSharepoint","SourceMicrosoftSharepointMode","SourceMicrosoftSharepointParquetFormat","SourceMicrosoftSharepointParsingStrategy","SourceMicrosoftSharepointProcessing","SourceMicrosoftSharepointSchemasAuthType","SourceMicrosoftSharepointSchemasFiletype","SourceMicrosoftSharepointSchemasHeaderDefinitionType","SourceMicrosoftSharepointSchemasStreamsFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype","SourceMicrosoftSharepointSchemasStreamsHeaderDefinitionType","SourceMicrosoftSharepointSearchScope","SourceMicrosoftSharepointServiceKeyAuthentication","SourceMicrosoftSharepointUserProvided","SourceMicrosoftSharepointValidationPolicy","SourceMicrosoftTeams","SourceMicrosoftTeamsAuthType","SourceMicrosoftTeamsAuthenticationMechanism","SourceMicrosoftTeamsMicrosoftTeams","SourceMicrosoftTeamsSchemasAuthType","SourceMixpanel","SourceMixpanelOptionTitle","SourceMixpanelRegion","SourceMixpanelSchemasOptionTitle","SourceMonday","SourceMondayAuthType","SourceMondayAuthorizationMethod","SourceMondayMonday","SourceMondayOAuth20","SourceMondaySchemasAuthType","SourceMongodbInternalPoc","SourceMongodbV2","SourceMongodbV2ClusterType","SourceMongodbV2SchemasClusterType","SourceMssql","SourceMssqlEncryptedTrustServerCertificate","SourceMssqlEncryptedVerifyCertificate","SourceMssqlInvalidCDCPositionBehaviorAdvanced","SourceMssqlMethod","SourceMssqlMssql","SourceMssqlNoTunnel","SourceMssqlPasswordAuthentication","SourceMssqlSSHKeyAuthentication","SourceMssqlSSHTunnelMethod","SourceMssqlSSLMethod","SourceMssqlSchemasMethod","SourceMssqlSchemasSSLMethodSSLMethodSSLMethod","SourceMssqlSchemasSslMethod","SourceMssqlSchemasSslMethodSslMethod","SourceMssqlSchemasTunnelMethod","SourceMssqlSchemasTunnelMethodTunnelMethod","SourceMssqlTunnelMethod","SourceMyHours","SourceMysql","SourceMysqlInvalidCDCPositionBehaviorAdvanced","SourceMysqlMethod","SourceMysqlMode","SourceMysqlMysql","SourceMysqlNoTunnel","SourceMysqlPasswordAuthentication","SourceMysqlSSHKeyAuthentication","SourceMysqlSSHTunnelMethod","SourceMysqlSSLModes","SourceMysqlScanChangesWithUserDefinedCursor","SourceMysqlSchemasMethod","SourceMysqlSchemasMode","SourceMysqlSchemasSSLModeSSLModesMode","SourceMysqlSchemasSslModeMode","SourceMysqlSchemasTunnelMethod","SourceMysqlSchemasTunnelMethodTunnelMethod","SourceMysqlTunnelMethod","SourceMysqlUpdateMethod","SourceMysqlVerifyCA","SourceNetsuite","SourceNotion","SourceNotionAccessToken","SourceNotionAuthType","SourceNotionAuthenticationMethod","SourceNotionNotion","SourceNotionOAuth20","SourceNotionSchemasAuthType","SourceNytimes","SourceOkta","SourceOktaAPIToken","SourceOktaAuthType","SourceOktaAuthorizationMethod","SourceOktaOAuth20","SourceOktaSchemasAuthType","SourceOmnisend","SourceOnesignal","SourceOracle","SourceOracleConnectionType","SourceOracleEncryptionMethod","SourceOracleNoTunnel","SourceOracleOracle","SourceOraclePasswordAuthentication","SourceOracleSSHKeyAuthentication","SourceOracleSSHTunnelMethod","SourceOracleSchemasTunnelMethod","SourceOracleSchemasTunnelMethodTunnelMethod","SourceOracleTunnelMethod","SourceOrb","SourceOrbit","SourceOutbrainAmplify","SourceOutbrainAmplifyAccessToken","SourceOutbrainAmplifyAuthenticationMethod","SourceOutbrainAmplifyUsernamePassword","SourceOutreach","SourcePatchRequest","SourcePaypalTransaction","SourcePaystack","SourcePendo","SourcePersistiq","SourcePexelsAPI","SourcePinterest","SourcePinterestAuthMethod","SourcePinterestLevel","SourcePinterestPinterest","SourcePinterestSchemasValidEnums","SourcePinterestValidEnums","SourcePipedrive","SourcePocket","SourcePocketSortBy","SourcePokeapi","SourcePolygonStockAPI","SourcePostgres","SourcePostgresAllow","SourcePostgresDisable","SourcePostgresInvalidCDCPositionBehaviorAdvanced","SourcePostgresMethod","SourcePostgresMode","SourcePostgresNoTunnel","SourcePostgresPasswordAuthentication","SourcePostgresPostgres","SourcePostgresPrefer","SourcePostgresRequire","SourcePostgresSSHKeyAuthentication","SourcePostgresSSHTunnelMethod","SourcePostgresSSLModes","SourcePostgresScanChangesWithUserDefinedCursor","SourcePostgresSchemasMethod","SourcePostgresSchemasMode","SourcePostgresSchemasReplicationMethodMethod","SourcePostgresSchemasSSLModeSSLModes5Mode","SourcePostgresSchemasSSLModeSSLModes6Mode","SourcePostgresSchemasSSLModeSSLModesMode","SourcePostgresSchemasSslModeMode","SourcePostgresSchemasTunnelMethod","SourcePostgresSchemasTunnelMethodTunnelMethod","SourcePostgresTunnelMethod","SourcePostgresUpdateMethod","SourcePostgresVerifyCa","SourcePostgresVerifyFull","SourcePosthog","SourcePostmarkapp","SourcePrestashop","SourcePunkAPI","SourcePutRequest","SourcePypi","SourceQualaroo","SourceRailz","SourceRecharge","SourceRecreation","SourceRecruitee","SourceRecurly","SourceRedshift","SourceRedshiftRedshift","SourceResponse","SourceRetently","SourceRetentlyAuthType","SourceRetentlyAuthenticationMechanism","SourceRetentlyRetently","SourceRetentlySchemasAuthType","SourceRkiCovid","SourceRss","SourceS3","SourceS3Autogenerated","SourceS3AvroFormat","SourceS3CSVFormat","SourceS3CSVHeaderDefinition","SourceS3DocumentFileTypeFormatExperimental","SourceS3FileBasedStreamConfig","SourceS3FileFormat","SourceS3Filetype","SourceS3Format","SourceS3FromCSV","SourceS3HeaderDefinitionType","SourceS3InferenceType","SourceS3JsonlFormat","SourceS3Local","SourceS3Mode","SourceS3ParquetFormat","SourceS3ParsingStrategy","SourceS3Processing","SourceS3S3","SourceS3SchemasFiletype","SourceS3SchemasFormatFiletype","SourceS3SchemasHeaderDefinitionType","SourceS3SchemasStreamsFiletype","SourceS3SchemasStreamsFormatFiletype","SourceS3SchemasStreamsFormatFormat4Filetype","SourceS3SchemasStreamsFormatFormat5Filetype","SourceS3SchemasStreamsFormatFormatFiletype","SourceS3SchemasStreamsHeaderDefinitionType","SourceS3UserProvided","SourceS3ValidationPolicy","SourceSalesforce","SourceSalesforceSalesforce","SourceSalesloft","SourceSalesloftAuthType","SourceSalesloftCredentials","SourceSalesloftSchemasAuthType","SourceSapFieldglass","SourceSecoda","SourceSendgrid","SourceSendinblue","SourceSenseforce","SourceSentry","SourceSftp","SourceSftpAuthMethod","SourceSftpAuthentication","SourceSftpBulk","SourceSftpBulkAuthType","SourceSftpBulkAuthentication","SourceSftpBulkAutogenerated","SourceSftpBulkAvroFormat","SourceSftpBulkCSVFormat","SourceSftpBulkCSVHeaderDefinition","SourceSftpBulkDocumentFileTypeFormatExperimental","SourceSftpBulkFileBasedStreamConfig","SourceSftpBulkFiletype","SourceSftpBulkFormat","SourceSftpBulkFromCSV","SourceSftpBulkHeaderDefinitionType","SourceSftpBulkInferenceType","SourceSftpBulkJsonlFormat","SourceSftpBulkLocal","SourceSftpBulkMode","SourceSftpBulkParquetFormat","SourceSftpBulkParsingStrategy","SourceSftpBulkProcessing","SourceSftpBulkSchemasAuthType","SourceSftpBulkSchemasFiletype","SourceSftpBulkSchemasHeaderDefinitionType","SourceSftpBulkSchemasMode","SourceSftpBulkSchemasStreamsFiletype","SourceSftpBulkSchemasStreamsFormatFiletype","SourceSftpBulkSchemasStreamsFormatFormatFiletype","SourceSftpBulkSchemasStreamsHeaderDefinitionType","SourceSftpBulkUserProvided","SourceSftpBulkValidationPolicy","SourceSftpPasswordAuthentication","SourceSftpSSHKeyAuthentication","SourceSftpSchemasAuthMethod","SourceShopify","SourceShopifyAuthMethod","SourceShopifyOAuth20","SourceShopifySchemasAuthMethod","SourceShopifyShopify","SourceShortio","SourceSlack","SourceSlackAPIToken","SourceSlackAuthenticationMechanism","SourceSlackOptionTitle","SourceSlackSchemasOptionTitle","SourceSlackSlack","SourceSmaily","SourceSmartengage","SourceSmartsheets","SourceSmartsheetsAuthType","SourceSmartsheetsAuthorizationMethod","SourceSmartsheetsOAuth20","SourceSmartsheetsSchemasAuthType","SourceSmartsheetsSmartsheets","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketing","SourceSnowflake","SourceSnowflakeAuthType","SourceSnowflakeAuthorizationMethod","SourceSnowflakeOAuth20","SourceSnowflakeSchemasAuthType","SourceSnowflakeSnowflake","SourceSnowflakeUsernameAndPassword","SourceSonarCloud","SourceSpacexAPI","SourceSquare","SourceSquareAPIKey","SourceSquareAuthType","SourceSquareAuthentication","SourceSquareSchemasAuthType","SourceSquareSquare","SourceStrava","SourceStravaAuthType","SourceStravaStrava","SourceStripe","SourceSurveySparrow","SourceSurveySparrowURLBase","SourceSurveymonkey","SourceSurveymonkeyAuthMethod","SourceSurveymonkeySurveymonkey","SourceTempo","SourceTheGuardianAPI","SourceTiktokMarketing","SourceTiktokMarketingAuthType","SourceTiktokMarketingAuthenticationMethod","SourceTiktokMarketingOAuth20","SourceTiktokMarketingSchemasAuthType","SourceTiktokMarketingTiktokMarketing","SourceTrello","SourceTrustpilot","SourceTrustpilotAPIKey","SourceTrustpilotAuthType","SourceTrustpilotAuthorizationMethod","SourceTrustpilotOAuth20","SourceTrustpilotSchemasAuthType","SourceTvmazeSchedule","SourceTwilio","SourceTwilioTaskrouter","SourceTwitter","SourceTypeform","SourceTypeformAuthType","SourceTypeformAuthorizationMethod","SourceTypeformOAuth20","SourceTypeformPrivateToken","SourceTypeformSchemasAuthType","SourceTypeformTypeform","SourceUsCensus","SourceVantage","SourceWebflow","SourceWhiskyHunter","SourceWikipediaPageviews","SourceWoocommerce","SourceXkcd","SourceYandexMetrica","SourceYotpo","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalytics","SourceZendeskChat","SourceZendeskChatAccessToken","SourceZendeskChatAuthorizationMethod","SourceZendeskChatCredentials","SourceZendeskChatOAuth20","SourceZendeskChatSchemasCredentials","SourceZendeskChatZendeskChat","SourceZendeskSell","SourceZendeskSunshine","SourceZendeskSunshineAPIToken","SourceZendeskSunshineAuthMethod","SourceZendeskSunshineAuthorizationMethod","SourceZendeskSunshineOAuth20","SourceZendeskSunshineSchemasAuthMethod","SourceZendeskSunshineZendeskSunshine","SourceZendeskSupport","SourceZendeskSupportAPIToken","SourceZendeskSupportAuthentication","SourceZendeskSupportCredentials","SourceZendeskSupportOAuth20","SourceZendeskSupportSchemasCredentials","SourceZendeskSupportZendeskSupport","SourceZendeskTalk","SourceZendeskTalkAPIToken","SourceZendeskTalkAuthType","SourceZendeskTalkAuthentication","SourceZendeskTalkOAuth20","SourceZendeskTalkSchemasAuthType","SourceZendeskTalkZendeskTalk","SourceZenloop","SourceZohoCrm","SourceZohoCrmEnvironment","SourceZoom","SourcesResponse","SpacexAPI","Square","SquareCredentials","StandaloneMongoDbInstance","Standard","StandardInserts","State","StateFilter","Status","Storage","StorageProvider","Strategies","Strava","StreamConfiguration","StreamConfigurations","StreamProperties","StreamsCriteria","StringFilter","Stripe","SurveyMonkeyAuthorizationMethod","SurveySparrow","Surveymonkey","SurveymonkeyCredentials","SwipeUpAttributionWindow","SystemIDSID","TLSEncryptedVerifyCertificate","Tempo","Teradata","TestDestination","TestDestinationType","TextSplitter","TheGuardianAPI","TiktokMarketing","TiktokMarketingCredentials","TimeGranularity","ToValue","TopHeadlinesTopic","TransformationQueryRunType","Trello","Trustpilot","TunnelMethod","TvmazeSchedule","Twilio","TwilioTaskrouter","Twitter","Type","Typeform","TypeformCredentials","Typesense","URLBase","Unencrypted","UnexpectedFieldBehavior","UpdateMethod","UploadingMethod","UsCensus","UserProvided","UserResponse","UsernameAndPassword","UsernamePassword","UsersResponse","ValidActionBreakdowns","ValidAdSetStatuses","ValidAdStatuses","ValidBreakdowns","ValidCampaignStatuses","ValidationPolicy","Validenums","Value","ValueType","Vantage","Vectara","VerifyCa","VerifyFull","VerifyIdentity","ViaAPI","ViewAttributionWindow","ViewWindowDays","Weaviate","Webflow","WhiskyHunter","WikipediaPageviews","Woocommerce","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse","Xkcd","Xz","YandexMetrica","Yellowbrick","Yotpo","YoutubeAnalytics","YoutubeAnalyticsCredentials","ZendeskChat","ZendeskChatCredentials","ZendeskSell","ZendeskSunshine","ZendeskSunshineCredentials","ZendeskSupport","ZendeskSupportCredentials","ZendeskTalk","ZendeskTalkCredentials","Zenloop","ZohoCRMEdition","ZohoCrm","Zoom","Zstandard"] +__all__ = ["APIAccessToken","APIKey","APIKeyAuth","APIKeySecret","APIParameterConfigModel","APIPassword","APIToken","AWSEnvironment","AWSRegion","AWSS3Staging","AWSSellerPartnerAccountType","AccessToken","AccessTokenIsRequiredForAuthenticationRequests","AccountNames","ActionReportTime","ActorTypeEnum","AdAnalyticsReportConfiguration","Aha","AirbyteAPIConnectionSchedule","Aircall","Airtable","Allow","AmazonAds","AmazonS3","AmazonSellerPartner","AmazonSqs","Amplitude","AndGroup","ApifyDataset","Appfollow","Applications","Asana","AsanaCredentials","Astra","Auth0","AuthMethod","AuthType","AuthenticateViaAPIKey","AuthenticateViaAccessKeys","AuthenticateViaAsanaOauth","AuthenticateViaFacebookMarketingOauth","AuthenticateViaGoogleOauth","AuthenticateViaHarvestOAuth","AuthenticateViaLeverAPIKey","AuthenticateViaLeverOAuth","AuthenticateViaMicrosoft","AuthenticateViaMicrosoftOAuth","AuthenticateViaMicrosoftOAuth20","AuthenticateViaOAuth","AuthenticateViaOAuth20","AuthenticateViaOauth2","AuthenticateViaPassword","AuthenticateViaPrivateKey","AuthenticateViaRetentlyOAuth","AuthenticateViaStorageAccountKey","AuthenticateWithAPIToken","AuthenticateWithPersonalAccessToken","Authentication","AuthenticationMechanism","AuthenticationMethod","AuthenticationMode","AuthenticationType","AuthenticationViaGoogleOAuth","AuthenticationWildcard","Authorization","AuthorizationMethod","AuthorizationType","Autogenerated","Avro","AvroApacheAvro","AvroFormat","AwsCloudtrail","AwsDatalake","AzBlobAzureBlobStorage","AzureBlobStorage","AzureBlobStorageCredentials","AzureOpenAI","AzureTable","BambooHr","BaseURL","BetweenFilter","Bigquery","BingAds","BothUsernameAndPasswordIsRequiredForAuthenticationRequest","Braintree","Braze","ByMarkdownHeader","ByProgrammingLanguage","BySeparator","Bzip2","CSVCommaSeparatedValues","CSVFormat","CSVHeaderDefinition","CacheType","CaptureModeAdvanced","Cart","Categories","CentralAPIRouter","Chargebee","Chartmogul","ChooseHowToPartitionData","Clazar","ClickWindowDays","Clickhouse","ClickupAPI","Clockify","CloseCom","ClusterType","Coda","Codec","Cohere","CohortReportSettings","CohortReports","Cohorts","CohortsRange","CoinAPI","Coinmarketcap","Collection","Compression","CompressionCodec","CompressionCodecOptional","CompressionType","Configcat","Confluence","ConnectBy","ConnectionCreateRequest","ConnectionPatchRequest","ConnectionResponse","ConnectionScheduleResponse","ConnectionStatusEnum","ConnectionSyncModeEnum","ConnectionType","ConnectionsResponse","ContentType","ContinuousFeed","ConversionReportTime","Convex","Country","Credential","CredentialType","Credentials","CredentialsTitle","Csv","CustomQueriesArray","CustomReportConfig","CustomerStatus","DataCenterLocation","DataFreshness","DataRegion","DataSource","DataSourceType","DataType","Databricks","Datadog","Datascope","DatasetLocation","DateRange","DefaultVectorizer","Deflate","DeletionMode","Delighted","DestinationAstra","DestinationAstraLanguage","DestinationAstraMode","DestinationAstraSchemasEmbeddingEmbedding1Mode","DestinationAstraSchemasEmbeddingEmbeddingMode","DestinationAstraSchemasEmbeddingMode","DestinationAstraSchemasMode","DestinationAstraSchemasProcessingMode","DestinationAstraSchemasProcessingTextSplitterMode","DestinationAstraSchemasProcessingTextSplitterTextSplitterMode","DestinationAwsDatalake","DestinationAwsDatalakeCompressionCodecOptional","DestinationAwsDatalakeCredentialsTitle","DestinationAwsDatalakeFormatTypeWildcard","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorage","DestinationAzureBlobStorageFormatType","DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON","DestinationBigquery","DestinationBigqueryCredentialType","DestinationBigqueryHMACKey","DestinationBigqueryMethod","DestinationClickhouse","DestinationClickhouseSchemasTunnelMethod","DestinationClickhouseTunnelMethod","DestinationConfiguration","DestinationConvex","DestinationCreateRequest","DestinationDatabricks","DestinationDatabricksAzureBlobStorage","DestinationDatabricksDataSourceType","DestinationDatabricksS3BucketRegion","DestinationDatabricksSchemasDataSourceType","DestinationDevNull","DestinationDuckdb","DestinationDynamodb","DestinationElasticsearch","DestinationElasticsearchMethod","DestinationElasticsearchSchemasMethod","DestinationFirebolt","DestinationFireboltLoadingMethod","DestinationFireboltMethod","DestinationFireboltSchemasMethod","DestinationFirestore","DestinationGcs","DestinationGcsCSVCommaSeparatedValues","DestinationGcsCodec","DestinationGcsCompression","DestinationGcsCompressionCodec","DestinationGcsCompressionType","DestinationGcsFormatType","DestinationGcsGZIP","DestinationGcsJSONLinesNewlineDelimitedJSON","DestinationGcsNoCompression","DestinationGcsOutputFormat","DestinationGcsParquetColumnarStorage","DestinationGcsSchemasCodec","DestinationGcsSchemasCompressionType","DestinationGcsSchemasFormatCodec","DestinationGcsSchemasFormatCompressionType","DestinationGcsSchemasFormatFormatType","DestinationGcsSchemasFormatOutputFormat1Codec","DestinationGcsSchemasFormatOutputFormatCodec","DestinationGcsSchemasFormatOutputFormatFormatType","DestinationGcsSchemasFormatType","DestinationGcsSchemasNoCompression","DestinationGoogleSheets","DestinationGoogleSheetsGoogleSheets","DestinationMilvus","DestinationMilvusAPIToken","DestinationMilvusAuthentication","DestinationMilvusAzureOpenAI","DestinationMilvusByMarkdownHeader","DestinationMilvusByProgrammingLanguage","DestinationMilvusBySeparator","DestinationMilvusCohere","DestinationMilvusEmbedding","DestinationMilvusFake","DestinationMilvusFieldNameMappingConfigModel","DestinationMilvusIndexing","DestinationMilvusLanguage","DestinationMilvusMode","DestinationMilvusOpenAI","DestinationMilvusOpenAICompatible","DestinationMilvusProcessingConfigModel","DestinationMilvusSchemasEmbeddingEmbedding5Mode","DestinationMilvusSchemasEmbeddingEmbeddingMode","DestinationMilvusSchemasEmbeddingMode","DestinationMilvusSchemasIndexingAuthAuthenticationMode","DestinationMilvusSchemasIndexingAuthMode","DestinationMilvusSchemasIndexingMode","DestinationMilvusSchemasMode","DestinationMilvusSchemasProcessingMode","DestinationMilvusSchemasProcessingTextSplitterMode","DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode","DestinationMilvusTextSplitter","DestinationMilvusUsernamePassword","DestinationMongodb","DestinationMongodbAuthorization","DestinationMongodbInstance","DestinationMongodbNoTunnel","DestinationMongodbPasswordAuthentication","DestinationMongodbSSHKeyAuthentication","DestinationMongodbSSHTunnelMethod","DestinationMongodbSchemasAuthorization","DestinationMongodbSchemasInstance","DestinationMongodbSchemasTunnelMethod","DestinationMongodbSchemasTunnelMethodTunnelMethod","DestinationMongodbTunnelMethod","DestinationMssql","DestinationMssqlNoTunnel","DestinationMssqlPasswordAuthentication","DestinationMssqlSSHKeyAuthentication","DestinationMssqlSSHTunnelMethod","DestinationMssqlSchemasSslMethod","DestinationMssqlSchemasTunnelMethod","DestinationMssqlSchemasTunnelMethodTunnelMethod","DestinationMssqlSslMethod","DestinationMssqlTunnelMethod","DestinationMysql","DestinationMysqlNoTunnel","DestinationMysqlPasswordAuthentication","DestinationMysqlSSHKeyAuthentication","DestinationMysqlSSHTunnelMethod","DestinationMysqlSchemasTunnelMethod","DestinationMysqlSchemasTunnelMethodTunnelMethod","DestinationMysqlTunnelMethod","DestinationOracle","DestinationOracleNoTunnel","DestinationOraclePasswordAuthentication","DestinationOracleSSHKeyAuthentication","DestinationOracleSSHTunnelMethod","DestinationOracleSchemasTunnelMethod","DestinationOracleSchemasTunnelMethodTunnelMethod","DestinationOracleTunnelMethod","DestinationPatchRequest","DestinationPinecone","DestinationPineconeAzureOpenAI","DestinationPineconeByMarkdownHeader","DestinationPineconeByProgrammingLanguage","DestinationPineconeBySeparator","DestinationPineconeCohere","DestinationPineconeEmbedding","DestinationPineconeFake","DestinationPineconeFieldNameMappingConfigModel","DestinationPineconeIndexing","DestinationPineconeLanguage","DestinationPineconeMode","DestinationPineconeOpenAI","DestinationPineconeOpenAICompatible","DestinationPineconeProcessingConfigModel","DestinationPineconeSchemasEmbeddingEmbedding5Mode","DestinationPineconeSchemasEmbeddingEmbeddingMode","DestinationPineconeSchemasEmbeddingMode","DestinationPineconeSchemasMode","DestinationPineconeSchemasProcessingMode","DestinationPineconeSchemasProcessingTextSplitterMode","DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode","DestinationPineconeTextSplitter","DestinationPostgres","DestinationPostgresMode","DestinationPostgresNoTunnel","DestinationPostgresPasswordAuthentication","DestinationPostgresSSHKeyAuthentication","DestinationPostgresSSHTunnelMethod","DestinationPostgresSchemasMode","DestinationPostgresSchemasSSLModeSSLModes6Mode","DestinationPostgresSchemasSSLModeSSLModesMode","DestinationPostgresSchemasSslModeMode","DestinationPostgresSchemasTunnelMethod","DestinationPostgresSchemasTunnelMethodTunnelMethod","DestinationPostgresTunnelMethod","DestinationPubsub","DestinationPutRequest","DestinationQdrant","DestinationQdrantAuthenticationMethod","DestinationQdrantAzureOpenAI","DestinationQdrantByMarkdownHeader","DestinationQdrantByProgrammingLanguage","DestinationQdrantBySeparator","DestinationQdrantCohere","DestinationQdrantEmbedding","DestinationQdrantFake","DestinationQdrantFieldNameMappingConfigModel","DestinationQdrantIndexing","DestinationQdrantLanguage","DestinationQdrantMode","DestinationQdrantNoAuth","DestinationQdrantOpenAI","DestinationQdrantOpenAICompatible","DestinationQdrantProcessingConfigModel","DestinationQdrantSchemasEmbeddingEmbedding5Mode","DestinationQdrantSchemasEmbeddingEmbeddingMode","DestinationQdrantSchemasEmbeddingMode","DestinationQdrantSchemasIndexingAuthMethodMode","DestinationQdrantSchemasIndexingMode","DestinationQdrantSchemasMode","DestinationQdrantSchemasProcessingMode","DestinationQdrantSchemasProcessingTextSplitterMode","DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode","DestinationQdrantTextSplitter","DestinationRedis","DestinationRedisDisable","DestinationRedisMode","DestinationRedisNoTunnel","DestinationRedisPasswordAuthentication","DestinationRedisSSHKeyAuthentication","DestinationRedisSSHTunnelMethod","DestinationRedisSSLModes","DestinationRedisSchemasMode","DestinationRedisSchemasTunnelMethod","DestinationRedisSchemasTunnelMethodTunnelMethod","DestinationRedisTunnelMethod","DestinationRedisVerifyFull","DestinationRedshift","DestinationRedshiftMethod","DestinationRedshiftNoTunnel","DestinationRedshiftPasswordAuthentication","DestinationRedshiftS3BucketRegion","DestinationRedshiftSSHKeyAuthentication","DestinationRedshiftSSHTunnelMethod","DestinationRedshiftSchemasTunnelMethod","DestinationRedshiftSchemasTunnelMethodTunnelMethod","DestinationRedshiftTunnelMethod","DestinationResponse","DestinationS3","DestinationS3AvroApacheAvro","DestinationS3Bzip2","DestinationS3CSVCommaSeparatedValues","DestinationS3Codec","DestinationS3Compression","DestinationS3CompressionCodec","DestinationS3CompressionType","DestinationS3Deflate","DestinationS3Flattening","DestinationS3FormatType","DestinationS3GZIP","DestinationS3Glue","DestinationS3GlueCompression","DestinationS3GlueCompressionType","DestinationS3GlueFormatType","DestinationS3GlueGZIP","DestinationS3GlueJSONLinesNewlineDelimitedJSON","DestinationS3GlueNoCompression","DestinationS3GlueOutputFormat","DestinationS3GlueS3BucketRegion","DestinationS3GlueSchemasCompressionType","DestinationS3JSONLinesNewlineDelimitedJSON","DestinationS3NoCompression","DestinationS3OutputFormat","DestinationS3ParquetColumnarStorage","DestinationS3S3BucketRegion","DestinationS3SchemasCodec","DestinationS3SchemasCompression","DestinationS3SchemasCompressionCodec","DestinationS3SchemasCompressionType","DestinationS3SchemasFlattening","DestinationS3SchemasFormatCodec","DestinationS3SchemasFormatCompressionType","DestinationS3SchemasFormatFormatType","DestinationS3SchemasFormatNoCompression","DestinationS3SchemasFormatOutputFormat3Codec","DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec","DestinationS3SchemasFormatOutputFormatCodec","DestinationS3SchemasFormatOutputFormatCompressionType","DestinationS3SchemasFormatOutputFormatFormatType","DestinationS3SchemasFormatType","DestinationS3SchemasGZIP","DestinationS3SchemasNoCompression","DestinationS3Snappy","DestinationS3Xz","DestinationS3Zstandard","DestinationSftpJSON","DestinationSnowflake","DestinationSnowflakeAuthType","DestinationSnowflakeCortex","DestinationSnowflakeCortexAzureOpenAI","DestinationSnowflakeCortexByMarkdownHeader","DestinationSnowflakeCortexByProgrammingLanguage","DestinationSnowflakeCortexBySeparator","DestinationSnowflakeCortexCohere","DestinationSnowflakeCortexCredentials","DestinationSnowflakeCortexEmbedding","DestinationSnowflakeCortexFake","DestinationSnowflakeCortexFieldNameMappingConfigModel","DestinationSnowflakeCortexLanguage","DestinationSnowflakeCortexMode","DestinationSnowflakeCortexOpenAI","DestinationSnowflakeCortexOpenAICompatible","DestinationSnowflakeCortexProcessingConfigModel","DestinationSnowflakeCortexSchemasEmbeddingEmbedding5Mode","DestinationSnowflakeCortexSchemasEmbeddingEmbeddingMode","DestinationSnowflakeCortexSchemasEmbeddingMode","DestinationSnowflakeCortexSchemasMode","DestinationSnowflakeCortexSchemasProcessingMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterTextSplitterMode","DestinationSnowflakeCortexTextSplitter","DestinationSnowflakeOAuth20","DestinationSnowflakeSchemasAuthType","DestinationSnowflakeSchemasCredentialsAuthType","DestinationSnowflakeSnowflake","DestinationTeradata","DestinationTeradataAllow","DestinationTeradataDisable","DestinationTeradataMode","DestinationTeradataPrefer","DestinationTeradataRequire","DestinationTeradataSSLModes","DestinationTeradataSchemasMode","DestinationTeradataSchemasSSLModeSSLModes5Mode","DestinationTeradataSchemasSSLModeSSLModes6Mode","DestinationTeradataSchemasSSLModeSSLModesMode","DestinationTeradataSchemasSslModeMode","DestinationTeradataVerifyCa","DestinationTeradataVerifyFull","DestinationTypesense","DestinationVectara","DestinationWeaviate","DestinationWeaviateAPIToken","DestinationWeaviateAuthentication","DestinationWeaviateAzureOpenAI","DestinationWeaviateByMarkdownHeader","DestinationWeaviateByProgrammingLanguage","DestinationWeaviateBySeparator","DestinationWeaviateCohere","DestinationWeaviateEmbedding","DestinationWeaviateFake","DestinationWeaviateFieldNameMappingConfigModel","DestinationWeaviateIndexing","DestinationWeaviateLanguage","DestinationWeaviateMode","DestinationWeaviateOpenAI","DestinationWeaviateOpenAICompatible","DestinationWeaviateProcessingConfigModel","DestinationWeaviateSchemasEmbeddingEmbedding5Mode","DestinationWeaviateSchemasEmbeddingEmbedding6Mode","DestinationWeaviateSchemasEmbeddingEmbedding7Mode","DestinationWeaviateSchemasEmbeddingEmbeddingMode","DestinationWeaviateSchemasEmbeddingMode","DestinationWeaviateSchemasIndexingAuthAuthenticationMode","DestinationWeaviateSchemasIndexingAuthMode","DestinationWeaviateSchemasIndexingMode","DestinationWeaviateSchemasMode","DestinationWeaviateSchemasProcessingMode","DestinationWeaviateSchemasProcessingTextSplitterMode","DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode","DestinationWeaviateTextSplitter","DestinationWeaviateUsernamePassword","DestinationYellowbrick","DestinationYellowbrickAllow","DestinationYellowbrickDisable","DestinationYellowbrickMode","DestinationYellowbrickNoTunnel","DestinationYellowbrickPasswordAuthentication","DestinationYellowbrickPrefer","DestinationYellowbrickRequire","DestinationYellowbrickSSHKeyAuthentication","DestinationYellowbrickSSHTunnelMethod","DestinationYellowbrickSSLModes","DestinationYellowbrickSchemasMode","DestinationYellowbrickSchemasSSLModeSSLModes5Mode","DestinationYellowbrickSchemasSSLModeSSLModes6Mode","DestinationYellowbrickSchemasSSLModeSSLModesMode","DestinationYellowbrickSchemasSslModeMode","DestinationYellowbrickSchemasTunnelMethod","DestinationYellowbrickSchemasTunnelMethodTunnelMethod","DestinationYellowbrickTunnelMethod","DestinationYellowbrickVerifyCa","DestinationYellowbrickVerifyFull","DestinationsResponse","DetailType","DetectChangesWithXminSystemColumn","DevNull","Dimension","DimensionsFilter","Disable","Disabled","DistanceMetric","Dixa","Dockerhub","DocumentFileTypeFormatExperimental","DomainRegionCode","DoubleValue","Dremio","Duckdb","DynamoDBRegion","Dynamodb","E2eTestCloud","EUBasedAccount","Elasticsearch","Emailoctopus","Embedding","Enabled","EncryptedTrustServerCertificate","EncryptedVerifyCertificate","Encryption","EncryptionAlgorithm","EncryptionMethod","EngagementWindowDays","Environment","ExchangeRates","Expression","ExternalTableViaS3","FacebookMarketing","FacebookMarketingCredentials","Fake","Faker","Fauna","FieldNameMappingConfigModel","File","FileBasedStreamConfig","FileFormat","Filetype","Filter","FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody","FilterName","FilterType","Firebolt","Firestore","Flattening","Fleetio","Format","FormatType","FormatTypeWildcard","Freshcaller","Freshdesk","Freshsales","FromCSV","FromField","FromValue","GCSBucketRegion","GCSGoogleCloudStorage","GCSStaging","GCSTmpFilesAfterwardProcessing","GainsightPx","Gcs","GeographyEnum","GeographyEnumNoDefault","Getlago","Github","GithubCredentials","Gitlab","GitlabCredentials","Glassfrog","GlobalAccount","Gnews","Goldcast","GoogleAds","GoogleAdsCredentials","GoogleAnalyticsDataAPI","GoogleAnalyticsDataAPICredentials","GoogleCredentials","GoogleDirectory","GoogleDrive","GoogleDriveCredentials","GooglePagespeedInsights","GoogleSearchConsole","GoogleSheets","GoogleSheetsCredentials","GoogleWebfonts","Granularity","GranularityForGeoLocationRegion","GranularityForPeriodicReports","Greenhouse","Gridly","Gzip","HMACKey","HTTPSPublicWeb","Harvest","Header","HeaderDefinitionType","Hubplanner","Hubspot","HubspotCredentials","IAMRole","IAMUser","In","InListFilter","Indexing","InferenceType","InitiateOauthRequest","InsightConfig","Insightly","Instagram","Instance","Instatus","Int64Value","Intercom","InvalidCDCPositionBehaviorAdvanced","Ip2whois","IssuesStreamExpandWith","Iterable","JSONLinesNewlineDelimitedJSON","Jira","JobCreateRequest","JobResponse","JobStatusEnum","JobTypeEnum","JobsResponse","Jsonl","JsonlFormat","K6Cloud","KeyPairAuthentication","Klarna","Klaviyo","Kyve","LSNCommitBehaviour","Language","Launchdarkly","Lemlist","Level","LeverHiring","LeverHiringCredentials","LinkedinAds","LinkedinAdsCredentials","LinkedinPages","Linnworks","LoadingMethod","Local","LoginPassword","Lokalise","Mailchimp","MailchimpCredentials","Mailgun","MailjetSms","Marketo","Metabase","Method","MetricsFilter","MicrosoftOnedrive","MicrosoftOnedriveCredentials","MicrosoftSharepoint","MicrosoftSharepointCredentials","MicrosoftTeams","MicrosoftTeamsCredentials","Milvus","Mixpanel","MockCatalog","Mode","Monday","MondayCredentials","MongoDBAtlas","MongoDBAtlasReplicaSet","MongoDbInstanceType","Mongodb","MongodbInternalPoc","MongodbV2","Mssql","MultiSchema","MyHours","Mysql","NamespaceDefinitionEnum","NamespaceDefinitionEnumNoDefault","NativeNetworkEncryptionNNE","Netsuite","NoAuth","NoAuthentication","NoCompression","NoExternalEmbedding","NoTunnel","NonBreakingSchemaUpdatesBehaviorEnum","NonBreakingSchemaUpdatesBehaviorEnumNoDefault","NoneT","Normalization","NormalizationFlattening","NotExpression","Notion","NotionCredentials","Nullable","NumericFilter","Nytimes","OAuth","OAuth20","OAuth20Credentials","OAuth2AccessToken","OAuth2ConfidentialApplication","OAuthActorNames","OAuthCredentialsConfiguration","OauthAuthentication","Okta","Omnisend","Onesignal","OpenAI","OpenAICompatible","Operator","OptionTitle","OptionsList","OrGroup","Oracle","Orb","Orbit","OrganizationResponse","OrganizationsResponse","OriginDatacenterOfTheSurveyMonkeyAccount","OutbrainAmplify","OutputFormat","OutputFormatWildcard","Outreach","Parquet","ParquetColumnarStorage","ParquetFormat","ParsingStrategy","PasswordAuthentication","PaypalTransaction","Paystack","Pendo","PeriodUsedForMostPopularStreams","PermissionCreateRequest","PermissionResponse","PermissionResponseRead","PermissionScope","PermissionType","PermissionUpdateRequest","PermissionsResponse","Persistiq","PersonalAccessToken","PexelsAPI","Pinecone","Pinterest","PinterestCredentials","Pipedrive","PivotCategory","Plugin","Pocket","Pokeapi","PokemonName","PolygonStockAPI","Postgres","Posthog","Postmarkapp","Prefer","Preferred","Prestashop","PrivateApp","PrivateToken","Processing","ProcessingConfigModel","ProductCatalog","ProjectSecret","PublicPermissionType","Pubsub","Pypi","Qdrant","Qualaroo","Queries","Railz","ReadChangesUsingBinaryLogCDC","ReadChangesUsingChangeDataCaptureCDC","ReadChangesUsingWriteAheadLogCDC","Recharge","RecommendedManagedTables","Recreation","Recruitee","Recurly","Redis","Redshift","Region","ReplicaSet","ReportConfig","ReportName","ReportOptions","ReportRecordTypes","ReportingDataObject","Require","Required","Retently","RetentlyCredentials","RkiCovid","RoleBasedAuthentication","Rss","S3","S3AmazonWebServices","S3BucketRegion","S3Glue","SCPSecureCopyProtocol","SFTPSecureFileTransferProtocol","SQLInserts","SSHKeyAuthentication","SSHSecureShell","SSHTunnelMethod","SSLMethod","SSLModes","Salesforce","Salesloft","SandboxAccessToken","SapFieldglass","ScanChangesWithUserDefinedCursor","ScheduleTypeEnum","ScheduleTypeWithBasicEnum","SchemeBasicAuth","SchemeClientCredentials","SearchCriteria","SearchScope","Secoda","Security","SelectedFieldInfo","SelfManagedReplicaSet","Sendgrid","Sendinblue","Senseforce","Sentry","SerializationLibrary","ServiceAccount","ServiceAccountKey","ServiceAccountKeyAuthentication","ServiceKeyAuthentication","ServiceName","Sftp","SftpBulk","SftpJSON","ShareTypeUsedForMostPopularSharedStream","Shopify","ShopifyAuthorizationMethod","ShopifyCredentials","Shortio","SignInViaGoogleOAuth","SignInViaSlackOAuth","Silent","SingleSchema","SingleStoreAccessToken","Site","Slack","SlackCredentials","Smaily","Smartengage","Smartsheets","SmartsheetsCredentials","SnapchatMarketing","Snappy","Snowflake","SnowflakeConnection","SnowflakeCortex","SnowflakeCredentials","SonarCloud","SortBy","SourceAha","SourceAircall","SourceAirtable","SourceAirtableAirtable","SourceAirtableAuthMethod","SourceAirtableAuthentication","SourceAirtableOAuth20","SourceAirtableSchemasAuthMethod","SourceAmazonAds","SourceAmazonAdsAmazonAds","SourceAmazonAdsAuthType","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAmazonSellerPartner","SourceAmazonSellerPartnerAuthType","SourceAmazonSqs","SourceAmazonSqsAWSRegion","SourceAmplitude","SourceApifyDataset","SourceAppfollow","SourceAsana","SourceAsanaAsana","SourceAsanaCredentialsTitle","SourceAsanaSchemasCredentialsTitle","SourceAuth0","SourceAuth0AuthenticationMethod","SourceAuth0SchemasAuthenticationMethod","SourceAuth0SchemasCredentialsAuthenticationMethod","SourceAwsCloudtrail","SourceAzureBlobStorage","SourceAzureBlobStorageAuthType","SourceAzureBlobStorageAuthentication","SourceAzureBlobStorageAzureBlobStorage","SourceAzureBlobStorageFiletype","SourceAzureBlobStorageHeaderDefinitionType","SourceAzureBlobStorageMode","SourceAzureBlobStorageSchemasAuthType","SourceAzureBlobStorageSchemasFiletype","SourceAzureBlobStorageSchemasHeaderDefinitionType","SourceAzureBlobStorageSchemasStreamsFiletype","SourceAzureBlobStorageSchemasStreamsFormatFiletype","SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype","SourceAzureTable","SourceBambooHr","SourceBigquery","SourceBigqueryBigquery","SourceBingAds","SourceBingAdsBingAds","SourceBraintree","SourceBraintreeEnvironment","SourceBraze","SourceCart","SourceCartAuthType","SourceCartAuthorizationMethod","SourceCartSchemasAuthType","SourceChargebee","SourceChartmogul","SourceClazar","SourceClickhouse","SourceClickhouseClickhouse","SourceClickhouseNoTunnel","SourceClickhousePasswordAuthentication","SourceClickhouseSSHKeyAuthentication","SourceClickhouseSSHTunnelMethod","SourceClickhouseSchemasTunnelMethod","SourceClickhouseSchemasTunnelMethodTunnelMethod","SourceClickhouseTunnelMethod","SourceClickupAPI","SourceClockify","SourceCloseCom","SourceCoda","SourceCoinAPI","SourceCoinmarketcap","SourceConfigcat","SourceConfiguration","SourceConfluence","SourceConvex","SourceConvexConvex","SourceCreateRequest","SourceDatadog","SourceDatadogDataSource","SourceDatascope","SourceDelighted","SourceDixa","SourceDockerhub","SourceDremio","SourceDynamodb","SourceDynamodbAuthType","SourceDynamodbCredentials","SourceDynamodbDynamodb","SourceDynamodbDynamodbRegion","SourceDynamodbSchemasAuthType","SourceE2eTestCloud","SourceE2eTestCloudSchemasType","SourceE2eTestCloudType","SourceEmailoctopus","SourceExchangeRates","SourceFacebookMarketing","SourceFacebookMarketingActionReportTime","SourceFacebookMarketingAuthType","SourceFacebookMarketingAuthentication","SourceFacebookMarketingFacebookMarketing","SourceFacebookMarketingSchemasAuthType","SourceFacebookMarketingValidEnums","SourceFaker","SourceFauna","SourceFaunaDeletionMode","SourceFaunaSchemasDeletionMode","SourceFile","SourceFileS3AmazonWebServices","SourceFileSchemasProviderStorage","SourceFileSchemasProviderStorageProvider6Storage","SourceFileSchemasProviderStorageProvider7Storage","SourceFileSchemasProviderStorageProviderStorage","SourceFileSchemasStorage","SourceFileStorage","SourceFirebolt","SourceFireboltFirebolt","SourceFleetio","SourceFreshcaller","SourceFreshdesk","SourceFreshsales","SourceGCSStreamConfig","SourceGainsightPx","SourceGcs","SourceGcsAutogenerated","SourceGcsCSVFormat","SourceGcsCSVHeaderDefinition","SourceGcsFiletype","SourceGcsFormat","SourceGcsFromCSV","SourceGcsGcs","SourceGcsHeaderDefinitionType","SourceGcsInferenceType","SourceGcsSchemasHeaderDefinitionType","SourceGcsSchemasStreamsHeaderDefinitionType","SourceGcsUserProvided","SourceGcsValidationPolicy","SourceGetlago","SourceGithub","SourceGithubAuthentication","SourceGithubGithub","SourceGithubOptionTitle","SourceGithubPersonalAccessToken","SourceGitlab","SourceGitlabAuthType","SourceGitlabAuthorizationMethod","SourceGitlabGitlab","SourceGitlabOAuth20","SourceGitlabSchemasAuthType","SourceGlassfrog","SourceGnews","SourceGoldcast","SourceGoogleAds","SourceGoogleAdsGoogleAds","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIAndGroup","SourceGoogleAnalyticsDataAPIAuthType","SourceGoogleAnalyticsDataAPIBetweenFilter","SourceGoogleAnalyticsDataAPICredentials","SourceGoogleAnalyticsDataAPICustomReportConfig","SourceGoogleAnalyticsDataAPIDisabled","SourceGoogleAnalyticsDataAPIDoubleValue","SourceGoogleAnalyticsDataAPIEnabled","SourceGoogleAnalyticsDataAPIExpression","SourceGoogleAnalyticsDataAPIFilter","SourceGoogleAnalyticsDataAPIFilterName","SourceGoogleAnalyticsDataAPIFilterType","SourceGoogleAnalyticsDataAPIFromValue","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIGranularity","SourceGoogleAnalyticsDataAPIInListFilter","SourceGoogleAnalyticsDataAPIInt64Value","SourceGoogleAnalyticsDataAPINotExpression","SourceGoogleAnalyticsDataAPINumericFilter","SourceGoogleAnalyticsDataAPIOrGroup","SourceGoogleAnalyticsDataAPISchemasAuthType","SourceGoogleAnalyticsDataAPISchemasBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayEnabled","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType","SourceGoogleAnalyticsDataAPISchemasDoubleValue","SourceGoogleAnalyticsDataAPISchemasEnabled","SourceGoogleAnalyticsDataAPISchemasExpression","SourceGoogleAnalyticsDataAPISchemasFilter","SourceGoogleAnalyticsDataAPISchemasFilterName","SourceGoogleAnalyticsDataAPISchemasFilterType","SourceGoogleAnalyticsDataAPISchemasFromValue","SourceGoogleAnalyticsDataAPISchemasInListFilter","SourceGoogleAnalyticsDataAPISchemasInt64Value","SourceGoogleAnalyticsDataAPISchemasNumericFilter","SourceGoogleAnalyticsDataAPISchemasStringFilter","SourceGoogleAnalyticsDataAPISchemasToValue","SourceGoogleAnalyticsDataAPISchemasValidEnums","SourceGoogleAnalyticsDataAPISchemasValue","SourceGoogleAnalyticsDataAPISchemasValueType","SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication","SourceGoogleAnalyticsDataAPIStringFilter","SourceGoogleAnalyticsDataAPIToValue","SourceGoogleAnalyticsDataAPIValidEnums","SourceGoogleAnalyticsDataAPIValue","SourceGoogleAnalyticsDataAPIValueType","SourceGoogleDirectory","SourceGoogleDirectoryCredentialsTitle","SourceGoogleDirectoryGoogleCredentials","SourceGoogleDirectorySchemasCredentialsTitle","SourceGoogleDrive","SourceGoogleDriveAuthType","SourceGoogleDriveAuthenticateViaGoogleOAuth","SourceGoogleDriveAuthentication","SourceGoogleDriveAutogenerated","SourceGoogleDriveAvroFormat","SourceGoogleDriveCSVFormat","SourceGoogleDriveCSVHeaderDefinition","SourceGoogleDriveDocumentFileTypeFormatExperimental","SourceGoogleDriveFileBasedStreamConfig","SourceGoogleDriveFiletype","SourceGoogleDriveFormat","SourceGoogleDriveFromCSV","SourceGoogleDriveGoogleDrive","SourceGoogleDriveHeaderDefinitionType","SourceGoogleDriveJsonlFormat","SourceGoogleDriveLocal","SourceGoogleDriveMode","SourceGoogleDriveParquetFormat","SourceGoogleDriveParsingStrategy","SourceGoogleDriveProcessing","SourceGoogleDriveSchemasAuthType","SourceGoogleDriveSchemasFiletype","SourceGoogleDriveSchemasHeaderDefinitionType","SourceGoogleDriveSchemasStreamsFiletype","SourceGoogleDriveSchemasStreamsFormatFiletype","SourceGoogleDriveSchemasStreamsFormatFormatFiletype","SourceGoogleDriveSchemasStreamsHeaderDefinitionType","SourceGoogleDriveServiceAccountKeyAuthentication","SourceGoogleDriveUserProvided","SourceGoogleDriveValidationPolicy","SourceGooglePagespeedInsights","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthType","SourceGoogleSearchConsoleCustomReportConfig","SourceGoogleSearchConsoleGoogleSearchConsole","SourceGoogleSearchConsoleOAuth","SourceGoogleSearchConsoleSchemasAuthType","SourceGoogleSearchConsoleServiceAccountKeyAuthentication","SourceGoogleSearchConsoleValidEnums","SourceGoogleSheets","SourceGoogleSheetsAuthType","SourceGoogleSheetsAuthenticateViaGoogleOAuth","SourceGoogleSheetsAuthentication","SourceGoogleSheetsGoogleSheets","SourceGoogleSheetsSchemasAuthType","SourceGoogleSheetsServiceAccountKeyAuthentication","SourceGoogleWebfonts","SourceGreenhouse","SourceGridly","SourceHarvest","SourceHarvestAuthType","SourceHarvestAuthenticateWithPersonalAccessToken","SourceHarvestAuthenticationMechanism","SourceHarvestSchemasAuthType","SourceHubplanner","SourceHubspot","SourceHubspotAuthType","SourceHubspotAuthentication","SourceHubspotHubspot","SourceHubspotOAuth","SourceHubspotSchemasAuthType","SourceInsightly","SourceInstagram","SourceInstagramInstagram","SourceInstatus","SourceIntercom","SourceIntercomIntercom","SourceIp2whois","SourceIterable","SourceJira","SourceK6Cloud","SourceKlarna","SourceKlarnaRegion","SourceKlaviyo","SourceKyve","SourceLaunchdarkly","SourceLemlist","SourceLeverHiring","SourceLeverHiringAuthType","SourceLeverHiringAuthenticationMechanism","SourceLeverHiringEnvironment","SourceLeverHiringLeverHiring","SourceLeverHiringSchemasAuthType","SourceLinkedinAds","SourceLinkedinAdsAuthMethod","SourceLinkedinAdsAuthentication","SourceLinkedinAdsLinkedinAds","SourceLinkedinAdsOAuth20","SourceLinkedinAdsSchemasAuthMethod","SourceLinkedinPages","SourceLinkedinPagesAccessToken","SourceLinkedinPagesAuthMethod","SourceLinkedinPagesAuthentication","SourceLinkedinPagesOAuth20","SourceLinkedinPagesSchemasAuthMethod","SourceLinnworks","SourceLokalise","SourceMailchimp","SourceMailchimpAuthType","SourceMailchimpAuthentication","SourceMailchimpMailchimp","SourceMailchimpOAuth20","SourceMailchimpSchemasAuthType","SourceMailgun","SourceMailjetSms","SourceMarketo","SourceMetabase","SourceMicrosoftOnedrive","SourceMicrosoftOnedriveAuthType","SourceMicrosoftOnedriveAuthentication","SourceMicrosoftOnedriveAutogenerated","SourceMicrosoftOnedriveAvroFormat","SourceMicrosoftOnedriveCSVFormat","SourceMicrosoftOnedriveCSVHeaderDefinition","SourceMicrosoftOnedriveFileBasedStreamConfig","SourceMicrosoftOnedriveFiletype","SourceMicrosoftOnedriveFormat","SourceMicrosoftOnedriveFromCSV","SourceMicrosoftOnedriveHeaderDefinitionType","SourceMicrosoftOnedriveJsonlFormat","SourceMicrosoftOnedriveLocal","SourceMicrosoftOnedriveMicrosoftOnedrive","SourceMicrosoftOnedriveMode","SourceMicrosoftOnedriveParquetFormat","SourceMicrosoftOnedriveParsingStrategy","SourceMicrosoftOnedriveProcessing","SourceMicrosoftOnedriveSchemasAuthType","SourceMicrosoftOnedriveSchemasFiletype","SourceMicrosoftOnedriveSchemasHeaderDefinitionType","SourceMicrosoftOnedriveSchemasStreamsFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsHeaderDefinitionType","SourceMicrosoftOnedriveUserProvided","SourceMicrosoftOnedriveValidationPolicy","SourceMicrosoftSharepoint","SourceMicrosoftSharepointAuthType","SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth","SourceMicrosoftSharepointAuthentication","SourceMicrosoftSharepointAutogenerated","SourceMicrosoftSharepointAvroFormat","SourceMicrosoftSharepointCSVFormat","SourceMicrosoftSharepointCSVHeaderDefinition","SourceMicrosoftSharepointFileBasedStreamConfig","SourceMicrosoftSharepointFiletype","SourceMicrosoftSharepointFormat","SourceMicrosoftSharepointFromCSV","SourceMicrosoftSharepointHeaderDefinitionType","SourceMicrosoftSharepointJsonlFormat","SourceMicrosoftSharepointLocal","SourceMicrosoftSharepointMicrosoftSharepoint","SourceMicrosoftSharepointMode","SourceMicrosoftSharepointParquetFormat","SourceMicrosoftSharepointParsingStrategy","SourceMicrosoftSharepointProcessing","SourceMicrosoftSharepointSchemasAuthType","SourceMicrosoftSharepointSchemasFiletype","SourceMicrosoftSharepointSchemasHeaderDefinitionType","SourceMicrosoftSharepointSchemasStreamsFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype","SourceMicrosoftSharepointSchemasStreamsHeaderDefinitionType","SourceMicrosoftSharepointSearchScope","SourceMicrosoftSharepointServiceKeyAuthentication","SourceMicrosoftSharepointUnstructuredDocumentFormat","SourceMicrosoftSharepointUserProvided","SourceMicrosoftSharepointValidationPolicy","SourceMicrosoftTeams","SourceMicrosoftTeamsAuthType","SourceMicrosoftTeamsAuthenticationMechanism","SourceMicrosoftTeamsMicrosoftTeams","SourceMicrosoftTeamsSchemasAuthType","SourceMixpanel","SourceMixpanelOptionTitle","SourceMixpanelRegion","SourceMixpanelSchemasOptionTitle","SourceMonday","SourceMondayAuthType","SourceMondayAuthorizationMethod","SourceMondayMonday","SourceMondayOAuth20","SourceMondaySchemasAuthType","SourceMongodbInternalPoc","SourceMongodbV2","SourceMongodbV2ClusterType","SourceMongodbV2SchemasClusterType","SourceMssql","SourceMssqlEncryptedTrustServerCertificate","SourceMssqlEncryptedVerifyCertificate","SourceMssqlInvalidCDCPositionBehaviorAdvanced","SourceMssqlMethod","SourceMssqlMssql","SourceMssqlNoTunnel","SourceMssqlPasswordAuthentication","SourceMssqlSSHKeyAuthentication","SourceMssqlSSHTunnelMethod","SourceMssqlSSLMethod","SourceMssqlSchemasMethod","SourceMssqlSchemasSSLMethodSSLMethodSSLMethod","SourceMssqlSchemasSslMethod","SourceMssqlSchemasSslMethodSslMethod","SourceMssqlSchemasTunnelMethod","SourceMssqlSchemasTunnelMethodTunnelMethod","SourceMssqlTunnelMethod","SourceMyHours","SourceMysql","SourceMysqlInvalidCDCPositionBehaviorAdvanced","SourceMysqlMethod","SourceMysqlMode","SourceMysqlMysql","SourceMysqlNoTunnel","SourceMysqlPasswordAuthentication","SourceMysqlSSHKeyAuthentication","SourceMysqlSSHTunnelMethod","SourceMysqlSSLModes","SourceMysqlScanChangesWithUserDefinedCursor","SourceMysqlSchemasMethod","SourceMysqlSchemasMode","SourceMysqlSchemasSSLModeSSLModesMode","SourceMysqlSchemasSslModeMode","SourceMysqlSchemasTunnelMethod","SourceMysqlSchemasTunnelMethodTunnelMethod","SourceMysqlTunnelMethod","SourceMysqlUpdateMethod","SourceMysqlVerifyCA","SourceNetsuite","SourceNotion","SourceNotionAccessToken","SourceNotionAuthType","SourceNotionAuthenticationMethod","SourceNotionNotion","SourceNotionOAuth20","SourceNotionSchemasAuthType","SourceNytimes","SourceOkta","SourceOktaAPIToken","SourceOktaAuthType","SourceOktaAuthorizationMethod","SourceOktaOAuth20","SourceOktaSchemasAuthType","SourceOmnisend","SourceOnesignal","SourceOracle","SourceOracleConnectionType","SourceOracleEncryptionMethod","SourceOracleNoTunnel","SourceOracleOracle","SourceOraclePasswordAuthentication","SourceOracleSSHKeyAuthentication","SourceOracleSSHTunnelMethod","SourceOracleSchemasTunnelMethod","SourceOracleSchemasTunnelMethodTunnelMethod","SourceOracleTunnelMethod","SourceOrb","SourceOrbit","SourceOutbrainAmplify","SourceOutbrainAmplifyAccessToken","SourceOutbrainAmplifyAuthenticationMethod","SourceOutbrainAmplifyUsernamePassword","SourceOutreach","SourcePatchRequest","SourcePaypalTransaction","SourcePaystack","SourcePendo","SourcePersistiq","SourcePexelsAPI","SourcePinterest","SourcePinterestAuthMethod","SourcePinterestLevel","SourcePinterestPinterest","SourcePinterestSchemasValidEnums","SourcePinterestValidEnums","SourcePipedrive","SourcePocket","SourcePocketSortBy","SourcePokeapi","SourcePolygonStockAPI","SourcePostgres","SourcePostgresAllow","SourcePostgresDisable","SourcePostgresInvalidCDCPositionBehaviorAdvanced","SourcePostgresMethod","SourcePostgresMode","SourcePostgresNoTunnel","SourcePostgresPasswordAuthentication","SourcePostgresPostgres","SourcePostgresPrefer","SourcePostgresRequire","SourcePostgresSSHKeyAuthentication","SourcePostgresSSHTunnelMethod","SourcePostgresSSLModes","SourcePostgresScanChangesWithUserDefinedCursor","SourcePostgresSchemasMethod","SourcePostgresSchemasMode","SourcePostgresSchemasReplicationMethodMethod","SourcePostgresSchemasSSLModeSSLModes5Mode","SourcePostgresSchemasSSLModeSSLModes6Mode","SourcePostgresSchemasSSLModeSSLModesMode","SourcePostgresSchemasSslModeMode","SourcePostgresSchemasTunnelMethod","SourcePostgresSchemasTunnelMethodTunnelMethod","SourcePostgresTunnelMethod","SourcePostgresUpdateMethod","SourcePostgresVerifyCa","SourcePostgresVerifyFull","SourcePosthog","SourcePostmarkapp","SourcePrestashop","SourcePutRequest","SourcePypi","SourceQualaroo","SourceRailz","SourceRecharge","SourceRecreation","SourceRecruitee","SourceRecurly","SourceRedshift","SourceRedshiftRedshift","SourceResponse","SourceRetently","SourceRetentlyAuthType","SourceRetentlyAuthenticationMechanism","SourceRetentlyRetently","SourceRetentlySchemasAuthType","SourceRkiCovid","SourceRss","SourceS3","SourceS3Autogenerated","SourceS3AvroFormat","SourceS3CSVFormat","SourceS3CSVHeaderDefinition","SourceS3FileBasedStreamConfig","SourceS3FileFormat","SourceS3Filetype","SourceS3Format","SourceS3FromCSV","SourceS3HeaderDefinitionType","SourceS3InferenceType","SourceS3JsonlFormat","SourceS3Local","SourceS3Mode","SourceS3ParquetFormat","SourceS3ParsingStrategy","SourceS3Processing","SourceS3S3","SourceS3SchemasFiletype","SourceS3SchemasFormatFiletype","SourceS3SchemasHeaderDefinitionType","SourceS3SchemasStreamsFiletype","SourceS3SchemasStreamsFormatFiletype","SourceS3SchemasStreamsFormatFormat4Filetype","SourceS3SchemasStreamsFormatFormat5Filetype","SourceS3SchemasStreamsFormatFormatFiletype","SourceS3SchemasStreamsHeaderDefinitionType","SourceS3UnstructuredDocumentFormat","SourceS3UserProvided","SourceS3ValidationPolicy","SourceSalesforce","SourceSalesforceSalesforce","SourceSalesloft","SourceSalesloftAuthType","SourceSalesloftCredentials","SourceSalesloftSchemasAuthType","SourceSapFieldglass","SourceSecoda","SourceSendgrid","SourceSendinblue","SourceSenseforce","SourceSentry","SourceSftp","SourceSftpAuthMethod","SourceSftpAuthentication","SourceSftpBulk","SourceSftpBulkAuthType","SourceSftpBulkAuthentication","SourceSftpBulkAutogenerated","SourceSftpBulkAvroFormat","SourceSftpBulkCSVFormat","SourceSftpBulkCSVHeaderDefinition","SourceSftpBulkDocumentFileTypeFormatExperimental","SourceSftpBulkFileBasedStreamConfig","SourceSftpBulkFiletype","SourceSftpBulkFormat","SourceSftpBulkFromCSV","SourceSftpBulkHeaderDefinitionType","SourceSftpBulkInferenceType","SourceSftpBulkJsonlFormat","SourceSftpBulkLocal","SourceSftpBulkMode","SourceSftpBulkParquetFormat","SourceSftpBulkParsingStrategy","SourceSftpBulkProcessing","SourceSftpBulkSchemasAuthType","SourceSftpBulkSchemasFiletype","SourceSftpBulkSchemasHeaderDefinitionType","SourceSftpBulkSchemasMode","SourceSftpBulkSchemasStreamsFiletype","SourceSftpBulkSchemasStreamsFormatFiletype","SourceSftpBulkSchemasStreamsFormatFormatFiletype","SourceSftpBulkSchemasStreamsHeaderDefinitionType","SourceSftpBulkUserProvided","SourceSftpBulkValidationPolicy","SourceSftpPasswordAuthentication","SourceSftpSSHKeyAuthentication","SourceSftpSchemasAuthMethod","SourceShopify","SourceShopifyAuthMethod","SourceShopifyOAuth20","SourceShopifySchemasAuthMethod","SourceShopifyShopify","SourceShortio","SourceSlack","SourceSlackAPIToken","SourceSlackAuthenticationMechanism","SourceSlackOptionTitle","SourceSlackSchemasOptionTitle","SourceSlackSlack","SourceSmaily","SourceSmartengage","SourceSmartsheets","SourceSmartsheetsAuthType","SourceSmartsheetsAuthorizationMethod","SourceSmartsheetsOAuth20","SourceSmartsheetsSchemasAuthType","SourceSmartsheetsSmartsheets","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketing","SourceSnowflake","SourceSnowflakeAuthType","SourceSnowflakeAuthorizationMethod","SourceSnowflakeKeyPairAuthentication","SourceSnowflakeOAuth20","SourceSnowflakeSchemasAuthType","SourceSnowflakeSchemasCredentialsAuthType","SourceSnowflakeSnowflake","SourceSnowflakeUsernameAndPassword","SourceSonarCloud","SourceSpacexAPI","SourceSquare","SourceSquareAPIKey","SourceSquareAuthType","SourceSquareAuthentication","SourceSquareSchemasAuthType","SourceSquareSquare","SourceStrava","SourceStravaAuthType","SourceStravaStrava","SourceStripe","SourceSurveySparrow","SourceSurveySparrowURLBase","SourceSurveymonkey","SourceSurveymonkeyAuthMethod","SourceSurveymonkeySurveymonkey","SourceTempo","SourceTheGuardianAPI","SourceTiktokMarketing","SourceTiktokMarketingAuthType","SourceTiktokMarketingAuthenticationMethod","SourceTiktokMarketingOAuth20","SourceTiktokMarketingSchemasAuthType","SourceTiktokMarketingTiktokMarketing","SourceTrello","SourceTrustpilot","SourceTrustpilotAPIKey","SourceTrustpilotAuthType","SourceTrustpilotAuthorizationMethod","SourceTrustpilotOAuth20","SourceTrustpilotSchemasAuthType","SourceTvmazeSchedule","SourceTwilio","SourceTwilioTaskrouter","SourceTwitter","SourceTypeform","SourceTypeformAuthType","SourceTypeformAuthorizationMethod","SourceTypeformOAuth20","SourceTypeformPrivateToken","SourceTypeformSchemasAuthType","SourceTypeformTypeform","SourceUsCensus","SourceVantage","SourceWebflow","SourceWhiskyHunter","SourceWikipediaPageviews","SourceWoocommerce","SourceXkcd","SourceYandexMetrica","SourceYotpo","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalytics","SourceZendeskChat","SourceZendeskChatAccessToken","SourceZendeskChatAuthorizationMethod","SourceZendeskChatCredentials","SourceZendeskChatOAuth20","SourceZendeskChatSchemasCredentials","SourceZendeskChatZendeskChat","SourceZendeskSell","SourceZendeskSunshine","SourceZendeskSunshineAPIToken","SourceZendeskSunshineAuthMethod","SourceZendeskSunshineAuthorizationMethod","SourceZendeskSunshineOAuth20","SourceZendeskSunshineSchemasAuthMethod","SourceZendeskSunshineZendeskSunshine","SourceZendeskSupport","SourceZendeskSupportAPIToken","SourceZendeskSupportAuthentication","SourceZendeskSupportCredentials","SourceZendeskSupportOAuth20","SourceZendeskSupportSchemasCredentials","SourceZendeskSupportZendeskSupport","SourceZendeskTalk","SourceZendeskTalkAPIToken","SourceZendeskTalkAuthType","SourceZendeskTalkAuthentication","SourceZendeskTalkOAuth20","SourceZendeskTalkSchemasAuthType","SourceZendeskTalkZendeskTalk","SourceZenloop","SourceZohoCrm","SourceZohoCrmEnvironment","SourceZoom","SourcesResponse","SpacexAPI","Square","SquareCredentials","StandaloneMongoDbInstance","StandardInserts","State","StateFilter","Status","Storage","StorageProvider","Strategies","Strava","StreamConfiguration","StreamConfigurations","StreamProperties","StreamsCriteria","StringFilter","Stripe","SurveyMonkeyAuthorizationMethod","SurveySparrow","Surveymonkey","SurveymonkeyCredentials","SwipeUpAttributionWindow","SystemIDSID","TLSEncryptedVerifyCertificate","Tempo","Teradata","TestDestination","TestDestinationType","TextSplitter","TheGuardianAPI","TiktokMarketing","TiktokMarketingCredentials","TimeGranularity","TimeGranularityType","ToValue","TopHeadlinesTopic","TransformationQueryRunType","Trello","Trustpilot","TunnelMethod","TvmazeSchedule","Twilio","TwilioTaskrouter","Twitter","Type","Typeform","TypeformCredentials","Typesense","URLBase","Unencrypted","UnexpectedFieldBehavior","UnstructuredDocumentFormat","UpdateMethod","UploadingMethod","UsCensus","UserProvided","UserResponse","UsernameAndPassword","UsernamePassword","UsersResponse","ValidActionBreakdowns","ValidAdSetStatuses","ValidAdStatuses","ValidBreakdowns","ValidCampaignStatuses","ValidationPolicy","Validenums","Value","ValueType","Vantage","Vectara","VerifyCa","VerifyFull","VerifyIdentity","ViaAPI","ViewAttributionWindow","ViewWindowDays","Weaviate","Webflow","WhiskyHunter","WikipediaPageviews","Woocommerce","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse","Xkcd","Xz","YandexMetrica","Yellowbrick","Yotpo","YoutubeAnalytics","YoutubeAnalyticsCredentials","ZendeskChat","ZendeskChatCredentials","ZendeskSell","ZendeskSunshine","ZendeskSunshineCredentials","ZendeskSupport","ZendeskSupportCredentials","ZendeskTalk","ZendeskTalkCredentials","Zenloop","ZohoCRMEdition","ZohoCrm","Zoom","Zstandard"] diff --git a/src/airbyte_api/models/destination_astra.py b/src/airbyte_api/models/destination_astra.py index dc1f514e..7094a706 100644 --- a/src/airbyte_api/models/destination_astra.py +++ b/src/airbyte_api/models/destination_astra.py @@ -91,8 +91,6 @@ class OpenAI: -Embedding = Union['OpenAI', 'Cohere', 'Fake', 'AzureOpenAI', 'OpenAICompatible'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -187,8 +185,6 @@ class BySeparator: -TextSplitter = Union['BySeparator', 'ByMarkdownHeader', 'ByProgrammingLanguage'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -233,3 +229,7 @@ class DestinationAstra: r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + +Embedding = Union[OpenAI, Cohere, Fake, AzureOpenAI, OpenAICompatible] + +TextSplitter = Union[BySeparator, ByMarkdownHeader, ByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_aws_datalake.py b/src/airbyte_api/models/destination_aws_datalake.py index 54610eae..5b8c9e7f 100644 --- a/src/airbyte_api/models/destination_aws_datalake.py +++ b/src/airbyte_api/models/destination_aws_datalake.py @@ -41,8 +41,6 @@ class IAMRole: -AuthenticationMode = Union['IAMRole', 'IAMUser'] - class AwsDatalake(str, Enum): AWS_DATALAKE = 'aws-datalake' @@ -89,8 +87,6 @@ class JSONLinesNewlineDelimitedJSON: -OutputFormatWildcard = Union['JSONLinesNewlineDelimitedJSON', 'ParquetColumnarStorage'] - class ChooseHowToPartitionData(str, Enum): r"""Partition data by cursor fields when a cursor field is a date""" @@ -171,3 +167,7 @@ class DestinationAwsDatalake: r"""The region of the S3 bucket. See here for all region codes.""" + +AuthenticationMode = Union[IAMRole, IAMUser] + +OutputFormatWildcard = Union[JSONLinesNewlineDelimitedJSON, ParquetColumnarStorage] diff --git a/src/airbyte_api/models/destination_azure_blob_storage.py b/src/airbyte_api/models/destination_azure_blob_storage.py index 9a85fb44..72fb38eb 100644 --- a/src/airbyte_api/models/destination_azure_blob_storage.py +++ b/src/airbyte_api/models/destination_azure_blob_storage.py @@ -19,6 +19,8 @@ class DestinationAzureBlobStorageFormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON: + file_extension: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_extension'), 'exclude': lambda f: f is None }}) + r"""Add file extensions to the output file.""" FORMAT_TYPE: Final[DestinationAzureBlobStorageFormatType] = dataclasses.field(default=DestinationAzureBlobStorageFormatType.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }}) @@ -37,14 +39,14 @@ class FormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class CSVCommaSeparatedValues: + file_extension: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_extension'), 'exclude': lambda f: f is None }}) + r"""Add file extensions to the output file.""" flattening: Optional[NormalizationFlattening] = dataclasses.field(default=NormalizationFlattening.NO_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" FORMAT_TYPE: Final[FormatType] = dataclasses.field(default=FormatType.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }}) -OutputFormat = Union['CSVCommaSeparatedValues', 'DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -66,3 +68,5 @@ class DestinationAzureBlobStorage: DESTINATION_TYPE: Final[DestinationAzureBlobStorageAzureBlobStorage] = dataclasses.field(default=DestinationAzureBlobStorageAzureBlobStorage.AZURE_BLOB_STORAGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + +OutputFormat = Union[CSVCommaSeparatedValues, DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON] diff --git a/src/airbyte_api/models/destination_bigquery.py b/src/airbyte_api/models/destination_bigquery.py index 7562f327..67241e3a 100644 --- a/src/airbyte_api/models/destination_bigquery.py +++ b/src/airbyte_api/models/destination_bigquery.py @@ -88,8 +88,6 @@ class DestinationBigqueryHMACKey: -Credential = Union['DestinationBigqueryHMACKey'] - class GCSTmpFilesAfterwardProcessing(str, Enum): r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly.""" @@ -117,8 +115,6 @@ class GCSStaging: -LoadingMethod = Union['GCSStaging', 'StandardInserts'] - class TransformationQueryRunType(str, Enum): r"""Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default \\"interactive\\" value is used if not set explicitly.""" @@ -150,3 +146,7 @@ class DestinationBigquery: r"""Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default \\"interactive\\" value is used if not set explicitly.""" + +Credential = Union[DestinationBigqueryHMACKey] + +LoadingMethod = Union[GCSStaging, StandardInserts] diff --git a/src/airbyte_api/models/destination_clickhouse.py b/src/airbyte_api/models/destination_clickhouse.py index b0c87cbf..348f237c 100644 --- a/src/airbyte_api/models/destination_clickhouse.py +++ b/src/airbyte_api/models/destination_clickhouse.py @@ -69,8 +69,6 @@ class NoTunnel: -SSHTunnelMethod = Union['NoTunnel', 'SSHKeyAuthentication', 'PasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -94,3 +92,5 @@ class DestinationClickhouse: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +SSHTunnelMethod = Union[NoTunnel, SSHKeyAuthentication, PasswordAuthentication] diff --git a/src/airbyte_api/models/destination_databricks.py b/src/airbyte_api/models/destination_databricks.py index cd6447d3..71cd2813 100644 --- a/src/airbyte_api/models/destination_databricks.py +++ b/src/airbyte_api/models/destination_databricks.py @@ -93,8 +93,6 @@ class RecommendedManagedTables: -DataSource = Union['RecommendedManagedTables', 'AmazonS3', 'DestinationDatabricksAzureBlobStorage'] - class Databricks(str, Enum): DATABRICKS = 'databricks' @@ -126,3 +124,5 @@ class DestinationDatabricks: r"""The default schema tables are written. If not specified otherwise, the \\"default\\" will be used.""" + +DataSource = Union[RecommendedManagedTables, AmazonS3, DestinationDatabricksAzureBlobStorage] diff --git a/src/airbyte_api/models/destination_dev_null.py b/src/airbyte_api/models/destination_dev_null.py index 3db4beec..4ce10c71 100644 --- a/src/airbyte_api/models/destination_dev_null.py +++ b/src/airbyte_api/models/destination_dev_null.py @@ -23,8 +23,6 @@ class Silent: -TestDestination = Union['Silent'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -34,3 +32,5 @@ class DestinationDevNull: DESTINATION_TYPE: Final[DevNull] = dataclasses.field(default=DevNull.DEV_NULL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + +TestDestination = Union[Silent] diff --git a/src/airbyte_api/models/destination_elasticsearch.py b/src/airbyte_api/models/destination_elasticsearch.py index 74fcd689..2a72e95e 100644 --- a/src/airbyte_api/models/destination_elasticsearch.py +++ b/src/airbyte_api/models/destination_elasticsearch.py @@ -41,8 +41,6 @@ class APIKeySecret: -AuthenticationMethod = Union['APIKeySecret', 'UsernamePassword'] - class Elasticsearch(str, Enum): ELASTICSEARCH = 'elasticsearch' @@ -62,3 +60,5 @@ class DestinationElasticsearch: r"""If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.""" + +AuthenticationMethod = Union[APIKeySecret, UsernamePassword] diff --git a/src/airbyte_api/models/destination_firebolt.py b/src/airbyte_api/models/destination_firebolt.py index 868a6599..fba57756 100644 --- a/src/airbyte_api/models/destination_firebolt.py +++ b/src/airbyte_api/models/destination_firebolt.py @@ -43,8 +43,6 @@ class SQLInserts: -DestinationFireboltLoadingMethod = Union['SQLInserts', 'ExternalTableViaS3'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -66,3 +64,5 @@ class DestinationFirebolt: r"""Loading method used to select the way data will be uploaded to Firebolt""" + +DestinationFireboltLoadingMethod = Union[SQLInserts, ExternalTableViaS3] diff --git a/src/airbyte_api/models/destination_gcs.py b/src/airbyte_api/models/destination_gcs.py index 889f4b5e..68ec48be 100644 --- a/src/airbyte_api/models/destination_gcs.py +++ b/src/airbyte_api/models/destination_gcs.py @@ -23,8 +23,6 @@ class HMACKey: -Authentication = Union['HMACKey'] - class Gcs(str, Enum): GCS = 'gcs' @@ -88,8 +86,6 @@ class DestinationGcsSchemasNoCompression: -DestinationGcsCompression = Union['DestinationGcsSchemasNoCompression', 'DestinationGcsGZIP'] - class DestinationGcsSchemasFormatFormatType(str, Enum): JSONL = 'JSONL' @@ -128,8 +124,6 @@ class DestinationGcsNoCompression: -Compression = Union['DestinationGcsNoCompression', 'Gzip'] - class Normalization(str, Enum): r"""Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" @@ -232,8 +226,6 @@ class NoCompression: -CompressionCodec = Union['NoCompression', 'Deflate', 'Bzip2', 'Xz', 'Zstandard', 'Snappy'] - class DestinationGcsFormatType(str, Enum): AVRO = 'Avro' @@ -248,8 +240,6 @@ class AvroApacheAvro: -DestinationGcsOutputFormat = Union['AvroApacheAvro', 'DestinationGcsCSVCommaSeparatedValues', 'DestinationGcsJSONLinesNewlineDelimitedJSON', 'DestinationGcsParquetColumnarStorage'] - class GCSBucketRegion(str, Enum): r"""Select a Region of the GCS Bucket. Read more here.""" @@ -306,3 +296,13 @@ class DestinationGcs: r"""Select a Region of the GCS Bucket. Read more here.""" + +Authentication = Union[HMACKey] + +DestinationGcsCompression = Union[DestinationGcsSchemasNoCompression, DestinationGcsGZIP] + +Compression = Union[DestinationGcsNoCompression, Gzip] + +CompressionCodec = Union[NoCompression, Deflate, Bzip2, Xz, Zstandard, Snappy] + +DestinationGcsOutputFormat = Union[AvroApacheAvro, DestinationGcsCSVCommaSeparatedValues, DestinationGcsJSONLinesNewlineDelimitedJSON, DestinationGcsParquetColumnarStorage] diff --git a/src/airbyte_api/models/destination_langchain.py b/src/airbyte_api/models/destination_langchain.py deleted file mode 100644 index bc2e7efd..00000000 --- a/src/airbyte_api/models/destination_langchain.py +++ /dev/null @@ -1,119 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final, List, Optional, Union - - -class Langchain(str, Enum): - LANGCHAIN = 'langchain' - - -class DestinationLangchainSchemasMode(str, Enum): - FAKE = 'fake' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationLangchainFake: - r"""Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.""" - MODE: Final[Optional[DestinationLangchainSchemasMode]] = dataclasses.field(default=DestinationLangchainSchemasMode.FAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) - - - - -class DestinationLangchainMode(str, Enum): - OPENAI = 'openai' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationLangchainOpenAI: - r"""Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.""" - openai_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('openai_key') }}) - MODE: Final[Optional[DestinationLangchainMode]] = dataclasses.field(default=DestinationLangchainMode.OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) - - - -DestinationLangchainEmbedding = Union['DestinationLangchainOpenAI', 'DestinationLangchainFake'] - - -class DestinationLangchainSchemasIndexingIndexing3Mode(str, Enum): - CHROMA_LOCAL = 'chroma_local' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ChromaLocalPersistance: - r"""Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.""" - destination_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destination_path') }}) - r"""Path to the directory where chroma files will be written. The files will be placed inside that local mount.""" - collection_name: Optional[str] = dataclasses.field(default='langchain', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('collection_name'), 'exclude': lambda f: f is None }}) - r"""Name of the collection to use.""" - MODE: Final[Optional[DestinationLangchainSchemasIndexingIndexing3Mode]] = dataclasses.field(default=DestinationLangchainSchemasIndexingIndexing3Mode.CHROMA_LOCAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) - - - - -class DestinationLangchainSchemasIndexingIndexingMode(str, Enum): - DOC_ARRAY_HNSW_SEARCH = 'DocArrayHnswSearch' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DocArrayHnswSearch: - r"""DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.""" - destination_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destination_path') }}) - r"""Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.""" - MODE: Final[Optional[DestinationLangchainSchemasIndexingIndexingMode]] = dataclasses.field(default=DestinationLangchainSchemasIndexingIndexingMode.DOC_ARRAY_HNSW_SEARCH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) - - - - -class DestinationLangchainSchemasIndexingMode(str, Enum): - PINECONE = 'pinecone' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationLangchainPinecone: - r"""Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.""" - index: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('index') }}) - r"""Pinecone index to use""" - pinecone_environment: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('pinecone_environment') }}) - r"""Pinecone environment to use""" - pinecone_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('pinecone_key') }}) - MODE: Final[Optional[DestinationLangchainSchemasIndexingMode]] = dataclasses.field(default=DestinationLangchainSchemasIndexingMode.PINECONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) - - - -DestinationLangchainIndexing = Union['DestinationLangchainPinecone', 'DocArrayHnswSearch', 'ChromaLocalPersistance'] - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationLangchainProcessingConfigModel: - chunk_size: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('chunk_size') }}) - r"""Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)""" - text_fields: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('text_fields') }}) - r"""List of fields in the record that should be used to calculate the embedding. All other fields are passed along as meta fields. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.""" - chunk_overlap: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('chunk_overlap'), 'exclude': lambda f: f is None }}) - r"""Size of overlap between chunks in tokens to store in vector store to better capture relevant context""" - - - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationLangchain: - embedding: DestinationLangchainEmbedding = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('embedding') }}) - r"""Embedding configuration""" - indexing: DestinationLangchainIndexing = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('indexing') }}) - r"""Indexing configuration""" - processing: DestinationLangchainProcessingConfigModel = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing') }}) - DESTINATION_TYPE: Final[Langchain] = dataclasses.field(default=Langchain.LANGCHAIN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - - diff --git a/src/airbyte_api/models/destination_milvus.py b/src/airbyte_api/models/destination_milvus.py index 4dc80719..b121a43e 100644 --- a/src/airbyte_api/models/destination_milvus.py +++ b/src/airbyte_api/models/destination_milvus.py @@ -91,8 +91,6 @@ class DestinationMilvusOpenAI: -DestinationMilvusEmbedding = Union['DestinationMilvusOpenAI', 'DestinationMilvusCohere', 'DestinationMilvusFake', 'DestinationMilvusAzureOpenAI', 'DestinationMilvusOpenAICompatible'] - class DestinationMilvusSchemasIndexingAuthAuthenticationMode(str, Enum): NO_AUTH = 'no_auth' @@ -138,8 +136,6 @@ class DestinationMilvusAPIToken: -DestinationMilvusAuthentication = Union['DestinationMilvusAPIToken', 'DestinationMilvusUsernamePassword', 'NoAuth'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -238,8 +234,6 @@ class DestinationMilvusBySeparator: -DestinationMilvusTextSplitter = Union['DestinationMilvusBySeparator', 'DestinationMilvusByMarkdownHeader', 'DestinationMilvusByProgrammingLanguage'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -284,3 +278,9 @@ class DestinationMilvus: r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + +DestinationMilvusEmbedding = Union[DestinationMilvusOpenAI, DestinationMilvusCohere, DestinationMilvusFake, DestinationMilvusAzureOpenAI, DestinationMilvusOpenAICompatible] + +DestinationMilvusAuthentication = Union[DestinationMilvusAPIToken, DestinationMilvusUsernamePassword, NoAuth] + +DestinationMilvusTextSplitter = Union[DestinationMilvusBySeparator, DestinationMilvusByMarkdownHeader, DestinationMilvusByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_mongodb.py b/src/airbyte_api/models/destination_mongodb.py index e06c26c6..d33e078a 100644 --- a/src/airbyte_api/models/destination_mongodb.py +++ b/src/airbyte_api/models/destination_mongodb.py @@ -37,8 +37,6 @@ class NoneT: -AuthorizationType = Union['NoneT', 'LoginPassword'] - class Mongodb(str, Enum): MONGODB = 'mongodb' @@ -89,8 +87,6 @@ class StandaloneMongoDbInstance: -MongoDbInstanceType = Union['StandaloneMongoDbInstance', 'ReplicaSet', 'MongoDBAtlas'] - class DestinationMongodbSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -149,8 +145,6 @@ class DestinationMongodbNoTunnel: -DestinationMongodbSSHTunnelMethod = Union['DestinationMongodbNoTunnel', 'DestinationMongodbSSHKeyAuthentication', 'DestinationMongodbPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -166,3 +160,9 @@ class DestinationMongodb: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +AuthorizationType = Union[NoneT, LoginPassword] + +MongoDbInstanceType = Union[StandaloneMongoDbInstance, ReplicaSet, MongoDBAtlas] + +DestinationMongodbSSHTunnelMethod = Union[DestinationMongodbNoTunnel, DestinationMongodbSSHKeyAuthentication, DestinationMongodbPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_mssql.py b/src/airbyte_api/models/destination_mssql.py index 7aead9de..82dc9971 100644 --- a/src/airbyte_api/models/destination_mssql.py +++ b/src/airbyte_api/models/destination_mssql.py @@ -39,8 +39,6 @@ class EncryptedTrustServerCertificate: -SSLMethod = Union['EncryptedTrustServerCertificate', 'EncryptedVerifyCertificate'] - class DestinationMssqlSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -99,8 +97,6 @@ class DestinationMssqlNoTunnel: -DestinationMssqlSSHTunnelMethod = Union['DestinationMssqlNoTunnel', 'DestinationMssqlSSHKeyAuthentication', 'DestinationMssqlPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -128,3 +124,7 @@ class DestinationMssql: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +SSLMethod = Union[EncryptedTrustServerCertificate, EncryptedVerifyCertificate] + +DestinationMssqlSSHTunnelMethod = Union[DestinationMssqlNoTunnel, DestinationMssqlSSHKeyAuthentication, DestinationMssqlPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_mysql.py b/src/airbyte_api/models/destination_mysql.py index c617f5f4..401cdfe6 100644 --- a/src/airbyte_api/models/destination_mysql.py +++ b/src/airbyte_api/models/destination_mysql.py @@ -69,8 +69,6 @@ class DestinationMysqlNoTunnel: -DestinationMysqlSSHTunnelMethod = Union['DestinationMysqlNoTunnel', 'DestinationMysqlSSHKeyAuthentication', 'DestinationMysqlPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -96,3 +94,5 @@ class DestinationMysql: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +DestinationMysqlSSHTunnelMethod = Union[DestinationMysqlNoTunnel, DestinationMysqlSSHKeyAuthentication, DestinationMysqlPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_oracle.py b/src/airbyte_api/models/destination_oracle.py index 5aaf3897..e4dcf086 100644 --- a/src/airbyte_api/models/destination_oracle.py +++ b/src/airbyte_api/models/destination_oracle.py @@ -69,8 +69,6 @@ class DestinationOracleNoTunnel: -DestinationOracleSSHTunnelMethod = Union['DestinationOracleNoTunnel', 'DestinationOracleSSHKeyAuthentication', 'DestinationOraclePasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -96,3 +94,5 @@ class DestinationOracle: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +DestinationOracleSSHTunnelMethod = Union[DestinationOracleNoTunnel, DestinationOracleSSHKeyAuthentication, DestinationOraclePasswordAuthentication] diff --git a/src/airbyte_api/models/destination_pinecone.py b/src/airbyte_api/models/destination_pinecone.py index 8015f671..65e50181 100644 --- a/src/airbyte_api/models/destination_pinecone.py +++ b/src/airbyte_api/models/destination_pinecone.py @@ -91,8 +91,6 @@ class DestinationPineconeOpenAI: -DestinationPineconeEmbedding = Union['DestinationPineconeOpenAI', 'DestinationPineconeCohere', 'DestinationPineconeFake', 'DestinationPineconeAzureOpenAI', 'DestinationPineconeOpenAICompatible'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -185,8 +183,6 @@ class DestinationPineconeBySeparator: -DestinationPineconeTextSplitter = Union['DestinationPineconeBySeparator', 'DestinationPineconeByMarkdownHeader', 'DestinationPineconeByProgrammingLanguage'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -231,3 +227,7 @@ class DestinationPinecone: r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + +DestinationPineconeEmbedding = Union[DestinationPineconeOpenAI, DestinationPineconeCohere, DestinationPineconeFake, DestinationPineconeAzureOpenAI, DestinationPineconeOpenAICompatible] + +DestinationPineconeTextSplitter = Union[DestinationPineconeBySeparator, DestinationPineconeByMarkdownHeader, DestinationPineconeByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_postgres.py b/src/airbyte_api/models/destination_postgres.py index 4968697c..2c692330 100644 --- a/src/airbyte_api/models/destination_postgres.py +++ b/src/airbyte_api/models/destination_postgres.py @@ -101,8 +101,6 @@ class Disable: -SSLModes = Union['Disable', 'Allow', 'Prefer', 'Require', 'VerifyCa', 'VerifyFull'] - class DestinationPostgresSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -161,8 +159,6 @@ class DestinationPostgresNoTunnel: -DestinationPostgresSSHTunnelMethod = Union['DestinationPostgresNoTunnel', 'DestinationPostgresSSHKeyAuthentication', 'DestinationPostgresPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -202,3 +198,7 @@ class DestinationPostgres: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +SSLModes = Union[Disable, Allow, Prefer, Require, VerifyCa, VerifyFull] + +DestinationPostgresSSHTunnelMethod = Union[DestinationPostgresNoTunnel, DestinationPostgresSSHKeyAuthentication, DestinationPostgresPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_qdrant.py b/src/airbyte_api/models/destination_qdrant.py index f68ef1e6..5a0ed0ff 100644 --- a/src/airbyte_api/models/destination_qdrant.py +++ b/src/airbyte_api/models/destination_qdrant.py @@ -91,8 +91,6 @@ class DestinationQdrantOpenAI: -DestinationQdrantEmbedding = Union['DestinationQdrantOpenAI', 'DestinationQdrantCohere', 'DestinationQdrantFake', 'DestinationQdrantAzureOpenAI', 'DestinationQdrantOpenAICompatible'] - class DestinationQdrantSchemasIndexingAuthMethodMode(str, Enum): NO_AUTH = 'no_auth' @@ -119,8 +117,6 @@ class APIKeyAuth: -DestinationQdrantAuthenticationMethod = Union['APIKeyAuth', 'DestinationQdrantNoAuth'] - class DistanceMetric(str, Enum): r"""The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.""" @@ -226,8 +222,6 @@ class DestinationQdrantBySeparator: -DestinationQdrantTextSplitter = Union['DestinationQdrantBySeparator', 'DestinationQdrantByMarkdownHeader', 'DestinationQdrantByProgrammingLanguage'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -272,3 +266,9 @@ class DestinationQdrant: r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + +DestinationQdrantEmbedding = Union[DestinationQdrantOpenAI, DestinationQdrantCohere, DestinationQdrantFake, DestinationQdrantAzureOpenAI, DestinationQdrantOpenAICompatible] + +DestinationQdrantAuthenticationMethod = Union[APIKeyAuth, DestinationQdrantNoAuth] + +DestinationQdrantTextSplitter = Union[DestinationQdrantBySeparator, DestinationQdrantByMarkdownHeader, DestinationQdrantByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_redis.py b/src/airbyte_api/models/destination_redis.py index 5cc260e9..e58caa93 100644 --- a/src/airbyte_api/models/destination_redis.py +++ b/src/airbyte_api/models/destination_redis.py @@ -50,8 +50,6 @@ class DestinationRedisDisable: -DestinationRedisSSLModes = Union['DestinationRedisDisable', 'DestinationRedisVerifyFull'] - class DestinationRedisSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -110,8 +108,6 @@ class DestinationRedisNoTunnel: -DestinationRedisSSHTunnelMethod = Union['DestinationRedisNoTunnel', 'DestinationRedisSSHKeyAuthentication', 'DestinationRedisPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -137,3 +133,7 @@ class DestinationRedis: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +DestinationRedisSSLModes = Union[DestinationRedisDisable, DestinationRedisVerifyFull] + +DestinationRedisSSHTunnelMethod = Union[DestinationRedisNoTunnel, DestinationRedisSSHKeyAuthentication, DestinationRedisPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_redshift.py b/src/airbyte_api/models/destination_redshift.py index 1d6d8e89..331b595a 100644 --- a/src/airbyte_api/models/destination_redshift.py +++ b/src/airbyte_api/models/destination_redshift.py @@ -69,51 +69,6 @@ class DestinationRedshiftNoTunnel: -DestinationRedshiftSSHTunnelMethod = Union['DestinationRedshiftNoTunnel', 'DestinationRedshiftSSHKeyAuthentication', 'DestinationRedshiftPasswordAuthentication'] - - -class DestinationRedshiftSchemasMethod(str, Enum): - STANDARD = 'Standard' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Standard: - r"""(not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use S3 uploading.""" - METHOD: Final[DestinationRedshiftSchemasMethod] = dataclasses.field(default=DestinationRedshiftSchemasMethod.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) - - - - -class DestinationRedshiftEncryptionType(str, Enum): - AES_CBC_ENVELOPE = 'aes_cbc_envelope' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class AESCBCEnvelopeEncryption: - r"""Staging data will be encrypted using AES-CBC envelope encryption.""" - ENCRYPTION_TYPE: Final[Optional[DestinationRedshiftEncryptionType]] = dataclasses.field(default=DestinationRedshiftEncryptionType.AES_CBC_ENVELOPE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type'), 'exclude': lambda f: f is None }}) - key_encrypting_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key_encrypting_key'), 'exclude': lambda f: f is None }}) - r"""The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.""" - - - - -class EncryptionType(str, Enum): - NONE = 'none' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class NoEncryption: - r"""Staging data will be stored in plaintext.""" - ENCRYPTION_TYPE: Final[Optional[EncryptionType]] = dataclasses.field(default=EncryptionType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_type'), 'exclude': lambda f: f is None }}) - - - -DestinationRedshiftEncryption = Union['NoEncryption', 'AESCBCEnvelopeEncryption'] - class DestinationRedshiftMethod(str, Enum): S3_STAGING = 'S3 Staging' @@ -167,8 +122,6 @@ class AWSS3Staging: r"""The name of the staging S3 bucket.""" secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key') }}) r"""The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.""" - encryption: Optional[DestinationRedshiftEncryption] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption'), 'exclude': lambda f: f is None }}) - r"""How to encrypt the staging data""" file_name_pattern: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_name_pattern'), 'exclude': lambda f: f is None }}) r"""The pattern allows you to set the file-name format for the S3 staging file(s)""" METHOD: Final[DestinationRedshiftMethod] = dataclasses.field(default=DestinationRedshiftMethod.S3_STAGING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) @@ -181,8 +134,6 @@ class AWSS3Staging: -UploadingMethod = Union['AWSS3Staging', 'Standard'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -214,3 +165,7 @@ class DestinationRedshift: r"""The way data will be uploaded to Redshift.""" + +DestinationRedshiftSSHTunnelMethod = Union[DestinationRedshiftNoTunnel, DestinationRedshiftSSHKeyAuthentication, DestinationRedshiftPasswordAuthentication] + +UploadingMethod = Union[AWSS3Staging] diff --git a/src/airbyte_api/models/destination_s3.py b/src/airbyte_api/models/destination_s3.py index 22e99b99..267d0b95 100644 --- a/src/airbyte_api/models/destination_s3.py +++ b/src/airbyte_api/models/destination_s3.py @@ -126,8 +126,6 @@ class DestinationS3SchemasFormatNoCompression: -DestinationS3CompressionCodec = Union['DestinationS3SchemasFormatNoCompression', 'DestinationS3Deflate', 'DestinationS3Bzip2', 'DestinationS3Xz', 'DestinationS3Zstandard', 'DestinationS3Snappy'] - class DestinationS3SchemasFormatFormatType(str, Enum): AVRO = 'Avro' @@ -166,8 +164,6 @@ class DestinationS3SchemasNoCompression: -DestinationS3SchemasCompression = Union['DestinationS3SchemasNoCompression', 'DestinationS3SchemasGZIP'] - class DestinationS3SchemasFlattening(str, Enum): r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.""" @@ -214,8 +210,6 @@ class DestinationS3NoCompression: -DestinationS3Compression = Union['DestinationS3NoCompression', 'DestinationS3GZIP'] - class DestinationS3Flattening(str, Enum): r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" @@ -238,8 +232,6 @@ class DestinationS3CSVCommaSeparatedValues: -DestinationS3OutputFormat = Union['DestinationS3CSVCommaSeparatedValues', 'DestinationS3JSONLinesNewlineDelimitedJSON', 'DestinationS3AvroApacheAvro', 'DestinationS3ParquetColumnarStorage'] - class DestinationS3S3BucketRegion(str, Enum): r"""The region of the S3 bucket. See here for all region codes.""" @@ -305,3 +297,11 @@ class DestinationS3: r"""The corresponding secret to the access key ID. Read more here""" + +DestinationS3CompressionCodec = Union[DestinationS3SchemasFormatNoCompression, DestinationS3Deflate, DestinationS3Bzip2, DestinationS3Xz, DestinationS3Zstandard, DestinationS3Snappy] + +DestinationS3SchemasCompression = Union[DestinationS3SchemasNoCompression, DestinationS3SchemasGZIP] + +DestinationS3Compression = Union[DestinationS3NoCompression, DestinationS3GZIP] + +DestinationS3OutputFormat = Union[DestinationS3CSVCommaSeparatedValues, DestinationS3JSONLinesNewlineDelimitedJSON, DestinationS3AvroApacheAvro, DestinationS3ParquetColumnarStorage] diff --git a/src/airbyte_api/models/destination_s3_glue.py b/src/airbyte_api/models/destination_s3_glue.py index 504d9d67..6b59ceaf 100644 --- a/src/airbyte_api/models/destination_s3_glue.py +++ b/src/airbyte_api/models/destination_s3_glue.py @@ -35,8 +35,6 @@ class DestinationS3GlueNoCompression: -DestinationS3GlueCompression = Union['DestinationS3GlueNoCompression', 'DestinationS3GlueGZIP'] - class Flattening(str, Enum): r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.""" @@ -59,8 +57,6 @@ class DestinationS3GlueJSONLinesNewlineDelimitedJSON: -DestinationS3GlueOutputFormat = Union['DestinationS3GlueJSONLinesNewlineDelimitedJSON'] - class SerializationLibrary(str, Enum): r"""The library that your query engine will use for reading and writing data in your lake.""" @@ -134,3 +130,7 @@ class DestinationS3Glue: r"""The corresponding secret to the access key ID. Read more here""" + +DestinationS3GlueCompression = Union[DestinationS3GlueNoCompression, DestinationS3GlueGZIP] + +DestinationS3GlueOutputFormat = Union[DestinationS3GlueJSONLinesNewlineDelimitedJSON] diff --git a/src/airbyte_api/models/destination_snowflake.py b/src/airbyte_api/models/destination_snowflake.py index e041dfae..bd94cb9b 100644 --- a/src/airbyte_api/models/destination_snowflake.py +++ b/src/airbyte_api/models/destination_snowflake.py @@ -57,8 +57,6 @@ class KeyPairAuthentication: -AuthorizationMethod = Union['KeyPairAuthentication', 'UsernameAndPassword', 'DestinationSnowflakeOAuth20'] - class DestinationSnowflakeSnowflake(str, Enum): SNOWFLAKE = 'snowflake' @@ -78,7 +76,7 @@ class DestinationSnowflake: username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""Enter the name of the user you want to use to access the database""" warehouse: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse') }}) - r"""Enter the name of the warehouse that you want to sync data into""" + r"""Enter the name of the warehouse that you want to use as a compute cluster""" credentials: Optional[AuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) DESTINATION_TYPE: Final[DestinationSnowflakeSnowflake] = dataclasses.field(default=DestinationSnowflakeSnowflake.SNOWFLAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) disable_type_dedupe: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_type_dedupe'), 'exclude': lambda f: f is None }}) @@ -91,3 +89,5 @@ class DestinationSnowflake: r"""The number of days of Snowflake Time Travel to enable on the tables. See Snowflake's documentation for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.""" + +AuthorizationMethod = Union[KeyPairAuthentication, UsernameAndPassword, DestinationSnowflakeOAuth20] diff --git a/src/airbyte_api/models/destination_snowflake_cortex.py b/src/airbyte_api/models/destination_snowflake_cortex.py index 987e574b..99ea8c5b 100644 --- a/src/airbyte_api/models/destination_snowflake_cortex.py +++ b/src/airbyte_api/models/destination_snowflake_cortex.py @@ -91,8 +91,6 @@ class DestinationSnowflakeCortexOpenAI: -DestinationSnowflakeCortexEmbedding = Union['DestinationSnowflakeCortexOpenAI', 'DestinationSnowflakeCortexCohere', 'DestinationSnowflakeCortexFake', 'DestinationSnowflakeCortexAzureOpenAI', 'DestinationSnowflakeCortexOpenAICompatible'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -105,7 +103,7 @@ class DestinationSnowflakeCortexCredentials: @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class DestinationSnowflakeCortexIndexing: +class SnowflakeConnection: r"""Snowflake can be used to store vector data and retrieve embeddings.""" credentials: DestinationSnowflakeCortexCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) @@ -119,7 +117,7 @@ class DestinationSnowflakeCortexIndexing: username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""Enter the name of the user you want to use to access the database""" warehouse: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse') }}) - r"""Enter the name of the warehouse that you want to sync data into""" + r"""Enter the name of the warehouse that you want to use as a compute cluster""" @@ -201,8 +199,6 @@ class DestinationSnowflakeCortexBySeparator: -DestinationSnowflakeCortexTextSplitter = Union['DestinationSnowflakeCortexBySeparator', 'DestinationSnowflakeCortexByMarkdownHeader', 'DestinationSnowflakeCortexByProgrammingLanguage'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -239,7 +235,7 @@ class DestinationSnowflakeCortex: """ embedding: DestinationSnowflakeCortexEmbedding = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('embedding') }}) r"""Embedding configuration""" - indexing: DestinationSnowflakeCortexIndexing = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('indexing') }}) + indexing: SnowflakeConnection = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('indexing') }}) r"""Snowflake can be used to store vector data and retrieve embeddings.""" processing: DestinationSnowflakeCortexProcessingConfigModel = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing') }}) DESTINATION_TYPE: Final[SnowflakeCortex] = dataclasses.field(default=SnowflakeCortex.SNOWFLAKE_CORTEX, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) @@ -247,3 +243,7 @@ class DestinationSnowflakeCortex: r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + +DestinationSnowflakeCortexEmbedding = Union[DestinationSnowflakeCortexOpenAI, DestinationSnowflakeCortexCohere, DestinationSnowflakeCortexFake, DestinationSnowflakeCortexAzureOpenAI, DestinationSnowflakeCortexOpenAICompatible] + +DestinationSnowflakeCortexTextSplitter = Union[DestinationSnowflakeCortexBySeparator, DestinationSnowflakeCortexByMarkdownHeader, DestinationSnowflakeCortexByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_teradata.py b/src/airbyte_api/models/destination_teradata.py index 78ec7d21..d7c05aa1 100644 --- a/src/airbyte_api/models/destination_teradata.py +++ b/src/airbyte_api/models/destination_teradata.py @@ -97,8 +97,6 @@ class DestinationTeradataDisable: -DestinationTeradataSSLModes = Union['DestinationTeradataDisable', 'DestinationTeradataAllow', 'DestinationTeradataPrefer', 'DestinationTeradataRequire', 'DestinationTeradataVerifyCa', 'DestinationTeradataVerifyFull'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -128,3 +126,5 @@ class DestinationTeradata: """ + +DestinationTeradataSSLModes = Union[DestinationTeradataDisable, DestinationTeradataAllow, DestinationTeradataPrefer, DestinationTeradataRequire, DestinationTeradataVerifyCa, DestinationTeradataVerifyFull] diff --git a/src/airbyte_api/models/destination_weaviate.py b/src/airbyte_api/models/destination_weaviate.py index 66cb12ff..b7508d10 100644 --- a/src/airbyte_api/models/destination_weaviate.py +++ b/src/airbyte_api/models/destination_weaviate.py @@ -121,8 +121,6 @@ class NoExternalEmbedding: -DestinationWeaviateEmbedding = Union['NoExternalEmbedding', 'DestinationWeaviateAzureOpenAI', 'DestinationWeaviateOpenAI', 'DestinationWeaviateCohere', 'FromField', 'DestinationWeaviateFake', 'DestinationWeaviateOpenAICompatible'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -177,8 +175,6 @@ class DestinationWeaviateAPIToken: -DestinationWeaviateAuthentication = Union['DestinationWeaviateAPIToken', 'DestinationWeaviateUsernamePassword', 'NoAuthentication'] - class DefaultVectorizer(str, Enum): r"""The vectorizer to use if new classes need to be created""" @@ -291,8 +287,6 @@ class DestinationWeaviateBySeparator: -DestinationWeaviateTextSplitter = Union['DestinationWeaviateBySeparator', 'DestinationWeaviateByMarkdownHeader', 'DestinationWeaviateByProgrammingLanguage'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -337,3 +331,9 @@ class DestinationWeaviate: r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + +DestinationWeaviateEmbedding = Union[NoExternalEmbedding, DestinationWeaviateAzureOpenAI, DestinationWeaviateOpenAI, DestinationWeaviateCohere, FromField, DestinationWeaviateFake, DestinationWeaviateOpenAICompatible] + +DestinationWeaviateAuthentication = Union[DestinationWeaviateAPIToken, DestinationWeaviateUsernamePassword, NoAuthentication] + +DestinationWeaviateTextSplitter = Union[DestinationWeaviateBySeparator, DestinationWeaviateByMarkdownHeader, DestinationWeaviateByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_yellowbrick.py b/src/airbyte_api/models/destination_yellowbrick.py index c6e9fa43..1811e9f0 100644 --- a/src/airbyte_api/models/destination_yellowbrick.py +++ b/src/airbyte_api/models/destination_yellowbrick.py @@ -101,8 +101,6 @@ class DestinationYellowbrickDisable: -DestinationYellowbrickSSLModes = Union['DestinationYellowbrickDisable', 'DestinationYellowbrickAllow', 'DestinationYellowbrickPrefer', 'DestinationYellowbrickRequire', 'DestinationYellowbrickVerifyCa', 'DestinationYellowbrickVerifyFull'] - class DestinationYellowbrickSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -161,8 +159,6 @@ class DestinationYellowbrickNoTunnel: -DestinationYellowbrickSSHTunnelMethod = Union['DestinationYellowbrickNoTunnel', 'DestinationYellowbrickSSHKeyAuthentication', 'DestinationYellowbrickPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -198,3 +194,7 @@ class DestinationYellowbrick: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +DestinationYellowbrickSSLModes = Union[DestinationYellowbrickDisable, DestinationYellowbrickAllow, DestinationYellowbrickPrefer, DestinationYellowbrickRequire, DestinationYellowbrickVerifyCa, DestinationYellowbrickVerifyFull] + +DestinationYellowbrickSSHTunnelMethod = Union[DestinationYellowbrickNoTunnel, DestinationYellowbrickSSHKeyAuthentication, DestinationYellowbrickPasswordAuthentication] diff --git a/src/airbyte_api/models/destinationconfiguration.py b/src/airbyte_api/models/destinationconfiguration.py index d1087234..e2841026 100644 --- a/src/airbyte_api/models/destinationconfiguration.py +++ b/src/airbyte_api/models/destinationconfiguration.py @@ -16,7 +16,6 @@ from .destination_firestore import DestinationFirestore from .destination_gcs import DestinationGcs from .destination_google_sheets import DestinationGoogleSheets -from .destination_langchain import DestinationLangchain from .destination_milvus import DestinationMilvus from .destination_mongodb import DestinationMongodb from .destination_mssql import DestinationMssql @@ -40,4 +39,4 @@ from .destination_yellowbrick import DestinationYellowbrick from typing import Union -DestinationConfiguration = Union[DestinationGoogleSheets, DestinationAstra, DestinationAwsDatalake, DestinationAzureBlobStorage, DestinationBigquery, DestinationClickhouse, DestinationConvex, DestinationDatabricks, DestinationDevNull, DestinationDuckdb, DestinationDynamodb, DestinationElasticsearch, DestinationFirebolt, DestinationFirestore, DestinationGcs, DestinationLangchain, DestinationMilvus, DestinationMongodb, DestinationMssql, DestinationMysql, DestinationOracle, DestinationPinecone, DestinationPostgres, DestinationPubsub, DestinationQdrant, DestinationRedis, DestinationRedshift, DestinationS3, DestinationS3Glue, DestinationSftpJSON, DestinationSnowflake, DestinationSnowflakeCortex, DestinationTeradata, DestinationTypesense, DestinationVectara, DestinationWeaviate, DestinationYellowbrick] +DestinationConfiguration = Union[DestinationGoogleSheets, DestinationAstra, DestinationAwsDatalake, DestinationAzureBlobStorage, DestinationBigquery, DestinationClickhouse, DestinationConvex, DestinationDatabricks, DestinationDevNull, DestinationDuckdb, DestinationDynamodb, DestinationElasticsearch, DestinationFirebolt, DestinationFirestore, DestinationGcs, DestinationMilvus, DestinationMongodb, DestinationMssql, DestinationMysql, DestinationOracle, DestinationPinecone, DestinationPostgres, DestinationPubsub, DestinationQdrant, DestinationRedis, DestinationRedshift, DestinationS3, DestinationS3Glue, DestinationSftpJSON, DestinationSnowflake, DestinationSnowflakeCortex, DestinationTeradata, DestinationTypesense, DestinationVectara, DestinationWeaviate, DestinationYellowbrick] diff --git a/src/airbyte_api/models/facebook_marketing.py b/src/airbyte_api/models/facebook_marketing.py index bc54fef5..e5f358de 100644 --- a/src/airbyte_api/models/facebook_marketing.py +++ b/src/airbyte_api/models/facebook_marketing.py @@ -9,10 +9,18 @@ @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class FacebookMarketing: +class FacebookMarketingCredentials: client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) r"""The Client Id for your OAuth app""" client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) r"""The Client Secret for your OAuth app""" + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class FacebookMarketing: + credentials: Optional[FacebookMarketingCredentials] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/jobtypeenum.py b/src/airbyte_api/models/jobtypeenum.py index 2bf10a65..65b66600 100644 --- a/src/airbyte_api/models/jobtypeenum.py +++ b/src/airbyte_api/models/jobtypeenum.py @@ -8,3 +8,5 @@ class JobTypeEnum(str, Enum): r"""Enum that describes the different types of jobs that the platform runs.""" SYNC = 'sync' RESET = 'reset' + REFRESH = 'refresh' + CLEAR = 'clear' diff --git a/src/airbyte_api/models/organizationresponse.py b/src/airbyte_api/models/organizationresponse.py new file mode 100644 index 00000000..e3cdbd40 --- /dev/null +++ b/src/airbyte_api/models/organizationresponse.py @@ -0,0 +1,17 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class OrganizationResponse: + r"""Provides details of a single organization for a user.""" + email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }}) + organization_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organizationId') }}) + organization_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organizationName') }}) + + diff --git a/src/airbyte_api/models/organizationsresponse.py b/src/airbyte_api/models/organizationsresponse.py new file mode 100644 index 00000000..cd3de7fd --- /dev/null +++ b/src/airbyte_api/models/organizationsresponse.py @@ -0,0 +1,17 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .organizationresponse import OrganizationResponse +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class OrganizationsResponse: + r"""List/Array of multiple organizations.""" + data: List[OrganizationResponse] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data') }}) + + diff --git a/src/airbyte_api/models/schemeclientcredentials.py b/src/airbyte_api/models/schemeclientcredentials.py index 94409de8..a47347fa 100644 --- a/src/airbyte_api/models/schemeclientcredentials.py +++ b/src/airbyte_api/models/schemeclientcredentials.py @@ -9,6 +9,6 @@ class SchemeClientCredentials: client_id: str = dataclasses.field(metadata={'security': { 'field_name': 'clientID' }}) client_secret: str = dataclasses.field(metadata={'security': { 'field_name': 'clientSecret' }}) - TOKEN_URL: Final[str] = dataclasses.field(default='/api/v1/applications/token') + TOKEN_URL: Final[str] = dataclasses.field(default='/applications/token') diff --git a/src/airbyte_api/models/source_airtable.py b/src/airbyte_api/models/source_airtable.py index db776026..9b548285 100644 --- a/src/airbyte_api/models/source_airtable.py +++ b/src/airbyte_api/models/source_airtable.py @@ -45,8 +45,6 @@ class SourceAirtableOAuth20: -SourceAirtableAuthentication = Union['SourceAirtableOAuth20', 'PersonalAccessToken'] - class SourceAirtableAirtable(str, Enum): AIRTABLE = 'airtable' @@ -59,3 +57,5 @@ class SourceAirtable: SOURCE_TYPE: Final[Optional[SourceAirtableAirtable]] = dataclasses.field(default=SourceAirtableAirtable.AIRTABLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) + +SourceAirtableAuthentication = Union[SourceAirtableOAuth20, PersonalAccessToken] diff --git a/src/airbyte_api/models/source_amazon_seller_partner.py b/src/airbyte_api/models/source_amazon_seller_partner.py index 52b21892..68934e76 100644 --- a/src/airbyte_api/models/source_amazon_seller_partner.py +++ b/src/airbyte_api/models/source_amazon_seller_partner.py @@ -137,7 +137,7 @@ class SourceAmazonSellerPartner: replication_end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.""" replication_start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) - r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If start date is not provided, the date 2 years ago from today will be used.""" + r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If start date is not provided or older than 2 years ago from today, the date 2 years ago from today will be used.""" report_options_list: Optional[List[ReportOptions]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_options_list'), 'exclude': lambda f: f is None }}) r"""Additional information passed to reports. This varies by report type.""" SOURCE_TYPE: Final[SourceAmazonSellerPartnerAmazonSellerPartner] = dataclasses.field(default=SourceAmazonSellerPartnerAmazonSellerPartner.AMAZON_SELLER_PARTNER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_asana.py b/src/airbyte_api/models/source_asana.py index b7e13629..00a8ceb2 100644 --- a/src/airbyte_api/models/source_asana.py +++ b/src/airbyte_api/models/source_asana.py @@ -40,8 +40,6 @@ class AuthenticateViaAsanaOauth: -AuthenticationMechanism = Union['AuthenticateViaAsanaOauth', 'AuthenticateWithPersonalAccessToken'] - class SourceAsanaAsana(str, Enum): ASANA = 'asana' @@ -59,3 +57,5 @@ class SourceAsana: r"""This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod.""" + +AuthenticationMechanism = Union[AuthenticateViaAsanaOauth, AuthenticateWithPersonalAccessToken] diff --git a/src/airbyte_api/models/source_auth0.py b/src/airbyte_api/models/source_auth0.py index 122a2749..61bc9ca2 100644 --- a/src/airbyte_api/models/source_auth0.py +++ b/src/airbyte_api/models/source_auth0.py @@ -39,8 +39,6 @@ class OAuth2ConfidentialApplication: -SourceAuth0AuthenticationMethod = Union['OAuth2ConfidentialApplication', 'OAuth2AccessToken'] - class Auth0(str, Enum): AUTH0 = 'auth0' @@ -57,3 +55,5 @@ class SourceAuth0: r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.""" + +SourceAuth0AuthenticationMethod = Union[OAuth2ConfidentialApplication, OAuth2AccessToken] diff --git a/src/airbyte_api/models/source_aws_cloudtrail.py b/src/airbyte_api/models/source_aws_cloudtrail.py index 73d605f3..a2a19be4 100644 --- a/src/airbyte_api/models/source_aws_cloudtrail.py +++ b/src/airbyte_api/models/source_aws_cloudtrail.py @@ -2,7 +2,6 @@ from __future__ import annotations import dataclasses -import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from datetime import date @@ -10,6 +9,15 @@ from typing import Final, Optional +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody: + attribute_key: Optional[str] = dataclasses.field(default='EventName', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attribute_key'), 'exclude': lambda f: f is None }}) + attribute_value: Optional[str] = dataclasses.field(default='ListInstanceAssociations', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attribute_value'), 'exclude': lambda f: f is None }}) + + + + class AwsCloudtrail(str, Enum): AWS_CLOUDTRAIL = 'aws-cloudtrail' @@ -19,12 +27,13 @@ class AwsCloudtrail(str, Enum): class SourceAwsCloudtrail: aws_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_key_id') }}) r"""AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.""" - aws_region_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_region_name') }}) - r"""The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name.""" aws_secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_key') }}) r"""AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.""" + aws_region_name: Optional[str] = dataclasses.field(default='us-east-1', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_region_name'), 'exclude': lambda f: f is None }}) + r"""The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name.""" + lookup_attributes_filter: Optional[FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookup_attributes_filter'), 'exclude': lambda f: f is None }}) SOURCE_TYPE: Final[AwsCloudtrail] = dataclasses.field(default=AwsCloudtrail.AWS_CLOUDTRAIL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - start_date: Optional[date] = dataclasses.field(default=dateutil.parser.parse('1970-01-01').date(), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) + start_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.""" diff --git a/src/airbyte_api/models/source_azure_blob_storage.py b/src/airbyte_api/models/source_azure_blob_storage.py index 43739075..243c137b 100644 --- a/src/airbyte_api/models/source_azure_blob_storage.py +++ b/src/airbyte_api/models/source_azure_blob_storage.py @@ -43,8 +43,6 @@ class AuthenticateViaOauth2: -SourceAzureBlobStorageAuthentication = Union['AuthenticateViaOauth2', 'AuthenticateViaStorageAccountKey'] - class SourceAzureBlobStorageAzureBlobStorage(str, Enum): AZURE_BLOB_STORAGE = 'azure-blob-storage' @@ -66,8 +64,6 @@ class Local: -Processing = Union['Local'] - class ParsingStrategy(str, Enum): r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" @@ -159,8 +155,6 @@ class FromCSV: -CSVHeaderDefinition = Union['FromCSV', 'Autogenerated', 'UserProvided'] - class InferenceType(str, Enum): r"""How to infer the types of the columns. If none, inference default to strings.""" @@ -217,8 +211,6 @@ class AvroFormat: -Format = Union['AvroFormat', 'CSVFormat', 'JsonlFormat', 'ParquetFormat', 'DocumentFileTypeFormatExperimental'] - class ValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -273,3 +265,11 @@ class SourceAzureBlobStorage: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceAzureBlobStorageAuthentication = Union[AuthenticateViaOauth2, AuthenticateViaStorageAccountKey] + +Processing = Union[Local] + +CSVHeaderDefinition = Union[FromCSV, Autogenerated, UserProvided] + +Format = Union[AvroFormat, CSVFormat, JsonlFormat, ParquetFormat, DocumentFileTypeFormatExperimental] diff --git a/src/airbyte_api/models/source_bamboo_hr.py b/src/airbyte_api/models/source_bamboo_hr.py index 86e4be92..96ceed9e 100644 --- a/src/airbyte_api/models/source_bamboo_hr.py +++ b/src/airbyte_api/models/source_bamboo_hr.py @@ -19,7 +19,7 @@ class SourceBambooHr: r"""Api key of bamboo hr""" subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }}) r"""Sub Domain of bamboo hr""" - custom_reports_fields: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_fields'), 'exclude': lambda f: f is None }}) + custom_reports_fields: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_fields'), 'exclude': lambda f: f is None }}) r"""Comma-separated list of fields to include in custom reports.""" custom_reports_include_default_fields: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_include_default_fields'), 'exclude': lambda f: f is None }}) r"""If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.""" diff --git a/src/airbyte_api/models/source_cart.py b/src/airbyte_api/models/source_cart.py index 73176c84..4bdc78f3 100644 --- a/src/airbyte_api/models/source_cart.py +++ b/src/airbyte_api/models/source_cart.py @@ -41,8 +41,6 @@ class CentralAPIRouter: -SourceCartAuthorizationMethod = Union['CentralAPIRouter', 'SingleStoreAccessToken'] - class Cart(str, Enum): CART = 'cart' @@ -57,3 +55,5 @@ class SourceCart: SOURCE_TYPE: Final[Cart] = dataclasses.field(default=Cart.CART, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceCartAuthorizationMethod = Union[CentralAPIRouter, SingleStoreAccessToken] diff --git a/src/airbyte_api/models/source_clazar.py b/src/airbyte_api/models/source_clazar.py new file mode 100644 index 00000000..fe4b5ae4 --- /dev/null +++ b/src/airbyte_api/models/source_clazar.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Clazar(str, Enum): + CLAZAR = 'clazar' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceClazar: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + SOURCE_TYPE: Final[Clazar] = dataclasses.field(default=Clazar.CLAZAR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_clickhouse.py b/src/airbyte_api/models/source_clickhouse.py index 8ce06891..4d5457cd 100644 --- a/src/airbyte_api/models/source_clickhouse.py +++ b/src/airbyte_api/models/source_clickhouse.py @@ -69,8 +69,6 @@ class SourceClickhouseNoTunnel: -SourceClickhouseSSHTunnelMethod = Union['SourceClickhouseNoTunnel', 'SourceClickhouseSSHKeyAuthentication', 'SourceClickhousePasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -92,3 +90,5 @@ class SourceClickhouse: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +SourceClickhouseSSHTunnelMethod = Union[SourceClickhouseNoTunnel, SourceClickhouseSSHKeyAuthentication, SourceClickhousePasswordAuthentication] diff --git a/src/airbyte_api/models/source_datadog.py b/src/airbyte_api/models/source_datadog.py new file mode 100644 index 00000000..fa85effb --- /dev/null +++ b/src/airbyte_api/models/source_datadog.py @@ -0,0 +1,66 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, List, Optional + + +class SourceDatadogDataSource(str, Enum): + r"""A data source that is powered by the platform.""" + METRICS = 'metrics' + CLOUD_COST = 'cloud_cost' + LOGS = 'logs' + RUM = 'rum' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Queries: + data_source: SourceDatadogDataSource = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_source') }}) + r"""A data source that is powered by the platform.""" + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + r"""The variable name for use in queries.""" + query: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query') }}) + r"""A classic query string.""" + + + + +class Site(str, Enum): + r"""The site where Datadog data resides in.""" + DATADOGHQ_COM = 'datadoghq.com' + US3_DATADOGHQ_COM = 'us3.datadoghq.com' + US5_DATADOGHQ_COM = 'us5.datadoghq.com' + DATADOGHQ_EU = 'datadoghq.eu' + DDOG_GOV_COM = 'ddog-gov.com' + + +class Datadog(str, Enum): + DATADOG = 'datadog' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDatadog: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Datadog API key""" + application_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('application_key') }}) + r"""Datadog application key""" + end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) + r"""UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs.""" + max_records_per_request: Optional[int] = dataclasses.field(default=5000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_records_per_request'), 'exclude': lambda f: f is None }}) + r"""Maximum number of records to collect per request.""" + queries: Optional[List[Queries]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('queries'), 'exclude': lambda f: f is None }}) + r"""List of queries to be run and used as inputs.""" + query: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query'), 'exclude': lambda f: f is None }}) + r"""The search query. This just applies to Incremental syncs. If empty, it'll collect all logs.""" + site: Optional[Site] = dataclasses.field(default=Site.DATADOGHQ_COM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site'), 'exclude': lambda f: f is None }}) + r"""The site where Datadog data resides in.""" + SOURCE_TYPE: Final[Datadog] = dataclasses.field(default=Datadog.DATADOG, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) + r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs.""" + + diff --git a/src/airbyte_api/models/source_dynamodb.py b/src/airbyte_api/models/source_dynamodb.py index b6f55174..842318a9 100644 --- a/src/airbyte_api/models/source_dynamodb.py +++ b/src/airbyte_api/models/source_dynamodb.py @@ -15,7 +15,6 @@ class SourceDynamodbSchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class RoleBasedAuthentication: - UNSET='__SPEAKEASY_UNSET__' additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) AUTH_TYPE: Final[Optional[SourceDynamodbSchemasAuthType]] = dataclasses.field(default=SourceDynamodbSchemasAuthType.ROLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) @@ -29,7 +28,6 @@ class SourceDynamodbAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class AuthenticateViaAccessKeys: - UNSET='__SPEAKEASY_UNSET__' access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id') }}) r"""The access key id to access Dynamodb. Airbyte requires read permissions to the database""" secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key') }}) @@ -39,8 +37,6 @@ class AuthenticateViaAccessKeys: -SourceDynamodbCredentials = Union['AuthenticateViaAccessKeys', 'RoleBasedAuthentication'] - class SourceDynamodbDynamodbRegion(str, Enum): r"""The region of the Dynamodb database""" @@ -101,3 +97,5 @@ class SourceDynamodb: SOURCE_TYPE: Final[Optional[SourceDynamodbDynamodb]] = dataclasses.field(default=SourceDynamodbDynamodb.DYNAMODB, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) + +SourceDynamodbCredentials = Union[AuthenticateViaAccessKeys, RoleBasedAuthentication] diff --git a/src/airbyte_api/models/source_e2e_test_cloud.py b/src/airbyte_api/models/source_e2e_test_cloud.py index 9fcf304d..064ea73d 100644 --- a/src/airbyte_api/models/source_e2e_test_cloud.py +++ b/src/airbyte_api/models/source_e2e_test_cloud.py @@ -41,8 +41,6 @@ class SingleSchema: -MockCatalog = Union['SingleSchema', 'MultiSchema'] - class E2eTestCloud(str, Enum): E2E_TEST_CLOUD = 'e2e-test-cloud' @@ -55,7 +53,6 @@ class Type(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class ContinuousFeed: - UNSET='__SPEAKEASY_UNSET__' mock_catalog: MockCatalog = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mock_catalog') }}) additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) max_messages: Optional[int] = dataclasses.field(default=100, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_messages'), 'exclude': lambda f: f is None }}) @@ -69,4 +66,6 @@ class ContinuousFeed: -SourceE2eTestCloud = Union['ContinuousFeed'] +MockCatalog = Union[SingleSchema, MultiSchema] + +SourceE2eTestCloud = Union[ContinuousFeed] diff --git a/src/airbyte_api/models/source_facebook_marketing.py b/src/airbyte_api/models/source_facebook_marketing.py index dce2290d..86e8ccc4 100644 --- a/src/airbyte_api/models/source_facebook_marketing.py +++ b/src/airbyte_api/models/source_facebook_marketing.py @@ -7,7 +7,7 @@ from dataclasses_json import Undefined, dataclass_json from datetime import datetime from enum import Enum -from typing import Final, List, Optional +from typing import Final, List, Optional, Union class ValidAdStatuses(str, Enum): @@ -47,6 +47,38 @@ class ValidCampaignStatuses(str, Enum): WITH_ISSUES = 'WITH_ISSUES' +class SourceFacebookMarketingSchemasAuthType(str, Enum): + SERVICE = 'Service' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ServiceAccountKeyAuthentication: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information.""" + AUTH_TYPE: Final[Optional[SourceFacebookMarketingSchemasAuthType]] = dataclasses.field(default=SourceFacebookMarketingSchemasAuthType.SERVICE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceFacebookMarketingAuthType(str, Enum): + CLIENT = 'Client' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class AuthenticateViaFacebookMarketingOauth: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Client ID for the Facebook Marketing API""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Client Secret for the Facebook Marketing API""" + access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) + r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information.""" + AUTH_TYPE: Final[Optional[SourceFacebookMarketingAuthType]] = dataclasses.field(default=SourceFacebookMarketingAuthType.CLIENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + class ValidActionBreakdowns(str, Enum): r"""An enumeration.""" ACTION_CANVAS_COMPONENT_NAME = 'action_canvas_component_name' @@ -130,7 +162,6 @@ class SourceFacebookMarketingValidEnums(str, Enum): ADSET_END = 'adset_end' ADSET_ID = 'adset_id' ADSET_NAME = 'adset_name' - ADSET_START = 'adset_start' AGE_TARGETING = 'age_targeting' ATTRIBUTION_SETTING = 'attribution_setting' AUCTION_BID = 'auction_bid' @@ -147,7 +178,6 @@ class SourceFacebookMarketingValidEnums(str, Enum): CATALOG_SEGMENT_VALUE_OMNI_PURCHASE_ROAS = 'catalog_segment_value_omni_purchase_roas' CATALOG_SEGMENT_VALUE_WEBSITE_PURCHASE_ROAS = 'catalog_segment_value_website_purchase_roas' CLICKS = 'clicks' - CONVERSION_LEAD_RATE = 'conversion_lead_rate' CONVERSION_RATE_RANKING = 'conversion_rate_ranking' CONVERSION_VALUES = 'conversion_values' CONVERSIONS = 'conversions' @@ -158,7 +188,6 @@ class SourceFacebookMarketingValidEnums(str, Enum): COST_PER_ACTION_TYPE = 'cost_per_action_type' COST_PER_AD_CLICK = 'cost_per_ad_click' COST_PER_CONVERSION = 'cost_per_conversion' - COST_PER_CONVERSION_LEAD = 'cost_per_conversion_lead' COST_PER_DDA_COUNTBY_CONVS = 'cost_per_dda_countby_convs' COST_PER_ESTIMATED_AD_RECALLERS = 'cost_per_estimated_ad_recallers' COST_PER_INLINE_LINK_CLICK = 'cost_per_inline_link_click' @@ -287,7 +316,7 @@ class InsightConfig: start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.""" time_increment: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('time_increment'), 'exclude': lambda f: f is None }}) - r"""Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).""" + r"""Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only). The minimum allowed value for this field is 1, and the maximum is 89.""" @@ -299,10 +328,10 @@ class SourceFacebookMarketingFacebookMarketing(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceFacebookMarketing: - access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) - r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information.""" account_ids: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_ids') }}) r"""The Facebook Ad account ID(s) to pull data from. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information.""" + access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) + r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information.""" action_breakdowns_allow_empty: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns_allow_empty'), 'exclude': lambda f: f is None }}) r"""Allows action_breakdowns to be an empty list""" ad_statuses: Optional[List[ValidAdStatuses]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ad_statuses'), 'exclude': lambda f: f is None }}) @@ -315,6 +344,8 @@ class SourceFacebookMarketing: r"""The Client Id for your OAuth app""" client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) r"""The Client Secret for your OAuth app""" + credentials: Optional[SourceFacebookMarketingAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) + r"""Credentials for connecting to the Facebook Marketing API""" custom_insights: Optional[List[InsightConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_insights'), 'exclude': lambda f: f is None }}) r"""A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on \\"add\\" to fill this field.""" end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) @@ -332,3 +363,5 @@ class SourceFacebookMarketing: r"""The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data will be replicated for usual streams and only last 2 years for insight streams.""" + +SourceFacebookMarketingAuthentication = Union[AuthenticateViaFacebookMarketingOauth, ServiceAccountKeyAuthentication] diff --git a/src/airbyte_api/models/source_fauna.py b/src/airbyte_api/models/source_fauna.py index d220ed56..a9a2c4cd 100644 --- a/src/airbyte_api/models/source_fauna.py +++ b/src/airbyte_api/models/source_fauna.py @@ -33,8 +33,6 @@ class Disabled: -DeletionMode = Union['Disabled', 'Enabled'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -75,3 +73,5 @@ class SourceFauna: SOURCE_TYPE: Final[Fauna] = dataclasses.field(default=Fauna.FAUNA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +DeletionMode = Union[Disabled, Enabled] diff --git a/src/airbyte_api/models/source_file.py b/src/airbyte_api/models/source_file.py index a1199383..275f4e9b 100644 --- a/src/airbyte_api/models/source_file.py +++ b/src/airbyte_api/models/source_file.py @@ -130,8 +130,6 @@ class HTTPSPublicWeb: -StorageProvider = Union['HTTPSPublicWeb', 'GCSGoogleCloudStorage', 'SourceFileS3AmazonWebServices', 'AzBlobAzureBlobStorage', 'SSHSecureShell', 'SCPSecureCopyProtocol', 'SFTPSecureFileTransferProtocol'] - class File(str, Enum): FILE = 'file' @@ -153,3 +151,5 @@ class SourceFile: SOURCE_TYPE: Final[File] = dataclasses.field(default=File.FILE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +StorageProvider = Union[HTTPSPublicWeb, GCSGoogleCloudStorage, SourceFileS3AmazonWebServices, AzBlobAzureBlobStorage, SSHSecureShell, SCPSecureCopyProtocol, SFTPSecureFileTransferProtocol] diff --git a/src/airbyte_api/models/source_fleetio.py b/src/airbyte_api/models/source_fleetio.py new file mode 100644 index 00000000..cbdd2d93 --- /dev/null +++ b/src/airbyte_api/models/source_fleetio.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Fleetio(str, Enum): + FLEETIO = 'fleetio' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceFleetio: + account_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_token') }}) + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + SOURCE_TYPE: Final[Fleetio] = dataclasses.field(default=Fleetio.FLEETIO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_gcs.py b/src/airbyte_api/models/source_gcs.py index 145fa353..e3a4207a 100644 --- a/src/airbyte_api/models/source_gcs.py +++ b/src/airbyte_api/models/source_gcs.py @@ -55,8 +55,6 @@ class SourceGcsFromCSV: -SourceGcsCSVHeaderDefinition = Union['SourceGcsFromCSV', 'SourceGcsAutogenerated', 'SourceGcsUserProvided'] - class SourceGcsInferenceType(str, Enum): r"""How to infer the types of the columns. If none, inference default to strings.""" @@ -99,8 +97,6 @@ class SourceGcsCSVFormat: -SourceGcsFormat = Union['SourceGcsCSVFormat'] - class SourceGcsValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -152,3 +148,7 @@ class SourceGcs: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceGcsCSVHeaderDefinition = Union[SourceGcsFromCSV, SourceGcsAutogenerated, SourceGcsUserProvided] + +SourceGcsFormat = Union[SourceGcsCSVFormat] diff --git a/src/airbyte_api/models/source_github.py b/src/airbyte_api/models/source_github.py index 0f677877..c852ce7e 100644 --- a/src/airbyte_api/models/source_github.py +++ b/src/airbyte_api/models/source_github.py @@ -41,8 +41,6 @@ class OAuth: -SourceGithubAuthentication = Union['OAuth', 'SourceGithubPersonalAccessToken'] - class SourceGithubGithub(str, Enum): GITHUB = 'github' @@ -70,3 +68,5 @@ class SourceGithub: r"""The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info""" + +SourceGithubAuthentication = Union[OAuth, SourceGithubPersonalAccessToken] diff --git a/src/airbyte_api/models/source_gitlab.py b/src/airbyte_api/models/source_gitlab.py index 40ddcbc2..e4012cdd 100644 --- a/src/airbyte_api/models/source_gitlab.py +++ b/src/airbyte_api/models/source_gitlab.py @@ -45,8 +45,6 @@ class SourceGitlabOAuth20: -SourceGitlabAuthorizationMethod = Union['SourceGitlabOAuth20', 'PrivateToken'] - class SourceGitlabGitlab(str, Enum): GITLAB = 'gitlab' @@ -71,3 +69,5 @@ class SourceGitlab: r"""The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data will be replicated. All data generated after this date will be replicated.""" + +SourceGitlabAuthorizationMethod = Union[SourceGitlabOAuth20, PrivateToken] diff --git a/src/airbyte_api/models/source_goldcast.py b/src/airbyte_api/models/source_goldcast.py new file mode 100644 index 00000000..55198b79 --- /dev/null +++ b/src/airbyte_api/models/source_goldcast.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Goldcast(str, Enum): + GOLDCAST = 'goldcast' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGoldcast: + access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key') }}) + r"""Your API Access Key. See here. The key is case sensitive.""" + SOURCE_TYPE: Final[Goldcast] = dataclasses.field(default=Goldcast.GOLDCAST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_google_analytics_data_api.py b/src/airbyte_api/models/source_google_analytics_data_api.py index 19c5dbab..e60379e1 100644 --- a/src/airbyte_api/models/source_google_analytics_data_api.py +++ b/src/airbyte_api/models/source_google_analytics_data_api.py @@ -15,7 +15,7 @@ class SourceGoogleAnalyticsDataAPISchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ServiceAccountKeyAuthentication: +class SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication: credentials_json: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json') }}) r"""The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.""" AUTH_TYPE: Final[Optional[SourceGoogleAnalyticsDataAPISchemasAuthType]] = dataclasses.field(default=SourceGoogleAnalyticsDataAPISchemasAuthType.SERVICE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) @@ -42,8 +42,6 @@ class AuthenticateViaGoogleOauth: -SourceGoogleAnalyticsDataAPICredentials = Union['AuthenticateViaGoogleOauth', 'ServiceAccountKeyAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -129,8 +127,6 @@ class SourceGoogleAnalyticsDataAPIDisabled: -CohortReports = Union['SourceGoogleAnalyticsDataAPIDisabled', 'SourceGoogleAnalyticsDataAPISchemasEnabled'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName(str, Enum): BETWEEN_FILTER = 'betweenFilter' @@ -161,8 +157,6 @@ class SourceGoogleAnalyticsDataAPIInt64Value: -FromValue = Union['SourceGoogleAnalyticsDataAPIInt64Value', 'SourceGoogleAnalyticsDataAPIDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -189,8 +183,6 @@ class SourceGoogleAnalyticsDataAPISchemasInt64Value: -ToValue = Union['SourceGoogleAnalyticsDataAPISchemasInt64Value', 'SourceGoogleAnalyticsDataAPISchemasDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -240,8 +232,6 @@ class Int64Value: -Value = Union['Int64Value', 'DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -291,8 +281,6 @@ class StringFilter: -SourceGoogleAnalyticsDataAPISchemasFilter = Union['StringFilter', 'InListFilter', 'NumericFilter', 'BetweenFilter'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType(str, Enum): FILTER = 'filter' @@ -338,8 +326,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasFromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -366,8 +352,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -417,8 +401,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -468,8 +450,6 @@ class SourceGoogleAnalyticsDataAPISchemasStringFilter: -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter = Union['SourceGoogleAnalyticsDataAPISchemasStringFilter', 'SourceGoogleAnalyticsDataAPISchemasInListFilter', 'SourceGoogleAnalyticsDataAPISchemasNumericFilter', 'SourceGoogleAnalyticsDataAPISchemasBetweenFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -523,8 +503,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64V -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -551,8 +529,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -602,8 +578,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -653,8 +627,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterString -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -708,8 +680,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -736,8 +706,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -787,8 +755,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimens -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -838,8 +804,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter: -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -863,8 +827,6 @@ class AndGroup: -DimensionsFilter = Union['AndGroup', 'OrGroup', 'NotExpression', 'Filter'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName(str, Enum): BETWEEN_FILTER = 'betweenFilter' @@ -895,8 +857,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Valu -SourceGoogleAnalyticsDataAPIFromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -923,8 +883,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPIToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -974,8 +932,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value: -SourceGoogleAnalyticsDataAPIValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1025,8 +981,6 @@ class SourceGoogleAnalyticsDataAPIStringFilter: -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter = Union['SourceGoogleAnalyticsDataAPIStringFilter', 'SourceGoogleAnalyticsDataAPIInListFilter', 'SourceGoogleAnalyticsDataAPINumericFilter', 'SourceGoogleAnalyticsDataAPIBetweenFilter'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType(str, Enum): FILTER = 'filter' @@ -1072,8 +1026,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -1100,8 +1052,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1151,8 +1101,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1202,8 +1150,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1257,8 +1203,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -1285,8 +1229,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1336,8 +1278,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1387,8 +1327,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1442,8 +1380,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue'] - class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType(str, Enum): DOUBLE_VALUE = 'doubleValue' @@ -1470,8 +1406,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1521,8 +1455,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1572,8 +1504,6 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFil -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter = Union['SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter', 'SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1597,8 +1527,6 @@ class SourceGoogleAnalyticsDataAPIAndGroup: -MetricsFilter = Union['SourceGoogleAnalyticsDataAPIAndGroup', 'SourceGoogleAnalyticsDataAPIOrGroup', 'SourceGoogleAnalyticsDataAPINotExpression', 'SourceGoogleAnalyticsDataAPIFilter'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -1638,8 +1566,82 @@ class SourceGoogleAnalyticsDataAPI: r"""The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.""" keep_empty_rows: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_empty_rows'), 'exclude': lambda f: f is None }}) r"""If false, each row with all metrics equal to 0 will not be returned. If true, these rows will be returned if they are not separately removed by a filter. More information is available in the documentation.""" + lookback_window: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) + r"""Since attribution changes after the event date, and Google Analytics has a data processing latency, we should specify how many days in the past we should refresh the data in every run. So if you set it at 5 days, in every sync it will fetch the last bookmark date minus 5 days.""" SOURCE_TYPE: Final[SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI] = dataclasses.field(default=SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI.GOOGLE_ANALYTICS_DATA_API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) window_in_days: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('window_in_days'), 'exclude': lambda f: f is None }}) r"""The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.""" + +SourceGoogleAnalyticsDataAPICredentials = Union[AuthenticateViaGoogleOauth, SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication] + +CohortReports = Union[SourceGoogleAnalyticsDataAPIDisabled, SourceGoogleAnalyticsDataAPISchemasEnabled] + +FromValue = Union[SourceGoogleAnalyticsDataAPIInt64Value, SourceGoogleAnalyticsDataAPIDoubleValue] + +ToValue = Union[SourceGoogleAnalyticsDataAPISchemasInt64Value, SourceGoogleAnalyticsDataAPISchemasDoubleValue] + +Value = Union[Int64Value, DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasFilter = Union[StringFilter, InListFilter, NumericFilter, BetweenFilter] + +SourceGoogleAnalyticsDataAPISchemasFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasStringFilter, SourceGoogleAnalyticsDataAPISchemasInListFilter, SourceGoogleAnalyticsDataAPISchemasNumericFilter, SourceGoogleAnalyticsDataAPISchemasBetweenFilter] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter] + +DimensionsFilter = Union[AndGroup, OrGroup, NotExpression, Filter] + +SourceGoogleAnalyticsDataAPIFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue] + +SourceGoogleAnalyticsDataAPIToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue] + +SourceGoogleAnalyticsDataAPIValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter = Union[SourceGoogleAnalyticsDataAPIStringFilter, SourceGoogleAnalyticsDataAPIInListFilter, SourceGoogleAnalyticsDataAPINumericFilter, SourceGoogleAnalyticsDataAPIBetweenFilter] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue] + +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter] + +MetricsFilter = Union[SourceGoogleAnalyticsDataAPIAndGroup, SourceGoogleAnalyticsDataAPIOrGroup, SourceGoogleAnalyticsDataAPINotExpression, SourceGoogleAnalyticsDataAPIFilter] diff --git a/src/airbyte_api/models/source_google_analytics_v4_service_account_only.py b/src/airbyte_api/models/source_google_analytics_v4_service_account_only.py deleted file mode 100644 index 2bf0bb93..00000000 --- a/src/airbyte_api/models/source_google_analytics_v4_service_account_only.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from datetime import date -from enum import Enum -from typing import Final, Optional, Union - - -class SourceGoogleAnalyticsV4ServiceAccountOnlyAuthType(str, Enum): - SERVICE = 'Service' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourceGoogleAnalyticsV4ServiceAccountOnlyServiceAccountKeyAuthentication: - credentials_json: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json') }}) - r"""The JSON key of the service account to use for authorization""" - AUTH_TYPE: Final[Optional[SourceGoogleAnalyticsV4ServiceAccountOnlyAuthType]] = dataclasses.field(default=SourceGoogleAnalyticsV4ServiceAccountOnlyAuthType.SERVICE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) - - - -SourceGoogleAnalyticsV4ServiceAccountOnlyCredentials = Union['SourceGoogleAnalyticsV4ServiceAccountOnlyServiceAccountKeyAuthentication'] - - -class GoogleAnalyticsV4ServiceAccountOnly(str, Enum): - GOOGLE_ANALYTICS_V4_SERVICE_ACCOUNT_ONLY = 'google-analytics-v4-service-account-only' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourceGoogleAnalyticsV4ServiceAccountOnly: - start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat }}) - r"""The date in the format YYYY-MM-DD. Any data before this date will not be replicated.""" - view_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('view_id') }}) - r"""The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.""" - credentials: Optional[SourceGoogleAnalyticsV4ServiceAccountOnlyCredentials] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) - r"""Credentials for the service""" - custom_reports: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports'), 'exclude': lambda f: f is None }}) - r"""A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.""" - end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) - r"""The date in the format YYYY-MM-DD. Any data after this date will not be replicated.""" - SOURCE_TYPE: Final[GoogleAnalyticsV4ServiceAccountOnly] = dataclasses.field(default=GoogleAnalyticsV4ServiceAccountOnly.GOOGLE_ANALYTICS_V4_SERVICE_ACCOUNT_ONLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - window_in_days: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('window_in_days'), 'exclude': lambda f: f is None }}) - r"""The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364.""" - - diff --git a/src/airbyte_api/models/source_google_directory.py b/src/airbyte_api/models/source_google_directory.py index f3aceee5..cd10b222 100644 --- a/src/airbyte_api/models/source_google_directory.py +++ b/src/airbyte_api/models/source_google_directory.py @@ -47,8 +47,6 @@ class SignInViaGoogleOAuth: -SourceGoogleDirectoryGoogleCredentials = Union['SignInViaGoogleOAuth', 'ServiceAccountKey'] - class GoogleDirectory(str, Enum): GOOGLE_DIRECTORY = 'google-directory' @@ -62,3 +60,5 @@ class SourceGoogleDirectory: SOURCE_TYPE: Final[GoogleDirectory] = dataclasses.field(default=GoogleDirectory.GOOGLE_DIRECTORY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceGoogleDirectoryGoogleCredentials = Union[SignInViaGoogleOAuth, ServiceAccountKey] diff --git a/src/airbyte_api/models/source_google_drive.py b/src/airbyte_api/models/source_google_drive.py index 4986569e..e4591eba 100644 --- a/src/airbyte_api/models/source_google_drive.py +++ b/src/airbyte_api/models/source_google_drive.py @@ -41,8 +41,6 @@ class SourceGoogleDriveAuthenticateViaGoogleOAuth: -SourceGoogleDriveAuthentication = Union['SourceGoogleDriveAuthenticateViaGoogleOAuth', 'SourceGoogleDriveServiceAccountKeyAuthentication'] - class SourceGoogleDriveGoogleDrive(str, Enum): GOOGLE_DRIVE = 'google-drive' @@ -64,8 +62,6 @@ class SourceGoogleDriveLocal: -SourceGoogleDriveProcessing = Union['SourceGoogleDriveLocal'] - class SourceGoogleDriveParsingStrategy(str, Enum): r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" @@ -157,8 +153,6 @@ class SourceGoogleDriveFromCSV: -SourceGoogleDriveCSVHeaderDefinition = Union['SourceGoogleDriveFromCSV', 'SourceGoogleDriveAutogenerated', 'SourceGoogleDriveUserProvided'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -207,8 +201,6 @@ class SourceGoogleDriveAvroFormat: -SourceGoogleDriveFormat = Union['SourceGoogleDriveAvroFormat', 'SourceGoogleDriveCSVFormat', 'SourceGoogleDriveJsonlFormat', 'SourceGoogleDriveParquetFormat', 'SourceGoogleDriveDocumentFileTypeFormatExperimental'] - class SourceGoogleDriveValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -257,3 +249,11 @@ class SourceGoogleDrive: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceGoogleDriveAuthentication = Union[SourceGoogleDriveAuthenticateViaGoogleOAuth, SourceGoogleDriveServiceAccountKeyAuthentication] + +SourceGoogleDriveProcessing = Union[SourceGoogleDriveLocal] + +SourceGoogleDriveCSVHeaderDefinition = Union[SourceGoogleDriveFromCSV, SourceGoogleDriveAutogenerated, SourceGoogleDriveUserProvided] + +SourceGoogleDriveFormat = Union[SourceGoogleDriveAvroFormat, SourceGoogleDriveCSVFormat, SourceGoogleDriveJsonlFormat, SourceGoogleDriveParquetFormat, SourceGoogleDriveDocumentFileTypeFormatExperimental] diff --git a/src/airbyte_api/models/source_google_search_console.py b/src/airbyte_api/models/source_google_search_console.py index ea695657..de914c53 100644 --- a/src/airbyte_api/models/source_google_search_console.py +++ b/src/airbyte_api/models/source_google_search_console.py @@ -45,8 +45,6 @@ class SourceGoogleSearchConsoleOAuth: -AuthenticationType = Union['SourceGoogleSearchConsoleOAuth', 'SourceGoogleSearchConsoleServiceAccountKeyAuthentication'] - class SourceGoogleSearchConsoleValidEnums(str, Enum): r"""An enumeration of dimensions.""" @@ -97,3 +95,5 @@ class SourceGoogleSearchConsole: r"""UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.""" + +AuthenticationType = Union[SourceGoogleSearchConsoleOAuth, SourceGoogleSearchConsoleServiceAccountKeyAuthentication] diff --git a/src/airbyte_api/models/source_google_sheets.py b/src/airbyte_api/models/source_google_sheets.py index 597ed6b1..2aab7418 100644 --- a/src/airbyte_api/models/source_google_sheets.py +++ b/src/airbyte_api/models/source_google_sheets.py @@ -39,8 +39,6 @@ class SourceGoogleSheetsAuthenticateViaGoogleOAuth: -SourceGoogleSheetsAuthentication = Union['SourceGoogleSheetsAuthenticateViaGoogleOAuth', 'SourceGoogleSheetsServiceAccountKeyAuthentication'] - class SourceGoogleSheetsGoogleSheets(str, Enum): GOOGLE_SHEETS = 'google-sheets' @@ -60,3 +58,5 @@ class SourceGoogleSheets: SOURCE_TYPE: Final[SourceGoogleSheetsGoogleSheets] = dataclasses.field(default=SourceGoogleSheetsGoogleSheets.GOOGLE_SHEETS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceGoogleSheetsAuthentication = Union[SourceGoogleSheetsAuthenticateViaGoogleOAuth, SourceGoogleSheetsServiceAccountKeyAuthentication] diff --git a/src/airbyte_api/models/source_harvest.py b/src/airbyte_api/models/source_harvest.py index 3358e656..e8ce4127 100644 --- a/src/airbyte_api/models/source_harvest.py +++ b/src/airbyte_api/models/source_harvest.py @@ -17,7 +17,6 @@ class SourceHarvestSchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceHarvestAuthenticateWithPersonalAccessToken: - UNSET='__SPEAKEASY_UNSET__' api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) r"""Log into Harvest and then create new personal access token.""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) @@ -33,7 +32,6 @@ class SourceHarvestAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class AuthenticateViaHarvestOAuth: - UNSET='__SPEAKEASY_UNSET__' client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) r"""The Client ID of your Harvest developer application.""" client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) @@ -45,8 +43,6 @@ class AuthenticateViaHarvestOAuth: -SourceHarvestAuthenticationMechanism = Union['AuthenticateViaHarvestOAuth', 'SourceHarvestAuthenticateWithPersonalAccessToken'] - class Harvest(str, Enum): HARVEST = 'harvest' @@ -66,3 +62,5 @@ class SourceHarvest: SOURCE_TYPE: Final[Harvest] = dataclasses.field(default=Harvest.HARVEST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceHarvestAuthenticationMechanism = Union[AuthenticateViaHarvestOAuth, SourceHarvestAuthenticateWithPersonalAccessToken] diff --git a/src/airbyte_api/models/source_hubspot.py b/src/airbyte_api/models/source_hubspot.py index 033450c0..56acecf4 100644 --- a/src/airbyte_api/models/source_hubspot.py +++ b/src/airbyte_api/models/source_hubspot.py @@ -45,8 +45,6 @@ class SourceHubspotOAuth: -SourceHubspotAuthentication = Union['SourceHubspotOAuth', 'PrivateApp'] - class SourceHubspotHubspot(str, Enum): HUBSPOT = 'hubspot' @@ -64,3 +62,5 @@ class SourceHubspot: r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If not set, \\"2006-06-01T00:00:00Z\\" (Hubspot creation date) will be used as start date. It's recommended to provide relevant to your data start date value to optimize synchronization.""" + +SourceHubspotAuthentication = Union[SourceHubspotOAuth, PrivateApp] diff --git a/src/airbyte_api/models/source_insightly.py b/src/airbyte_api/models/source_insightly.py index b01f5f7e..54bdd2bc 100644 --- a/src/airbyte_api/models/source_insightly.py +++ b/src/airbyte_api/models/source_insightly.py @@ -17,7 +17,7 @@ class Insightly(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceInsightly: - start_date: Optional[datetime] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + start_date: Optional[datetime] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse }}) r"""The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams.""" token: Optional[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }}) r"""Your Insightly API token.""" diff --git a/src/airbyte_api/models/source_klarna.py b/src/airbyte_api/models/source_klarna.py index af4a0de7..090bce31 100644 --- a/src/airbyte_api/models/source_klarna.py +++ b/src/airbyte_api/models/source_klarna.py @@ -9,9 +9,9 @@ class SourceKlarnaRegion(str, Enum): - r"""Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'""" + r"""Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'na', 'oc'""" EU = 'eu' - US = 'us' + NA = 'na' OC = 'oc' @@ -25,7 +25,7 @@ class SourceKlarna: password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) r"""A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)""" region: SourceKlarnaRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }}) - r"""Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'""" + r"""Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'na', 'oc'""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)""" playground: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('playground'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_lever_hiring.py b/src/airbyte_api/models/source_lever_hiring.py index 8c02f27a..8d013750 100644 --- a/src/airbyte_api/models/source_lever_hiring.py +++ b/src/airbyte_api/models/source_lever_hiring.py @@ -39,8 +39,6 @@ class AuthenticateViaLeverOAuth: -SourceLeverHiringAuthenticationMechanism = Union['AuthenticateViaLeverOAuth', 'AuthenticateViaLeverAPIKey'] - class SourceLeverHiringEnvironment(str, Enum): r"""The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.""" @@ -64,3 +62,5 @@ class SourceLeverHiring: SOURCE_TYPE: Final[SourceLeverHiringLeverHiring] = dataclasses.field(default=SourceLeverHiringLeverHiring.LEVER_HIRING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceLeverHiringAuthenticationMechanism = Union[AuthenticateViaLeverOAuth, AuthenticateViaLeverAPIKey] diff --git a/src/airbyte_api/models/source_linkedin_ads.py b/src/airbyte_api/models/source_linkedin_ads.py index 51dd6665..307a1387 100644 --- a/src/airbyte_api/models/source_linkedin_ads.py +++ b/src/airbyte_api/models/source_linkedin_ads.py @@ -87,8 +87,6 @@ class SourceLinkedinAdsOAuth20: -SourceLinkedinAdsAuthentication = Union['SourceLinkedinAdsOAuth20', 'AccessToken'] - class SourceLinkedinAdsLinkedinAds(str, Enum): LINKEDIN_ADS = 'linkedin-ads' @@ -106,3 +104,5 @@ class SourceLinkedinAds: SOURCE_TYPE: Final[SourceLinkedinAdsLinkedinAds] = dataclasses.field(default=SourceLinkedinAdsLinkedinAds.LINKEDIN_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceLinkedinAdsAuthentication = Union[SourceLinkedinAdsOAuth20, AccessToken] diff --git a/src/airbyte_api/models/source_linkedin_pages.py b/src/airbyte_api/models/source_linkedin_pages.py index 1a7be320..c09265be 100644 --- a/src/airbyte_api/models/source_linkedin_pages.py +++ b/src/airbyte_api/models/source_linkedin_pages.py @@ -2,8 +2,10 @@ from __future__ import annotations import dataclasses +import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from datetime import datetime from enum import Enum from typing import Final, Optional, Union @@ -39,13 +41,17 @@ class SourceLinkedinPagesOAuth20: -SourceLinkedinPagesAuthentication = Union['SourceLinkedinPagesOAuth20', 'SourceLinkedinPagesAccessToken'] - class LinkedinPages(str, Enum): LINKEDIN_PAGES = 'linkedin-pages' +class TimeGranularityType(str, Enum): + r"""Granularity of the statistics for metrics per time period. Must be either \\"DAY\\" or \\"MONTH\\" """ + DAY = 'DAY' + MONTH = 'MONTH' + + @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceLinkedinPages: @@ -53,5 +59,11 @@ class SourceLinkedinPages: r"""Specify the Organization ID""" credentials: Optional[SourceLinkedinPagesAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) SOURCE_TYPE: Final[LinkedinPages] = dataclasses.field(default=LinkedinPages.LINKEDIN_PAGES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[datetime] = dataclasses.field(default=dateutil.parser.isoparse('2023-01-01T00:00:00Z'), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""Start date for getting metrics per time period. Must be atmost 12 months before the request date (UTC) and atleast 2 days prior to the request date (UTC). See https://bit.ly/linkedin-pages-date-rules {{\"{{\"}} \\"\n\\" }} {{\"{{\"}} response.errorDetails }}""" + time_granularity_type: Optional[TimeGranularityType] = dataclasses.field(default=TimeGranularityType.DAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('time_granularity_type'), 'exclude': lambda f: f is None }}) + r"""Granularity of the statistics for metrics per time period. Must be either \\"DAY\\" or \\"MONTH\\" """ + +SourceLinkedinPagesAuthentication = Union[SourceLinkedinPagesOAuth20, SourceLinkedinPagesAccessToken] diff --git a/src/airbyte_api/models/source_mailchimp.py b/src/airbyte_api/models/source_mailchimp.py index 623867ba..31d5f6f3 100644 --- a/src/airbyte_api/models/source_mailchimp.py +++ b/src/airbyte_api/models/source_mailchimp.py @@ -41,8 +41,6 @@ class SourceMailchimpOAuth20: -SourceMailchimpAuthentication = Union['SourceMailchimpOAuth20', 'APIKey'] - class SourceMailchimpMailchimp(str, Enum): MAILCHIMP = 'mailchimp' @@ -59,3 +57,5 @@ class SourceMailchimp: r"""The date from which you want to start syncing data for Incremental streams. Only records that have been created or modified since this date will be synced. If left blank, all data will by synced.""" + +SourceMailchimpAuthentication = Union[SourceMailchimpOAuth20, APIKey] diff --git a/src/airbyte_api/models/source_microsoft_onedrive.py b/src/airbyte_api/models/source_microsoft_onedrive.py index bc788303..3231f892 100644 --- a/src/airbyte_api/models/source_microsoft_onedrive.py +++ b/src/airbyte_api/models/source_microsoft_onedrive.py @@ -55,8 +55,6 @@ class AuthenticateViaMicrosoftOAuth: -SourceMicrosoftOnedriveAuthentication = Union['AuthenticateViaMicrosoftOAuth', 'ServiceKeyAuthentication'] - class SearchScope(str, Enum): r"""Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.""" @@ -85,8 +83,6 @@ class SourceMicrosoftOnedriveLocal: -SourceMicrosoftOnedriveProcessing = Union['SourceMicrosoftOnedriveLocal'] - class SourceMicrosoftOnedriveParsingStrategy(str, Enum): r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" @@ -98,7 +94,7 @@ class SourceMicrosoftOnedriveParsingStrategy(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceMicrosoftOnedriveDocumentFileTypeFormatExperimental: +class UnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" FILETYPE: Final[Optional[SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[SourceMicrosoftOnedriveProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) @@ -178,8 +174,6 @@ class SourceMicrosoftOnedriveFromCSV: -SourceMicrosoftOnedriveCSVHeaderDefinition = Union['SourceMicrosoftOnedriveFromCSV', 'SourceMicrosoftOnedriveAutogenerated', 'SourceMicrosoftOnedriveUserProvided'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -228,8 +222,6 @@ class SourceMicrosoftOnedriveAvroFormat: -SourceMicrosoftOnedriveFormat = Union['SourceMicrosoftOnedriveAvroFormat', 'SourceMicrosoftOnedriveCSVFormat', 'SourceMicrosoftOnedriveJsonlFormat', 'SourceMicrosoftOnedriveParquetFormat', 'SourceMicrosoftOnedriveDocumentFileTypeFormatExperimental'] - class SourceMicrosoftOnedriveValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -282,3 +274,11 @@ class SourceMicrosoftOnedrive: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceMicrosoftOnedriveAuthentication = Union[AuthenticateViaMicrosoftOAuth, ServiceKeyAuthentication] + +SourceMicrosoftOnedriveProcessing = Union[SourceMicrosoftOnedriveLocal] + +SourceMicrosoftOnedriveCSVHeaderDefinition = Union[SourceMicrosoftOnedriveFromCSV, SourceMicrosoftOnedriveAutogenerated, SourceMicrosoftOnedriveUserProvided] + +SourceMicrosoftOnedriveFormat = Union[SourceMicrosoftOnedriveAvroFormat, SourceMicrosoftOnedriveCSVFormat, SourceMicrosoftOnedriveJsonlFormat, SourceMicrosoftOnedriveParquetFormat, UnstructuredDocumentFormat] diff --git a/src/airbyte_api/models/source_microsoft_sharepoint.py b/src/airbyte_api/models/source_microsoft_sharepoint.py index 20d08a88..44ef6aba 100644 --- a/src/airbyte_api/models/source_microsoft_sharepoint.py +++ b/src/airbyte_api/models/source_microsoft_sharepoint.py @@ -55,8 +55,6 @@ class SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth: -SourceMicrosoftSharepointAuthentication = Union['SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth', 'SourceMicrosoftSharepointServiceKeyAuthentication'] - class SourceMicrosoftSharepointSearchScope(str, Enum): r"""Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.""" @@ -85,8 +83,6 @@ class SourceMicrosoftSharepointLocal: -SourceMicrosoftSharepointProcessing = Union['SourceMicrosoftSharepointLocal'] - class SourceMicrosoftSharepointParsingStrategy(str, Enum): r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" @@ -98,7 +94,7 @@ class SourceMicrosoftSharepointParsingStrategy(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceMicrosoftSharepointDocumentFileTypeFormatExperimental: +class SourceMicrosoftSharepointUnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" FILETYPE: Final[Optional[SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[SourceMicrosoftSharepointProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) @@ -178,8 +174,6 @@ class SourceMicrosoftSharepointFromCSV: -SourceMicrosoftSharepointCSVHeaderDefinition = Union['SourceMicrosoftSharepointFromCSV', 'SourceMicrosoftSharepointAutogenerated', 'SourceMicrosoftSharepointUserProvided'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -228,8 +222,6 @@ class SourceMicrosoftSharepointAvroFormat: -SourceMicrosoftSharepointFormat = Union['SourceMicrosoftSharepointAvroFormat', 'SourceMicrosoftSharepointCSVFormat', 'SourceMicrosoftSharepointJsonlFormat', 'SourceMicrosoftSharepointParquetFormat', 'SourceMicrosoftSharepointDocumentFileTypeFormatExperimental'] - class SourceMicrosoftSharepointValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -280,3 +272,11 @@ class SourceMicrosoftSharepoint: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceMicrosoftSharepointAuthentication = Union[SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth, SourceMicrosoftSharepointServiceKeyAuthentication] + +SourceMicrosoftSharepointProcessing = Union[SourceMicrosoftSharepointLocal] + +SourceMicrosoftSharepointCSVHeaderDefinition = Union[SourceMicrosoftSharepointFromCSV, SourceMicrosoftSharepointAutogenerated, SourceMicrosoftSharepointUserProvided] + +SourceMicrosoftSharepointFormat = Union[SourceMicrosoftSharepointAvroFormat, SourceMicrosoftSharepointCSVFormat, SourceMicrosoftSharepointJsonlFormat, SourceMicrosoftSharepointParquetFormat, SourceMicrosoftSharepointUnstructuredDocumentFormat] diff --git a/src/airbyte_api/models/source_microsoft_teams.py b/src/airbyte_api/models/source_microsoft_teams.py index 0dac3b8f..2105c63d 100644 --- a/src/airbyte_api/models/source_microsoft_teams.py +++ b/src/airbyte_api/models/source_microsoft_teams.py @@ -45,8 +45,6 @@ class AuthenticateViaMicrosoftOAuth20: -SourceMicrosoftTeamsAuthenticationMechanism = Union['AuthenticateViaMicrosoftOAuth20', 'AuthenticateViaMicrosoft'] - class SourceMicrosoftTeamsMicrosoftTeams(str, Enum): MICROSOFT_TEAMS = 'microsoft-teams' @@ -62,3 +60,5 @@ class SourceMicrosoftTeams: SOURCE_TYPE: Final[SourceMicrosoftTeamsMicrosoftTeams] = dataclasses.field(default=SourceMicrosoftTeamsMicrosoftTeams.MICROSOFT_TEAMS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceMicrosoftTeamsAuthenticationMechanism = Union[AuthenticateViaMicrosoftOAuth20, AuthenticateViaMicrosoft] diff --git a/src/airbyte_api/models/source_mixpanel.py b/src/airbyte_api/models/source_mixpanel.py index b30dec51..c981db6b 100644 --- a/src/airbyte_api/models/source_mixpanel.py +++ b/src/airbyte_api/models/source_mixpanel.py @@ -41,8 +41,6 @@ class ServiceAccount: -AuthenticationWildcard = Union['ServiceAccount', 'ProjectSecret'] - class SourceMixpanelRegion(str, Enum): r"""The region of mixpanel domain instance either US or EU.""" @@ -76,3 +74,5 @@ class SourceMixpanel: r"""The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.""" + +AuthenticationWildcard = Union[ServiceAccount, ProjectSecret] diff --git a/src/airbyte_api/models/source_monday.py b/src/airbyte_api/models/source_monday.py index 938f8100..5edbcfff 100644 --- a/src/airbyte_api/models/source_monday.py +++ b/src/airbyte_api/models/source_monday.py @@ -41,8 +41,6 @@ class SourceMondayOAuth20: -SourceMondayAuthorizationMethod = Union['SourceMondayOAuth20', 'APIToken'] - class SourceMondayMonday(str, Enum): MONDAY = 'monday' @@ -55,3 +53,5 @@ class SourceMonday: SOURCE_TYPE: Final[SourceMondayMonday] = dataclasses.field(default=SourceMondayMonday.MONDAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceMondayAuthorizationMethod = Union[SourceMondayOAuth20, APIToken] diff --git a/src/airbyte_api/models/source_mongodb_v2.py b/src/airbyte_api/models/source_mongodb_v2.py index 539cfb93..d31c1294 100644 --- a/src/airbyte_api/models/source_mongodb_v2.py +++ b/src/airbyte_api/models/source_mongodb_v2.py @@ -16,7 +16,6 @@ class SourceMongodbV2SchemasClusterType(str, Enum): @dataclasses.dataclass class SelfManagedReplicaSet: r"""MongoDB self-hosted cluster configured as a replica set""" - UNSET='__SPEAKEASY_UNSET__' connection_string: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_string') }}) r"""The connection string of the cluster that you want to replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string for more information.""" database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) @@ -43,7 +42,6 @@ class SourceMongodbV2ClusterType(str, Enum): @dataclasses.dataclass class MongoDBAtlasReplicaSet: r"""MongoDB Atlas-hosted cluster configured as a replica set""" - UNSET='__SPEAKEASY_UNSET__' connection_string: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_string') }}) r"""The connection string of the cluster that you want to replicate.""" database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) @@ -61,8 +59,6 @@ class MongoDBAtlasReplicaSet: -ClusterType = Union['MongoDBAtlasReplicaSet', 'SelfManagedReplicaSet'] - class InvalidCDCPositionBehaviorAdvanced(str, Enum): r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" @@ -98,3 +94,5 @@ class SourceMongodbV2: r"""Determines how Airbyte looks up the value of an updated document. If 'Lookup' is chosen, the current value of the document will be read. If 'Post Image' is chosen, then the version of the document immediately after an update will be read. WARNING : Severe data loss will occur if this option is chosen and the appropriate settings are not set on your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images.""" + +ClusterType = Union[MongoDBAtlasReplicaSet, SelfManagedReplicaSet] diff --git a/src/airbyte_api/models/source_mssql.py b/src/airbyte_api/models/source_mssql.py index 2cd90531..174d6663 100644 --- a/src/airbyte_api/models/source_mssql.py +++ b/src/airbyte_api/models/source_mssql.py @@ -45,8 +45,6 @@ class ReadChangesUsingChangeDataCaptureCDC: -UpdateMethod = Union['ReadChangesUsingChangeDataCaptureCDC', 'ScanChangesWithUserDefinedCursor'] - class SourceMssqlMssql(str, Enum): MSSQL = 'mssql' @@ -94,8 +92,6 @@ class Unencrypted: -SourceMssqlSSLMethod = Union['Unencrypted', 'SourceMssqlEncryptedTrustServerCertificate', 'SourceMssqlEncryptedVerifyCertificate'] - class SourceMssqlSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -154,8 +150,6 @@ class SourceMssqlNoTunnel: -SourceMssqlSSHTunnelMethod = Union['SourceMssqlNoTunnel', 'SourceMssqlSSHKeyAuthentication', 'SourceMssqlPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -183,3 +177,9 @@ class SourceMssql: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +UpdateMethod = Union[ReadChangesUsingChangeDataCaptureCDC, ScanChangesWithUserDefinedCursor] + +SourceMssqlSSLMethod = Union[Unencrypted, SourceMssqlEncryptedTrustServerCertificate, SourceMssqlEncryptedVerifyCertificate] + +SourceMssqlSSHTunnelMethod = Union[SourceMssqlNoTunnel, SourceMssqlSSHKeyAuthentication, SourceMssqlPasswordAuthentication] diff --git a/src/airbyte_api/models/source_mysql.py b/src/airbyte_api/models/source_mysql.py index a4422090..5a8fe128 100644 --- a/src/airbyte_api/models/source_mysql.py +++ b/src/airbyte_api/models/source_mysql.py @@ -35,6 +35,8 @@ class SourceMysqlMethod(str, Enum): @dataclasses.dataclass class ReadChangesUsingBinaryLogCDC: r"""Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.""" + initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) + r"""The amount of time an initial load is allowed to continue for before catching up on CDC logs.""" initial_waiting_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_waiting_seconds'), 'exclude': lambda f: f is None }}) r"""The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.""" invalid_cdc_cursor_position_behavior: Optional[SourceMysqlInvalidCDCPositionBehaviorAdvanced] = dataclasses.field(default=SourceMysqlInvalidCDCPositionBehaviorAdvanced.FAIL_SYNC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('invalid_cdc_cursor_position_behavior'), 'exclude': lambda f: f is None }}) @@ -45,8 +47,6 @@ class ReadChangesUsingBinaryLogCDC: -SourceMysqlUpdateMethod = Union['ReadChangesUsingBinaryLogCDC', 'SourceMysqlScanChangesWithUserDefinedCursor'] - class SourceMysqlMysql(str, Enum): MYSQL = 'mysql' @@ -119,8 +119,6 @@ class Preferred: -SourceMysqlSSLModes = Union['Preferred', 'Required', 'SourceMysqlVerifyCA', 'VerifyIdentity'] - class SourceMysqlSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -179,8 +177,6 @@ class SourceMysqlNoTunnel: -SourceMysqlSSHTunnelMethod = Union['SourceMysqlNoTunnel', 'SourceMysqlSSHKeyAuthentication', 'SourceMysqlPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -206,3 +202,9 @@ class SourceMysql: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +SourceMysqlUpdateMethod = Union[ReadChangesUsingBinaryLogCDC, SourceMysqlScanChangesWithUserDefinedCursor] + +SourceMysqlSSLModes = Union[Preferred, Required, SourceMysqlVerifyCA, VerifyIdentity] + +SourceMysqlSSHTunnelMethod = Union[SourceMysqlNoTunnel, SourceMysqlSSHKeyAuthentication, SourceMysqlPasswordAuthentication] diff --git a/src/airbyte_api/models/source_notion.py b/src/airbyte_api/models/source_notion.py index 2b906fa5..38a2992d 100644 --- a/src/airbyte_api/models/source_notion.py +++ b/src/airbyte_api/models/source_notion.py @@ -41,8 +41,6 @@ class SourceNotionOAuth20: -SourceNotionAuthenticationMethod = Union['SourceNotionOAuth20', 'SourceNotionAccessToken'] - class SourceNotionNotion(str, Enum): NOTION = 'notion' @@ -58,3 +56,5 @@ class SourceNotion: r"""UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z. During incremental sync, any data generated before this date will not be replicated. If left blank, the start date will be set to 2 years before the present date.""" + +SourceNotionAuthenticationMethod = Union[SourceNotionOAuth20, SourceNotionAccessToken] diff --git a/src/airbyte_api/models/source_nytimes.py b/src/airbyte_api/models/source_nytimes.py index 830d9be9..aa70b22a 100644 --- a/src/airbyte_api/models/source_nytimes.py +++ b/src/airbyte_api/models/source_nytimes.py @@ -4,7 +4,6 @@ import dataclasses from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from datetime import date from enum import Enum from typing import Final, Optional @@ -32,9 +31,9 @@ class SourceNytimes: r"""API Key""" period: PeriodUsedForMostPopularStreams = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('period') }}) r"""Period of time (in days)""" - start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat }}) + start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) r"""Start date to begin the article retrieval (format YYYY-MM)""" - end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) + end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) r"""End date to stop the article retrieval (format YYYY-MM)""" share_type: Optional[ShareTypeUsedForMostPopularSharedStream] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('share_type'), 'exclude': lambda f: f is None }}) r"""Share Type""" diff --git a/src/airbyte_api/models/source_okta.py b/src/airbyte_api/models/source_okta.py index 63f5cc4b..e94740b9 100644 --- a/src/airbyte_api/models/source_okta.py +++ b/src/airbyte_api/models/source_okta.py @@ -41,8 +41,6 @@ class SourceOktaOAuth20: -SourceOktaAuthorizationMethod = Union['SourceOktaOAuth20', 'SourceOktaAPIToken'] - class Okta(str, Enum): OKTA = 'okta' @@ -59,3 +57,5 @@ class SourceOkta: r"""UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any data before this date will not be replicated.""" + +SourceOktaAuthorizationMethod = Union[SourceOktaOAuth20, SourceOktaAPIToken] diff --git a/src/airbyte_api/models/source_oracle.py b/src/airbyte_api/models/source_oracle.py index 7a6302a7..b28dfc4d 100644 --- a/src/airbyte_api/models/source_oracle.py +++ b/src/airbyte_api/models/source_oracle.py @@ -35,8 +35,6 @@ class ServiceName: -ConnectBy = Union['ServiceName', 'SystemIDSID'] - class SourceOracleEncryptionMethod(str, Enum): ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' @@ -74,8 +72,6 @@ class NativeNetworkEncryptionNNE: -Encryption = Union['NativeNetworkEncryptionNNE', 'TLSEncryptedVerifyCertificate'] - class SourceOracleOracle(str, Enum): ORACLE = 'oracle' @@ -138,8 +134,6 @@ class SourceOracleNoTunnel: -SourceOracleSSHTunnelMethod = Union['SourceOracleNoTunnel', 'SourceOracleSSHKeyAuthentication', 'SourceOraclePasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -169,3 +163,9 @@ class SourceOracle: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +ConnectBy = Union[ServiceName, SystemIDSID] + +Encryption = Union[NativeNetworkEncryptionNNE, TLSEncryptedVerifyCertificate] + +SourceOracleSSHTunnelMethod = Union[SourceOracleNoTunnel, SourceOracleSSHKeyAuthentication, SourceOraclePasswordAuthentication] diff --git a/src/airbyte_api/models/source_orb.py b/src/airbyte_api/models/source_orb.py index 1f8d8277..494ebf4d 100644 --- a/src/airbyte_api/models/source_orb.py +++ b/src/airbyte_api/models/source_orb.py @@ -2,8 +2,10 @@ from __future__ import annotations import dataclasses +import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from datetime import datetime from enum import Enum from typing import Final, List, Optional @@ -17,7 +19,7 @@ class Orb(str, Enum): class SourceOrb: api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) r"""Orb API Key, issued from the Orb admin console.""" - start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) r"""UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter.""" end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at after this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter.""" diff --git a/src/airbyte_api/models/source_outbrain_amplify.py b/src/airbyte_api/models/source_outbrain_amplify.py index 85ba819c..58dcceb4 100644 --- a/src/airbyte_api/models/source_outbrain_amplify.py +++ b/src/airbyte_api/models/source_outbrain_amplify.py @@ -37,8 +37,6 @@ class SourceOutbrainAmplifyAccessToken: -SourceOutbrainAmplifyAuthenticationMethod = Union['SourceOutbrainAmplifyAccessToken', 'SourceOutbrainAmplifyUsernamePassword'] - class GranularityForGeoLocationRegion(str, Enum): r"""The granularity used for geo location data in reports.""" @@ -74,3 +72,5 @@ class SourceOutbrainAmplify: SOURCE_TYPE: Final[OutbrainAmplify] = dataclasses.field(default=OutbrainAmplify.OUTBRAIN_AMPLIFY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceOutbrainAmplifyAuthenticationMethod = Union[SourceOutbrainAmplifyAccessToken, SourceOutbrainAmplifyUsernamePassword] diff --git a/src/airbyte_api/models/source_postgres.py b/src/airbyte_api/models/source_postgres.py index 430d7dc1..3e323cab 100644 --- a/src/airbyte_api/models/source_postgres.py +++ b/src/airbyte_api/models/source_postgres.py @@ -28,7 +28,7 @@ class SourcePostgresSchemasMethod(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DetectChangesWithXminSystemColumn: - r"""Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.""" + r"""Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Suitable for databases that have low transaction pressure.""" METHOD: Final[SourcePostgresSchemasMethod] = dataclasses.field(default=SourcePostgresSchemasMethod.XMIN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) @@ -59,7 +59,6 @@ class Plugin(str, Enum): @dataclasses.dataclass class ReadChangesUsingWriteAheadLogCDC: r"""Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.""" - UNSET='__SPEAKEASY_UNSET__' publication: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publication') }}) r"""A Postgres publication used for consuming changes. Read about publications and replication identities.""" replication_slot: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_slot') }}) @@ -81,8 +80,6 @@ class ReadChangesUsingWriteAheadLogCDC: -SourcePostgresUpdateMethod = Union['ReadChangesUsingWriteAheadLogCDC', 'DetectChangesWithXminSystemColumn', 'SourcePostgresScanChangesWithUserDefinedCursor'] - class SourcePostgresPostgres(str, Enum): POSTGRES = 'postgres' @@ -96,7 +93,6 @@ class SourcePostgresSchemasSSLModeSSLModes6Mode(str, Enum): @dataclasses.dataclass class SourcePostgresVerifyFull: r"""This is the most secure mode. Always require encryption and verifies the identity of the source database server.""" - UNSET='__SPEAKEASY_UNSET__' ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }}) r"""CA certificate""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) @@ -119,7 +115,6 @@ class SourcePostgresSchemasSSLModeSSLModes5Mode(str, Enum): @dataclasses.dataclass class SourcePostgresVerifyCa: r"""Always require encryption and verifies that the source database server has a valid SSL certificate.""" - UNSET='__SPEAKEASY_UNSET__' ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }}) r"""CA certificate""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) @@ -142,7 +137,6 @@ class SourcePostgresSchemasSSLModeSSLModesMode(str, Enum): @dataclasses.dataclass class SourcePostgresRequire: r"""Always require encryption. If the source database server does not support encryption, connection will fail.""" - UNSET='__SPEAKEASY_UNSET__' additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) MODE: Final[SourcePostgresSchemasSSLModeSSLModesMode] = dataclasses.field(default=SourcePostgresSchemasSSLModeSSLModesMode.REQUIRE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) @@ -157,7 +151,6 @@ class SourcePostgresSchemasSslModeMode(str, Enum): @dataclasses.dataclass class SourcePostgresPrefer: r"""Allows unencrypted connection only if the source database does not support encryption.""" - UNSET='__SPEAKEASY_UNSET__' additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) MODE: Final[SourcePostgresSchemasSslModeMode] = dataclasses.field(default=SourcePostgresSchemasSslModeMode.PREFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) @@ -172,7 +165,6 @@ class SourcePostgresSchemasMode(str, Enum): @dataclasses.dataclass class SourcePostgresAllow: r"""Enables encryption only when required by the source database.""" - UNSET='__SPEAKEASY_UNSET__' additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) MODE: Final[SourcePostgresSchemasMode] = dataclasses.field(default=SourcePostgresSchemasMode.ALLOW, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) @@ -187,14 +179,11 @@ class SourcePostgresMode(str, Enum): @dataclasses.dataclass class SourcePostgresDisable: r"""Disables encryption of communication between Airbyte and source database.""" - UNSET='__SPEAKEASY_UNSET__' additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) MODE: Final[SourcePostgresMode] = dataclasses.field(default=SourcePostgresMode.DISABLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) -SourcePostgresSSLModes = Union['SourcePostgresDisable', 'SourcePostgresAllow', 'SourcePostgresPrefer', 'SourcePostgresRequire', 'SourcePostgresVerifyCa', 'SourcePostgresVerifyFull'] - class SourcePostgresSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" @@ -253,8 +242,6 @@ class SourcePostgresNoTunnel: -SourcePostgresSSHTunnelMethod = Union['SourcePostgresNoTunnel', 'SourcePostgresSSHKeyAuthentication', 'SourcePostgresPasswordAuthentication'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -284,3 +271,9 @@ class SourcePostgres: r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + +SourcePostgresUpdateMethod = Union[ReadChangesUsingWriteAheadLogCDC, DetectChangesWithXminSystemColumn, SourcePostgresScanChangesWithUserDefinedCursor] + +SourcePostgresSSLModes = Union[SourcePostgresDisable, SourcePostgresAllow, SourcePostgresPrefer, SourcePostgresRequire, SourcePostgresVerifyCa, SourcePostgresVerifyFull] + +SourcePostgresSSHTunnelMethod = Union[SourcePostgresNoTunnel, SourcePostgresSSHKeyAuthentication, SourcePostgresPasswordAuthentication] diff --git a/src/airbyte_api/models/source_punk_api.py b/src/airbyte_api/models/source_punk_api.py deleted file mode 100644 index 526c80cd..00000000 --- a/src/airbyte_api/models/source_punk_api.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final, Optional - - -class PunkAPI(str, Enum): - PUNK_API = 'punk-api' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourcePunkAPI: - brewed_after: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('brewed_after') }}) - r"""To extract specific data with Unique ID""" - brewed_before: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('brewed_before') }}) - r"""To extract specific data with Unique ID""" - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }}) - r"""To extract specific data with Unique ID""" - SOURCE_TYPE: Final[PunkAPI] = dataclasses.field(default=PunkAPI.PUNK_API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - - diff --git a/src/airbyte_api/models/source_retently.py b/src/airbyte_api/models/source_retently.py index a5a9291e..c8af5db5 100644 --- a/src/airbyte_api/models/source_retently.py +++ b/src/airbyte_api/models/source_retently.py @@ -15,7 +15,6 @@ class SourceRetentlySchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class AuthenticateWithAPIToken: - UNSET='__SPEAKEASY_UNSET__' api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) r"""Retently API Token. See the docs for more information on how to obtain this key.""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) @@ -31,7 +30,6 @@ class SourceRetentlyAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class AuthenticateViaRetentlyOAuth: - UNSET='__SPEAKEASY_UNSET__' client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) r"""The Client ID of your Retently developer application.""" client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) @@ -43,8 +41,6 @@ class AuthenticateViaRetentlyOAuth: -SourceRetentlyAuthenticationMechanism = Union['AuthenticateViaRetentlyOAuth', 'AuthenticateWithAPIToken'] - class SourceRetentlyRetently(str, Enum): RETENTLY = 'retently' @@ -58,3 +54,5 @@ class SourceRetently: SOURCE_TYPE: Final[Optional[SourceRetentlyRetently]] = dataclasses.field(default=SourceRetentlyRetently.RETENTLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) + +SourceRetentlyAuthenticationMechanism = Union[AuthenticateViaRetentlyOAuth, AuthenticateWithAPIToken] diff --git a/src/airbyte_api/models/source_s3.py b/src/airbyte_api/models/source_s3.py index b4e2abdf..a432421d 100644 --- a/src/airbyte_api/models/source_s3.py +++ b/src/airbyte_api/models/source_s3.py @@ -100,8 +100,6 @@ class Csv: -SourceS3FileFormat = Union['Csv', 'Parquet', 'Avro', 'Jsonl'] - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass @@ -147,8 +145,6 @@ class SourceS3Local: -SourceS3Processing = Union['SourceS3Local'] - class SourceS3ParsingStrategy(str, Enum): r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" @@ -160,7 +156,7 @@ class SourceS3ParsingStrategy(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceS3DocumentFileTypeFormatExperimental: +class SourceS3UnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFormat5Filetype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFormat5Filetype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[SourceS3Processing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) @@ -240,8 +236,6 @@ class SourceS3FromCSV: -SourceS3CSVHeaderDefinition = Union['SourceS3FromCSV', 'SourceS3Autogenerated', 'SourceS3UserProvided'] - class SourceS3InferenceType(str, Enum): r"""How to infer the types of the columns. If none, inference default to strings.""" @@ -298,8 +292,6 @@ class SourceS3AvroFormat: -SourceS3Format = Union['SourceS3AvroFormat', 'SourceS3CSVFormat', 'SourceS3JsonlFormat', 'SourceS3ParquetFormat', 'SourceS3DocumentFileTypeFormatExperimental'] - class SourceS3ValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -368,3 +360,11 @@ class SourceS3: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceS3FileFormat = Union[Csv, Parquet, Avro, Jsonl] + +SourceS3Processing = Union[SourceS3Local] + +SourceS3CSVHeaderDefinition = Union[SourceS3FromCSV, SourceS3Autogenerated, SourceS3UserProvided] + +SourceS3Format = Union[SourceS3AvroFormat, SourceS3CSVFormat, SourceS3JsonlFormat, SourceS3ParquetFormat, SourceS3UnstructuredDocumentFormat] diff --git a/src/airbyte_api/models/source_salesloft.py b/src/airbyte_api/models/source_salesloft.py index 774efdd3..65298ffb 100644 --- a/src/airbyte_api/models/source_salesloft.py +++ b/src/airbyte_api/models/source_salesloft.py @@ -45,8 +45,6 @@ class AuthenticateViaOAuth: -SourceSalesloftCredentials = Union['AuthenticateViaOAuth', 'AuthenticateViaAPIKey'] - class Salesloft(str, Enum): SALESLOFT = 'salesloft' @@ -61,3 +59,5 @@ class SourceSalesloft: SOURCE_TYPE: Final[Salesloft] = dataclasses.field(default=Salesloft.SALESLOFT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceSalesloftCredentials = Union[AuthenticateViaOAuth, AuthenticateViaAPIKey] diff --git a/src/airbyte_api/models/source_sftp.py b/src/airbyte_api/models/source_sftp.py index e8e72864..658764f1 100644 --- a/src/airbyte_api/models/source_sftp.py +++ b/src/airbyte_api/models/source_sftp.py @@ -39,8 +39,6 @@ class SourceSftpPasswordAuthentication: -SourceSftpAuthentication = Union['SourceSftpPasswordAuthentication', 'SourceSftpSSHKeyAuthentication'] - class Sftp(str, Enum): SFTP = 'sftp' @@ -66,3 +64,5 @@ class SourceSftp: SOURCE_TYPE: Final[Sftp] = dataclasses.field(default=Sftp.SFTP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceSftpAuthentication = Union[SourceSftpPasswordAuthentication, SourceSftpSSHKeyAuthentication] diff --git a/src/airbyte_api/models/source_sftp_bulk.py b/src/airbyte_api/models/source_sftp_bulk.py index 38b2f469..3c3e6ac4 100644 --- a/src/airbyte_api/models/source_sftp_bulk.py +++ b/src/airbyte_api/models/source_sftp_bulk.py @@ -37,8 +37,6 @@ class AuthenticateViaPassword: -SourceSftpBulkAuthentication = Union['AuthenticateViaPassword', 'AuthenticateViaPrivateKey'] - class SftpBulk(str, Enum): SFTP_BULK = 'sftp-bulk' @@ -90,8 +88,6 @@ class SourceSftpBulkLocal: -SourceSftpBulkProcessing = Union['SourceSftpBulkLocal', 'ViaAPI'] - class SourceSftpBulkParsingStrategy(str, Enum): r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" @@ -183,8 +179,6 @@ class SourceSftpBulkFromCSV: -SourceSftpBulkCSVHeaderDefinition = Union['SourceSftpBulkFromCSV', 'SourceSftpBulkAutogenerated', 'SourceSftpBulkUserProvided'] - class SourceSftpBulkInferenceType(str, Enum): r"""How to infer the types of the columns. If none, inference default to strings.""" @@ -241,8 +235,6 @@ class SourceSftpBulkAvroFormat: -SourceSftpBulkFormat = Union['SourceSftpBulkAvroFormat', 'SourceSftpBulkCSVFormat', 'SourceSftpBulkJsonlFormat', 'SourceSftpBulkParquetFormat', 'SourceSftpBulkDocumentFileTypeFormatExperimental'] - class SourceSftpBulkValidationPolicy(str, Enum): r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" @@ -299,3 +291,11 @@ class SourceSftpBulk: r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + +SourceSftpBulkAuthentication = Union[AuthenticateViaPassword, AuthenticateViaPrivateKey] + +SourceSftpBulkProcessing = Union[SourceSftpBulkLocal, ViaAPI] + +SourceSftpBulkCSVHeaderDefinition = Union[SourceSftpBulkFromCSV, SourceSftpBulkAutogenerated, SourceSftpBulkUserProvided] + +SourceSftpBulkFormat = Union[SourceSftpBulkAvroFormat, SourceSftpBulkCSVFormat, SourceSftpBulkJsonlFormat, SourceSftpBulkParquetFormat, SourceSftpBulkDocumentFileTypeFormatExperimental] diff --git a/src/airbyte_api/models/source_shopify.py b/src/airbyte_api/models/source_shopify.py index 7af2d2dc..91127c3b 100644 --- a/src/airbyte_api/models/source_shopify.py +++ b/src/airbyte_api/models/source_shopify.py @@ -43,8 +43,6 @@ class SourceShopifyOAuth20: -ShopifyAuthorizationMethod = Union['SourceShopifyOAuth20', 'APIPassword'] - class SourceShopifyShopify(str, Enum): SHOPIFY = 'shopify' @@ -61,8 +59,12 @@ class SourceShopify: r"""The authorization method to use to retrieve data from Shopify""" fetch_transactions_user_id: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fetch_transactions_user_id'), 'exclude': lambda f: f is None }}) r"""Defines which API type (REST/BULK) to use to fetch `Transactions` data. If you are a `Shopify Plus` user, leave the default value to speed up the fetch.""" + job_termination_threshold: Optional[int] = dataclasses.field(default=3600, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('job_termination_threshold'), 'exclude': lambda f: f is None }}) + r"""The max time in seconds, after which the single BULK Job should be `CANCELED` and retried. The bigger the value the longer the BULK Job is allowed to run.""" SOURCE_TYPE: Final[SourceShopifyShopify] = dataclasses.field(default=SourceShopifyShopify.SHOPIFY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[date] = dataclasses.field(default=dateutil.parser.parse('2020-01-01').date(), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.""" + +ShopifyAuthorizationMethod = Union[SourceShopifyOAuth20, APIPassword] diff --git a/src/airbyte_api/models/source_slack.py b/src/airbyte_api/models/source_slack.py index 3b0bdb27..cc2889e2 100644 --- a/src/airbyte_api/models/source_slack.py +++ b/src/airbyte_api/models/source_slack.py @@ -41,8 +41,6 @@ class SignInViaSlackOAuth: -SourceSlackAuthenticationMechanism = Union['SignInViaSlackOAuth', 'SourceSlackAPIToken'] - class SourceSlackSlack(str, Enum): SLACK = 'slack' @@ -66,3 +64,5 @@ class SourceSlack: SOURCE_TYPE: Final[SourceSlackSlack] = dataclasses.field(default=SourceSlackSlack.SLACK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceSlackAuthenticationMechanism = Union[SignInViaSlackOAuth, SourceSlackAPIToken] diff --git a/src/airbyte_api/models/source_smartsheets.py b/src/airbyte_api/models/source_smartsheets.py index 0d146f5f..9dd65ec6 100644 --- a/src/airbyte_api/models/source_smartsheets.py +++ b/src/airbyte_api/models/source_smartsheets.py @@ -45,8 +45,6 @@ class SourceSmartsheetsOAuth20: -SourceSmartsheetsAuthorizationMethod = Union['SourceSmartsheetsOAuth20', 'APIAccessToken'] - class Validenums(str, Enum): SHEETCREATED_AT = 'sheetcreatedAt' @@ -85,3 +83,5 @@ class SourceSmartsheets: r"""Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`""" + +SourceSmartsheetsAuthorizationMethod = Union[SourceSmartsheetsOAuth20, APIAccessToken] diff --git a/src/airbyte_api/models/source_snowflake.py b/src/airbyte_api/models/source_snowflake.py index a2555b58..cd13bc3d 100644 --- a/src/airbyte_api/models/source_snowflake.py +++ b/src/airbyte_api/models/source_snowflake.py @@ -8,7 +8,7 @@ from typing import Final, Optional, Union -class SourceSnowflakeSchemasAuthType(str, Enum): +class SourceSnowflakeSchemasCredentialsAuthType(str, Enum): USERNAME_PASSWORD = 'username/password' @@ -19,7 +19,25 @@ class SourceSnowflakeUsernameAndPassword: r"""The password associated with the username.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username you created to allow Airbyte to access the database.""" - AUTH_TYPE: Final[SourceSnowflakeSchemasAuthType] = dataclasses.field(default=SourceSnowflakeSchemasAuthType.USERNAME_PASSWORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + AUTH_TYPE: Final[SourceSnowflakeSchemasCredentialsAuthType] = dataclasses.field(default=SourceSnowflakeSchemasCredentialsAuthType.USERNAME_PASSWORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + + + + +class SourceSnowflakeSchemasAuthType(str, Enum): + KEY_PAIR_AUTHENTICATION = 'Key Pair Authentication' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSnowflakeKeyPairAuthentication: + private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key') }}) + r"""RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""The username you created to allow Airbyte to access the database.""" + AUTH_TYPE: Final[Optional[SourceSnowflakeSchemasAuthType]] = dataclasses.field(default=SourceSnowflakeSchemasAuthType.KEY_PAIR_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + private_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key_password'), 'exclude': lambda f: f is None }}) + r"""Passphrase for private key""" @@ -43,8 +61,6 @@ class SourceSnowflakeOAuth20: -SourceSnowflakeAuthorizationMethod = Union['SourceSnowflakeOAuth20', 'SourceSnowflakeUsernameAndPassword'] - class SourceSnowflakeSnowflake(str, Enum): SNOWFLAKE = 'snowflake' @@ -69,3 +85,5 @@ class SourceSnowflake: SOURCE_TYPE: Final[SourceSnowflakeSnowflake] = dataclasses.field(default=SourceSnowflakeSnowflake.SNOWFLAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceSnowflakeAuthorizationMethod = Union[SourceSnowflakeOAuth20, SourceSnowflakeKeyPairAuthentication, SourceSnowflakeUsernameAndPassword] diff --git a/src/airbyte_api/models/source_square.py b/src/airbyte_api/models/source_square.py index 1f636186..6c02bf9e 100644 --- a/src/airbyte_api/models/source_square.py +++ b/src/airbyte_api/models/source_square.py @@ -41,8 +41,6 @@ class OauthAuthentication: -SourceSquareAuthentication = Union['OauthAuthentication', 'SourceSquareAPIKey'] - class SourceSquareSquare(str, Enum): SQUARE = 'square' @@ -62,3 +60,5 @@ class SourceSquare: r"""UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.""" + +SourceSquareAuthentication = Union[OauthAuthentication, SourceSquareAPIKey] diff --git a/src/airbyte_api/models/source_survey_sparrow.py b/src/airbyte_api/models/source_survey_sparrow.py index 0ae751f7..46669887 100644 --- a/src/airbyte_api/models/source_survey_sparrow.py +++ b/src/airbyte_api/models/source_survey_sparrow.py @@ -31,8 +31,6 @@ class EUBasedAccount: -BaseURL = Union['EUBasedAccount', 'GlobalAccount'] - class SurveySparrow(str, Enum): SURVEY_SPARROW = 'survey-sparrow' @@ -50,3 +48,5 @@ class SourceSurveySparrow: r"""A List of your survey ids for survey-specific stream""" + +BaseURL = Union[EUBasedAccount, GlobalAccount] diff --git a/src/airbyte_api/models/source_tiktok_marketing.py b/src/airbyte_api/models/source_tiktok_marketing.py index 9f8a5faf..066614db 100644 --- a/src/airbyte_api/models/source_tiktok_marketing.py +++ b/src/airbyte_api/models/source_tiktok_marketing.py @@ -45,8 +45,6 @@ class SourceTiktokMarketingOAuth20: -SourceTiktokMarketingAuthenticationMethod = Union['SourceTiktokMarketingOAuth20', 'SandboxAccessToken'] - class SourceTiktokMarketingTiktokMarketing(str, Enum): TIKTOK_MARKETING = 'tiktok-marketing' @@ -62,9 +60,11 @@ class SourceTiktokMarketing: end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.""" include_deleted: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_deleted'), 'exclude': lambda f: f is None }}) - r"""Set to active if you want to include deleted data in reports.""" + r"""Set to active if you want to include deleted data in report based streams and Ads, Ad Groups and Campaign streams.""" SOURCE_TYPE: Final[Optional[SourceTiktokMarketingTiktokMarketing]] = dataclasses.field(default=SourceTiktokMarketingTiktokMarketing.TIKTOK_MARKETING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) start_date: Optional[date] = dataclasses.field(default=dateutil.parser.parse('2016-09-01').date(), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.""" + +SourceTiktokMarketingAuthenticationMethod = Union[SourceTiktokMarketingOAuth20, SandboxAccessToken] diff --git a/src/airbyte_api/models/source_trustpilot.py b/src/airbyte_api/models/source_trustpilot.py index 36748a0e..76788abc 100644 --- a/src/airbyte_api/models/source_trustpilot.py +++ b/src/airbyte_api/models/source_trustpilot.py @@ -46,8 +46,6 @@ class SourceTrustpilotOAuth20: -SourceTrustpilotAuthorizationMethod = Union['SourceTrustpilotOAuth20', 'SourceTrustpilotAPIKey'] - class Trustpilot(str, Enum): TRUSTPILOT = 'trustpilot' @@ -64,3 +62,5 @@ class SourceTrustpilot: SOURCE_TYPE: Final[Trustpilot] = dataclasses.field(default=Trustpilot.TRUSTPILOT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceTrustpilotAuthorizationMethod = Union[SourceTrustpilotOAuth20, SourceTrustpilotAPIKey] diff --git a/src/airbyte_api/models/source_typeform.py b/src/airbyte_api/models/source_typeform.py index efe7175c..6b660d8d 100644 --- a/src/airbyte_api/models/source_typeform.py +++ b/src/airbyte_api/models/source_typeform.py @@ -45,8 +45,6 @@ class SourceTypeformOAuth20: -SourceTypeformAuthorizationMethod = Union['SourceTypeformOAuth20', 'SourceTypeformPrivateToken'] - class SourceTypeformTypeform(str, Enum): TYPEFORM = 'typeform' @@ -63,3 +61,5 @@ class SourceTypeform: r"""The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.""" + +SourceTypeformAuthorizationMethod = Union[SourceTypeformOAuth20, SourceTypeformPrivateToken] diff --git a/src/airbyte_api/models/source_whisky_hunter.py b/src/airbyte_api/models/source_whisky_hunter.py index 932a6bf5..77b65815 100644 --- a/src/airbyte_api/models/source_whisky_hunter.py +++ b/src/airbyte_api/models/source_whisky_hunter.py @@ -5,7 +5,7 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional +from typing import Final class WhiskyHunter(str, Enum): @@ -15,6 +15,6 @@ class WhiskyHunter(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceWhiskyHunter: - SOURCE_TYPE: Final[Optional[WhiskyHunter]] = dataclasses.field(default=WhiskyHunter.WHISKY_HUNTER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[WhiskyHunter] = dataclasses.field(default=WhiskyHunter.WHISKY_HUNTER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_wikipedia_pageviews.py b/src/airbyte_api/models/source_wikipedia_pageviews.py index f1c88e2e..73bd4f79 100644 --- a/src/airbyte_api/models/source_wikipedia_pageviews.py +++ b/src/airbyte_api/models/source_wikipedia_pageviews.py @@ -28,7 +28,7 @@ class SourceWikipediaPageviews: project: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project') }}) r"""If you want to filter by project, use the domain of any Wikimedia project.""" start: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start') }}) - r"""The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.""" + r"""The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format. Also serves as the date to retrieve data for the top articles.""" SOURCE_TYPE: Final[WikipediaPageviews] = dataclasses.field(default=WikipediaPageviews.WIKIPEDIA_PAGEVIEWS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_youtube_analytics.py b/src/airbyte_api/models/source_youtube_analytics.py index 02a00059..b5d3596e 100644 --- a/src/airbyte_api/models/source_youtube_analytics.py +++ b/src/airbyte_api/models/source_youtube_analytics.py @@ -11,7 +11,6 @@ @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class AuthenticateViaOAuth20: - UNSET='__SPEAKEASY_UNSET__' client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) r"""The Client ID of your developer application""" client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) diff --git a/src/airbyte_api/models/source_zendesk_chat.py b/src/airbyte_api/models/source_zendesk_chat.py index bfbaca47..5f38e511 100644 --- a/src/airbyte_api/models/source_zendesk_chat.py +++ b/src/airbyte_api/models/source_zendesk_chat.py @@ -43,8 +43,6 @@ class SourceZendeskChatOAuth20: -SourceZendeskChatAuthorizationMethod = Union['SourceZendeskChatOAuth20', 'SourceZendeskChatAccessToken'] - class SourceZendeskChatZendeskChat(str, Enum): ZENDESK_CHAT = 'zendesk-chat' @@ -61,3 +59,5 @@ class SourceZendeskChat: r"""Required if you access Zendesk Chat from a Zendesk Support subdomain.""" + +SourceZendeskChatAuthorizationMethod = Union[SourceZendeskChatOAuth20, SourceZendeskChatAccessToken] diff --git a/src/airbyte_api/models/source_zendesk_sunshine.py b/src/airbyte_api/models/source_zendesk_sunshine.py index 804479b0..69f7922f 100644 --- a/src/airbyte_api/models/source_zendesk_sunshine.py +++ b/src/airbyte_api/models/source_zendesk_sunshine.py @@ -43,8 +43,6 @@ class SourceZendeskSunshineOAuth20: -SourceZendeskSunshineAuthorizationMethod = Union['SourceZendeskSunshineOAuth20', 'SourceZendeskSunshineAPIToken'] - class SourceZendeskSunshineZendeskSunshine(str, Enum): ZENDESK_SUNSHINE = 'zendesk-sunshine' @@ -61,3 +59,5 @@ class SourceZendeskSunshine: SOURCE_TYPE: Final[SourceZendeskSunshineZendeskSunshine] = dataclasses.field(default=SourceZendeskSunshineZendeskSunshine.ZENDESK_SUNSHINE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceZendeskSunshineAuthorizationMethod = Union[SourceZendeskSunshineOAuth20, SourceZendeskSunshineAPIToken] diff --git a/src/airbyte_api/models/source_zendesk_support.py b/src/airbyte_api/models/source_zendesk_support.py index 0544a973..c9177f78 100644 --- a/src/airbyte_api/models/source_zendesk_support.py +++ b/src/airbyte_api/models/source_zendesk_support.py @@ -17,7 +17,6 @@ class SourceZendeskSupportSchemasCredentials(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceZendeskSupportAPIToken: - UNSET='__SPEAKEASY_UNSET__' api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) r"""The value of the API token generated. See our full documentation for more information on generating this token.""" email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }}) @@ -35,7 +34,6 @@ class SourceZendeskSupportCredentials(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceZendeskSupportOAuth20: - UNSET='__SPEAKEASY_UNSET__' access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""The OAuth access token. See the Zendesk docs for more information on generating this token.""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) @@ -47,8 +45,6 @@ class SourceZendeskSupportOAuth20: -SourceZendeskSupportAuthentication = Union['SourceZendeskSupportOAuth20', 'SourceZendeskSupportAPIToken'] - class SourceZendeskSupportZendeskSupport(str, Enum): ZENDESK_SUPPORT = 'zendesk-support' @@ -68,3 +64,5 @@ class SourceZendeskSupport: r"""The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.""" + +SourceZendeskSupportAuthentication = Union[SourceZendeskSupportOAuth20, SourceZendeskSupportAPIToken] diff --git a/src/airbyte_api/models/source_zendesk_talk.py b/src/airbyte_api/models/source_zendesk_talk.py index 3f927ba6..9364a829 100644 --- a/src/airbyte_api/models/source_zendesk_talk.py +++ b/src/airbyte_api/models/source_zendesk_talk.py @@ -17,7 +17,6 @@ class SourceZendeskTalkSchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceZendeskTalkAPIToken: - UNSET='__SPEAKEASY_UNSET__' api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) r"""The value of the API token generated. See the docs for more information.""" email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }}) @@ -35,7 +34,6 @@ class SourceZendeskTalkAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceZendeskTalkOAuth20: - UNSET='__SPEAKEASY_UNSET__' access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""The value of the API token generated. See the docs for more information.""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) @@ -47,8 +45,6 @@ class SourceZendeskTalkOAuth20: -SourceZendeskTalkAuthentication = Union['SourceZendeskTalkOAuth20', 'SourceZendeskTalkAPIToken'] - class SourceZendeskTalkZendeskTalk(str, Enum): ZENDESK_TALK = 'zendesk-talk' @@ -66,3 +62,5 @@ class SourceZendeskTalk: SOURCE_TYPE: Final[SourceZendeskTalkZendeskTalk] = dataclasses.field(default=SourceZendeskTalkZendeskTalk.ZENDESK_TALK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + +SourceZendeskTalkAuthentication = Union[SourceZendeskTalkOAuth20, SourceZendeskTalkAPIToken] diff --git a/src/airbyte_api/models/sourceconfiguration.py b/src/airbyte_api/models/sourceconfiguration.py index 36c14daa..95c415aa 100644 --- a/src/airbyte_api/models/sourceconfiguration.py +++ b/src/airbyte_api/models/sourceconfiguration.py @@ -23,6 +23,7 @@ from .source_cart import SourceCart from .source_chargebee import SourceChargebee from .source_chartmogul import SourceChartmogul +from .source_clazar import SourceClazar from .source_clickhouse import SourceClickhouse from .source_clickup_api import SourceClickupAPI from .source_clockify import SourceClockify @@ -33,6 +34,7 @@ from .source_configcat import SourceConfigcat from .source_confluence import SourceConfluence from .source_convex import SourceConvex +from .source_datadog import SourceDatadog from .source_datascope import SourceDatascope from .source_delighted import SourceDelighted from .source_dixa import SourceDixa @@ -47,6 +49,7 @@ from .source_fauna import SourceFauna from .source_file import SourceFile from .source_firebolt import SourceFirebolt +from .source_fleetio import SourceFleetio from .source_freshcaller import SourceFreshcaller from .source_freshdesk import SourceFreshdesk from .source_freshsales import SourceFreshsales @@ -57,9 +60,9 @@ from .source_gitlab import SourceGitlab from .source_glassfrog import SourceGlassfrog from .source_gnews import SourceGnews +from .source_goldcast import SourceGoldcast from .source_google_ads import SourceGoogleAds from .source_google_analytics_data_api import SourceGoogleAnalyticsDataAPI -from .source_google_analytics_v4_service_account_only import SourceGoogleAnalyticsV4ServiceAccountOnly from .source_google_directory import SourceGoogleDirectory from .source_google_drive import SourceGoogleDrive from .source_google_pagespeed_insights import SourceGooglePagespeedInsights @@ -129,7 +132,6 @@ from .source_posthog import SourcePosthog from .source_postmarkapp import SourcePostmarkapp from .source_prestashop import SourcePrestashop -from .source_punk_api import SourcePunkAPI from .source_pypi import SourcePypi from .source_qualaroo import SourceQualaroo from .source_railz import SourceRailz @@ -197,4 +199,4 @@ from .source_zoom import SourceZoom from typing import Union -SourceConfiguration = Union[SourceAha, SourceAircall, SourceAirtable, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppfollow, SourceAsana, SourceAuth0, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBambooHr, SourceBigquery, SourceBingAds, SourceBraintree, SourceBraze, SourceCart, SourceChargebee, SourceChartmogul, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceCloseCom, SourceCoda, SourceCoinAPI, SourceCoinmarketcap, SourceConfigcat, SourceConfluence, SourceConvex, SourceDatascope, SourceDelighted, SourceDixa, SourceDockerhub, SourceDremio, SourceDynamodb, SourceE2eTestCloud, SourceEmailoctopus, SourceExchangeRates, SourceFacebookMarketing, SourceFaker, SourceFauna, SourceFile, SourceFirebolt, SourceFreshcaller, SourceFreshdesk, SourceFreshsales, SourceGainsightPx, SourceGcs, SourceGetlago, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGnews, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleAnalyticsV4ServiceAccountOnly, SourceGoogleDirectory, SourceGoogleDrive, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleWebfonts, SourceGreenhouse, SourceGridly, SourceHarvest, SourceHubplanner, SourceHubspot, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIp2whois, SourceIterable, SourceJira, SourceK6Cloud, SourceKlarna, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLemlist, SourceLeverHiring, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLokalise, SourceMailchimp, SourceMailgun, SourceMailjetSms, SourceMarketo, SourceMetabase, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMixpanel, SourceMonday, SourceMongodbInternalPoc, SourceMongodbV2, SourceMssql, SourceMyHours, SourceMysql, SourceNetsuite, SourceNotion, SourceNytimes, SourceOkta, SourceOmnisend, SourceOnesignal, SourceOracle, SourceOrb, SourceOrbit, SourceOutbrainAmplify, SourceOutreach, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePersistiq, SourcePexelsAPI, SourcePinterest, SourcePipedrive, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePunkAPI, SourcePypi, SourceQualaroo, SourceRailz, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceRedshift, SourceRetently, SourceRkiCovid, SourceRss, SourceS3, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSecoda, SourceSendgrid, SourceSendinblue, SourceSenseforce, SourceSentry, SourceSftp, SourceSftpBulk, SourceShopify, SourceShortio, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartsheets, SourceSnapchatMarketing, SourceSnowflake, SourceSonarCloud, SourceSpacexAPI, SourceSquare, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceTempo, SourceTheGuardianAPI, SourceTiktokMarketing, SourceTrello, SourceTrustpilot, SourceTvmazeSchedule, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTypeform, SourceUsCensus, SourceVantage, SourceWebflow, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceXkcd, SourceYandexMetrica, SourceYotpo, SourceYoutubeAnalytics, SourceZendeskChat, SourceZendeskSell, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenloop, SourceZohoCrm, SourceZoom] +SourceConfiguration = Union[SourceAha, SourceAircall, SourceAirtable, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppfollow, SourceAsana, SourceAuth0, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBambooHr, SourceBigquery, SourceBingAds, SourceBraintree, SourceBraze, SourceCart, SourceChargebee, SourceChartmogul, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceCloseCom, SourceCoda, SourceCoinAPI, SourceCoinmarketcap, SourceConfigcat, SourceConfluence, SourceConvex, SourceDatadog, SourceDatascope, SourceDelighted, SourceDixa, SourceDockerhub, SourceDremio, SourceDynamodb, SourceE2eTestCloud, SourceEmailoctopus, SourceExchangeRates, SourceFacebookMarketing, SourceFaker, SourceFauna, SourceFile, SourceFirebolt, SourceFleetio, SourceFreshcaller, SourceFreshdesk, SourceFreshsales, SourceGainsightPx, SourceGcs, SourceGetlago, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGnews, SourceGoldcast, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleDirectory, SourceGoogleDrive, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleWebfonts, SourceGreenhouse, SourceGridly, SourceHarvest, SourceHubplanner, SourceHubspot, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIp2whois, SourceIterable, SourceJira, SourceK6Cloud, SourceKlarna, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLemlist, SourceLeverHiring, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLokalise, SourceMailchimp, SourceMailgun, SourceMailjetSms, SourceMarketo, SourceMetabase, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMixpanel, SourceMonday, SourceMongodbInternalPoc, SourceMongodbV2, SourceMssql, SourceMyHours, SourceMysql, SourceNetsuite, SourceNotion, SourceNytimes, SourceOkta, SourceOmnisend, SourceOnesignal, SourceOracle, SourceOrb, SourceOrbit, SourceOutbrainAmplify, SourceOutreach, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePersistiq, SourcePexelsAPI, SourcePinterest, SourcePipedrive, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePypi, SourceQualaroo, SourceRailz, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceRedshift, SourceRetently, SourceRkiCovid, SourceRss, SourceS3, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSecoda, SourceSendgrid, SourceSendinblue, SourceSenseforce, SourceSentry, SourceSftp, SourceSftpBulk, SourceShopify, SourceShortio, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartsheets, SourceSnapchatMarketing, SourceSnowflake, SourceSonarCloud, SourceSpacexAPI, SourceSquare, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceTempo, SourceTheGuardianAPI, SourceTiktokMarketing, SourceTrello, SourceTrustpilot, SourceTvmazeSchedule, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTypeform, SourceUsCensus, SourceVantage, SourceWebflow, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceXkcd, SourceYandexMetrica, SourceYotpo, SourceYoutubeAnalytics, SourceZendeskChat, SourceZendeskSell, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenloop, SourceZohoCrm, SourceZoom] diff --git a/src/airbyte_api/models/userresponse.py b/src/airbyte_api/models/userresponse.py index b48fa3a3..17f41aea 100644 --- a/src/airbyte_api/models/userresponse.py +++ b/src/airbyte_api/models/userresponse.py @@ -4,17 +4,16 @@ import dataclasses from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class UserResponse: - r"""Provides details of a single user.""" + r"""Provides details of a single user in an organization.""" email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }}) - user_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('userId') }}) + id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) r"""Internal Airbyte user ID""" - name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) r"""Name of the user""" diff --git a/src/airbyte_api/models/usersresponse.py b/src/airbyte_api/models/usersresponse.py index 71bd378f..a9405110 100644 --- a/src/airbyte_api/models/usersresponse.py +++ b/src/airbyte_api/models/usersresponse.py @@ -11,7 +11,7 @@ @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class UsersResponse: - r"""List/Array of multiple users""" + r"""List/Array of multiple users in an organization""" data: List[UserResponse] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data') }}) diff --git a/src/airbyte_api/organizations.py b/src/airbyte_api/organizations.py new file mode 100644 index 00000000..4862b919 --- /dev/null +++ b/src/airbyte_api/organizations.py @@ -0,0 +1,73 @@ +"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" + +import requests as requests_http +from .sdkconfiguration import SDKConfiguration +from airbyte_api import api, errors, models, utils +from airbyte_api._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from typing import Optional + +class Organizations: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + + + def list_organizations_for_user(self) -> api.ListOrganizationsForUserResponse: + r"""List all organizations for a user + Lists users organizations. + """ + hook_ctx = HookContext(operation_id='listOrganizationsForUser', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = base_url + '/organizations' + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.ListOrganizationsForUserResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.OrganizationsResponse]) + res.organizations_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + diff --git a/src/airbyte_api/sdk.py b/src/airbyte_api/sdk.py index 11c58bfb..985e5c84 100644 --- a/src/airbyte_api/sdk.py +++ b/src/airbyte_api/sdk.py @@ -5,6 +5,7 @@ from .destinations import Destinations from .health import Health from .jobs import Jobs +from .organizations import Organizations from .permissions import Permissions from .sdkconfiguration import SDKConfiguration from .sources import Sources @@ -22,6 +23,7 @@ class AirbyteAPI: destinations: Destinations health: Health jobs: Jobs + organizations: Organizations permissions: Permissions sources: Sources streams: Streams @@ -87,6 +89,7 @@ def _init_sdks(self): self.destinations = Destinations(self.sdk_configuration) self.health = Health(self.sdk_configuration) self.jobs = Jobs(self.sdk_configuration) + self.organizations = Organizations(self.sdk_configuration) self.permissions = Permissions(self.sdk_configuration) self.sources = Sources(self.sdk_configuration) self.streams = Streams(self.sdk_configuration) diff --git a/src/airbyte_api/sdkconfiguration.py b/src/airbyte_api/sdkconfiguration.py index bfd0b0ff..cc9927ea 100644 --- a/src/airbyte_api/sdkconfiguration.py +++ b/src/airbyte_api/sdkconfiguration.py @@ -24,9 +24,9 @@ class SDKConfiguration: server_idx: Optional[int] = 0 language: str = 'python' openapi_doc_version: str = '1.0.0' - sdk_version: str = '0.50.1' - gen_version: str = '2.338.7' - user_agent: str = 'speakeasy-sdk/python 0.50.1 2.338.7 1.0.0 airbyte-api' + sdk_version: str = '0.51.0' + gen_version: str = '2.372.3' + user_agent: str = 'speakeasy-sdk/python 0.51.0 2.372.3 1.0.0 airbyte-api' retry_config: Optional[RetryConfig] = None def __post_init__(self): diff --git a/src/airbyte_api/users.py b/src/airbyte_api/users.py index 319a2997..a58f5f85 100644 --- a/src/airbyte_api/users.py +++ b/src/airbyte_api/users.py @@ -14,11 +14,11 @@ def __init__(self, sdk_config: SDKConfiguration) -> None: - def list_users(self, request: api.ListUsersRequest) -> api.ListUsersResponse: - r"""List users - Lists users based on provided filters. You can filter on either a list of IDs or a list of emails, but not both. If no filters provided we will list all users by default. + def list_users_within_an_organization(self, request: api.ListUsersWithinAnOrganizationRequest) -> api.ListUsersWithinAnOrganizationResponse: + r"""List all users within an organization + Organization Admin user can list all users within the same organization. Also provide filtering on a list of user IDs or/and a list of user emails. """ - hook_ctx = HookContext(operation_id='listUsers', oauth2_scopes=[], security_source=self.sdk_configuration.security) + hook_ctx = HookContext(operation_id='listUsersWithinAnOrganization', oauth2_scopes=[], security_source=self.sdk_configuration.security) base_url = utils.template_url(*self.sdk_configuration.get_server_details()) url = base_url + '/users' @@ -53,7 +53,7 @@ def list_users(self, request: api.ListUsersRequest) -> api.ListUsersResponse: - res = api.ListUsersResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + res = api.ListUsersWithinAnOrganizationResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) if http_res.status_code == 200: # pylint: disable=no-else-return diff --git a/src/airbyte_api/utils/utils.py b/src/airbyte_api/utils/utils.py index 06f42113..a79b2f54 100644 --- a/src/airbyte_api/utils/utils.py +++ b/src/airbyte_api/utils/utils.py @@ -904,6 +904,9 @@ def bigintencode(val: int): def bigintdecoder(val): + if val is None: + return None + if isinstance(val, float): raise ValueError(f"{val} is a float") return int(val) @@ -918,6 +921,9 @@ def integerstrencode(val: int): def integerstrdecoder(val): + if val is None: + return None + if isinstance(val, float): raise ValueError(f"{val} is a float") return int(val) @@ -933,6 +939,9 @@ def numberstrencode(val: float): def numberstrdecoder(val): + if val is None: + return None + return float(val) @@ -950,6 +959,9 @@ def decimalencode(val: Decimal): def decimaldecoder(val): + if val is None: + return None + return Decimal(str(val)) diff --git a/src/airbyte_api/workspaces.py b/src/airbyte_api/workspaces.py index 020dab82..0b7898fb 100644 --- a/src/airbyte_api/workspaces.py +++ b/src/airbyte_api/workspaces.py @@ -17,7 +17,7 @@ def __init__(self, sdk_config: SDKConfiguration) -> None: def create_or_update_workspace_o_auth_credentials(self, request: api.CreateOrUpdateWorkspaceOAuthCredentialsRequest) -> api.CreateOrUpdateWorkspaceOAuthCredentialsResponse: r"""Create OAuth override credentials for a workspace and source type. Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth. - In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. + In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. """ hook_ctx = HookContext(operation_id='createOrUpdateWorkspaceOAuthCredentials', oauth2_scopes=[], security_source=self.sdk_configuration.security) base_url = utils.template_url(*self.sdk_configuration.get_server_details())