Skip to content

Commit

Permalink
Merge branch 'master' into pnilan/workflows/update-automerge-rules
Browse files Browse the repository at this point in the history
  • Loading branch information
pnilan committed Jan 9, 2025
2 parents 3d04e34 + 7a196ae commit 62cf3e7
Show file tree
Hide file tree
Showing 68 changed files with 659 additions and 605 deletions.
37 changes: 24 additions & 13 deletions .github/actions/install-airbyte-ci/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,21 +30,26 @@ runs:
- name: "Determine how Airbyte CI should be installed"
shell: bash
id: determine-install-mode
# When the PR is from a fork, we always install from binary
if: inputs.is_fork == 'false'
run: |
if [[ "${{ github.ref }}" != "refs/heads/master" ]] && [[ "${{ steps.changes.outputs.pipelines_any_changed }}" == "true" ]]; then
echo "Making changes to Airbyte CI on a non-master branch. Airbyte-CI will be installed from source."
echo "install-mode=source" >> $GITHUB_OUTPUT
echo "SENTRY_ENVIRONMENT=dev" >> $GITHUB_ENV
else
echo "install-mode=binary" >> $GITHUB_OUTPUT
echo "SENTRY_ENVIRONMENT=production" >> $GITHUB_ENV
fi
echo "install-mode=source" >> $GITHUB_OUTPUT
echo "SENTRY_ENVIRONMENT=dev" >> $GITHUB_ENV
# When the PR is from a fork, we always install from binary
# if: inputs.is_fork == 'false'
# run: |
# if [[ "${{ github.ref }}" != "refs/heads/master" ]] && [[ "${{ steps.changes.outputs.pipelines_any_changed }}" == "true" ]]; then
# echo "Making changes to Airbyte CI on a non-master branch. Airbyte-CI will be installed from source."
# echo "install-mode=source" >> $GITHUB_OUTPUT
# echo "SENTRY_ENVIRONMENT=dev" >> $GITHUB_ENV
# else
# echo "install-mode=binary" >> $GITHUB_OUTPUT
# echo "SENTRY_ENVIRONMENT=production" >> $GITHUB_ENV
# fi

- name: Install Airbyte CI from binary
id: install-airbyte-ci-binary
if: steps.determine-install-mode.outputs.install-mode == 'binary' || ${{ inputs.is_fork }} == 'true'
if: false
# if: steps.determine-install-mode.outputs.install-mode == 'binary' || ${{ inputs.is_fork }} == 'true'
shell: bash
run: |
curl -sSL ${{ inputs.airbyte_ci_binary_url }} --output airbyte-ci-bin
Expand All @@ -54,21 +59,27 @@ runs:
- name: Install Python 3.10
id: install-python-3-10
uses: actions/setup-python@v4
if: steps.determine-install-mode.outputs.install-mode == 'source'
# if: steps.determine-install-mode.outputs.install-mode == 'source'
with:
python-version: "3.10"
token: ${{ inputs.github_token }}

- name: Install Airbyte CI from source
id: install-airbyte-ci-source
if: steps.determine-install-mode.outputs.install-mode == 'source'
if: true
# if: steps.determine-install-mode.outputs.install-mode == 'source'
shell: bash
run: |
pip install --upgrade pip
pip install pipx
pipx ensurepath
pipx install ${{ inputs.path_to_airbyte_ci_source }}
- name: Print installed `airbyte-ci` version
shell: bash
run: |
airbyte-ci --version
- name: Get dagger engine image name
id: get-dagger-engine-image-name
shell: bash
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/airbyte-ci-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ jobs:
- name: Install Poetry
id: install_poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5

- name: Install Dependencies
id: install_dependencies
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/auto_merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ jobs:
python-version: "3.10"
- name: Install and configure Poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5
- name: Run auto merge
shell: bash
working-directory: airbyte-ci/connectors/auto_merge
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/connectors_insights.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ jobs:
- name: Install Poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5
virtualenvs-create: true
virtualenvs-in-project: true
installer-parallel: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/connectors_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ jobs:
# If the condition is not met the job will be skipped (it will not fail)
if: (github.event_name == 'pull_request' && needs.changes.outputs.connectors == 'true' && github.event.pull_request.head.repo.fork != true) || github.event_name == 'workflow_dispatch'
name: Connectors CI
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 360 # 6 hours
steps:
- name: Checkout Airbyte
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/connectors_version_increment_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ on:
jobs:
connectors_ci:
name: Connectors Version Increment Check
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
if: github.event.pull_request.head.repo.fork != true
timeout-minutes: 22
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
# Any revision upwards should be based on a performance analysis of gradle scans.
# See https://github.com/airbytehq/airbyte/pull/36055 for an example of this,
# which explains why which we went down from 64 cores to 16.
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
name: Gradle Check
timeout-minutes: 30
steps:
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/live_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ on:
jobs:
live_tests:
name: Live Tests
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 360 # 6 hours
steps:
- name: Checkout Airbyte
Expand All @@ -63,6 +63,8 @@ jobs:
- name: Install Poetry
id: install_poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5

- name: Make poetry venv in project
id: poetry_venv
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish-bulk-cdk.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ env:
jobs:
publish-bulk-cdk:
name: Publish Bulk CDK
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 30
steps:
- name: Checkout Airbyte
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish-java-cdk-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ env:
jobs:
publish-cdk:
name: Publish Java CDK
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 30
steps:
- name: Link comment to Workflow Run
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/regression_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ on:
jobs:
regression_tests:
name: Regression Tests
runs-on: connector-test-large
runs-on: linux-20.04-large # Custom runner, defined in GitHub org settings
timeout-minutes: 360 # 6 hours
steps:
- name: Checkout Airbyte
Expand All @@ -63,6 +63,8 @@ jobs:
- name: Install Poetry
id: install_poetry
uses: snok/install-poetry@v1
with:
version: 1.8.5

- name: Make poetry venv in project
id: poetry_venv
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ class DebeziumPropertiesBuilder(private val props: Properties = Properties()) {
// unless we set the following.
with("value.converter.replace.null.with.default", "false")
// Timeout for DebeziumEngine's close() method.
with("debezium.embedded.shutdown.pause.before.interrupt.ms", "10000")
// We find that in production, substantial time is in fact legitimately required here.
with("debezium.embedded.shutdown.pause.before.interrupt.ms", "60000")
// Unblock CDC syncs by skipping errors caused by unparseable DDLs
with("schema.history.internal.skip.unparseable.ddl", "true")
}
Expand Down
3 changes: 2 additions & 1 deletion airbyte-cdk/bulk/toolkits/load-s3/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,6 @@ dependencies {
api project(':airbyte-cdk:bulk:toolkits:bulk-cdk-toolkit-load-object-storage')

testFixturesApi(testFixtures(project(":airbyte-cdk:bulk:toolkits:bulk-cdk-toolkit-load-object-storage")))
implementation("aws.sdk.kotlin:s3:1.3.94")
implementation("aws.sdk.kotlin:s3:1.3.98")
implementation("aws.smithy.kotlin:http-client-engine-okhttp:1.3.31")
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import aws.sdk.kotlin.services.s3.model.PutObjectRequest
import aws.smithy.kotlin.runtime.auth.awscredentials.CredentialsProvider
import aws.smithy.kotlin.runtime.content.ByteStream
import aws.smithy.kotlin.runtime.content.toInputStream
import aws.smithy.kotlin.runtime.http.engine.okhttp.OkHttpEngine
import aws.smithy.kotlin.runtime.net.url.Url
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings
import io.airbyte.cdk.load.command.aws.AWSAccessKeyConfigurationProvider
Expand All @@ -38,6 +39,7 @@ import jakarta.inject.Singleton
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.io.OutputStream
import kotlin.time.Duration.Companion.milliseconds
import kotlinx.coroutines.flow.flow

data class S3Object(override val key: String, override val storageConfig: S3BucketConfiguration) :
Expand Down Expand Up @@ -244,6 +246,9 @@ class S3ClientFactory(
Url.parse(it)
} else null
}
// Fix for connection reset issue:
// https://github.com/awslabs/aws-sdk-kotlin/issues/1214#issuecomment-2464831817
httpClient(OkHttpEngine) { connectionIdlePollingInterval = 200.milliseconds }
}

return S3Client(
Expand Down
3 changes: 2 additions & 1 deletion airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -854,7 +854,8 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
| 4.48.3 | [#49827](https://github.com/airbytehq/airbyte/pull/49827) | Bypasses CI checks for promoted release candidate PRs. |
| 4.48.4 | [#49827](https://github.com/airbytehq/airbyte/pull/49827) | Bypasses CI checks for promoted release candidate PRs. |
| 4.48.3 | [#50988](https://github.com/airbytehq/airbyte/pull/50988) | Remove deprecated `--no-update` flag from poetry commands |
| 4.48.2 | [#50871](https://github.com/airbytehq/airbyte/pull/50871) | Speed up connector modification detection. |
| 4.48.1 | [#50410](https://github.com/airbytehq/airbyte/pull/50410) | Java connector build: give ownership of built artifacts to the current image user. |
| 4.48.0 | [#49960](https://github.com/airbytehq/airbyte/pull/49960) | Deprecate airbyte-ci format command |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,7 @@ def _build_erd_container(self, connector_directory: Directory, discovered_catalo
.with_workdir("/app")
)

return container.with_exec(["poetry", "lock", "--no-update"], use_entrypoint=True).with_exec(
["poetry", "install"], use_entrypoint=True
)
return container.with_exec(["poetry", "lock"], use_entrypoint=True).with_exec(["poetry", "install"], use_entrypoint=True)


class UploadDbmlSchema(Step):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -828,7 +828,5 @@ async def _build_test_container(self, target_container_id: str) -> Container:
)
)

container = container.with_exec(["poetry", "lock", "--no-update"], use_entrypoint=True).with_exec(
["poetry", "install"], use_entrypoint=True
)
container = container.with_exec(["poetry", "lock"], use_entrypoint=True).with_exec(["poetry", "install"], use_entrypoint=True)
return container
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ async def upgrade_cdk_version_for_python_connector(self, og_connector_dir: Direc
connector_container = base_container.with_mounted_directory("/connector", updated_connector_dir).with_workdir("/connector")

poetry_lock_file = await connector_container.file(POETRY_LOCK_FILENAME).contents()
updated_container = await connector_container.with_exec(["poetry", "lock", "--no-update"], use_entrypoint=True)
updated_container = await connector_container.with_exec(["poetry", "lock"], use_entrypoint=True)
updated_poetry_lock_file = await updated_container.file(POETRY_LOCK_FILENAME).contents()

if poetry_lock_file != updated_poetry_lock_file:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def prepare_container_for_poe_tasks(
],
use_entrypoint=True,
)
.with_exec(["poetry", "lock", "--no-update"], use_entrypoint=True)
.with_exec(["poetry", "lock"], use_entrypoint=True)
)

# Install the poetry package
Expand Down
3 changes: 2 additions & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ types-requests = "^2.31"
sentry-sdk = "^2.12.0"
segment-analytics-python = "^2.2.3"
pygit2 = "^1.13.1"
asyncclick = "^8.1.3.4"
asyncclick = "^8
.1.3.4"
certifi = ">=2024"
tomli = "^2.0.1"
tomli-w = "^1.0.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,19 @@ data:
type: GSM
alias: airbyte-connector-testing-secret-store
connectorType: destination
definitionId: 37a928c1-2d5c-431a-a97d-ae236bd1ea0c
dockerImageTag: 0.2.5
dockerRepository: airbyte/destination-iceberg-v2
documentationUrl: https://docs.airbyte.com/integrations/destinations/s3
githubIssueLabel: destination-iceberg-v2
definitionId: 716ca874-520b-4902-9f80-9fad66754b89
dockerImageTag: 0.2.6
dockerRepository: airbyte/destination-s3-data-lake
documentationUrl: https://docs.airbyte.com/integrations/destinations/s3-data-lake
githubIssueLabel: destination-s3-data-lake
icon: icon.svg
license: ELv2
name: S3 Data Lake Destination
name: S3 Data Lake
registryOverrides:
cloud:
enabled: false
enabled: true
oss:
enabled: false
enabled: true
releaseStage: alpha
supportLevel: community
supportsRefreshes: true
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"documentationUrl" : "https://docs.airbyte.com/integrations/destinations/s3",
"documentationUrl" : "https://docs.airbyte.com/integrations/destinations/s3-data-lake",
"connectionSpecification" : {
"$schema" : "http://json-schema.org/draft-07/schema#",
"title" : "Iceberg V2 Destination Specification",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"documentationUrl" : "https://docs.airbyte.com/integrations/destinations/s3",
"documentationUrl" : "https://docs.airbyte.com/integrations/destinations/s3-data-lake",
"connectionSpecification" : {
"$schema" : "http://json-schema.org/draft-07/schema#",
"title" : "Iceberg V2 Destination Specification",
Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/source-mysql/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ application {
airbyteBulkConnector {
core = 'extract'
toolkits = ['extract-jdbc', 'extract-cdc']
cdk = '0.249'
cdk = '0.257'
}

dependencies {
Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/source-mysql/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ data:
connectorSubtype: database
connectorType: source
definitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad
dockerImageTag: 3.10.0-rc.7
dockerImageTag: 3.10.0-rc.9
dockerRepository: airbyte/source-mysql
documentationUrl: https://docs.airbyte.com/integrations/sources/mysql
githubIssueLabel: source-mysql
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,27 @@ import io.airbyte.cdk.read.cdc.NoConversion
import io.airbyte.cdk.read.cdc.NullFallThrough
import io.airbyte.cdk.read.cdc.PartialConverter
import io.airbyte.cdk.read.cdc.RelationalColumnCustomConverter
import io.debezium.spi.converter.RelationalColumn
import org.apache.kafka.connect.data.SchemaBuilder

class MySqlSourceCdcBooleanConverter : RelationalColumnCustomConverter {

override val debeziumPropertiesKey: String = "boolean"
override val handlers: List<RelationalColumnCustomConverter.Handler> = listOf(tinyint1Handler)

companion object {
val tinyint1Handler =
RelationalColumnCustomConverter.Handler(
predicate = {
it.typeName().equals("TINYINT", ignoreCase = true) &&
it.length().isPresent &&
it.length().asInt == 1
},
outputSchema = SchemaBuilder.bool(),
partialConverters =
listOf(
NullFallThrough,
PartialConverter { if (it is Number) Converted(it != 0) else NoConversion }
)
override val handlers: List<RelationalColumnCustomConverter.Handler> = listOf(TinyInt1Handler)

data object TinyInt1Handler : RelationalColumnCustomConverter.Handler {

override fun matches(column: RelationalColumn): Boolean =
column.typeName().equals("TINYINT", ignoreCase = true) &&
column.length().isPresent &&
column.length().asInt == 1

override fun outputSchemaBuilder(): SchemaBuilder = SchemaBuilder.bool()

override val partialConverters: List<PartialConverter> =
listOf(
NullFallThrough,
PartialConverter { if (it is Number) Converted(it != 0) else NoConversion }
)
}
}
Loading

0 comments on commit 62cf3e7

Please sign in to comment.