Skip to content

Commit

Permalink
Merge branch 'master' into 6856-source-mysql---null-errors-on-nullabl…
Browse files Browse the repository at this point in the history
…e-field
  • Loading branch information
rodireich authored Jan 17, 2025
2 parents 099ae4e + 49d28f0 commit 5f0a256
Show file tree
Hide file tree
Showing 9 changed files with 32 additions and 19 deletions.
3 changes: 2 additions & 1 deletion airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -854,7 +854,8 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
| 4.48.8 | [#51609](https://github.com/airbytehq/airbyte/pull/51609) | Fix typo in `migrate-to-inline-schemas` command |
| 4.48.9 | [#51609](https://github.com/airbytehq/airbyte/pull/51609) | Fix ownership of shared cache volume for non root connectors |
| 4.48.8 | [#51582](https://github.com/airbytehq/airbyte/pull/51582) | Fix typo in `migrate-to-inline-schemas` command |
| 4.48.7 | [#51579](https://github.com/airbytehq/airbyte/pull/51579) | Give back the ownership of /tmp to the original user on finalize build |
| 4.48.6 | [#51577](https://github.com/airbytehq/airbyte/pull/51577) | Run finalize build scripts as root |
| 4.48.5 | [#49827](https://github.com/airbytehq/airbyte/pull/49827) | Bypasses CI checks for promoted release candidate PRs. |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ async def _build_from_base_image(self, platform: Platform) -> Container:

connector_container = build_customization.apply_airbyte_entrypoint(base_connector_container, self.context.connector)
customized_connector = await build_customization.post_install_hooks(self.context.connector, connector_container, self.logger)
# Make sure the user has access to /tmp
customized_connector = customized_connector.with_exec(["chown", "-R", f"{user}:{user}", "/tmp"])
return customized_connector.with_user(user)

async def _build_from_dockerfile(self, platform: Platform) -> Container:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ async def _run(self, connector_under_test: Container) -> StepResult:
pytest_command = self.get_pytest_command(test_config_file_name)

if self.bind_to_docker_host:
test_environment = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, test_environment)
test_environment = await pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, test_environment)

test_execution = test_environment.with_exec(pytest_command)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ async def _run(self, *args: Any, **kwargs: Any) -> StepResult:
gradle_container = gradle_container.with_(await secrets.mounted_connector_secrets(self.context, secrets_dir, self.secrets))
if self.bind_to_docker_host:
# If this GradleTask subclass needs docker, then install it and bind it to the existing global docker host container.
gradle_container = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container)
gradle_container = await pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container)
# This installation should be cheap, as the package has already been downloaded, and its dependencies are already installed.
gradle_container = gradle_container.with_exec(["yum", "install", "-y", "docker"], use_entrypoint=True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
#

import json
import logging
import platform
import uuid
from typing import Callable, Dict, List, Optional, Union
from typing import Any, Callable, Coroutine, Dict, List, Optional, Union

from dagger import Client, Container, File, Service
from dagger import Secret as DaggerSecret
Expand Down Expand Up @@ -56,13 +58,17 @@ def get_base_dockerd_container(dagger_client: Client) -> Container:
)
)
# Expose the docker host port.
.with_exec(["adduser", "-u", "1000", "-S", "-H", "airbyte"])
.with_exposed_port(DOCKER_HOST_PORT)
# We cache /tmp for file sharing between client and daemon.
.with_mounted_cache("/tmp", dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME))
.with_mounted_cache("/tmp", dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME), owner="airbyte")
.with_exec(["chmod", "777", "/tmp"])
)

# We cache /var/lib/docker to avoid downloading images and layers multiple times.
base_container = base_container.with_mounted_cache("/var/lib/docker", dagger_client.cache_volume(DOCKER_VAR_LIB_VOLUME_NAME))
base_container = base_container.with_mounted_cache(
"/var/lib/docker", dagger_client.cache_volume(DOCKER_VAR_LIB_VOLUME_NAME), owner="airbyte"
)
return base_container


Expand All @@ -75,8 +81,10 @@ def get_daemon_config_json(registry_mirror_url: Optional[str] = None) -> str:
Returns:
str: The json representation of the docker daemon config.
"""
storage_driver = "vfs" if platform.system() == "Darwin" else STORAGE_DRIVER
logging.info(f"Using storage driver: {storage_driver}")
daemon_config: Dict[str, Union[List[str], str]] = {
"storage-driver": STORAGE_DRIVER,
"storage-driver": storage_driver,
}
if registry_mirror_url:
daemon_config["registry-mirrors"] = ["http://" + registry_mirror_url]
Expand Down Expand Up @@ -152,7 +160,7 @@ def with_global_dockerd_service(
).as_service()


def with_bound_docker_host(
async def with_bound_docker_host(
context: ConnectorContext,
container: Container,
) -> Container:
Expand All @@ -165,21 +173,22 @@ def with_bound_docker_host(
Container: The container bound to the docker host.
"""
assert context.dockerd_service is not None
current_user = (await container.with_exec(["whoami"]).stdout()).strip()
return (
container.with_env_variable("DOCKER_HOST", f"tcp://{DOCKER_HOST_NAME}:{DOCKER_HOST_PORT}")
.with_service_binding(DOCKER_HOST_NAME, context.dockerd_service)
.with_mounted_cache("/tmp", context.dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME))
.with_mounted_cache("/tmp", context.dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME), owner=current_user)
)


def bound_docker_host(context: ConnectorContext) -> Callable[[Container], Container]:
def bound_docker_host_inner(container: Container) -> Container:
return with_bound_docker_host(context, container)
def bound_docker_host(context: ConnectorContext) -> Callable[[Container], Coroutine[Any, Any, Container]]:
async def bound_docker_host_inner(container: Container) -> Container:
return await with_bound_docker_host(context, container)

return bound_docker_host_inner


def with_docker_cli(context: ConnectorContext) -> Container:
async def with_docker_cli(context: ConnectorContext) -> Container:
"""Create a container with the docker CLI installed and bound to a persistent docker host.
Args:
Expand All @@ -189,7 +198,7 @@ def with_docker_cli(context: ConnectorContext) -> Container:
Container: A docker cli container bound to a docker host.
"""
docker_cli = context.dagger_client.container().from_(consts.DOCKER_CLI_IMAGE)
return with_bound_docker_host(context, docker_cli)
return await with_bound_docker_host(context, docker_cli)


async def load_image_to_docker_host(context: ConnectorContext, tar_file: File, image_tag: str) -> str:
Expand All @@ -202,7 +211,7 @@ async def load_image_to_docker_host(context: ConnectorContext, tar_file: File, i
"""
# Hacky way to make sure the image is always loaded
tar_name = f"{str(uuid.uuid4())}.tar"
docker_cli = with_docker_cli(context).with_mounted_file(tar_name, tar_file)
docker_cli = (await with_docker_cli(context)).with_mounted_file(tar_name, tar_file)

image_load_output = await docker_cli.with_exec(["docker", "load", "--input", tar_name], use_entrypoint=True).stdout()
# Not tagged images only have a sha256 id the load output shares.
Expand Down
2 changes: 1 addition & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "pipelines"
version = "4.48.8"
version = "4.48.9"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte <[email protected]>"]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ data:
connectorSubtype: file
connectorType: source
definitionId: 31e3242f-dee7-4cdc-a4b8-8e06c5458517
dockerImageTag: 1.7.0-rc.1
dockerImageTag: 1.7.0
dockerRepository: airbyte/source-sftp-bulk
documentationUrl: https://docs.airbyte.com/integrations/sources/sftp-bulk
githubIssueLabel: source-sftp-bulk
Expand All @@ -26,7 +26,7 @@ data:
releaseStage: alpha
releases:
rolloutConfiguration:
enableProgressiveRollout: true
enableProgressiveRollout: false
breakingChanges:
1.0.0:
message: "This upgrade migrates the SFTP Bulk source to the Airbyte file-based CDK. This is the first necessary step of transitioning a file connector from community to Airbyte maintained."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",]
build-backend = "poetry.core.masonry.api"

[tool.poetry]
version = "1.7.0-rc.1"
version = "1.7.0"
name = "source-sftp-bulk"
description = "Source implementation for SFTP Bulk."
authors = [ "Airbyte <[email protected]>",]
Expand Down
1 change: 1 addition & 0 deletions docs/integrations/sources/sftp-bulk.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ This source provides a single stream per file with a dynamic schema. The current

| Version | Date | Pull Request | Subject |
|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------------------|
| 1.7.0 | 2025-01-17 | [51611](https://github.com/airbytehq/airbyte/pull/51611) | Promoting release candidate 1.7.0-rc.1 to a main version. |
| 1.7.0-rc.1 | 2025-01-16 | [50972](https://github.com/airbytehq/airbyte/pull/50972) | Include option to not mirroring subdirectory structure. |
| 1.6.0 | 2024-12-17 | [49826](https://github.com/airbytehq/airbyte/pull/49826) | Increase individual file size limit. |
| 1.5.0 | 2024-12-02 | [48434](https://github.com/airbytehq/airbyte/pull/48434) | Add get_file method for file-transfer feature. |
Expand Down

0 comments on commit 5f0a256

Please sign in to comment.