Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

destination-s3: Use airbyte/java-connector-base:2.0.0 #51536

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -854,7 +854,8 @@ airbyte-ci connectors --language=low-code migrate-to-manifest-only

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
| 4.48.8 | [#51609](https://github.com/airbytehq/airbyte/pull/51609) | Fix typo in `migrate-to-inline-schemas` command |
| 4.48.9 | [#51609](https://github.com/airbytehq/airbyte/pull/51609) | Fix ownership of shared cache volume for non root connectors |
| 4.48.8 | [#51582](https://github.com/airbytehq/airbyte/pull/51582) | Fix typo in `migrate-to-inline-schemas` command |
| 4.48.7 | [#51579](https://github.com/airbytehq/airbyte/pull/51579) | Give back the ownership of /tmp to the original user on finalize build |
| 4.48.6 | [#51577](https://github.com/airbytehq/airbyte/pull/51577) | Run finalize build scripts as root |
| 4.48.5 | [#49827](https://github.com/airbytehq/airbyte/pull/49827) | Bypasses CI checks for promoted release candidate PRs. |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ async def _build_from_base_image(self, platform: Platform) -> Container:

connector_container = build_customization.apply_airbyte_entrypoint(base_connector_container, self.context.connector)
customized_connector = await build_customization.post_install_hooks(self.context.connector, connector_container, self.logger)
# Make sure the user has access to /tmp
customized_connector = customized_connector.with_exec(["chown", "-R", f"{user}:{user}", "/tmp"])
return customized_connector.with_user(user)

async def _build_from_dockerfile(self, platform: Platform) -> Container:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ async def _run(self, connector_under_test: Container) -> StepResult:
pytest_command = self.get_pytest_command(test_config_file_name)

if self.bind_to_docker_host:
test_environment = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, test_environment)
test_environment = await pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, test_environment)

test_execution = test_environment.with_exec(pytest_command)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ async def _run(self, *args: Any, **kwargs: Any) -> StepResult:
gradle_container = gradle_container.with_(await secrets.mounted_connector_secrets(self.context, secrets_dir, self.secrets))
if self.bind_to_docker_host:
# If this GradleTask subclass needs docker, then install it and bind it to the existing global docker host container.
gradle_container = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container)
gradle_container = await pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container)
# This installation should be cheap, as the package has already been downloaded, and its dependencies are already installed.
gradle_container = gradle_container.with_exec(["yum", "install", "-y", "docker"], use_entrypoint=True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
#

import json
import logging
import platform
import uuid
from typing import Callable, Dict, List, Optional, Union
from typing import Any, Callable, Coroutine, Dict, List, Optional, Union

from dagger import Client, Container, File, Service
from dagger import Secret as DaggerSecret
Expand Down Expand Up @@ -56,13 +58,17 @@ def get_base_dockerd_container(dagger_client: Client) -> Container:
)
)
# Expose the docker host port.
.with_exec(["adduser", "-u", "1000", "-S", "-H", "airbyte"])
.with_exposed_port(DOCKER_HOST_PORT)
# We cache /tmp for file sharing between client and daemon.
.with_mounted_cache("/tmp", dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME))
.with_mounted_cache("/tmp", dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME), owner="airbyte")
.with_exec(["chmod", "777", "/tmp"])
)

# We cache /var/lib/docker to avoid downloading images and layers multiple times.
base_container = base_container.with_mounted_cache("/var/lib/docker", dagger_client.cache_volume(DOCKER_VAR_LIB_VOLUME_NAME))
base_container = base_container.with_mounted_cache(
"/var/lib/docker", dagger_client.cache_volume(DOCKER_VAR_LIB_VOLUME_NAME), owner="airbyte"
)
return base_container


Expand All @@ -75,8 +81,10 @@ def get_daemon_config_json(registry_mirror_url: Optional[str] = None) -> str:
Returns:
str: The json representation of the docker daemon config.
"""
storage_driver = "vfs" if platform.system() == "Darwin" else STORAGE_DRIVER
logging.info(f"Using storage driver: {storage_driver}")
daemon_config: Dict[str, Union[List[str], str]] = {
"storage-driver": STORAGE_DRIVER,
"storage-driver": storage_driver,
}
if registry_mirror_url:
daemon_config["registry-mirrors"] = ["http://" + registry_mirror_url]
Expand Down Expand Up @@ -152,7 +160,7 @@ def with_global_dockerd_service(
).as_service()


def with_bound_docker_host(
async def with_bound_docker_host(
context: ConnectorContext,
container: Container,
) -> Container:
Expand All @@ -165,21 +173,22 @@ def with_bound_docker_host(
Container: The container bound to the docker host.
"""
assert context.dockerd_service is not None
current_user = (await container.with_exec(["whoami"]).stdout()).strip()
return (
container.with_env_variable("DOCKER_HOST", f"tcp://{DOCKER_HOST_NAME}:{DOCKER_HOST_PORT}")
.with_service_binding(DOCKER_HOST_NAME, context.dockerd_service)
.with_mounted_cache("/tmp", context.dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME))
.with_mounted_cache("/tmp", context.dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME), owner=current_user)
)


def bound_docker_host(context: ConnectorContext) -> Callable[[Container], Container]:
def bound_docker_host_inner(container: Container) -> Container:
return with_bound_docker_host(context, container)
def bound_docker_host(context: ConnectorContext) -> Callable[[Container], Coroutine[Any, Any, Container]]:
async def bound_docker_host_inner(container: Container) -> Container:
return await with_bound_docker_host(context, container)

return bound_docker_host_inner


def with_docker_cli(context: ConnectorContext) -> Container:
async def with_docker_cli(context: ConnectorContext) -> Container:
"""Create a container with the docker CLI installed and bound to a persistent docker host.

Args:
Expand All @@ -189,7 +198,7 @@ def with_docker_cli(context: ConnectorContext) -> Container:
Container: A docker cli container bound to a docker host.
"""
docker_cli = context.dagger_client.container().from_(consts.DOCKER_CLI_IMAGE)
return with_bound_docker_host(context, docker_cli)
return await with_bound_docker_host(context, docker_cli)


async def load_image_to_docker_host(context: ConnectorContext, tar_file: File, image_tag: str) -> str:
Expand All @@ -202,7 +211,7 @@ async def load_image_to_docker_host(context: ConnectorContext, tar_file: File, i
"""
# Hacky way to make sure the image is always loaded
tar_name = f"{str(uuid.uuid4())}.tar"
docker_cli = with_docker_cli(context).with_mounted_file(tar_name, tar_file)
docker_cli = (await with_docker_cli(context)).with_mounted_file(tar_name, tar_file)

image_load_output = await docker_cli.with_exec(["docker", "load", "--input", tar_name], use_entrypoint=True).stdout()
# Not tagged images only have a sha256 id the load output shares.
Expand Down
2 changes: 1 addition & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "pipelines"
version = "4.48.8"
version = "4.48.9"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte <[email protected]>"]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@ data:
connectorSubtype: file
connectorType: destination
definitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362
dockerImageTag: 1.5.0-rc.10
dockerImageTag: 1.5.0-rc.11
dockerRepository: airbyte/destination-s3
githubIssueLabel: destination-s3
icon: s3.svg
license: ELv2
name: S3
connectorBuildOptions:
baseImage: docker.io/airbyte/java-connector-base:1.0.0@sha256:be86e5684e1e6d9280512d3d8071b47153698fe08ad990949c8eeff02803201a
baseImage: docker.io/airbyte/java-connector-base:2.0.0@sha256:5a1a21c75c5e1282606de9fa539ba136520abe2fbd013058e988bb0297a9f454
registryOverrides:
cloud:
enabled: true
Expand Down
1 change: 1 addition & 0 deletions docs/integrations/destinations/s3.md
Original file line number Diff line number Diff line change
Expand Up @@ -544,6 +544,7 @@ To see connector limitations, or troubleshoot your S3 connector, see more [in ou

| Version | Date | Pull Request | Subject |
|:------------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------|
| 1.5.0-rc.11 | 2025-01-16 | [51536](https://github.com/airbytehq/airbyte/pull/51536) | Use latest version of the java connector base image to make the connector rootless |
| 1.5.0-rc.10 | 2025-01-15 | [50960](https://github.com/airbytehq/airbyte/pull/50960) | Bug fixes: tolerate repeated path variables; avro meta field schema matches old cdk |
| 1.5.0-rc.9 | 2025-01-10 | [50960](https://github.com/airbytehq/airbyte/pull/50960) | Bug fixes: variables respected in bucket path; sync does not hang on streams w/o state |
| 1.5.0-rc.8 | 2025-01-08 | [50960](https://github.com/airbytehq/airbyte/pull/50960) | Use `airbyte/java-connector-base` base image. |
Expand Down
Loading