diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md
index e63b59c46136..7d92fc08cad2 100644
--- a/airbyte-ci/connectors/pipelines/README.md
+++ b/airbyte-ci/connectors/pipelines/README.md
@@ -644,7 +644,8 @@ E.G.: running Poe tasks on the modified internal packages of the current branch:
| Version | PR | Description |
| ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- |
-| 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. |
+| 4.4.0 | [#35317](https://github.com/airbytehq/airbyte/pull/35317) | Augment java connector reports to include full logs and junit test results |
+| 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. |
| 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. |
| 4.3.0 | [#35438](https://github.com/airbytehq/airbyte/pull/35438) | Optionally disable telemetry with environment variable. |
| 4.2.4 | [#35325](https://github.com/airbytehq/airbyte/pull/35325) | Use `connectors_qa` for QA checks and remove redundant checks. |
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py
index 7a47568639a6..712062361242 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py
@@ -47,7 +47,7 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
async with semaphore:
async with context:
build_result = await run_connector_build(context)
- per_platform_built_containers = build_result.output_artifact
+ per_platform_built_containers = build_result.output
step_results.append(build_result)
if context.is_local and build_result.status is StepStatus.SUCCESS:
load_image_result = await LoadContainerToLocalDockerHost(context, per_platform_built_containers, image_tag).run()
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py
index 687b566f8fa5..f7ae65bffcfc 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py
@@ -48,7 +48,7 @@ async def _run(self, *args: Any) -> StepResult:
f"The {self.context.connector.technical_name} docker image "
f"was successfully built for platform(s) {', '.join(self.build_platforms)}"
)
- return StepResult(step=self, status=StepStatus.SUCCESS, stdout=success_message, output_artifact=build_results_per_platform)
+ return StepResult(step=self, status=StepStatus.SUCCESS, stdout=success_message, output=build_results_per_platform)
async def _build_connector(self, platform: Platform, *args: Any, **kwargs: Any) -> Container:
"""Implement the generation of the image for the platform and return the corresponding container.
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py
index aa0b3448ba68..8d31bd5a714a 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py
@@ -59,7 +59,7 @@ async def run_connector_build(context: ConnectorContext) -> StepResult:
build_connector_tar_result = await BuildConnectorDistributionTar(context).run()
if build_connector_tar_result.status is not StepStatus.SUCCESS:
return build_connector_tar_result
- dist_dir = await build_connector_tar_result.output_artifact.directory(dist_tar_directory_path(context))
+ dist_dir = await build_connector_tar_result.output.directory(dist_tar_directory_path(context))
return await BuildConnectorImages(context).run(dist_dir)
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py
index 0ac35aab7fa4..7a3bae6d0ac3 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py
@@ -36,4 +36,4 @@ async def _run(self) -> StepResult:
build_normalization_container = normalization.with_normalization(self.context, self.build_platform)
else:
build_normalization_container = self.context.dagger_client.container().from_(self.normalization_image)
- return StepResult(step=self, status=StepStatus.SUCCESS, output_artifact=build_normalization_container)
+ return StepResult(step=self, status=StepStatus.SUCCESS, output=build_normalization_container)
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py
index 7b2fda58b3d5..329da37639b3 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py
@@ -55,7 +55,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector does not have a documentation file.",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
try:
updated_doc = self.add_changelog_entry(doc_path.read_text())
@@ -64,14 +64,14 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.FAILURE,
stdout=f"Could not add changelog entry: {e}",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
updated_repo_dir = self.repo_dir.with_new_file(str(doc_path), contents=updated_doc)
return StepResult(
step=self,
status=StepStatus.SUCCESS,
stdout=f"Added changelog entry to {doc_path}",
- output_artifact=updated_repo_dir,
+ output=updated_repo_dir,
)
def find_line_index_for_new_entry(self, markdown_text: str) -> int:
@@ -118,7 +118,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Can't retrieve the connector current version.",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
updated_metadata_str = self.get_metadata_with_bumped_version(current_version, self.new_version, current_metadata_str)
repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata_str(
@@ -134,7 +134,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SUCCESS,
stdout=f"Updated dockerImageTag from {current_version} to {self.new_version} in {metadata_path}",
- output_artifact=repo_dir_with_updated_metadata,
+ output=repo_dir_with_updated_metadata,
)
@@ -164,7 +164,7 @@ async def run_connector_version_bump_pipeline(
new_version,
)
update_docker_image_tag_in_metadata_result = await update_docker_image_tag_in_metadata.run()
- repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output_artifact
+ repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output
steps_results.append(update_docker_image_tag_in_metadata_result)
add_changelog_entry = AddChangelogEntry(
@@ -176,7 +176,7 @@ async def run_connector_version_bump_pipeline(
)
add_changelog_entry_result = await add_changelog_entry.run()
steps_results.append(add_changelog_entry_result)
- final_repo_dir = add_changelog_entry_result.output_artifact
+ final_repo_dir = add_changelog_entry_result.output
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
report = ConnectorReport(context, steps_results, name="CONNECTOR VERSION BUMP RESULTS")
context.report = report
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py
index 47fba701ff99..cb1f6d357d3a 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py
@@ -64,7 +64,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Could not find a base image for this connector language.",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
metadata_path = self.context.connector.metadata_file_path
@@ -76,7 +76,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector does not have a base image metadata field.",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
if current_base_image_address == latest_base_image_address:
@@ -84,7 +84,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector already uses latest base image",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
updated_metadata = self.update_base_image_in_metadata(current_metadata, latest_base_image_address)
updated_repo_dir = metadata_change_helpers.get_repo_dir_with_updated_metadata(self.repo_dir, metadata_path, updated_metadata)
@@ -93,7 +93,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SUCCESS,
stdout=f"Updated base image to {latest_base_image_address} in {metadata_path}",
- output_artifact=updated_repo_dir,
+ output=updated_repo_dir,
)
@@ -146,7 +146,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector does not have a documentation file.",
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
current_readme = await (await self.context.get_connector_dir(include=["README.md"])).file("README.md").contents()
try:
@@ -156,14 +156,14 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.FAILURE,
stdout=str(e),
- output_artifact=self.repo_dir,
+ output=self.repo_dir,
)
updated_repo_dir = await self.repo_dir.with_new_file(str(readme_path), contents=updated_readme)
return StepResult(
step=self,
status=StepStatus.SUCCESS,
stdout=f"Added build instructions to {readme_path}",
- output_artifact=updated_repo_dir,
+ output=updated_repo_dir,
)
def add_build_instructions(self, og_doc_content: str) -> str:
@@ -276,7 +276,7 @@ async def run_connector_base_image_upgrade_pipeline(context: ConnectorContext, s
)
update_base_image_in_metadata_result = await update_base_image_in_metadata.run()
steps_results.append(update_base_image_in_metadata_result)
- final_repo_dir = update_base_image_in_metadata_result.output_artifact
+ final_repo_dir = update_base_image_in_metadata_result.output
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
report = ConnectorReport(context, steps_results, name="BASE IMAGE UPGRADE RESULTS")
context.report = report
@@ -324,7 +324,7 @@ async def run_connector_migration_to_base_image_pipeline(
new_version = get_bumped_version(context.connector.version, "patch")
bump_version_in_metadata = BumpDockerImageTagInMetadata(
context,
- update_base_image_in_metadata_result.output_artifact,
+ update_base_image_in_metadata_result.output,
new_version,
)
bump_version_in_metadata_result = await bump_version_in_metadata.run()
@@ -333,7 +333,7 @@ async def run_connector_migration_to_base_image_pipeline(
# ADD CHANGELOG ENTRY
add_changelog_entry = AddChangelogEntry(
context,
- bump_version_in_metadata_result.output_artifact,
+ bump_version_in_metadata_result.output,
new_version,
"Base image migration: remove Dockerfile and use the python-connector-base image",
pull_request_number,
@@ -344,13 +344,13 @@ async def run_connector_migration_to_base_image_pipeline(
# UPDATE DOC
add_build_instructions_to_doc = AddBuildInstructionsToReadme(
context,
- add_changelog_entry_result.output_artifact,
+ add_changelog_entry_result.output,
)
add_build_instructions_to_doc_results = await add_build_instructions_to_doc.run()
steps_results.append(add_build_instructions_to_doc_results)
# EXPORT MODIFIED FILES BACK TO HOST
- final_repo_dir = add_build_instructions_to_doc_results.output_artifact
+ final_repo_dir = add_build_instructions_to_doc_results.output
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
report = ConnectorReport(context, steps_results, name="MIGRATE TO BASE IMAGE RESULTS")
context.report = report
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
index 849a9348f115..88c917211946 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
@@ -313,7 +313,7 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport:
if build_connector_results.status is not StepStatus.SUCCESS:
return create_connector_report(results)
- built_connector_platform_variants = list(build_connector_results.output_artifact.values())
+ built_connector_platform_variants = list(build_connector_results.output.values())
push_connector_image_results = await PushConnectorImageToRegistry(context).run(built_connector_platform_variants)
results.append(push_connector_image_results)
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py
index b8265c4385a1..594b9573ee57 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py
@@ -6,13 +6,15 @@
import json
import webbrowser
from dataclasses import dataclass
-from typing import TYPE_CHECKING
+from pathlib import Path
+from types import MappingProxyType
+from typing import TYPE_CHECKING, Dict
-from anyio import Path
from connector_ops.utils import console # type: ignore
from jinja2 import Environment, PackageLoader, select_autoescape
from pipelines.consts import GCS_PUBLIC_DOMAIN
from pipelines.helpers.utils import format_duration
+from pipelines.models.artifacts import Artifact
from pipelines.models.reports import Report
from pipelines.models.steps import StepStatus
from rich.console import Group
@@ -42,13 +44,19 @@ def report_output_prefix(self) -> str:
def html_report_file_name(self) -> str:
return self.filename + ".html"
+ def file_remote_storage_key(self, file_name: str) -> str:
+ return f"{self.report_output_prefix}/{file_name}"
+
@property
def html_report_remote_storage_key(self) -> str:
- return f"{self.report_output_prefix}/{self.html_report_file_name}"
+ return self.file_remote_storage_key(self.html_report_file_name)
+
+ def file_url(self, file_name: str) -> str:
+ return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.file_remote_storage_key(file_name)}"
@property
def html_report_url(self) -> str:
- return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}"
+ return self.file_url(self.html_report_file_name)
def to_json(self) -> str:
"""Create a JSON representation of the connector test report.
@@ -81,7 +89,7 @@ def to_json(self) -> str:
}
)
- async def to_html(self) -> str:
+ def to_html(self) -> str:
env = Environment(
loader=PackageLoader("pipelines.airbyte_ci.connectors.test.steps"),
autoescape=select_autoescape(),
@@ -91,7 +99,18 @@ async def to_html(self) -> str:
template = env.get_template("test_report.html.j2")
template.globals["StepStatus"] = StepStatus
template.globals["format_duration"] = format_duration
- local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve()
+ local_icon_path = Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve()
+ step_result_to_artifact_links: Dict[str, List[Dict]] = {}
+ for step_result in self.steps_results:
+ for artifact in step_result.artifacts:
+ if artifact.gcs_url:
+ url = artifact.gcs_url
+ elif artifact.local_path:
+ url = artifact.local_path.resolve().as_uri()
+ else:
+ continue
+ step_result_to_artifact_links.setdefault(step_result.step.title, []).append({"name": artifact.name, "url": url})
+
template_context = {
"connector_name": self.pipeline_context.connector.technical_name,
"step_results": self.steps_results,
@@ -104,6 +123,8 @@ async def to_html(self) -> str:
"git_revision": self.pipeline_context.git_revision,
"commit_url": None,
"icon_url": local_icon_path.as_uri(),
+ "report": self,
+ "step_result_to_artifact_links": MappingProxyType(step_result_to_artifact_links),
}
if self.pipeline_context.is_ci:
@@ -116,18 +137,32 @@ async def to_html(self) -> str:
] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg"
return template.render(template_context)
+ async def save_html_report(self) -> None:
+ """Save the report as HTML, upload it to GCS if the pipeline is running in CI"""
+
+ html_report_path = self.report_dir_path / self.html_report_file_name
+ report_dir = self.pipeline_context.dagger_client.host().directory(str(self.report_dir_path))
+ local_html_report_file = report_dir.with_new_file(self.html_report_file_name, self.to_html()).file(self.html_report_file_name)
+ html_report_artifact = Artifact(name="HTML Report", content_type="text/html", content=local_html_report_file)
+ await html_report_artifact.save_to_local_path(html_report_path)
+ absolute_path = html_report_path.absolute()
+ self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
+ if self.remote_storage_enabled and self.pipeline_context.ci_gcs_credentials_secret and self.pipeline_context.ci_report_bucket:
+ gcs_url = await html_report_artifact.upload_to_gcs(
+ dagger_client=self.pipeline_context.dagger_client,
+ bucket=self.pipeline_context.ci_report_bucket,
+ key=self.html_report_remote_storage_key,
+ gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
+ )
+ self.pipeline_context.logger.info(f"HTML report uploaded to {gcs_url}")
+
+ elif self.pipeline_context.enable_report_auto_open:
+ self.pipeline_context.logger.info("Opening HTML report in browser.")
+ webbrowser.open(absolute_path.as_uri())
+
async def save(self) -> None:
- local_html_path = await self.save_local(self.html_report_file_name, await self.to_html())
- absolute_path = await local_html_path.resolve()
- if self.pipeline_context.enable_report_auto_open:
- self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}")
- if self.pipeline_context.enable_report_auto_open:
- self.pipeline_context.logger.info("Opening HTML report in browser.")
- webbrowser.open(absolute_path.as_uri())
- if self.remote_storage_enabled:
- await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html")
- self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}")
await super().save()
+ await self.save_html_report()
def print(self) -> None:
"""Print the test report to the console in a nice way."""
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py
index c69a0ac4fb42..a4259b8f67c8 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py
@@ -38,7 +38,7 @@ class IntegrationTests(GradleTask):
gradle_task_name = "integrationTestJava"
mount_connector_secrets = True
bind_to_docker_host = True
- with_test_report = True
+ with_test_artifacts = True
@property
def default_params(self) -> STEP_PARAMS:
@@ -80,7 +80,7 @@ class UnitTests(GradleTask):
title = "Java Connector Unit Tests"
gradle_task_name = "test"
bind_to_docker_host = True
- with_test_report = True
+ with_test_artifacts = True
def _create_integration_step_args_factory(context: ConnectorContext) -> Callable:
@@ -90,14 +90,14 @@ def _create_integration_step_args_factory(context: ConnectorContext) -> Callable
async def _create_integration_step_args(results: RESULTS_DICT) -> Dict[str, Optional[File]]:
- connector_container = results["build"].output_artifact[LOCAL_BUILD_PLATFORM]
+ connector_container = results["build"].output[LOCAL_BUILD_PLATFORM]
connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container, LOCAL_BUILD_PLATFORM)
if context.connector.supports_normalization:
tar_file_name = f"{context.connector.normalization_repository}_{context.git_revision}.tar"
build_normalization_results = results["build_normalization"]
- normalization_container = build_normalization_results.output_artifact
+ normalization_container = build_normalization_results.output
normalization_tar_file, _ = await export_container_to_tarball(
context, normalization_container, LOCAL_BUILD_PLATFORM, tar_file_name=tar_file_name
)
@@ -138,9 +138,7 @@ def _get_acceptance_test_steps(context: ConnectorContext) -> List[StepToRun]:
StepToRun(
id=CONNECTOR_TEST_STEP_ID.ACCEPTANCE,
step=AcceptanceTests(context, True),
- args=lambda results: {
- "connector_under_test_container": results[CONNECTOR_TEST_STEP_ID.BUILD].output_artifact[LOCAL_BUILD_PLATFORM]
- },
+ args=lambda results: {"connector_under_test_container": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD],
),
]
@@ -159,7 +157,7 @@ def get_test_steps(context: ConnectorContext) -> STEP_TREE:
id=CONNECTOR_TEST_STEP_ID.BUILD,
step=BuildConnectorImages(context),
args=lambda results: {
- "dist_dir": results[CONNECTOR_TEST_STEP_ID.BUILD_TAR].output_artifact.directory(dist_tar_directory_path(context))
+ "dist_dir": results[CONNECTOR_TEST_STEP_ID.BUILD_TAR].output.directory(dist_tar_directory_path(context))
},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD_TAR],
),
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py
index c19c807fcea0..c7cc04cea7f3 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py
@@ -253,7 +253,7 @@ def get_test_steps(context: ConnectorContext) -> STEP_TREE:
StepToRun(
id=CONNECTOR_TEST_STEP_ID.UNIT,
step=UnitTests(context),
- args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output_artifact[LOCAL_BUILD_PLATFORM]},
+ args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD],
)
],
@@ -261,21 +261,19 @@ def get_test_steps(context: ConnectorContext) -> STEP_TREE:
StepToRun(
id=CONNECTOR_TEST_STEP_ID.INTEGRATION,
step=IntegrationTests(context),
- args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output_artifact[LOCAL_BUILD_PLATFORM]},
+ args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD],
),
StepToRun(
id=CONNECTOR_TEST_STEP_ID.AIRBYTE_LIB_VALIDATION,
step=AirbyteLibValidation(context),
- args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output_artifact[LOCAL_BUILD_PLATFORM]},
+ args=lambda results: {"connector_under_test": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD],
),
StepToRun(
id=CONNECTOR_TEST_STEP_ID.ACCEPTANCE,
step=AcceptanceTests(context, context.concurrent_cat),
- args=lambda results: {
- "connector_under_test_container": results[CONNECTOR_TEST_STEP_ID.BUILD].output_artifact[LOCAL_BUILD_PLATFORM]
- },
+ args=lambda results: {"connector_under_test_container": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD],
),
],
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/templates/test_report.html.j2 b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/templates/test_report.html.j2
index 7835305a7535..d0027605d0bf 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/templates/test_report.html.j2
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/templates/test_report.html.j2
@@ -82,7 +82,7 @@
transition: max-height .25s ease-in-out;
}
.toggle:checked + .lbl-toggle + .collapsible-content {
- max-height: 100vh;
+ max-height: 70vh;
}
.toggle:checked + .lbl-toggle {
border-bottom-right-radius: 0;
@@ -110,6 +110,14 @@
}
+
{{ connector_name }} test report
block.
- # The java logging backend will have already taken care of masking any secrets.
- # Nothing to do in that regard.
- try:
- if testsuite := xmltodict.parse(junit_xml):
- testsuites.append(testsuite)
- except Exception as e:
- self.context.logger.error(str(e))
- self.context.logger.warn(f"Failed to parse junit xml file {file_name}.")
- except QueryError as e:
+ zip_file = await (
+ dagger_directory_as_zip_file(
+ self.dagger_client,
+ await gradle_container.directory(f"{self.context.connector.code_directory}/build/{test_results_dir_name_in_container}"),
+ test_results_dir_name_in_zip,
+ )
+ )
+ return Artifact(
+ name=f"{test_results_dir_name_in_zip}.zip",
+ content=zip_file,
+ content_type="application/zip",
+ to_upload=True,
+ )
+ except ExecError as e:
self.context.logger.error(str(e))
- self.context.logger.warn(f"Failed to retrieve junit test results from {junit_xml_path} gradle container.")
return None
- return render_junit_xml(testsuites)
-
-
-MAYBE_STARTS_WITH_XML_TAG = re.compile("^ *<")
-ESCAPED_ANSI_COLOR_PATTERN = re.compile(r"\?\[0?m|\?\[[34][0-9]m")
-
-
-def render_junit_xml(testsuites: List[Any]) -> str:
- """Renders the JUnit XML report as something readable in the HTML test report."""
- # Transform the dict contents.
- indent = " "
- for testsuite in testsuites:
- testsuite = testsuite.get("testsuite")
- massage_system_out_and_err(testsuite, indent, 4)
- if testcases := testsuite.get("testcase"):
- if not isinstance(testcases, list):
- testcases = [testcases]
- for testcase in testcases:
- massage_system_out_and_err(testcase, indent, 5)
- # Transform back to XML string.
- # Try to respect the JUnit XML test result schema.
- root = {"testsuites": {"testsuite": testsuites}}
- xml = xmltodict.unparse(root, pretty=True, short_empty_elements=True, indent=indent)
- # Escape < and > and so forth to make them render properly, but not in the log messages.
- # These lines will already have been escaped by xmltodict.unparse.
- lines = xml.splitlines()
- for idx, line in enumerate(lines):
- if MAYBE_STARTS_WITH_XML_TAG.match(line):
- lines[idx] = html.escape(line)
- return "\n".join(lines)
-
-
-def massage_system_out_and_err(d: dict, indent: str, indent_levels: int) -> None:
- """Makes the system-out and system-err text prettier."""
- if d:
- for key in ["system-out", "system-err"]:
- if s := d.get(key):
- lines = s.splitlines()
- s = ""
- for line in lines:
- stripped = line.strip()
- if stripped:
- s += "\n" + indent * indent_levels + ESCAPED_ANSI_COLOR_PATTERN.sub("", line.strip())
- s = s + "\n" + indent * (indent_levels - 1) if s else None
- d[key] = s
diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
index e275b42610bf..cd8d31dd4d40 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
@@ -19,7 +19,7 @@
import anyio
import asyncclick as click
import asyncer
-from dagger import Client, Config, Container, ExecError, File, ImageLayerCompression, Platform, Secret
+from dagger import Client, Config, Container, Directory, ExecError, File, ImageLayerCompression, Platform, Secret
from more_itertools import chunked
if TYPE_CHECKING:
@@ -353,3 +353,24 @@ def java_log_scrub_pattern(secrets_to_mask: List[str]) -> str:
":": ":",
},
)
+
+
+def dagger_directory_as_zip_file(dagger_client: Client, directory: Directory, directory_name: str) -> File:
+ """Compress a directory and return a File object representing the zip file.
+
+ Args:
+ dagger_client (Client): The dagger client.
+ directory (Path): The directory to compress.
+ directory_name (str): The name of the directory.
+
+ Returns:
+ File: The File object representing the zip file.
+ """
+ return (
+ dagger_client.container()
+ .from_("alpine:3.19.1")
+ .with_exec(sh_dash_c(["apk update", "apk add zip"]))
+ .with_mounted_directory(f"/{directory_name}", directory)
+ .with_exec(["zip", "-r", "/zipped.zip", f"/{directory_name}"])
+ .file("/zipped.zip")
+ )
diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/artifacts.py b/airbyte-ci/connectors/pipelines/pipelines/models/artifacts.py
new file mode 100644
index 000000000000..f1deafd445e7
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/pipelines/models/artifacts.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Optional
+
+import dagger
+from pipelines.consts import GCS_PUBLIC_DOMAIN
+from pipelines.dagger.actions import remote_storage
+
+
+@dataclass(kw_only=True)
+class Artifact:
+ """A dataclass to represent an artifact produced by a pipeline execution."""
+
+ name: str
+ content_type: str
+ content: dagger.File
+ to_upload: bool = True
+ local_path: Optional[Path] = None
+ gcs_url: Optional[str] = None
+
+ async def save_to_local_path(self, path: Path) -> Path:
+ exported = await self.content.export(str(path))
+ if exported:
+ self.local_path = path
+ return path
+ else:
+ raise Exception(f"Failed to save artifact {self.name} to local path {path}")
+
+ async def upload_to_gcs(self, dagger_client: dagger.Client, bucket: str, key: str, gcs_credentials: dagger.Secret) -> str:
+ gcs_cp_flags = [f'--content-disposition=filename="{self.name}"']
+ if self.content_type is not None:
+ gcs_cp_flags = gcs_cp_flags + [f"--content-type={self.content_type}"]
+
+ report_upload_exit_code, _, _ = await remote_storage.upload_to_gcs(
+ dagger_client=dagger_client,
+ file_to_upload=self.content,
+ key=key,
+ bucket=bucket,
+ gcs_credentials=gcs_credentials,
+ flags=gcs_cp_flags,
+ )
+ if report_upload_exit_code != 0:
+ raise Exception(f"Failed to upload artifact {self.name} to GCS. Exit code: {report_upload_exit_code}.")
+ self.gcs_url = f"{GCS_PUBLIC_DOMAIN}/{bucket}/{key}"
+ return f"{GCS_PUBLIC_DOMAIN}/{bucket}/{key}"
diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py
index 206e1c44e9e1..4cf5c33f8055 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py
@@ -7,17 +7,17 @@
from __future__ import annotations
import json
+import time
import typing
from dataclasses import dataclass, field
from datetime import datetime, timedelta
+from pathlib import Path
from typing import List
-import anyio
-from anyio import Path
from connector_ops.utils import console # type: ignore
-from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT
-from pipelines.dagger.actions import remote_storage
-from pipelines.helpers.utils import format_duration
+from pipelines.consts import LOCAL_REPORTS_PATH_ROOT
+from pipelines.helpers.utils import format_duration, slugify
+from pipelines.models.artifacts import Artifact
from pipelines.models.steps import StepResult, StepStatus
from rich.console import Group
from rich.panel import Panel
@@ -44,6 +44,10 @@ class Report:
def report_output_prefix(self) -> str:
return self.pipeline_context.report_output_prefix
+ @property
+ def report_dir_path(self) -> Path:
+ return Path(f"{LOCAL_REPORTS_PATH_ROOT}/{self.report_output_prefix}")
+
@property
def json_report_file_name(self) -> str:
return self.filename + ".json"
@@ -84,42 +88,52 @@ def lead_duration(self) -> timedelta:
def remote_storage_enabled(self) -> bool:
return self.pipeline_context.is_ci
- async def save_local(self, filename: str, content: str) -> Path:
- """Save the report files locally."""
- local_path = anyio.Path(f"{LOCAL_REPORTS_PATH_ROOT}/{self.report_output_prefix}/{filename}")
- await local_path.parents[0].mkdir(parents=True, exist_ok=True)
- await local_path.write_text(content)
- return local_path
-
- async def save_remote(self, local_path: Path, remote_key: str, content_type: str) -> int:
- assert self.pipeline_context.ci_report_bucket is not None, "The ci_report_bucket must be set to save reports."
-
- gcs_cp_flags = None if content_type is None else [f"--content-type={content_type}"]
- local_file = self.pipeline_context.dagger_client.host().directory(".", include=[str(local_path)]).file(str(local_path))
- report_upload_exit_code, _, _ = await remote_storage.upload_to_gcs(
- dagger_client=self.pipeline_context.dagger_client,
- file_to_upload=local_file,
- key=remote_key,
- bucket=self.pipeline_context.ci_report_bucket,
- gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
- flags=gcs_cp_flags,
- )
- gcs_uri = "gs://" + self.pipeline_context.ci_report_bucket + "/" + remote_key
- public_url = f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{remote_key}"
- if report_upload_exit_code != 0:
- self.pipeline_context.logger.error(f"Uploading {local_path} to {gcs_uri} failed.")
- else:
- self.pipeline_context.logger.info(f"Uploading {local_path} to {gcs_uri} succeeded. Public URL: {public_url}")
- return report_upload_exit_code
-
async def save(self) -> None:
- """Save the report files."""
-
- local_json_path = await self.save_local(self.json_report_file_name, self.to_json())
- absolute_path = await local_json_path.absolute()
+ self.report_dir_path.mkdir(parents=True, exist_ok=True)
+ await self.save_json_report()
+ await self.save_step_result_artifacts()
+
+ async def save_json_report(self) -> None:
+ """Save the report as JSON, upload it to GCS if the pipeline is running in CI"""
+
+ json_report_path = self.report_dir_path / self.json_report_file_name
+ report_dir = self.pipeline_context.dagger_client.host().directory(str(self.report_dir_path))
+ local_json_report_file = report_dir.with_new_file(self.json_report_file_name, self.to_json()).file(self.json_report_file_name)
+ json_report_artifact = Artifact(name="JSON Report", content_type="application/json", content=local_json_report_file)
+ await json_report_artifact.save_to_local_path(json_report_path)
+ absolute_path = json_report_path.absolute()
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
- if self.remote_storage_enabled:
- await self.save_remote(local_json_path, self.json_report_remote_storage_key, "application/json")
+ if self.remote_storage_enabled and self.pipeline_context.ci_report_bucket and self.pipeline_context.ci_gcs_credentials_secret:
+ gcs_url = await json_report_artifact.upload_to_gcs(
+ dagger_client=self.pipeline_context.dagger_client,
+ bucket=self.pipeline_context.ci_report_bucket,
+ key=self.json_report_remote_storage_key,
+ gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
+ )
+ self.pipeline_context.logger.info(f"JSON Report uploaded to {gcs_url}")
+
+ async def save_step_result_artifacts(self) -> None:
+ local_artifacts_dir = self.report_dir_path / "artifacts"
+ local_artifacts_dir.mkdir(parents=True, exist_ok=True)
+ # TODO: concurrent save and upload
+ for step_result in self.steps_results:
+ for artifact in step_result.artifacts:
+ step_artifacts_dir = local_artifacts_dir / slugify(step_result.step.title)
+ step_artifacts_dir.mkdir(parents=True, exist_ok=True)
+ await artifact.save_to_local_path(step_artifacts_dir / artifact.name)
+ if (
+ self.remote_storage_enabled
+ and self.pipeline_context.ci_report_bucket
+ and self.pipeline_context.ci_gcs_credentials_secret
+ ):
+ upload_time = int(time.time())
+ gcs_url = await artifact.upload_to_gcs(
+ dagger_client=self.pipeline_context.dagger_client,
+ bucket=self.pipeline_context.ci_report_bucket,
+ key=f"{self.report_output_prefix}/artifacts/{slugify(step_result.step.title)}/{upload_time}_{artifact.name}",
+ gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
+ )
+ self.pipeline_context.logger.info(f"Artifact {artifact.name} for {step_result.step.title} uploaded to {gcs_url}")
def to_json(self) -> str:
"""Create a JSON representation of the report.
diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py
index edc97079a32f..86be5550f713 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py
@@ -19,6 +19,7 @@
from pipelines import main_logger
from pipelines.helpers import sentry_utils
from pipelines.helpers.utils import format_duration, get_exec_result
+from pipelines.models.artifacts import Artifact
if TYPE_CHECKING:
from typing import Any, ClassVar, Optional, Union
@@ -73,7 +74,8 @@ class Result:
stdout: Optional[str] = None
report: Optional[str] = None
exc_info: Optional[Exception] = None
- output_artifact: Any = None
+ output: Any = None
+ artifacts: List[Artifact] = field(default_factory=list)
@property
def success(self) -> bool:
@@ -387,7 +389,7 @@ def get_step_status_from_exit_code(
else:
return StepStatus.FAILURE
- async def get_step_result(self, container: Container) -> StepResult:
+ async def get_step_result(self, container: Container, *args: Any, **kwargs: Any) -> StepResult:
"""Concurrent retrieval of exit code, stdout and stdout of a container.
Create a StepResult object from these objects.
@@ -404,7 +406,7 @@ async def get_step_result(self, container: Container) -> StepResult:
status=self.get_step_status_from_exit_code(exit_code),
stderr=stderr,
stdout=stdout,
- output_artifact=container,
+ output=container,
)
def _get_timed_out_step_result(self) -> StepResult:
diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml
index d6cca9affdc0..cc7abba8285d 100644
--- a/airbyte-ci/connectors/pipelines/pyproject.toml
+++ b/airbyte-ci/connectors/pipelines/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "pipelines"
-version = "4.3.2"
+version = "4.4.0"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte "]
diff --git a/airbyte-ci/connectors/pipelines/tests/test_bases.py b/airbyte-ci/connectors/pipelines/tests/test_bases.py
index a109e1e33a0b..15808f2c88de 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_bases.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_bases.py
@@ -39,7 +39,7 @@ async def test_run_with_timeout(self, test_context):
assert step_result.status == timed_out_step_result.status
assert step_result.stdout == timed_out_step_result.stdout
assert step_result.stderr == timed_out_step_result.stderr
- assert step_result.output_artifact == timed_out_step_result.output_artifact
+ assert step_result.output == timed_out_step_result.output
assert step.retry_count == step.max_retries + 1
@pytest.mark.parametrize(
diff --git a/airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py
index bb8ac23a10ea..084911d82cf1 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py
@@ -117,14 +117,14 @@ async def test__run_using_base_image_with_mocks(self, mocker, test_context_with_
container_built_from_base.with_exec.assert_called_with(["spec"])
assert step_result.status is StepStatus.SUCCESS
for platform in all_platforms:
- assert step_result.output_artifact[platform] == container_built_from_base
+ assert step_result.output[platform] == container_built_from_base
@pytest.mark.slow
async def test_building_from_base_image_for_real(self, test_context_with_real_connector_using_base_image, current_platform):
step = python_connectors.BuildConnectorImages(test_context_with_real_connector_using_base_image)
step_result = await step._run()
step_result.status is StepStatus.SUCCESS
- built_container = step_result.output_artifact[current_platform]
+ built_container = step_result.output[current_platform]
assert await built_container.env_variable("AIRBYTE_ENTRYPOINT") == " ".join(
build_customization.get_entrypoint(step.context.connector)
)
@@ -146,7 +146,7 @@ async def test_building_from_base_image_with_customization_for_real(
step = python_connectors.BuildConnectorImages(test_context_with_real_connector_using_base_image_with_build_customization)
step_result = await step._run()
step_result.status is StepStatus.SUCCESS
- built_container = step_result.output_artifact[current_platform]
+ built_container = step_result.output[current_platform]
assert await built_container.env_variable("MY_PRE_BUILD_ENV_VAR") == "my_pre_build_env_var_value"
assert await built_container.env_variable("MY_POST_BUILD_ENV_VAR") == "my_post_build_env_var_value"
@@ -161,7 +161,7 @@ async def test__run_using_base_dockerfile_with_mocks(self, mocker, test_context_
container_built_from_dockerfile.with_exec.assert_called_with(["spec"])
assert step_result.status is StepStatus.SUCCESS
for platform in all_platforms:
- assert step_result.output_artifact[platform] == container_built_from_dockerfile
+ assert step_result.output[platform] == container_built_from_dockerfile
async def test_building_from_dockerfile_for_real(self, test_context_with_real_connector_without_base_image):
step = python_connectors.BuildConnectorImages(test_context_with_real_connector_without_base_image)
diff --git a/airbyte-ci/connectors/pipelines/tests/test_helpers/test_execution/test_run_steps.py b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_execution/test_run_steps.py
index 37bfa991eee0..2d0193676b1b 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_helpers/test_execution/test_run_steps.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_execution/test_run_steps.py
@@ -299,7 +299,7 @@ async def test_run_steps_passes_results():
StepToRun(
id=CONNECTOR_TEST_STEP_ID.ACCEPTANCE,
step=AcceptanceTests(context, True),
- args=lambda results: {"connector_under_test_container": results[CONNECTOR_TEST_STEP_ID.BUILD].output_artifact[LOCAL_BUILD_PLATFORM]},
+ args=lambda results: {"connector_under_test_container": results[CONNECTOR_TEST_STEP_ID.BUILD].output[LOCAL_BUILD_PLATFORM]},
depends_on=[CONNECTOR_TEST_STEP_ID.BUILD],
),
@@ -309,23 +309,23 @@ class Simple(Step):
title = "Test Step"
async def _run(self, arg1, arg2) -> StepResult:
- output_artifact = f"{arg1}:{arg2}"
- return StepResult(step=self, status=StepStatus.SUCCESS, output_artifact=output_artifact)
+ output = f"{arg1}:{arg2}"
+ return StepResult(step=self, status=StepStatus.SUCCESS, output=output)
async def async_args(results):
- return {"arg1": results["step2"].output_artifact, "arg2": "4"}
+ return {"arg1": results["step2"].output, "arg2": "4"}
steps = [
[StepToRun(id="step1", step=Simple(test_context), args={"arg1": "1", "arg2": "2"})],
- [StepToRun(id="step2", step=Simple(test_context), args=lambda results: {"arg1": results["step1"].output_artifact, "arg2": "3"})],
+ [StepToRun(id="step2", step=Simple(test_context), args=lambda results: {"arg1": results["step1"].output, "arg2": "3"})],
[StepToRun(id="step3", step=Simple(test_context), args=async_args)],
]
results = await run_steps(steps)
- assert results["step1"].output_artifact == "1:2"
- assert results["step2"].output_artifact == "1:2:3"
- assert results["step3"].output_artifact == "1:2:3:4"
+ assert results["step1"].output == "1:2"
+ assert results["step2"].output == "1:2:3"
+ assert results["step3"].output == "1:2:3:4"
@pytest.mark.anyio
diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py
index 471c89ae6ece..f5cb73d0ca9e 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_publish.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py
@@ -278,12 +278,12 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist(
name="check_connector_image_does_not_exist_result", status=StepStatus.SUCCESS
)
- # have output_artifact.values return []
+ # have output.values return []
built_connector_platform = mocker.Mock()
built_connector_platform.values.return_value = ["linux/amd64"]
publish_pipeline.steps.run_connector_build.return_value = mocker.Mock(
- name="build_connector_for_publish_result", status=build_step_status, output_artifact=built_connector_platform
+ name="build_connector_for_publish_result", status=build_step_status, output=built_connector_platform
)
publish_pipeline.PushConnectorImageToRegistry.return_value.run.return_value = mocker.Mock(
diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py
index f6b718be42f3..f53b43ebe57a 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py
@@ -48,7 +48,7 @@ def context_for_certified_connector_with_setup(self, mocker, certified_connector
@pytest.fixture
async def certified_container_with_setup(self, context_for_certified_connector_with_setup, current_platform):
result = await BuildConnectorImages(context_for_certified_connector_with_setup).run()
- return result.output_artifact[current_platform]
+ return result.output[current_platform]
@pytest.fixture
def context_for_connector_with_poetry(self, mocker, connector_with_poetry, dagger_client, current_platform):
@@ -69,7 +69,7 @@ def context_for_connector_with_poetry(self, mocker, connector_with_poetry, dagge
@pytest.fixture
async def container_with_poetry(self, context_for_connector_with_poetry, current_platform):
result = await BuildConnectorImages(context_for_connector_with_poetry).run()
- return result.output_artifact[current_platform]
+ return result.output[current_platform]
async def test__run_for_setup_py(self, context_for_certified_connector_with_setup, certified_container_with_setup):
# Assume that the tests directory is available
@@ -80,7 +80,7 @@ async def test__run_for_setup_py(self, context_for_certified_connector_with_setu
"Total coverage:" in result.stdout
), "The pytest-cov package should be installed in the test environment and test coverage report should be displayed."
assert "Required test coverage of" in result.stdout, "A test coverage threshold should be defined for certified connectors."
- pip_freeze_output = await result.output_artifact.with_exec(["pip", "freeze"], skip_entrypoint=True).stdout()
+ pip_freeze_output = await result.output.with_exec(["pip", "freeze"], skip_entrypoint=True).stdout()
assert (
context_for_certified_connector_with_setup.connector.technical_name in pip_freeze_output
), "The connector should be installed in the test environment."
@@ -93,7 +93,7 @@ async def test__run_for_poetry(self, context_for_connector_with_poetry, containe
assert isinstance(result, StepResult)
# We only check for the presence of "test session starts" because we have no guarantee that the tests will pass
assert "test session starts" in result.stdout or "test session starts" in result.stderr, "The pytest tests should have started."
- pip_freeze_output = await result.output_artifact.with_exec(["poetry", "run", "pip", "freeze"], skip_entrypoint=True).stdout()
+ pip_freeze_output = await result.output.with_exec(["poetry", "run", "pip", "freeze"], skip_entrypoint=True).stdout()
assert (
context_for_connector_with_poetry.connector.technical_name in pip_freeze_output