Skip to content

Commit 371dd8c

Browse files
stephane-airbytejatinyadav-cc
authored andcommitted
airbyte-ci: augment the report for java connectors (airbytehq#35317)
Today we're missing the logs (both JVM and container logs) in java connector reports. This is creating a link to test artifacts. In the CI, the link will point to a zip file, while on a local run, it will point to a directory. In addition, we recently added the junit XML inlined with the test standard output and error, but that didn't really work as well as we'd hoped: The reports were slow to load, they were not ordered by time, the corresponding logs were lacking. There's still a possibility they'll be useful, so rather than removing them altogether, they will be bundled in the log zip (or directory). I'm also adding a button to copy the standard output or the standard error from a step into the clipboard. Finally, I'm reducing the max vertical size of an expanded step, so it doesn't go over 70%, which seems much cleaner to me. Here's an example of the result (from the child PR): https://storage.cloud.google.com/airbyte-ci-reports-multi/airbyte-ci/connectors/test/pull_request/stephane_02-09-add_background_thread_to_track_mssql_container_status/1708056420/d4683bfb7f90675c6b9e7c6d4bbad3f98c7a7550/source-mssql/3.7.0/output.html
1 parent 4a1d195 commit 371dd8c

File tree

26 files changed

+339
-209
lines changed

26 files changed

+339
-209
lines changed

airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml

+1-2
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,7 @@
77
<Property name="container-log-pattern">%d{yyyy-MM-dd'T'HH:mm:ss,SSS}{GMT+0}`%replace{%X{log_source}}{^ -}{} > %replace{%m}{$${env:LOG_SCRUB_PATTERN:-\*\*\*\*\*}}{*****}%n</Property>
88
<!-- Always log INFO by default. -->
99
<Property name="log-level">${sys:LOG_LEVEL:-${env:LOG_LEVEL:-INFO}}</Property>
10-
<Property name="logSubDir">${env:AIRBYTE_LOG_SUBDIR:-${date:yyyy-MM-dd'T'HH:mm:ss}}</Property>
11-
<Property name="logDir">build/test-logs/${logSubDir}</Property>
10+
<Property name="logDir">build/test-logs/${date:yyyy-MM-dd'T'HH:mm:ss}</Property>
1211
</Properties>
1312

1413
<Appenders>

airbyte-ci/connectors/pipelines/README.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -644,7 +644,8 @@ E.G.: running Poe tasks on the modified internal packages of the current branch:
644644

645645
| Version | PR | Description |
646646
| ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- |
647-
| 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. |
647+
| 4.4.0 | [#35317](https://github.com/airbytehq/airbyte/pull/35317) | Augment java connector reports to include full logs and junit test results |
648+
| 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. |
648649
| 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. |
649650
| 4.3.0 | [#35438](https://github.com/airbytehq/airbyte/pull/35438) | Optionally disable telemetry with environment variable. |
650651
| 4.2.4 | [#35325](https://github.com/airbytehq/airbyte/pull/35325) | Use `connectors_qa` for QA checks and remove redundant checks. |

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
4747
async with semaphore:
4848
async with context:
4949
build_result = await run_connector_build(context)
50-
per_platform_built_containers = build_result.output_artifact
50+
per_platform_built_containers = build_result.output
5151
step_results.append(build_result)
5252
if context.is_local and build_result.status is StepStatus.SUCCESS:
5353
load_image_result = await LoadContainerToLocalDockerHost(context, per_platform_built_containers, image_tag).run()

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ async def _run(self, *args: Any) -> StepResult:
4848
f"The {self.context.connector.technical_name} docker image "
4949
f"was successfully built for platform(s) {', '.join(self.build_platforms)}"
5050
)
51-
return StepResult(step=self, status=StepStatus.SUCCESS, stdout=success_message, output_artifact=build_results_per_platform)
51+
return StepResult(step=self, status=StepStatus.SUCCESS, stdout=success_message, output=build_results_per_platform)
5252

5353
async def _build_connector(self, platform: Platform, *args: Any, **kwargs: Any) -> Container:
5454
"""Implement the generation of the image for the platform and return the corresponding container.

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ async def run_connector_build(context: ConnectorContext) -> StepResult:
5959
build_connector_tar_result = await BuildConnectorDistributionTar(context).run()
6060
if build_connector_tar_result.status is not StepStatus.SUCCESS:
6161
return build_connector_tar_result
62-
dist_dir = await build_connector_tar_result.output_artifact.directory(dist_tar_directory_path(context))
62+
dist_dir = await build_connector_tar_result.output.directory(dist_tar_directory_path(context))
6363
return await BuildConnectorImages(context).run(dist_dir)
6464

6565

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,4 @@ async def _run(self) -> StepResult:
3636
build_normalization_container = normalization.with_normalization(self.context, self.build_platform)
3737
else:
3838
build_normalization_container = self.context.dagger_client.container().from_(self.normalization_image)
39-
return StepResult(step=self, status=StepStatus.SUCCESS, output_artifact=build_normalization_container)
39+
return StepResult(step=self, status=StepStatus.SUCCESS, output=build_normalization_container)

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ async def _run(self) -> StepResult:
5555
step=self,
5656
status=StepStatus.SKIPPED,
5757
stdout="Connector does not have a documentation file.",
58-
output_artifact=self.repo_dir,
58+
output=self.repo_dir,
5959
)
6060
try:
6161
updated_doc = self.add_changelog_entry(doc_path.read_text())
@@ -64,14 +64,14 @@ async def _run(self) -> StepResult:
6464
step=self,
6565
status=StepStatus.FAILURE,
6666
stdout=f"Could not add changelog entry: {e}",
67-
output_artifact=self.repo_dir,
67+
output=self.repo_dir,
6868
)
6969
updated_repo_dir = self.repo_dir.with_new_file(str(doc_path), contents=updated_doc)
7070
return StepResult(
7171
step=self,
7272
status=StepStatus.SUCCESS,
7373
stdout=f"Added changelog entry to {doc_path}",
74-
output_artifact=updated_repo_dir,
74+
output=updated_repo_dir,
7575
)
7676

7777
def find_line_index_for_new_entry(self, markdown_text: str) -> int:
@@ -118,7 +118,7 @@ async def _run(self) -> StepResult:
118118
step=self,
119119
status=StepStatus.SKIPPED,
120120
stdout="Can't retrieve the connector current version.",
121-
output_artifact=self.repo_dir,
121+
output=self.repo_dir,
122122
)
123123
updated_metadata_str = self.get_metadata_with_bumped_version(current_version, self.new_version, current_metadata_str)
124124
repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata_str(
@@ -134,7 +134,7 @@ async def _run(self) -> StepResult:
134134
step=self,
135135
status=StepStatus.SUCCESS,
136136
stdout=f"Updated dockerImageTag from {current_version} to {self.new_version} in {metadata_path}",
137-
output_artifact=repo_dir_with_updated_metadata,
137+
output=repo_dir_with_updated_metadata,
138138
)
139139

140140

@@ -164,7 +164,7 @@ async def run_connector_version_bump_pipeline(
164164
new_version,
165165
)
166166
update_docker_image_tag_in_metadata_result = await update_docker_image_tag_in_metadata.run()
167-
repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output_artifact
167+
repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output
168168
steps_results.append(update_docker_image_tag_in_metadata_result)
169169

170170
add_changelog_entry = AddChangelogEntry(
@@ -176,7 +176,7 @@ async def run_connector_version_bump_pipeline(
176176
)
177177
add_changelog_entry_result = await add_changelog_entry.run()
178178
steps_results.append(add_changelog_entry_result)
179-
final_repo_dir = add_changelog_entry_result.output_artifact
179+
final_repo_dir = add_changelog_entry_result.output
180180
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
181181
report = ConnectorReport(context, steps_results, name="CONNECTOR VERSION BUMP RESULTS")
182182
context.report = report

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ async def _run(self) -> StepResult:
6464
step=self,
6565
status=StepStatus.SKIPPED,
6666
stdout="Could not find a base image for this connector language.",
67-
output_artifact=self.repo_dir,
67+
output=self.repo_dir,
6868
)
6969

7070
metadata_path = self.context.connector.metadata_file_path
@@ -76,15 +76,15 @@ async def _run(self) -> StepResult:
7676
step=self,
7777
status=StepStatus.SKIPPED,
7878
stdout="Connector does not have a base image metadata field.",
79-
output_artifact=self.repo_dir,
79+
output=self.repo_dir,
8080
)
8181

8282
if current_base_image_address == latest_base_image_address:
8383
return StepResult(
8484
step=self,
8585
status=StepStatus.SKIPPED,
8686
stdout="Connector already uses latest base image",
87-
output_artifact=self.repo_dir,
87+
output=self.repo_dir,
8888
)
8989
updated_metadata = self.update_base_image_in_metadata(current_metadata, latest_base_image_address)
9090
updated_repo_dir = metadata_change_helpers.get_repo_dir_with_updated_metadata(self.repo_dir, metadata_path, updated_metadata)
@@ -93,7 +93,7 @@ async def _run(self) -> StepResult:
9393
step=self,
9494
status=StepStatus.SUCCESS,
9595
stdout=f"Updated base image to {latest_base_image_address} in {metadata_path}",
96-
output_artifact=updated_repo_dir,
96+
output=updated_repo_dir,
9797
)
9898

9999

@@ -146,7 +146,7 @@ async def _run(self) -> StepResult:
146146
step=self,
147147
status=StepStatus.SKIPPED,
148148
stdout="Connector does not have a documentation file.",
149-
output_artifact=self.repo_dir,
149+
output=self.repo_dir,
150150
)
151151
current_readme = await (await self.context.get_connector_dir(include=["README.md"])).file("README.md").contents()
152152
try:
@@ -156,14 +156,14 @@ async def _run(self) -> StepResult:
156156
step=self,
157157
status=StepStatus.FAILURE,
158158
stdout=str(e),
159-
output_artifact=self.repo_dir,
159+
output=self.repo_dir,
160160
)
161161
updated_repo_dir = await self.repo_dir.with_new_file(str(readme_path), contents=updated_readme)
162162
return StepResult(
163163
step=self,
164164
status=StepStatus.SUCCESS,
165165
stdout=f"Added build instructions to {readme_path}",
166-
output_artifact=updated_repo_dir,
166+
output=updated_repo_dir,
167167
)
168168

169169
def add_build_instructions(self, og_doc_content: str) -> str:
@@ -276,7 +276,7 @@ async def run_connector_base_image_upgrade_pipeline(context: ConnectorContext, s
276276
)
277277
update_base_image_in_metadata_result = await update_base_image_in_metadata.run()
278278
steps_results.append(update_base_image_in_metadata_result)
279-
final_repo_dir = update_base_image_in_metadata_result.output_artifact
279+
final_repo_dir = update_base_image_in_metadata_result.output
280280
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
281281
report = ConnectorReport(context, steps_results, name="BASE IMAGE UPGRADE RESULTS")
282282
context.report = report
@@ -324,7 +324,7 @@ async def run_connector_migration_to_base_image_pipeline(
324324
new_version = get_bumped_version(context.connector.version, "patch")
325325
bump_version_in_metadata = BumpDockerImageTagInMetadata(
326326
context,
327-
update_base_image_in_metadata_result.output_artifact,
327+
update_base_image_in_metadata_result.output,
328328
new_version,
329329
)
330330
bump_version_in_metadata_result = await bump_version_in_metadata.run()
@@ -333,7 +333,7 @@ async def run_connector_migration_to_base_image_pipeline(
333333
# ADD CHANGELOG ENTRY
334334
add_changelog_entry = AddChangelogEntry(
335335
context,
336-
bump_version_in_metadata_result.output_artifact,
336+
bump_version_in_metadata_result.output,
337337
new_version,
338338
"Base image migration: remove Dockerfile and use the python-connector-base image",
339339
pull_request_number,
@@ -344,13 +344,13 @@ async def run_connector_migration_to_base_image_pipeline(
344344
# UPDATE DOC
345345
add_build_instructions_to_doc = AddBuildInstructionsToReadme(
346346
context,
347-
add_changelog_entry_result.output_artifact,
347+
add_changelog_entry_result.output,
348348
)
349349
add_build_instructions_to_doc_results = await add_build_instructions_to_doc.run()
350350
steps_results.append(add_build_instructions_to_doc_results)
351351

352352
# EXPORT MODIFIED FILES BACK TO HOST
353-
final_repo_dir = add_build_instructions_to_doc_results.output_artifact
353+
final_repo_dir = add_build_instructions_to_doc_results.output
354354
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
355355
report = ConnectorReport(context, steps_results, name="MIGRATE TO BASE IMAGE RESULTS")
356356
context.report = report

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport:
313313
if build_connector_results.status is not StepStatus.SUCCESS:
314314
return create_connector_report(results)
315315

316-
built_connector_platform_variants = list(build_connector_results.output_artifact.values())
316+
built_connector_platform_variants = list(build_connector_results.output.values())
317317
push_connector_image_results = await PushConnectorImageToRegistry(context).run(built_connector_platform_variants)
318318
results.append(push_connector_image_results)
319319

airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py

+51-16
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,15 @@
66
import json
77
import webbrowser
88
from dataclasses import dataclass
9-
from typing import TYPE_CHECKING
9+
from pathlib import Path
10+
from types import MappingProxyType
11+
from typing import TYPE_CHECKING, Dict
1012

11-
from anyio import Path
1213
from connector_ops.utils import console # type: ignore
1314
from jinja2 import Environment, PackageLoader, select_autoescape
1415
from pipelines.consts import GCS_PUBLIC_DOMAIN
1516
from pipelines.helpers.utils import format_duration
17+
from pipelines.models.artifacts import Artifact
1618
from pipelines.models.reports import Report
1719
from pipelines.models.steps import StepStatus
1820
from rich.console import Group
@@ -42,13 +44,19 @@ def report_output_prefix(self) -> str:
4244
def html_report_file_name(self) -> str:
4345
return self.filename + ".html"
4446

47+
def file_remote_storage_key(self, file_name: str) -> str:
48+
return f"{self.report_output_prefix}/{file_name}"
49+
4550
@property
4651
def html_report_remote_storage_key(self) -> str:
47-
return f"{self.report_output_prefix}/{self.html_report_file_name}"
52+
return self.file_remote_storage_key(self.html_report_file_name)
53+
54+
def file_url(self, file_name: str) -> str:
55+
return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.file_remote_storage_key(file_name)}"
4856

4957
@property
5058
def html_report_url(self) -> str:
51-
return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}"
59+
return self.file_url(self.html_report_file_name)
5260

5361
def to_json(self) -> str:
5462
"""Create a JSON representation of the connector test report.
@@ -81,7 +89,7 @@ def to_json(self) -> str:
8189
}
8290
)
8391

84-
async def to_html(self) -> str:
92+
def to_html(self) -> str:
8593
env = Environment(
8694
loader=PackageLoader("pipelines.airbyte_ci.connectors.test.steps"),
8795
autoescape=select_autoescape(),
@@ -91,7 +99,18 @@ async def to_html(self) -> str:
9199
template = env.get_template("test_report.html.j2")
92100
template.globals["StepStatus"] = StepStatus
93101
template.globals["format_duration"] = format_duration
94-
local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve()
102+
local_icon_path = Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve()
103+
step_result_to_artifact_links: Dict[str, List[Dict]] = {}
104+
for step_result in self.steps_results:
105+
for artifact in step_result.artifacts:
106+
if artifact.gcs_url:
107+
url = artifact.gcs_url
108+
elif artifact.local_path:
109+
url = artifact.local_path.resolve().as_uri()
110+
else:
111+
continue
112+
step_result_to_artifact_links.setdefault(step_result.step.title, []).append({"name": artifact.name, "url": url})
113+
95114
template_context = {
96115
"connector_name": self.pipeline_context.connector.technical_name,
97116
"step_results": self.steps_results,
@@ -104,6 +123,8 @@ async def to_html(self) -> str:
104123
"git_revision": self.pipeline_context.git_revision,
105124
"commit_url": None,
106125
"icon_url": local_icon_path.as_uri(),
126+
"report": self,
127+
"step_result_to_artifact_links": MappingProxyType(step_result_to_artifact_links),
107128
}
108129

109130
if self.pipeline_context.is_ci:
@@ -116,18 +137,32 @@ async def to_html(self) -> str:
116137
] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg"
117138
return template.render(template_context)
118139

140+
async def save_html_report(self) -> None:
141+
"""Save the report as HTML, upload it to GCS if the pipeline is running in CI"""
142+
143+
html_report_path = self.report_dir_path / self.html_report_file_name
144+
report_dir = self.pipeline_context.dagger_client.host().directory(str(self.report_dir_path))
145+
local_html_report_file = report_dir.with_new_file(self.html_report_file_name, self.to_html()).file(self.html_report_file_name)
146+
html_report_artifact = Artifact(name="HTML Report", content_type="text/html", content=local_html_report_file)
147+
await html_report_artifact.save_to_local_path(html_report_path)
148+
absolute_path = html_report_path.absolute()
149+
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
150+
if self.remote_storage_enabled and self.pipeline_context.ci_gcs_credentials_secret and self.pipeline_context.ci_report_bucket:
151+
gcs_url = await html_report_artifact.upload_to_gcs(
152+
dagger_client=self.pipeline_context.dagger_client,
153+
bucket=self.pipeline_context.ci_report_bucket,
154+
key=self.html_report_remote_storage_key,
155+
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
156+
)
157+
self.pipeline_context.logger.info(f"HTML report uploaded to {gcs_url}")
158+
159+
elif self.pipeline_context.enable_report_auto_open:
160+
self.pipeline_context.logger.info("Opening HTML report in browser.")
161+
webbrowser.open(absolute_path.as_uri())
162+
119163
async def save(self) -> None:
120-
local_html_path = await self.save_local(self.html_report_file_name, await self.to_html())
121-
absolute_path = await local_html_path.resolve()
122-
if self.pipeline_context.enable_report_auto_open:
123-
self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}")
124-
if self.pipeline_context.enable_report_auto_open:
125-
self.pipeline_context.logger.info("Opening HTML report in browser.")
126-
webbrowser.open(absolute_path.as_uri())
127-
if self.remote_storage_enabled:
128-
await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html")
129-
self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}")
130164
await super().save()
165+
await self.save_html_report()
131166

132167
def print(self) -> None:
133168
"""Print the test report to the console in a nice way."""

0 commit comments

Comments
 (0)