diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/events.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/events.py index 5d011006de..61bcc4a79e 100644 --- a/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/events.py +++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/events.py @@ -20,6 +20,11 @@ schematized in YAML and the Weaver tool supports it. """ +from __future__ import annotations + +from dataclasses import asdict, dataclass +from typing import Literal, Sequence + from opentelemetry._events import Event from opentelemetry.semconv._incubating.attributes import gen_ai_attributes from opentelemetry.util.types import AnyValue @@ -89,3 +94,57 @@ def system_event( }, body=body, ) + + +@dataclass +class ChoiceMessage: + """The message field for a gen_ai.choice event""" + + content: AnyValue = None + role: str = "assistant" + + +@dataclass +class ChoiceToolCall: + """The tool_calls field for a gen_ai.choice event""" + + @dataclass + class Function: + name: str + arguments: AnyValue = None + + function: Function + id: str + type: Literal["function"] + + +FinishReason = Literal[ + "content_filter", "error", "length", "stop", "tool_calls" +] + + +def choice_event( + *, + finish_reason: FinishReason | str, + index: int, + message: ChoiceMessage, + tool_calls: Sequence[ChoiceToolCall] | None = None, +) -> Event: + """Creates a choice event, which describes the Gen AI response message. + https://github.com/open-telemetry/semantic-conventions/blob/v1.28.0/docs/gen-ai/gen-ai-events.md#event-gen_aichoice + """ + body: dict[str, AnyValue] = { + "finish_reason": finish_reason, + "index": index, + "message": asdict(message), + } + if tool_calls: + body["tool_calls"] = ([asdict(tool_call) for tool_call in tool_calls],) + + return Event( + name="gen_ai.choice", + attributes={ + gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value, + }, + body=body, + ) diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/patch.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/patch.py index 3c72a10023..c268400b96 100644 --- a/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/patch.py +++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/patch.py @@ -29,6 +29,7 @@ get_server_attributes, get_span_name, request_to_events, + response_to_events, ) from opentelemetry.trace import SpanKind, Tracer @@ -130,10 +131,11 @@ def traced_method( # ) span.set_attributes(get_genai_response_attributes(response)) - # TODO: add response attributes and events - # _set_response_attributes( - # span, result, event_logger, capture_content - # ) + for event in response_to_events( + response=response, capture_content=capture_content + ): + event_logger.emit(event) + return response return traced_method diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/utils.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/utils.py index 5db81a900a..f8d432ecb4 100644 --- a/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/utils.py +++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai/src/opentelemetry/instrumentation/vertexai/utils.py @@ -28,7 +28,10 @@ from opentelemetry._events import Event from opentelemetry.instrumentation.vertexai.events import ( + ChoiceMessage, + FinishReason, assistant_event, + choice_event, system_event, user_event, ) @@ -55,6 +58,9 @@ ) +_MODEL = "model" + + @dataclass(frozen=True) class GenerateContentParams: model: str @@ -204,7 +210,7 @@ def request_to_events( for content in params.contents or []: # Assistant message - if content.role == "model": + if content.role == _MODEL: request_content = _parts_to_any_value( capture_content=capture_content, parts=content.parts ) @@ -218,6 +224,29 @@ def request_to_events( yield user_event(role=content.role, content=request_content) +def response_to_events( + *, + response: prediction_service.GenerateContentResponse + | prediction_service_v1beta1.GenerateContentResponse, + capture_content: bool, +) -> Iterable[Event]: + for index, candidate in enumerate(response.candidates): + yield choice_event( + finish_reason=_map_finish_reason(candidate.finish_reason), + index=index, + # default to "model" since Vertex uses that instead of assistant + message=ChoiceMessage( + role=candidate.content.role or _MODEL, + content=_parts_to_any_value( + capture_content=capture_content, + parts=candidate.content.parts, + ), + ), + # TODO: tool calls + tool_calls=None, + ) + + def _parts_to_any_value( *, capture_content: bool, @@ -230,3 +259,26 @@ def _parts_to_any_value( cast("dict[str, AnyValue]", type(part).to_dict(part)) # type: ignore[reportUnknownMemberType] for part in parts ] + + +def _map_finish_reason( + finish_reason: content.Candidate.FinishReason + | content_v1beta1.Candidate.FinishReason, +) -> FinishReason | str: + EnumType = type(finish_reason) + if ( + finish_reason is EnumType.FINISH_REASON_UNSPECIFIED + or finish_reason is EnumType.OTHER + ): + return "error" + if finish_reason is EnumType.STOP: + return "stop" + if finish_reason is EnumType.MAX_TOKENS: + return "length" + + # There are a lot of specific enum values from Vertex that would map to "content_filter". + # I'm worried trying to map the enum obfuscates the telemetry because 1) it over + # generalizes and 2) half of the values are from the OTel enum and others from the vertex + # enum. See for reference + # https://github.com/googleapis/python-aiplatform/blob/c5023698c7068e2f84523f91b824641c9ef2d694/google/cloud/aiplatform_v1/types/content.py#L786-L822 + return finish_reason.name.lower()