Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Propagate prompt id to playground spans metadata #6224

Open
wants to merge 4 commits into
base: prompts
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions app/schema.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ input ChatCompletionInput {
tools: [JSON!]
apiKey: String = null
template: TemplateOptions
promptId: GlobalID = null
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we want to also add this to the nonStreaming mutation? That one does not carry over prompt id

}

input ChatCompletionMessageInput {
Expand Down Expand Up @@ -159,6 +160,7 @@ input ChatCompletionOverDatasetInput {
experimentName: String = null
experimentDescription: String = null
experimentMetadata: JSON = {}
promptId: GlobalID = null
}

type ChatCompletionOverDatasetMutationExamplePayload {
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion app/src/pages/playground/playgroundUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -980,7 +980,8 @@ const getBaseChatCompletionInput = ({
? instance.tools.map((tool) => tool.definition)
: undefined,
apiKey: credentials[instance.model.provider] || null,
} as const;
promptId: instance.prompt?.id,
} satisfies Partial<ChatCompletionInput>;
};

/**
Expand Down
8 changes: 8 additions & 0 deletions src/phoenix/server/api/helpers/playground_spans.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
)
from opentelemetry.sdk.trace.id_generator import RandomIdGenerator as DefaultOTelIDGenerator
from opentelemetry.trace import StatusCode
from strawberry.relay.types import GlobalID
from strawberry.scalars import JSON as JSONScalarType
from typing_extensions import Self, TypeAlias, assert_never

Expand Down Expand Up @@ -70,8 +71,10 @@ def __init__(
) -> None:
self._input = input
self._attributes: dict[str, Any] = attributes if attributes is not None else {}

self._attributes.update(
chain(
prompt_id_metadata(input.prompt_id),
llm_span_kind(),
llm_model_name(input.model.name),
llm_tools(input.tools or []),
Expand Down Expand Up @@ -264,6 +267,10 @@ def input_value_and_mime_type(
yield INPUT_VALUE, safe_json_dumps(input_data)


def prompt_id_metadata(prompt_id: Optional[GlobalID]) -> Iterator[tuple[str, Any]]:
yield METADATA, {"phoenix-prompt-id": str(prompt_id)}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

image

Do we always want to set this even when no prompt id is set?



def _merge_tool_call_chunks(
chunks_by_id: defaultdict[str, list[ToolCallChunk]],
) -> list[dict[str, Any]]:
Expand Down Expand Up @@ -442,6 +449,7 @@ def _serialize_event(event: SpanEvent) -> dict[str, Any]:
LLM_TOOLS = SpanAttributes.LLM_TOOLS
LLM_TOKEN_COUNT_PROMPT = SpanAttributes.LLM_TOKEN_COUNT_PROMPT
LLM_TOKEN_COUNT_COMPLETION = SpanAttributes.LLM_TOKEN_COUNT_COMPLETION
METADATA = SpanAttributes.METADATA

MESSAGE_CONTENT = MessageAttributes.MESSAGE_CONTENT
MESSAGE_ROLE = MessageAttributes.MESSAGE_ROLE
Expand Down
2 changes: 2 additions & 0 deletions src/phoenix/server/api/input_types/ChatCompletionInput.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class ChatCompletionInput:
tools: Optional[list[JSON]] = UNSET
api_key: Optional[str] = strawberry.field(default=None)
template: Optional[TemplateOptions] = UNSET
prompt_id: Optional[GlobalID] = None


@strawberry.input
Expand All @@ -36,3 +37,4 @@ class ChatCompletionOverDatasetInput:
experiment_name: Optional[str] = None
experiment_description: Optional[str] = None
experiment_metadata: Optional[JSON] = strawberry.field(default_factory=dict)
prompt_id: Optional[GlobalID] = None
Loading