Skip to content

Commit

Permalink
fix: allow disabling tracing of prompts through config (#19)
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga authored Dec 1, 2023
1 parent 006dec1 commit a616e97
Show file tree
Hide file tree
Showing 6 changed files with 141 additions and 115 deletions.
7 changes: 0 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

227 changes: 121 additions & 106 deletions packages/instrumentation-openai/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,16 @@ import {
} from "openai/resources";

export class OpenAIInstrumentation extends InstrumentationBase<any> {
protected override _config!: OpenAIInstrumentationConfig;

constructor(config: OpenAIInstrumentationConfig = {}) {
super("@traceloop/instrumentation-openai", "0.0.17", config);
}

public override setConfig(config: OpenAIInstrumentationConfig = {}) {
super.setConfig(config);
}

public manuallyInstrument(
module: typeof openai.OpenAI & { openLLMetryPatched?: boolean },
) {
Expand Down Expand Up @@ -177,7 +183,12 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
},
);

const wrappedPromise = wrapPromise(type, version, span, execPromise);
const wrappedPromise = plugin._wrapPromise(
type,
version,
span,
execPromise,
);

return context.bind(execContext, wrappedPromise as any);
};
Expand Down Expand Up @@ -232,7 +243,7 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
});
}

if (shouldSendPrompts()) {
if (this._shouldSendPrompts()) {
if (type === "chat") {
params.messages.forEach((message, index) => {
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
Expand All @@ -253,124 +264,128 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
attributes,
});
}
}

function wrapPromise<T>(
type: "chat" | "completion",
version: "v3" | "v4",
span: Span,
promise: Promise<T>,
): Promise<T> {
return promise
.then((result) => {
return new Promise<T>((resolve) => {
if (version === "v3") {
if (type === "chat") {
endSpan({
type,
span,
result: (result as any).data as ChatCompletion,
});
} else {
endSpan({ type, span, result: (result as any).data as Completion });
}
} else {
if (type === "chat") {
endSpan({ type, span, result: result as ChatCompletion });
private _wrapPromise<T>(
type: "chat" | "completion",
version: "v3" | "v4",
span: Span,
promise: Promise<T>,
): Promise<T> {
return promise
.then((result) => {
return new Promise<T>((resolve) => {
if (version === "v3") {
if (type === "chat") {
this._endSpan({
type,
span,
result: (result as any).data as ChatCompletion,
});
} else {
this._endSpan({
type,
span,
result: (result as any).data as Completion,
});
}
} else {
endSpan({ type, span, result: result as Completion });
if (type === "chat") {
this._endSpan({ type, span, result: result as ChatCompletion });
} else {
this._endSpan({ type, span, result: result as Completion });
}
}
}
resolve(result);
});
})
.catch((error: Error) => {
return new Promise<T>((_, reject) => {
span.setStatus({
code: SpanStatusCode.ERROR,
message: error.message,
resolve(result);
});
span.recordException(error);
span.end();
})
.catch((error: Error) => {
return new Promise<T>((_, reject) => {
span.setStatus({
code: SpanStatusCode.ERROR,
message: error.message,
});
span.recordException(error);
span.end();

reject(error);
reject(error);
});
});
});
}

function endSpan({
span,
type,
result,
}:
| { span: Span; type: "chat"; result: ChatCompletion }
| { span: Span; type: "completion"; result: Completion }) {
span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
if (result.usage) {
span.setAttribute(
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
result.usage?.total_tokens,
);
span.setAttribute(
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
result.usage?.completion_tokens,
);
span.setAttribute(
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
result.usage?.prompt_tokens,
);
}

if (shouldSendPrompts()) {
if (type === "chat") {
result.choices.forEach((choice, index) => {
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
choice.finish_reason,
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
choice.message.role,
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
choice.message.content ?? "",
);
private _endSpan({
span,
type,
result,
}:
| { span: Span; type: "chat"; result: ChatCompletion }
| { span: Span; type: "completion"; result: Completion }) {
span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
if (result.usage) {
span.setAttribute(
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
result.usage?.total_tokens,
);
span.setAttribute(
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
result.usage?.completion_tokens,
);
span.setAttribute(
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
result.usage?.prompt_tokens,
);
}

if (choice.message.function_call) {
if (this._shouldSendPrompts()) {
if (type === "chat") {
result.choices.forEach((choice, index) => {
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.name`,
choice.message.function_call.name,
`${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
choice.finish_reason,
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.arguments`,
choice.message.function_call.arguments,
`${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
choice.message.role,
);
}
});
} else {
result.choices.forEach((choice, index) => {
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
choice.finish_reason,
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
"assistant",
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
choice.text,
);
});
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
choice.message.content ?? "",
);

if (choice.message.function_call) {
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.name`,
choice.message.function_call.name,
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.arguments`,
choice.message.function_call.arguments,
);
}
});
} else {
result.choices.forEach((choice, index) => {
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
choice.finish_reason,
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
"assistant",
);
span.setAttribute(
`${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
choice.text,
);
});
}
}
}

span.end();
}
span.end();
}

function shouldSendPrompts() {
return (
(process.env.TRACELOOP_TRACE_CONTENT || "true").toLowerCase() === "true"
);
private _shouldSendPrompts() {
return this._config.traceContent !== undefined
? this._config.traceContent
: true;
}
}
8 changes: 7 additions & 1 deletion packages/instrumentation-openai/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
import { InstrumentationConfig } from "@opentelemetry/instrumentation";

export interface OpenAIInstrumentationConfig extends InstrumentationConfig {}
export interface OpenAIInstrumentationConfig extends InstrumentationConfig {
/**
* Whether to log prompts, completions and embeddings on traces.
* @default true
*/
traceContent?: boolean;
}
1 change: 0 additions & 1 deletion packages/traceloop-sdk/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
"dependencies": {
"@opentelemetry/exporter-trace-otlp-proto": "^0.44.0",
"@opentelemetry/sdk-node": "^0.44.0",
"@scarf/scarf": "^1.3.0",
"@traceloop/instrumentation-openai": "^0.0.22",
"@types/nunjucks": "^3.2.5",
"fetch-retry": "^5.0.6",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,12 @@ export interface InitializeOptions {
*/
suppressLogs?: boolean;

/**
* Whether to log prompts, completions and embeddings on traces. Optional.
* Defaults to true.
*/
traceContent?: boolean;

/**
* The OpenTelemetry SpanExporter to be used for sending traces data. Optional.
* Defaults to the OTLP exporter.
Expand Down
7 changes: 7 additions & 0 deletions packages/traceloop-sdk/src/lib/tracing/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ export const initInstrumentations = () => {
* @throws {InitializationError} if the configuration is invalid or if failed to fetch feature data.
*/
export const startTracing = (options: InitializeOptions) => {
if (
options.traceContent === false ||
(process.env.TRACELOOP_TRACE_CONTENT || "true").toLowerCase() === "false"
) {
openAIInstrumentation.setConfig({ traceContent: false });
}

const traceExporter =
options.exporter ??
new OTLPTraceExporter({
Expand Down

0 comments on commit a616e97

Please sign in to comment.