Skip to content

Commit

Permalink
feat: report custom pre/post processing
Browse files Browse the repository at this point in the history
  • Loading branch information
nirga committed Jan 16, 2024
1 parent 018b242 commit ad63d87
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 10 deletions.
5 changes: 3 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 10 additions & 0 deletions packages/ai-semantic-conventions/src/SemanticAttributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ export const SpanAttributes = {
TRACELOOP_WORKFLOW_NAME: "traceloop.workflow.name",
TRACELOOP_ENTITY_NAME: "traceloop.entity.name",
TRACELOOP_ASSOCIATION_PROPERTIES: "traceloop.association.properties",

// LLM Pre/Post Processing
TRACELOOP_CUSTOM_PROCESSING_STEP_TYPE: "traceloop.processing.type",
TRACELOOP_CUSTOM_PROCESSING_INPUT: "traceloop.processing.input",
TRACELOOP_CUSTOM_PROCESSING_OUTPUT: "traceloop.processing.output",
};

export enum LLMRequestTypeValues {
Expand All @@ -56,3 +61,8 @@ export enum TraceloopSpanKindValues {
TOOL = "tool",
UNKNOWN = "unknown",
}

export enum TraceloopCustomProcessingStepTypeValues {
PRE_PROCESSING = "pre_process",
POST_PROCESSING = "post_process",
}
16 changes: 14 additions & 2 deletions packages/sample-app/src/sample_with.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,24 @@ async function chat() {
return await traceloop.withWorkflow("sample_chat", async () => {
const chatCompletion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Tell me a joke about OpenTelemetry" },
{
role: "user",
content:
"Tell me a joke about OpenTelemetry and explain in a separate line",
},
],
model: "gpt-3.5-turbo",
});

return chatCompletion.choices[0].message.content;
const jokeWithExplanation = chatCompletion.choices[0].message.content;
const onlyExplanation = jokeWithExplanation?.split("Explanation:")[1];

traceloop.reportPostProcessing(
jokeWithExplanation || "",
onlyExplanation || "",
);

return onlyExplanation;
});
}

Expand Down
5 changes: 3 additions & 2 deletions packages/traceloop-sdk/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,9 @@
"dependencies": {
"@opentelemetry/exporter-trace-otlp-proto": "^0.44.0",
"@opentelemetry/sdk-node": "^0.44.0",
"@traceloop/instrumentation-llamaindex": "^0.0.31",
"@traceloop/instrumentation-openai": "^0.0.31",
"@traceloop/ai-semantic-conventions": "*",
"@traceloop/instrumentation-llamaindex": "*",
"@traceloop/instrumentation-openai": "*",
"@types/nunjucks": "^3.2.5",
"fetch-retry": "^5.0.6",
"nunjucks": "^3.2.4",
Expand Down
6 changes: 5 additions & 1 deletion packages/traceloop-sdk/src/lib/node-server-sdk.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ import { initInstrumentations } from "./tracing";
export * from "./errors";
export { InitializeOptions } from "./interfaces";
export { initialize } from "./configuration";
export { forceFlush } from "./tracing";
export {
forceFlush,
reportPreProcessing,
reportPostProcessing,
} from "./tracing";
export * from "./tracing/decorators";
export * from "./tracing/association";
export * from "./tracing/score";
Expand Down
27 changes: 25 additions & 2 deletions packages/traceloop-sdk/src/lib/tracing/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,15 @@ import { Instrumentation } from "@opentelemetry/instrumentation";
import { InitializeOptions } from "../interfaces";
import { OpenAIInstrumentation } from "@traceloop/instrumentation-openai";
import { LlamaIndexInstrumentation } from "@traceloop/instrumentation-llamaindex";
import { SpanAttributes } from "@traceloop/ai-semantic-conventions";
import { ASSOCATION_PROPERTIES_KEY, WORKFLOW_NAME_KEY } from "./tracing";
import {
SpanAttributes,
TraceloopCustomProcessingStepTypeValues,
} from "@traceloop/ai-semantic-conventions";
import {
ASSOCATION_PROPERTIES_KEY,
WORKFLOW_NAME_KEY,
reportCustomProcessing,
} from "./tracing";
import { Telemetry } from "../telemetry/telemetry";
import { TraceloopSampler } from "./sampler";

Expand Down Expand Up @@ -113,3 +120,19 @@ export const startTracing = (options: InitializeOptions) => {
export const forceFlush = async () => {
await _spanProcessor.forceFlush();
};

export const reportPreProcessing = (input: string, output: string) => {
reportCustomProcessing(
TraceloopCustomProcessingStepTypeValues.PRE_PROCESSING,
input,
output,
);
};

export const reportPostProcessing = (input: string, output: string) => {
reportCustomProcessing(
TraceloopCustomProcessingStepTypeValues.POST_PROCESSING,
input,
output,
);
};
20 changes: 19 additions & 1 deletion packages/traceloop-sdk/src/lib/tracing/tracing.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import { trace, createContextKey } from "@opentelemetry/api";

import {
SpanAttributes,
TraceloopCustomProcessingStepTypeValues,
} from "@traceloop/ai-semantic-conventions";
const TRACER_NAME = "traceloop.tracer";
export const WORKFLOW_NAME_KEY = createContextKey("workflow_name");
export const ASSOCATION_PROPERTIES_KEY = createContextKey(
Expand All @@ -9,3 +12,18 @@ export const ASSOCATION_PROPERTIES_KEY = createContextKey(
export const getTracer = () => {
return trace.getTracer(TRACER_NAME);
};

export const reportCustomProcessing = (
kind: TraceloopCustomProcessingStepTypeValues,
input: string,
output: string,
) => {
const s = getTracer().startSpan(`custom.${kind}`, {
attributes: {
[SpanAttributes.TRACELOOP_CUSTOM_PROCESSING_STEP_TYPE]: kind,
[SpanAttributes.TRACELOOP_CUSTOM_PROCESSING_INPUT]: input,
[SpanAttributes.TRACELOOP_CUSTOM_PROCESSING_OUTPUT]: output,
},
});
s.end();
};

0 comments on commit ad63d87

Please sign in to comment.