Skip to content

Commit

Permalink
Merge branch 'main' into tf/filter-nextjs
Browse files Browse the repository at this point in the history
  • Loading branch information
tomer-friedman committed Dec 7, 2023
2 parents 68a96dc + 8af9342 commit 0be033c
Show file tree
Hide file tree
Showing 9 changed files with 63 additions and 15 deletions.
6 changes: 3 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion packages/instrumentation-openai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@traceloop/instrumentation-openai",
"version": "0.0.23",
"version": "0.0.24",
"description": "OpenTelemetry ai-specific semantic conventions",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
Expand Down
9 changes: 7 additions & 2 deletions packages/instrumentation-openai/src/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,8 +248,13 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
params.messages.forEach((message, index) => {
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
message.role;
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
(message.content as string) || "";
if (typeof message.content === "string") {
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
(message.content as string) || "";
} else {
attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
JSON.stringify(message.content);
}
});
} else {
if (typeof params.prompt === "string") {
Expand Down
1 change: 1 addition & 0 deletions packages/sample-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"run:decorators": "npm run build && node dist/src/sample_decorators.js",
"run:with": "npm run build && node dist/src/sample_with.js",
"run:prompt_mgmt": "npm run build && node dist/src/sample_prompt_mgmt.js",
"run:sample_vision": "npm run build && node dist/src/sample_vision_prompt.js",
"run:sampler": "npm run build && node dist/src/sample_sampler.js",
"lint": "eslint . --ext .ts",
"lint:fix": "eslint . --ext .ts --fix"
Expand Down
22 changes: 22 additions & 0 deletions packages/sample-app/src/sample_vision_prompt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import * as traceloop from "@traceloop/node-server-sdk";
import OpenAI from "openai";

const main = async () => {
traceloop.initialize({
appName: "sample_prompt_mgmt",
apiKey: process.env.TRACELOOP_API_KEY,
disableBatch: true,
traceloopSyncEnabled: true,
});

await traceloop.waitForInitialization();

const openai = new OpenAI();
const prompt = traceloop.getPrompt("vision", { words: "4" }); // NOTE: ensure prompt exists
console.log("Fetched prompt: ", prompt);

const chatCompletion = await openai.chat.completions.create(prompt);
console.log(chatCompletion.choices[0].message.content);
};

main();
4 changes: 2 additions & 2 deletions packages/traceloop-sdk/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@traceloop/node-server-sdk",
"version": "0.0.26",
"version": "0.0.27",
"description": "Traceloop Software Development Kit (SDK) for Node.js",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
Expand Down Expand Up @@ -34,7 +34,7 @@
"dependencies": {
"@opentelemetry/exporter-trace-otlp-proto": "^0.44.0",
"@opentelemetry/sdk-node": "^0.44.0",
"@traceloop/instrumentation-openai": "^0.0.23",
"@traceloop/instrumentation-openai": "^0.0.24",
"@types/nunjucks": "^3.2.5",
"fetch-retry": "^5.0.6",
"nunjucks": "^3.2.4",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,20 @@ export const TEMPLATING_ENGINE = {
export type TemplatingEngine =
(typeof TEMPLATING_ENGINE)[keyof typeof TEMPLATING_ENGINE];

export type Content =
| { type: "text"; text: string }
| { type: "image_url"; image_url: { url: string } };

export interface PromptMessage {
index: number;
template: string;
template: string | Content[];
role: string;
variables: string[];
}

export interface RenderedMessage {
role: string;
content: string;
content: string | Content[];
}

export interface PromptTarget {
Expand Down
2 changes: 1 addition & 1 deletion packages/traceloop-sdk/src/lib/prompts/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,13 @@ export const getPrompt = (key: string, variables: Record<string, string>) => {
...promptVersion.llm_config,
prompt: message?.[0]?.content,
};
if (result?.["stop"].length === 0) delete result["stop"];
} else {
result = {
messages: renderMessages(promptVersion, variables),
...promptVersion.llm_config,
};
}
if (result?.["stop"].length === 0) delete result["stop"];
delete result["mode"];

result.extraAttributes = managedPromptTracingAttributes(
Expand Down
24 changes: 20 additions & 4 deletions packages/traceloop-sdk/src/lib/prompts/template.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,26 @@ export const renderMessages = (
if (promptVersion.templating_engine === TEMPLATING_ENGINE.JINJA2) {
return promptVersion.messages.map((message) => {
try {
return {
content: env.renderString(message.template, variables),
role: message.role,
};
if (typeof message.template === "string") {
return {
content: env.renderString(message.template, variables),
role: message.role,
};
} else {
return {
content: message.template.map((content) => {
if (content.type === "text") {
return {
type: "text",
text: env.renderString(content.text, variables),
};
} else {
return content;
}
}),
role: message.role,
};
}
} catch (err) {
throw new TraceloopError(
`Failed to render message template. Missing variables?`,
Expand Down

0 comments on commit 0be033c

Please sign in to comment.