From f9250cb113400a10483a7869c4ce5c04a5432b90 Mon Sep 17 00:00:00 2001 From: Sam Lijin Date: Fri, 28 Jun 2024 18:28:01 -0700 Subject: [PATCH 1/2] try to fix typescript tracing --- engine/baml-runtime/src/tracing/mod.rs | 36 ++++-- .../src/typescript/generate_types.rs | 4 +- .../src/typescript/mod.rs | 2 +- .../src/typescript/templates/client.js.j2 | 76 ------------- .../typescript/templates/type_builder.js.j2 | 67 ------------ .../src/typescript/templates/types.js.j2 | 20 ---- .../async_context_vars.d.ts | 6 +- .../async_context_vars.d.ts.map | 2 +- .../async_context_vars.js | 60 +++++----- engine/language_client_typescript/index.d.ts | 4 +- .../language_client_typescript/index.d.ts.map | 2 +- engine/language_client_typescript/index.js | 2 +- .../typescript_src/async_context_vars.ts | 57 +++++----- .../typescript_src/index.ts | 11 +- integ-tests/baml_src/clients.baml | 25 +++-- integ-tests/python/baml_client/inlinedbaml.py | 2 +- integ-tests/ruby/baml_client/inlined.rb | 2 +- integ-tests/typescript/baml_client/client.ts | 6 +- .../typescript/baml_client/inlinedbaml.ts | 2 +- .../typescript/tests/integ-tests.test.ts | 103 +++++++++++++++++- root.code-workspace | 4 +- tools/build | 13 ++- 22 files changed, 238 insertions(+), 268 deletions(-) delete mode 100644 engine/language-client-codegen/src/typescript/templates/client.js.j2 delete mode 100644 engine/language-client-codegen/src/typescript/templates/type_builder.js.j2 delete mode 100644 engine/language-client-codegen/src/typescript/templates/types.js.j2 diff --git a/engine/baml-runtime/src/tracing/mod.rs b/engine/baml-runtime/src/tracing/mod.rs index c4e925c25..2934833b9 100644 --- a/engine/baml-runtime/src/tracing/mod.rs +++ b/engine/baml-runtime/src/tracing/mod.rs @@ -1,12 +1,9 @@ pub mod api_wrapper; -#[cfg(not(target_arch = "wasm32"))] -mod threaded_tracer; -#[cfg(target_arch = "wasm32")] -mod wasm_tracer; use crate::on_log_event::LogEventCallbackSync; use anyhow::Result; use baml_types::{BamlMap, BamlMediaType, BamlValue}; +use cfg_if::cfg_if; use colored::Colorize; use internal_baml_jinja::RenderedPrompt; use std::collections::HashMap; @@ -27,16 +24,17 @@ use self::api_wrapper::{ }, APIWrapper, }; -#[cfg(not(target_arch = "wasm32"))] -use self::threaded_tracer::ThreadedTracer; -#[cfg(target_arch = "wasm32")] -use self::wasm_tracer::NonThreadedTracer; +cfg_if! { + if #[cfg(target_arch = "wasm32")] { + mod wasm_tracer; + use self::wasm_tracer::NonThreadedTracer as TracerImpl; + } else { + mod threaded_tracer; + use self::threaded_tracer::ThreadedTracer as TracerImpl; + } +} -#[cfg(not(target_arch = "wasm32"))] -type TracerImpl = ThreadedTracer; -#[cfg(target_arch = "wasm32")] -type TracerImpl = NonThreadedTracer; #[derive(Debug)] pub struct TracingSpan { span_id: Uuid, @@ -95,6 +93,7 @@ impl BamlTracer { params: &BamlMap, ) -> (Option, RuntimeContext) { let span_id = ctx.enter(function_name); + log::trace!("Entering span: {:#?}:::{:?}", span_id, function_name); if !self.enabled { return (None, ctx.create_ctx(tb)); } @@ -150,6 +149,12 @@ impl BamlTracer { ctx ); }; + log::trace!( + "Finishing span: {:#?} {}\nevent chain {:?}", + span, + span_id, + event_chain + ); if span.span_id != span_id { anyhow::bail!("Span ID mismatch: {} != {}", span.span_id, span_id); @@ -211,6 +216,13 @@ impl BamlTracer { anyhow::bail!("Attempting to finish a span without first starting one"); }; + log::trace!( + "Finishing baml span: {:#?} {}\nevent chain {:?}", + span, + span_id, + event_chain + ); + if span.span_id != span_id { anyhow::bail!("Span ID mismatch: {} != {}", span.span_id, span_id); } diff --git a/engine/language-client-codegen/src/typescript/generate_types.rs b/engine/language-client-codegen/src/typescript/generate_types.rs index 8038be2b9..8f67d1693 100644 --- a/engine/language-client-codegen/src/typescript/generate_types.rs +++ b/engine/language-client-codegen/src/typescript/generate_types.rs @@ -7,14 +7,14 @@ use crate::GeneratorArgs; use super::ToTypeReferenceInClientDefinition; #[derive(askama::Template)] -#[template(path = "type_builder.js.j2", escape = "none")] +#[template(path = "type_builder.ts.j2", escape = "none")] pub(crate) struct TypeBuilder<'ir> { enums: Vec>, classes: Vec>, } #[derive(askama::Template)] -#[template(path = "types.js.j2", escape = "none")] +#[template(path = "types.ts.j2", escape = "none")] pub(crate) struct TypescriptTypes<'ir> { enums: Vec>, classes: Vec>, diff --git a/engine/language-client-codegen/src/typescript/mod.rs b/engine/language-client-codegen/src/typescript/mod.rs index 64da56fa3..00b5226f4 100644 --- a/engine/language-client-codegen/src/typescript/mod.rs +++ b/engine/language-client-codegen/src/typescript/mod.rs @@ -12,7 +12,7 @@ use self::typescript_language_features::{ToTypescript, TypescriptLanguageFeature use crate::dir_writer::FileCollector; #[derive(askama::Template)] -#[template(path = "client.js.j2", escape = "none")] +#[template(path = "client.ts.j2", escape = "none")] struct TypescriptClient { funcs: Vec, types: Vec, diff --git a/engine/language-client-codegen/src/typescript/templates/client.js.j2 b/engine/language-client-codegen/src/typescript/templates/client.js.j2 deleted file mode 100644 index 4982c5809..000000000 --- a/engine/language-client-codegen/src/typescript/templates/client.js.j2 +++ /dev/null @@ -1,76 +0,0 @@ -import { BamlRuntime, FunctionResult, BamlCtxManager, BamlStream, Image } from "@boundaryml/baml" -import { - {%- for t in types %}{{ t }}{% if !loop.last %}, {% endif %}{% endfor -%} -} from "./types" -import TypeBuilder from "./type_builder" - -export type RecursivePartialNull = T extends object - ? { - [P in keyof T]?: RecursivePartialNull; - } - : T | null; - -export class BamlClient { - private stream_client: BamlStreamClient - - constructor(private runtime: BamlRuntime, private ctx_manager: BamlCtxManager) { - this.stream_client = new BamlStreamClient(runtime, ctx_manager) - } - - get stream() { - return this.stream_client - } - - {% for fn in funcs %} - async {{ fn.name }}( - {% for (name, optional, type) in fn.args -%} - {{name}}{% if optional %}?{% endif %}: {{type}}, - {%- endfor %} - __baml_options__?: { tb?: TypeBuilder } - ): Promise<{{fn.return_type}}> { - const raw = await this.runtime.callFunction( - "{{fn.name}}", - { - {% for (name, optional, type) in fn.args -%} - "{{name}}": {{name}}{% if optional %}?? null{% endif %}{% if !loop.last %},{% endif %} - {%- endfor %} - }, - this.ctx_manager.get(), - __baml_options__?.tb?.__tb(), - ) - return raw.parsed() as {{fn.return_type}} - } - {% endfor %} -} - -class BamlStreamClient { - constructor(private runtime: BamlRuntime, private ctx_manager: BamlCtxManager) {} - - {% for fn in funcs %} - {{ fn.name }}( - {% for (name, optional, type) in fn.args -%} - {{name}}{% if optional %}?{% endif %}: {{type}}, - {%- endfor %} - __baml_options__?: { tb?: TypeBuilder } - ): BamlStream, {{ fn.return_type }}> { - const raw = this.runtime.streamFunction( - "{{fn.name}}", - { - {% for (name, optional, type) in fn.args -%} - "{{name}}": {{name}}{% if optional %} ?? null{% endif %}{% if !loop.last %},{% endif %} - {%- endfor %} - }, - undefined, - this.ctx_manager.get(), - __baml_options__?.tb?.__tb(), - ) - return new BamlStream, {{ fn.return_type }}>( - raw, - (a): a is RecursivePartialNull<{{ fn.return_type }}> => a, - (a): a is {{ fn.return_type }} => a, - this.ctx_manager.get(), - __baml_options__?.tb?.__tb(), - ) - } - {% endfor %} -} \ No newline at end of file diff --git a/engine/language-client-codegen/src/typescript/templates/type_builder.js.j2 b/engine/language-client-codegen/src/typescript/templates/type_builder.js.j2 deleted file mode 100644 index 703d0173c..000000000 --- a/engine/language-client-codegen/src/typescript/templates/type_builder.js.j2 +++ /dev/null @@ -1,67 +0,0 @@ -import { FieldType } from '@boundaryml/baml/native' -import { TypeBuilder as _TypeBuilder, EnumBuilder, ClassBuilder } from '@boundaryml/baml/type_builder' - -export default class TypeBuilder { - private tb: _TypeBuilder; - {% for cls in classes %}{% if cls.dynamic %} - {{cls.name}}: ClassBuilder<'{{cls.name}}' - {%- for (name, _, _) in cls.fields %}{% if loop.first %}, {%endif%}"{{name}}"{% if !loop.last %} | {% endif %}{% endfor -%} - >; - {% endif %}{% endfor %} - {% for enum in enums %}{% if enum.dynamic %} - {{enum.name}}: EnumBuilder<'{{enum.name}}'{%- for value in enum.values %}{% if loop.first %}, {%endif%}"{{value}}"{% if !loop.last %} | {% endif %}{% endfor -%}>; - {% endif %}{% endfor %} - - constructor() { - this.tb = new _TypeBuilder({ - classes: new Set([ - {% for cls in classes %}"{{cls.name}}",{% endfor %} - ]), - enums: new Set([ - {% for enum in enums %}"{{enum.name}}",{% endfor %} - ]) - }); - {% for cls in classes %}{% if cls.dynamic %} - this.{{cls.name}} = this.tb.classBuilder("{{cls.name}}", [ - {% for (name, _, _) in cls.fields %}"{{name}}",{% endfor %} - ]); - {% endif %}{% endfor %} - {% for enum in enums %}{% if enum.dynamic %} - this.{{enum.name}} = this.tb.enumBuilder("{{enum.name}}", [ - {% for value in enum.values %}"{{value}}",{% endfor %} - ]); - {% endif %}{% endfor %} - } - - __tb() { - return this.tb._tb(); - } - - string(): FieldType { - return this.tb.string() - } - - int(): FieldType { - return this.tb.int() - } - - float(): FieldType { - return this.tb.float() - } - - bool(): FieldType { - return this.tb.bool() - } - - list(type: FieldType): FieldType { - return this.tb.list(type) - } - - addClass(name: Name): ClassBuilder { - return this.tb.addClass(name); - } - - addEnum(name: Name): EnumBuilder { - return this.tb.addEnum(name); - } -} diff --git a/engine/language-client-codegen/src/typescript/templates/types.js.j2 b/engine/language-client-codegen/src/typescript/templates/types.js.j2 deleted file mode 100644 index f560952ce..000000000 --- a/engine/language-client-codegen/src/typescript/templates/types.js.j2 +++ /dev/null @@ -1,20 +0,0 @@ -import { Image } from "@boundaryml/baml" - -{%- for enum in enums %} -export enum {{enum.name}} { - {%- for value in enum.values %} - {{ value }} = "{{ value }}", - {%- endfor %} -} -{% endfor %} - -{%- for cls in classes %} -export interface {{cls.name}} { - {%- for (name, optional, type) in cls.fields %} - {{name}}{% if optional %}?{% endif %}: {{type}} - {%- endfor %} - {% if cls.dynamic %} - [key: string]: any; - {%- endif %} -} -{% endfor %} \ No newline at end of file diff --git a/engine/language_client_typescript/async_context_vars.d.ts b/engine/language_client_typescript/async_context_vars.d.ts index 61cf8de62..4c62996e6 100644 --- a/engine/language_client_typescript/async_context_vars.d.ts +++ b/engine/language_client_typescript/async_context_vars.d.ts @@ -1,12 +1,12 @@ import { BamlSpan, RuntimeContextManager, BamlRuntime, BamlLogEvent } from './native'; -export declare class CtxManager { +export declare class BamlCtxManager { private rt; private ctx; constructor(rt: BamlRuntime); upsertTags(tags: Record): void; get(): RuntimeContextManager; - startTraceSync(name: string, args: Record): BamlSpan; - startTraceAsync(name: string, args: Record): BamlSpan; + startTraceSync(name: string, args: Record): [RuntimeContextManager, BamlSpan]; + startTraceAsync(name: string, args: Record): [RuntimeContextManager, BamlSpan]; endTrace(span: BamlSpan, response: any): void; flush(): void; onLogEvent(callback: (event: BamlLogEvent) => void): void; diff --git a/engine/language_client_typescript/async_context_vars.d.ts.map b/engine/language_client_typescript/async_context_vars.d.ts.map index 3d7ae88ad..d622d4dc7 100644 --- a/engine/language_client_typescript/async_context_vars.d.ts.map +++ b/engine/language_client_typescript/async_context_vars.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"async_context_vars.d.ts","sourceRoot":"","sources":["typescript_src/async_context_vars.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,qBAAqB,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAGrF,qBAAa,UAAU;IACrB,OAAO,CAAC,EAAE,CAAa;IACvB,OAAO,CAAC,GAAG,CAA0C;gBAEzC,EAAE,EAAE,WAAW;IAS3B,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,IAAI;IAK9C,GAAG,IAAI,qBAAqB;IAS5B,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;IAOjE,eAAe,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;IAOlE,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,GAAG,IAAI;IAS7C,KAAK,IAAI,IAAI;IAIb,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,YAAY,KAAK,IAAI,GAAG,IAAI;IAQzD,WAAW,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,UAAU,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC;IAsB3F,WAAW,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,UAAU,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC;CAqBrG"} \ No newline at end of file +{"version":3,"file":"async_context_vars.d.ts","sourceRoot":"","sources":["typescript_src/async_context_vars.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,qBAAqB,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAGrF,qBAAa,cAAc;IACzB,OAAO,CAAC,EAAE,CAAa;IACvB,OAAO,CAAC,GAAG,CAA0C;gBAEzC,EAAE,EAAE,WAAW;IAS3B,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,IAAI;IAK9C,GAAG,IAAI,qBAAqB;IAS5B,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,qBAAqB,EAAE,QAAQ,CAAC;IAK1F,eAAe,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,CAAC,qBAAqB,EAAE,QAAQ,CAAC;IAK3F,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,GAAG,IAAI;IAS7C,KAAK,IAAI,IAAI;IAIb,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,YAAY,KAAK,IAAI,GAAG,IAAI;IAQzD,WAAW,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,UAAU,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC;IAuB3F,WAAW,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,UAAU,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC;CAuBrG"} \ No newline at end of file diff --git a/engine/language_client_typescript/async_context_vars.js b/engine/language_client_typescript/async_context_vars.js index 2c1a77f0d..85f218f6b 100644 --- a/engine/language_client_typescript/async_context_vars.js +++ b/engine/language_client_typescript/async_context_vars.js @@ -1,9 +1,9 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.CtxManager = void 0; +exports.BamlCtxManager = void 0; const native_1 = require("./native"); const async_hooks_1 = require("async_hooks"); -class CtxManager { +class BamlCtxManager { rt; ctx; constructor(rt) { @@ -28,15 +28,11 @@ class CtxManager { } startTraceSync(name, args) { const mng = this.get(); - // const clone = mng.deepClone() - // this.ctx.enterWith(clone) - return native_1.BamlSpan.new(this.rt, name, args, mng); + return [mng, native_1.BamlSpan.new(this.rt, name, args, mng)]; } startTraceAsync(name, args) { - const mng = this.get(); - const clone = mng.deepClone(); - this.ctx.enterWith(clone); - return native_1.BamlSpan.new(this.rt, name, args, clone); + const mng = this.get().deepClone(); + return [mng, native_1.BamlSpan.new(this.rt, name, args, mng)]; } endTrace(span, response) { const manager = this.ctx.getStore(); @@ -62,16 +58,18 @@ class CtxManager { ...acc, [`arg${i}`]: arg, // generic way to label args }), {}); - const span = this.startTraceSync(name, params); - try { - const response = func(...args); - this.endTrace(span, response); - return response; - } - catch (e) { - this.endTrace(span, e); - throw e; - } + const [mng, span] = this.startTraceSync(name, params); + this.ctx.run(mng, () => { + try { + const response = func(...args); + this.endTrace(span, response); + return response; + } + catch (e) { + this.endTrace(span, e); + throw e; + } + }); }); } traceFnAync(name, func) { @@ -81,17 +79,19 @@ class CtxManager { ...acc, [`arg${i}`]: arg, // generic way to label args }), {}); - const span = this.startTraceAsync(funcName, params); - try { - const response = await func(...args); - this.endTrace(span, response); - return response; - } - catch (e) { - this.endTrace(span, e); - throw e; - } + const [mng, span] = this.startTraceAsync(name, params); + await this.ctx.run(mng, async () => { + try { + const response = await func(...args); + this.endTrace(span, response); + return response; + } + catch (e) { + this.endTrace(span, e); + throw e; + } + }); }); } } -exports.CtxManager = CtxManager; +exports.BamlCtxManager = BamlCtxManager; diff --git a/engine/language_client_typescript/index.d.ts b/engine/language_client_typescript/index.d.ts index cb2886eea..d7fb1b238 100644 --- a/engine/language_client_typescript/index.d.ts +++ b/engine/language_client_typescript/index.d.ts @@ -1,4 +1,4 @@ -export { BamlRuntime, FunctionResult, FunctionResultStream, BamlImage as Image, BamlAudio as Audio, invoke_runtime_cli } from './native'; +export { BamlRuntime, FunctionResult, FunctionResultStream, BamlImage as Image, BamlAudio as Audio, invoke_runtime_cli, } from './native'; export { BamlStream } from './stream'; -export { CtxManager as BamlCtxManager } from './async_context_vars'; +export { BamlCtxManager } from './async_context_vars'; //# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/engine/language_client_typescript/index.d.ts.map b/engine/language_client_typescript/index.d.ts.map index bc7bb3b91..3640d5f40 100644 --- a/engine/language_client_typescript/index.d.ts.map +++ b/engine/language_client_typescript/index.d.ts.map @@ -1 +1 @@ -{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["typescript_src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,cAAc,EAAE,oBAAoB,EAAE,SAAS,IAAI,KAAK,EAAE,SAAS,IAAI,KAAK,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAA;AACxI,OAAO,EAAE,UAAU,EAAE,MAAM,UAAU,CAAA;AACrC,OAAO,EAAE,UAAU,IAAI,cAAc,EAAE,MAAM,sBAAsB,CAAA"} \ No newline at end of file +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["typescript_src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,WAAW,EACX,cAAc,EACd,oBAAoB,EACpB,SAAS,IAAI,KAAK,EAClB,SAAS,IAAI,KAAK,EAClB,kBAAkB,GACnB,MAAM,UAAU,CAAA;AACjB,OAAO,EAAE,UAAU,EAAE,MAAM,UAAU,CAAA;AACrC,OAAO,EAAE,cAAc,EAAE,MAAM,sBAAsB,CAAA"} \ No newline at end of file diff --git a/engine/language_client_typescript/index.js b/engine/language_client_typescript/index.js index 4379795f9..1aac1f0cf 100644 --- a/engine/language_client_typescript/index.js +++ b/engine/language_client_typescript/index.js @@ -11,4 +11,4 @@ Object.defineProperty(exports, "invoke_runtime_cli", { enumerable: true, get: fu var stream_1 = require("./stream"); Object.defineProperty(exports, "BamlStream", { enumerable: true, get: function () { return stream_1.BamlStream; } }); var async_context_vars_1 = require("./async_context_vars"); -Object.defineProperty(exports, "BamlCtxManager", { enumerable: true, get: function () { return async_context_vars_1.CtxManager; } }); +Object.defineProperty(exports, "BamlCtxManager", { enumerable: true, get: function () { return async_context_vars_1.BamlCtxManager; } }); diff --git a/engine/language_client_typescript/typescript_src/async_context_vars.ts b/engine/language_client_typescript/typescript_src/async_context_vars.ts index a33f5c90c..5481cbcc9 100644 --- a/engine/language_client_typescript/typescript_src/async_context_vars.ts +++ b/engine/language_client_typescript/typescript_src/async_context_vars.ts @@ -1,7 +1,7 @@ import { BamlSpan, RuntimeContextManager, BamlRuntime, BamlLogEvent } from './native' import { AsyncLocalStorage } from 'async_hooks' -export class CtxManager { +export class BamlCtxManager { private rt: BamlRuntime private ctx: AsyncLocalStorage @@ -28,18 +28,14 @@ export class CtxManager { return store } - startTraceSync(name: string, args: Record): BamlSpan { + startTraceSync(name: string, args: Record): [RuntimeContextManager, BamlSpan] { const mng = this.get() - // const clone = mng.deepClone() - // this.ctx.enterWith(clone) - return BamlSpan.new(this.rt, name, args, mng) + return [mng, BamlSpan.new(this.rt, name, args, mng)] } - startTraceAsync(name: string, args: Record): BamlSpan { - const mng = this.get() - const clone = mng.deepClone() - this.ctx.enterWith(clone) - return BamlSpan.new(this.rt, name, args, clone) + startTraceAsync(name: string, args: Record): [RuntimeContextManager, BamlSpan] { + const mng = this.get().deepClone() + return [mng, BamlSpan.new(this.rt, name, args, mng)] } endTrace(span: BamlSpan, response: any): void { @@ -72,16 +68,17 @@ export class CtxManager { }), {}, ) - const span = this.startTraceSync(name, params) - - try { - const response = func(...args) - this.endTrace(span, response) - return response - } catch (e) { - this.endTrace(span, e) - throw e - } + const [mng, span] = this.startTraceSync(name, params) + this.ctx.run(mng, () => { + try { + const response = func(...args) + this.endTrace(span, response) + return response + } catch (e) { + this.endTrace(span, e) + throw e + } + }) }) } @@ -95,15 +92,17 @@ export class CtxManager { }), {}, ) - const span = this.startTraceAsync(funcName, params) - try { - const response = await func(...args) - this.endTrace(span, response) - return response - } catch (e) { - this.endTrace(span, e) - throw e - } + const [mng, span] = this.startTraceAsync(name, params) + await this.ctx.run(mng, async () => { + try { + const response = await func(...args) + this.endTrace(span, response) + return response + } catch (e) { + this.endTrace(span, e) + throw e + } + }) }) } } diff --git a/engine/language_client_typescript/typescript_src/index.ts b/engine/language_client_typescript/typescript_src/index.ts index 3ee94afce..6e19e66b6 100644 --- a/engine/language_client_typescript/typescript_src/index.ts +++ b/engine/language_client_typescript/typescript_src/index.ts @@ -1,3 +1,10 @@ -export { BamlRuntime, FunctionResult, FunctionResultStream, BamlImage as Image, BamlAudio as Audio, invoke_runtime_cli } from './native' +export { + BamlRuntime, + FunctionResult, + FunctionResultStream, + BamlImage as Image, + BamlAudio as Audio, + invoke_runtime_cli, +} from './native' export { BamlStream } from './stream' -export { CtxManager as BamlCtxManager } from './async_context_vars' +export { BamlCtxManager } from './async_context_vars' diff --git a/integ-tests/baml_src/clients.baml b/integ-tests/baml_src/clients.baml index df695ede8..2b9a04030 100644 --- a/integ-tests/baml_src/clients.baml +++ b/integ-tests/baml_src/clients.baml @@ -84,18 +84,27 @@ client Gemini { } client AwsBedrock { - provider aws-bedrock + provider anthropic options { - inference_configuration { - max_tokens 100 - } - model_id "anthropic.claude-3-haiku-20240307-v1:0" - // model_id "meta.llama3-8b-instruct-v1:0" - // model_id "mistral.mistral-7b-instruct-v0:2" - api_key "" + model claude-3-haiku-20240307 + api_key env.ANTHROPIC_API_KEY + max_tokens 1000 } } +// client AwsBedrock { +// provider aws-bedrock +// options { +// inference_configuration { +// max_tokens 100 +// } +// model_id "anthropic.claude-3-haiku-20240307-v1:0" +// // model_id "meta.llama3-8b-instruct-v1:0" +// // model_id "mistral.mistral-7b-instruct-v0:2" +// api_key "" +// } +// } + client Claude { provider anthropic options { diff --git a/integ-tests/python/baml_client/inlinedbaml.py b/integ-tests/python/baml_client/inlinedbaml.py index c5fbb0df8..660688839 100644 --- a/integ-tests/python/baml_client/inlinedbaml.py +++ b/integ-tests/python/baml_client/inlinedbaml.py @@ -16,7 +16,7 @@ file_map = { - "clients.baml": "retry_policy Bar {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Foo {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient GPT4 {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4o {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4Turbo {\n retry_policy Bar\n provider openai\n options {\n model gpt-4-turbo\n api_key env.OPENAI_API_KEY\n }\n} \n\nclient GPT35 {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient GPT35LegacyProvider {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient Ollama {\n provider ollama\n options {\n model llama2\n }\n}\n\nclient GPT35Azure {\n provider azure-openai\n options {\n resource_name \"west-us-azure-baml\"\n deployment_id \"gpt-35-turbo-default\"\n // base_url \"https://west-us-azure-baml.openai.azure.com/openai/deployments/gpt-35-turbo-default\"\n api_version \"2024-02-01\"\n api_key env.AZURE_OPENAI_API_KEY\n }\n}\n\nclient Gemini {\n provider google-ai\n options {\n model \"gemini-1.5-pro-001\"\n api_key env.GOOGLE_API_KEY\n }\n}\n\nclient AwsBedrock {\n provider aws-bedrock\n options {\n inference_configuration {\n max_tokens 100\n }\n model_id \"anthropic.claude-3-haiku-20240307-v1:0\"\n // model_id \"meta.llama3-8b-instruct-v1:0\"\n // model_id \"mistral.mistral-7b-instruct-v0:2\"\n api_key \"\"\n }\n}\n\nclient Claude {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\nclient Resilient_SimpleSyntax {\n retry_policy Foo\n provider baml-fallback\n options {\n strategy [\n GPT4Turbo\n GPT35\n Lottery_SimpleSyntax\n ]\n }\n} \n \nclient Lottery_SimpleSyntax {\n provider baml-round-robin\n options {\n start 0\n strategy [\n GPT35\n Claude\n ]\n }\n}\n", + "clients.baml": "retry_policy Bar {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Foo {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient GPT4 {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4o {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4Turbo {\n retry_policy Bar\n provider openai\n options {\n model gpt-4-turbo\n api_key env.OPENAI_API_KEY\n }\n} \n\nclient GPT35 {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient GPT35LegacyProvider {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient Ollama {\n provider ollama\n options {\n model llama2\n }\n}\n\nclient GPT35Azure {\n provider azure-openai\n options {\n resource_name \"west-us-azure-baml\"\n deployment_id \"gpt-35-turbo-default\"\n // base_url \"https://west-us-azure-baml.openai.azure.com/openai/deployments/gpt-35-turbo-default\"\n api_version \"2024-02-01\"\n api_key env.AZURE_OPENAI_API_KEY\n }\n}\n\nclient Gemini {\n provider google-ai\n options {\n model \"gemini-1.5-pro-001\"\n api_key env.GOOGLE_API_KEY\n }\n}\n\nclient AwsBedrock {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\n// client AwsBedrock {\n// provider aws-bedrock\n// options {\n// inference_configuration {\n// max_tokens 100\n// }\n// model_id \"anthropic.claude-3-haiku-20240307-v1:0\"\n// // model_id \"meta.llama3-8b-instruct-v1:0\"\n// // model_id \"mistral.mistral-7b-instruct-v0:2\"\n// api_key \"\"\n// }\n// }\n\nclient Claude {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\nclient Resilient_SimpleSyntax {\n retry_policy Foo\n provider baml-fallback\n options {\n strategy [\n GPT4Turbo\n GPT35\n Lottery_SimpleSyntax\n ]\n }\n} \n \nclient Lottery_SimpleSyntax {\n provider baml-round-robin\n options {\n start 0\n strategy [\n GPT35\n Claude\n ]\n }\n}\n", "fiddle-examples/chain-of-thought.baml": "class Email {\n subject string\n body string\n from_address string\n}\n\nenum OrderStatus {\n ORDERED\n SHIPPED\n DELIVERED\n CANCELLED\n}\n\nclass OrderInfo {\n order_status OrderStatus\n tracking_number string?\n estimated_arrival_date string?\n}\n\nfunction GetOrderInfo(email: Email) -> OrderInfo {\n client GPT4\n prompt #\"\n Given the email below:\n\n ```\n from: {{email.from_address}}\n Email Subject: {{email.subject}}\n Email Body: {{email.body}}\n ```\n\n Extract this info from the email in JSON format:\n {{ ctx.output_format }}\n\n Before you output the JSON, please explain your\n reasoning step-by-step. Here is an example on how to do this:\n 'If we think step by step we can see that ...\n therefore the output JSON is:\n {\n ... the json schema ...\n }'\n \"#\n}", "fiddle-examples/chat-roles.baml": "// This will be available as an enum in your Python and Typescript code.\nenum Category2 {\n Refund\n CancelOrder\n TechnicalSupport\n AccountIssue\n Question\n}\n\nfunction ClassifyMessage2(input: string) -> Category {\n client GPT4\n\n prompt #\"\n {{ _.role(\"system\") }}\n // You can use _.role(\"system\") to indicate that this text should be a system message\n\n Classify the following INPUT into ONE\n of the following categories:\n\n {{ ctx.output_format }}\n\n {{ _.role(\"user\") }}\n // And _.role(\"user\") to indicate that this text should be a user message\n\n INPUT: {{ input }}\n\n Response:\n \"#\n}", "fiddle-examples/classify-message.baml": "// This will be available as an enum in your Python and Typescript code.\nenum Category {\n Refund\n CancelOrder\n TechnicalSupport\n AccountIssue\n Question\n}\n\nfunction ClassifyMessage(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", diff --git a/integ-tests/ruby/baml_client/inlined.rb b/integ-tests/ruby/baml_client/inlined.rb index 3afa00b71..dc2412925 100644 --- a/integ-tests/ruby/baml_client/inlined.rb +++ b/integ-tests/ruby/baml_client/inlined.rb @@ -16,7 +16,7 @@ module Baml module Inlined FILE_MAP = { - "clients.baml" => "retry_policy Bar {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Foo {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient GPT4 {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4o {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4Turbo {\n retry_policy Bar\n provider openai\n options {\n model gpt-4-turbo\n api_key env.OPENAI_API_KEY\n }\n} \n\nclient GPT35 {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient GPT35LegacyProvider {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient Ollama {\n provider ollama\n options {\n model llama2\n }\n}\n\nclient GPT35Azure {\n provider azure-openai\n options {\n resource_name \"west-us-azure-baml\"\n deployment_id \"gpt-35-turbo-default\"\n // base_url \"https://west-us-azure-baml.openai.azure.com/openai/deployments/gpt-35-turbo-default\"\n api_version \"2024-02-01\"\n api_key env.AZURE_OPENAI_API_KEY\n }\n}\n\nclient Gemini {\n provider google-ai\n options {\n model \"gemini-1.5-pro-001\"\n api_key env.GOOGLE_API_KEY\n }\n}\n\nclient AwsBedrock {\n provider aws-bedrock\n options {\n inference_configuration {\n max_tokens 100\n }\n model_id \"anthropic.claude-3-haiku-20240307-v1:0\"\n // model_id \"meta.llama3-8b-instruct-v1:0\"\n // model_id \"mistral.mistral-7b-instruct-v0:2\"\n api_key \"\"\n }\n}\n\nclient Claude {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\nclient Resilient_SimpleSyntax {\n retry_policy Foo\n provider baml-fallback\n options {\n strategy [\n GPT4Turbo\n GPT35\n Lottery_SimpleSyntax\n ]\n }\n} \n \nclient Lottery_SimpleSyntax {\n provider baml-round-robin\n options {\n start 0\n strategy [\n GPT35\n Claude\n ]\n }\n}\n", + "clients.baml" => "retry_policy Bar {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Foo {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient GPT4 {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4o {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4Turbo {\n retry_policy Bar\n provider openai\n options {\n model gpt-4-turbo\n api_key env.OPENAI_API_KEY\n }\n} \n\nclient GPT35 {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient GPT35LegacyProvider {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient Ollama {\n provider ollama\n options {\n model llama2\n }\n}\n\nclient GPT35Azure {\n provider azure-openai\n options {\n resource_name \"west-us-azure-baml\"\n deployment_id \"gpt-35-turbo-default\"\n // base_url \"https://west-us-azure-baml.openai.azure.com/openai/deployments/gpt-35-turbo-default\"\n api_version \"2024-02-01\"\n api_key env.AZURE_OPENAI_API_KEY\n }\n}\n\nclient Gemini {\n provider google-ai\n options {\n model \"gemini-1.5-pro-001\"\n api_key env.GOOGLE_API_KEY\n }\n}\n\nclient AwsBedrock {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\n// client AwsBedrock {\n// provider aws-bedrock\n// options {\n// inference_configuration {\n// max_tokens 100\n// }\n// model_id \"anthropic.claude-3-haiku-20240307-v1:0\"\n// // model_id \"meta.llama3-8b-instruct-v1:0\"\n// // model_id \"mistral.mistral-7b-instruct-v0:2\"\n// api_key \"\"\n// }\n// }\n\nclient Claude {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\nclient Resilient_SimpleSyntax {\n retry_policy Foo\n provider baml-fallback\n options {\n strategy [\n GPT4Turbo\n GPT35\n Lottery_SimpleSyntax\n ]\n }\n} \n \nclient Lottery_SimpleSyntax {\n provider baml-round-robin\n options {\n start 0\n strategy [\n GPT35\n Claude\n ]\n }\n}\n", "fiddle-examples/chain-of-thought.baml" => "class Email {\n subject string\n body string\n from_address string\n}\n\nenum OrderStatus {\n ORDERED\n SHIPPED\n DELIVERED\n CANCELLED\n}\n\nclass OrderInfo {\n order_status OrderStatus\n tracking_number string?\n estimated_arrival_date string?\n}\n\nfunction GetOrderInfo(email: Email) -> OrderInfo {\n client GPT4\n prompt #\"\n Given the email below:\n\n ```\n from: {{email.from_address}}\n Email Subject: {{email.subject}}\n Email Body: {{email.body}}\n ```\n\n Extract this info from the email in JSON format:\n {{ ctx.output_format }}\n\n Before you output the JSON, please explain your\n reasoning step-by-step. Here is an example on how to do this:\n 'If we think step by step we can see that ...\n therefore the output JSON is:\n {\n ... the json schema ...\n }'\n \"#\n}", "fiddle-examples/chat-roles.baml" => "// This will be available as an enum in your Python and Typescript code.\nenum Category2 {\n Refund\n CancelOrder\n TechnicalSupport\n AccountIssue\n Question\n}\n\nfunction ClassifyMessage2(input: string) -> Category {\n client GPT4\n\n prompt #\"\n {{ _.role(\"system\") }}\n // You can use _.role(\"system\") to indicate that this text should be a system message\n\n Classify the following INPUT into ONE\n of the following categories:\n\n {{ ctx.output_format }}\n\n {{ _.role(\"user\") }}\n // And _.role(\"user\") to indicate that this text should be a user message\n\n INPUT: {{ input }}\n\n Response:\n \"#\n}", "fiddle-examples/classify-message.baml" => "// This will be available as an enum in your Python and Typescript code.\nenum Category {\n Refund\n CancelOrder\n TechnicalSupport\n AccountIssue\n Question\n}\n\nfunction ClassifyMessage(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", diff --git a/integ-tests/typescript/baml_client/client.ts b/integ-tests/typescript/baml_client/client.ts index 6ca5545da..a0b934713 100644 --- a/integ-tests/typescript/baml_client/client.ts +++ b/integ-tests/typescript/baml_client/client.ts @@ -26,9 +26,13 @@ export type RecursivePartialNull = T extends object : T | null; export class BamlClient { + private runtime: BamlRuntime + private ctx_manager: BamlCtxManager private stream_client: BamlStreamClient - constructor(private runtime: BamlRuntime, private ctx_manager: BamlCtxManager) { + constructor(runtime: BamlRuntime, ctx_manager: BamlCtxManager) { + this.runtime = runtime + this.ctx_manager = ctx_manager this.stream_client = new BamlStreamClient(runtime, ctx_manager) } diff --git a/integ-tests/typescript/baml_client/inlinedbaml.ts b/integ-tests/typescript/baml_client/inlinedbaml.ts index 4d3eabe3d..5cbc39bfc 100644 --- a/integ-tests/typescript/baml_client/inlinedbaml.ts +++ b/integ-tests/typescript/baml_client/inlinedbaml.ts @@ -17,7 +17,7 @@ $ pnpm add @boundaryml/baml /* eslint-disable */ const fileMap = { - "clients.baml": "retry_policy Bar {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Foo {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient GPT4 {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4o {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4Turbo {\n retry_policy Bar\n provider openai\n options {\n model gpt-4-turbo\n api_key env.OPENAI_API_KEY\n }\n} \n\nclient GPT35 {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient GPT35LegacyProvider {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient Ollama {\n provider ollama\n options {\n model llama2\n }\n}\n\nclient GPT35Azure {\n provider azure-openai\n options {\n resource_name \"west-us-azure-baml\"\n deployment_id \"gpt-35-turbo-default\"\n // base_url \"https://west-us-azure-baml.openai.azure.com/openai/deployments/gpt-35-turbo-default\"\n api_version \"2024-02-01\"\n api_key env.AZURE_OPENAI_API_KEY\n }\n}\n\nclient Gemini {\n provider google-ai\n options {\n model \"gemini-1.5-pro-001\"\n api_key env.GOOGLE_API_KEY\n }\n}\n\nclient AwsBedrock {\n provider aws-bedrock\n options {\n inference_configuration {\n max_tokens 100\n }\n model_id \"anthropic.claude-3-haiku-20240307-v1:0\"\n // model_id \"meta.llama3-8b-instruct-v1:0\"\n // model_id \"mistral.mistral-7b-instruct-v0:2\"\n api_key \"\"\n }\n}\n\nclient Claude {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\nclient Resilient_SimpleSyntax {\n retry_policy Foo\n provider baml-fallback\n options {\n strategy [\n GPT4Turbo\n GPT35\n Lottery_SimpleSyntax\n ]\n }\n} \n \nclient Lottery_SimpleSyntax {\n provider baml-round-robin\n options {\n start 0\n strategy [\n GPT35\n Claude\n ]\n }\n}\n", + "clients.baml": "retry_policy Bar {\n max_retries 3\n strategy {\n type exponential_backoff\n }\n}\n\nretry_policy Foo {\n max_retries 3\n strategy {\n type constant_delay\n delay_ms 100\n }\n}\n\nclient GPT4 {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4o {\n provider openai\n options {\n model gpt-4o\n api_key env.OPENAI_API_KEY\n }\n} \n\n\nclient GPT4Turbo {\n retry_policy Bar\n provider openai\n options {\n model gpt-4-turbo\n api_key env.OPENAI_API_KEY\n }\n} \n\nclient GPT35 {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\nclient GPT35LegacyProvider {\n provider openai\n options {\n model \"gpt-3.5-turbo\"\n api_key env.OPENAI_API_KEY\n }\n}\n\n\nclient Ollama {\n provider ollama\n options {\n model llama2\n }\n}\n\nclient GPT35Azure {\n provider azure-openai\n options {\n resource_name \"west-us-azure-baml\"\n deployment_id \"gpt-35-turbo-default\"\n // base_url \"https://west-us-azure-baml.openai.azure.com/openai/deployments/gpt-35-turbo-default\"\n api_version \"2024-02-01\"\n api_key env.AZURE_OPENAI_API_KEY\n }\n}\n\nclient Gemini {\n provider google-ai\n options {\n model \"gemini-1.5-pro-001\"\n api_key env.GOOGLE_API_KEY\n }\n}\n\nclient AwsBedrock {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\n// client AwsBedrock {\n// provider aws-bedrock\n// options {\n// inference_configuration {\n// max_tokens 100\n// }\n// model_id \"anthropic.claude-3-haiku-20240307-v1:0\"\n// // model_id \"meta.llama3-8b-instruct-v1:0\"\n// // model_id \"mistral.mistral-7b-instruct-v0:2\"\n// api_key \"\"\n// }\n// }\n\nclient Claude {\n provider anthropic\n options {\n model claude-3-haiku-20240307\n api_key env.ANTHROPIC_API_KEY\n max_tokens 1000\n }\n}\n\nclient Resilient_SimpleSyntax {\n retry_policy Foo\n provider baml-fallback\n options {\n strategy [\n GPT4Turbo\n GPT35\n Lottery_SimpleSyntax\n ]\n }\n} \n \nclient Lottery_SimpleSyntax {\n provider baml-round-robin\n options {\n start 0\n strategy [\n GPT35\n Claude\n ]\n }\n}\n", "fiddle-examples/chain-of-thought.baml": "class Email {\n subject string\n body string\n from_address string\n}\n\nenum OrderStatus {\n ORDERED\n SHIPPED\n DELIVERED\n CANCELLED\n}\n\nclass OrderInfo {\n order_status OrderStatus\n tracking_number string?\n estimated_arrival_date string?\n}\n\nfunction GetOrderInfo(email: Email) -> OrderInfo {\n client GPT4\n prompt #\"\n Given the email below:\n\n ```\n from: {{email.from_address}}\n Email Subject: {{email.subject}}\n Email Body: {{email.body}}\n ```\n\n Extract this info from the email in JSON format:\n {{ ctx.output_format }}\n\n Before you output the JSON, please explain your\n reasoning step-by-step. Here is an example on how to do this:\n 'If we think step by step we can see that ...\n therefore the output JSON is:\n {\n ... the json schema ...\n }'\n \"#\n}", "fiddle-examples/chat-roles.baml": "// This will be available as an enum in your Python and Typescript code.\nenum Category2 {\n Refund\n CancelOrder\n TechnicalSupport\n AccountIssue\n Question\n}\n\nfunction ClassifyMessage2(input: string) -> Category {\n client GPT4\n\n prompt #\"\n {{ _.role(\"system\") }}\n // You can use _.role(\"system\") to indicate that this text should be a system message\n\n Classify the following INPUT into ONE\n of the following categories:\n\n {{ ctx.output_format }}\n\n {{ _.role(\"user\") }}\n // And _.role(\"user\") to indicate that this text should be a user message\n\n INPUT: {{ input }}\n\n Response:\n \"#\n}", "fiddle-examples/classify-message.baml": "// This will be available as an enum in your Python and Typescript code.\nenum Category {\n Refund\n CancelOrder\n TechnicalSupport\n AccountIssue\n Question\n}\n\nfunction ClassifyMessage(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", diff --git a/integ-tests/typescript/tests/integ-tests.test.ts b/integ-tests/typescript/tests/integ-tests.test.ts index 5cf5b6173..dedf5c77d 100644 --- a/integ-tests/typescript/tests/integ-tests.test.ts +++ b/integ-tests/typescript/tests/integ-tests.test.ts @@ -16,6 +16,7 @@ import TypeBuilder from '../baml_client/type_builder' import { RecursivePartialNull } from '../baml_client/client' import { config } from 'dotenv' import { BamlLogEvent } from '@boundaryml/baml/native' +import { AsyncLocalStorage } from 'async_hooks' config() describe('Integ tests', () => { @@ -246,8 +247,87 @@ describe('Integ tests', () => { traceSync('dummyFunc3', dummyFunc)('hi there') }) + it('test local trace async', async () => { + const s2 = new AsyncLocalStorage() + s2.enterWith(['first']) + + const localTraceAsync = Promise>( + name: string, + func: F, + ): F => { + const funcName = name + return (async (...args: any[]) => { + const params = args.reduce( + (acc, arg, i) => ({ + ...acc, + [`arg${i}`]: arg, // generic way to label args + }), + {}, + ) + + await s2.run([...(s2.getStore() ?? ['no-parent']), funcName], async () => { + console.log('entering span', s2.getStore()) + try { + const response = await func(...args) + console.log('exiting span try', s2.getStore()) + return response + } catch (e) { + console.log('exiting span catch', s2.getStore()) + throw e + } + }) + }) + } + + const res = await localTraceAsync('parentAsync', async (firstArg: string, secondArg: number) => { + const res2 = await localTraceAsync('asyncDummyFunc', asyncDummyFunc)('secondDummyFuncArg') + + const llm_res = await Promise.all([ + await localTraceAsync('prom1', asyncDummyFunc)('arg1'), + await localTraceAsync('prom2', asyncDummyFunc)('arg2'), + await localTraceAsync('prom3', asyncDummyFunc)('arg3'), + await localTraceAsync('prom4', asyncDummyFunc)('arg4'), + ]) + + const res3 = await localTraceAsync('asyncDummyFunc', asyncDummyFunc)('thirdDummyFuncArg') + + return 'hello world' + })('hi', 10) + }) + + it('test asynclocalstorage semantics', async () => { + const s = new AsyncLocalStorage() + s.enterWith('first') + + await (async () => { + console.log('expected first, got ', s.getStore()) + s.enterWith('second') + await Promise.all([ + (async () => { + console.log('thirdA: expected second, got ', s.getStore()) + s.run('thirdA', async () => console.log('thirdA: expected thirdA, got ', s.getStore())) + })(), + (async () => { + console.log('thirdB: expected second, got ', s.getStore()) + //s.enterWith('thirdB') + s.run('thirdB', async () => console.log('thirdB: expected thirdB, got ', s.getStore())) + })(), + (async () => { + console.log('thirdC: expected second, got ', s.getStore()) + //s.enterWith('thirdC') + s.run('thirdC', async () => console.log('thirdC: expected thirdC, got ', s.getStore())) + })(), + (async () => { + console.log('thirdD: expected second, got ', s.getStore()) + //s.enterWith('thirdD') + s.run('thirdD', async () => console.log('thirdD: expected thirdD, got ', s.getStore())) + })(), + ]) + console.log('expected second (2), got ', s.getStore()) + })() + }) // Look at the dashboard to verify results. - it('supports tracing async', async () => { + it.only('supports tracing async', async () => { const res = await traceAsync('parentAsync', async (firstArg: string, secondArg: number) => { console.log('hello world') setTags({ myKey: 'myVal' }) @@ -256,18 +336,31 @@ describe('Integ tests', () => { const res2 = await traceAsync('asyncDummyFunc', asyncDummyFunc)('secondDummyFuncArg') - const llm_res = await b.TestFnNamedArgsSingleStringList(['a', 'b', 'c']) + const llm_res = await Promise.all([ + b.TestFnNamedArgsSingleStringList(['a1', 'b', 'c']), + b.TestFnNamedArgsSingleStringList(['a2', 'b', 'c']), + b.TestFnNamedArgsSingleStringList(['a3', 'b', 'c']), + b.TestFnNamedArgsSingleStringList(['a4', 'b', 'c']), + b.TestFnNamedArgsSingleStringList(['a5', 'b', 'c']), + ]) const res3 = await traceAsync('asyncDummyFunc', asyncDummyFunc)('thirdDummyFuncArg') return 'hello world' })('hi', 10) - const res2 = await traceAsync('parentAsync2', async (firstArg: string, secondArg: number) => { - console.log('hello world') + // const res2 = await traceAsync('parentAsync2', async (firstArg: string, secondArg: number) => { + // console.log('hello world') - const res1 = traceSync('dummyFunc', dummyFunc)('firstDummyFuncArg') + // const res1 = traceSync('dummyFunc', dummyFunc)('firstDummyFuncArg') + // return 'hello world' + // })('hi', 10) + }) + + it('supports tracing async sam test', async () => { + const res = await traceAsync('padreAsync', async (firstArg: string, secondArg: number) => { + console.log('hello world') return 'hello world' })('hi', 10) }) diff --git a/root.code-workspace b/root.code-workspace index 7c7b39cd1..1c3ac40a5 100644 --- a/root.code-workspace +++ b/root.code-workspace @@ -43,7 +43,9 @@ "editor.defaultFormatter": "biomejs.biome" }, "files.associations": { - "*.baml.j2": "jinja" + "*.baml.j2": "jinja", + "*.js.j2": "jinja-js", + "*.ts.j2": "jinja-js" }, "editor.colorDecoratorsLimit": 2000, "editor.formatOnSaveMode": "file", diff --git a/tools/build b/tools/build index cc8a00295..18d880558 100755 --- a/tools/build +++ b/tools/build @@ -226,14 +226,21 @@ case "$_path" in command="(cd ${_repo_root}/engine/language_client_typescript && pnpm build:debug)" command="${command} && pnpm baml-cli generate --from ${_repo_root}/integ-tests/baml_src" if [ "$_test_mode" -eq 1 ]; then - command="${command} && pnpm integ-tests" + #command="${command} && pnpm integ-tests" + command="${command} && BAML_LOG=baml_runtime::tracing=trace,baml_runtime=debug infisical run -- pnpm test tests/integ-tests.test.ts" fi if [ "$_watch_mode" -eq 1 ]; then + #--verbose \ npx nodemon \ - --ext py,pyi,rs,j2,toml \ + --delay 1.5 \ + --ext py,pyi,rs,j2,toml,test.ts \ --watch "${_repo_root}/engine" \ --watch . \ - --ignore baml_client \ + --ignore baml_client/** \ + --ignore dist/** \ + --ignore target/** \ + --ignore node_modules/** \ + --ignore *.d.ts \ --exec "${command}" else eval "${command}" From 07eac56bb1580137c9f26223b47026facf9ce20e Mon Sep 17 00:00:00 2001 From: Sam Lijin Date: Fri, 28 Jun 2024 18:28:42 -0700 Subject: [PATCH 2/2] js.t2 to ts.t2 --- .../src/typescript/templates/client.ts.j2 | 80 +++++++++++++++++++ .../typescript/templates/type_builder.ts.j2 | 67 ++++++++++++++++ .../src/typescript/templates/types.ts.j2 | 20 +++++ 3 files changed, 167 insertions(+) create mode 100644 engine/language-client-codegen/src/typescript/templates/client.ts.j2 create mode 100644 engine/language-client-codegen/src/typescript/templates/type_builder.ts.j2 create mode 100644 engine/language-client-codegen/src/typescript/templates/types.ts.j2 diff --git a/engine/language-client-codegen/src/typescript/templates/client.ts.j2 b/engine/language-client-codegen/src/typescript/templates/client.ts.j2 new file mode 100644 index 000000000..75f1a7090 --- /dev/null +++ b/engine/language-client-codegen/src/typescript/templates/client.ts.j2 @@ -0,0 +1,80 @@ +import { BamlRuntime, FunctionResult, BamlCtxManager, BamlStream, Image } from "@boundaryml/baml" +import { + {%- for t in types %}{{ t }}{% if !loop.last %}, {% endif %}{% endfor -%} +} from "./types" +import TypeBuilder from "./type_builder" + +export type RecursivePartialNull = T extends object + ? { + [P in keyof T]?: RecursivePartialNull; + } + : T | null; + +export class BamlClient { + private runtime: BamlRuntime + private ctx_manager: BamlCtxManager + private stream_client: BamlStreamClient + + constructor(runtime: BamlRuntime, ctx_manager: BamlCtxManager) { + this.runtime = runtime + this.ctx_manager = ctx_manager + this.stream_client = new BamlStreamClient(runtime, ctx_manager) + } + + get stream() { + return this.stream_client + } + + {% for fn in funcs %} + async {{ fn.name }}( + {% for (name, optional, type) in fn.args -%} + {{name}}{% if optional %}?{% endif %}: {{type}}, + {%- endfor %} + __baml_options__?: { tb?: TypeBuilder } + ): Promise<{{fn.return_type}}> { + const raw = await this.runtime.callFunction( + "{{fn.name}}", + { + {% for (name, optional, type) in fn.args -%} + "{{name}}": {{name}}{% if optional %}?? null{% endif %}{% if !loop.last %},{% endif %} + {%- endfor %} + }, + this.ctx_manager.get(), + __baml_options__?.tb?.__tb(), + ) + return raw.parsed() as {{fn.return_type}} + } + {% endfor %} +} + +class BamlStreamClient { + constructor(private runtime: BamlRuntime, private ctx_manager: BamlCtxManager) {} + + {% for fn in funcs %} + {{ fn.name }}( + {% for (name, optional, type) in fn.args -%} + {{name}}{% if optional %}?{% endif %}: {{type}}, + {%- endfor %} + __baml_options__?: { tb?: TypeBuilder } + ): BamlStream, {{ fn.return_type }}> { + const raw = this.runtime.streamFunction( + "{{fn.name}}", + { + {% for (name, optional, type) in fn.args -%} + "{{name}}": {{name}}{% if optional %} ?? null{% endif %}{% if !loop.last %},{% endif %} + {%- endfor %} + }, + undefined, + this.ctx_manager.get(), + __baml_options__?.tb?.__tb(), + ) + return new BamlStream, {{ fn.return_type }}>( + raw, + (a): a is RecursivePartialNull<{{ fn.return_type }}> => a, + (a): a is {{ fn.return_type }} => a, + this.ctx_manager.get(), + __baml_options__?.tb?.__tb(), + ) + } + {% endfor %} +} \ No newline at end of file diff --git a/engine/language-client-codegen/src/typescript/templates/type_builder.ts.j2 b/engine/language-client-codegen/src/typescript/templates/type_builder.ts.j2 new file mode 100644 index 000000000..703d0173c --- /dev/null +++ b/engine/language-client-codegen/src/typescript/templates/type_builder.ts.j2 @@ -0,0 +1,67 @@ +import { FieldType } from '@boundaryml/baml/native' +import { TypeBuilder as _TypeBuilder, EnumBuilder, ClassBuilder } from '@boundaryml/baml/type_builder' + +export default class TypeBuilder { + private tb: _TypeBuilder; + {% for cls in classes %}{% if cls.dynamic %} + {{cls.name}}: ClassBuilder<'{{cls.name}}' + {%- for (name, _, _) in cls.fields %}{% if loop.first %}, {%endif%}"{{name}}"{% if !loop.last %} | {% endif %}{% endfor -%} + >; + {% endif %}{% endfor %} + {% for enum in enums %}{% if enum.dynamic %} + {{enum.name}}: EnumBuilder<'{{enum.name}}'{%- for value in enum.values %}{% if loop.first %}, {%endif%}"{{value}}"{% if !loop.last %} | {% endif %}{% endfor -%}>; + {% endif %}{% endfor %} + + constructor() { + this.tb = new _TypeBuilder({ + classes: new Set([ + {% for cls in classes %}"{{cls.name}}",{% endfor %} + ]), + enums: new Set([ + {% for enum in enums %}"{{enum.name}}",{% endfor %} + ]) + }); + {% for cls in classes %}{% if cls.dynamic %} + this.{{cls.name}} = this.tb.classBuilder("{{cls.name}}", [ + {% for (name, _, _) in cls.fields %}"{{name}}",{% endfor %} + ]); + {% endif %}{% endfor %} + {% for enum in enums %}{% if enum.dynamic %} + this.{{enum.name}} = this.tb.enumBuilder("{{enum.name}}", [ + {% for value in enum.values %}"{{value}}",{% endfor %} + ]); + {% endif %}{% endfor %} + } + + __tb() { + return this.tb._tb(); + } + + string(): FieldType { + return this.tb.string() + } + + int(): FieldType { + return this.tb.int() + } + + float(): FieldType { + return this.tb.float() + } + + bool(): FieldType { + return this.tb.bool() + } + + list(type: FieldType): FieldType { + return this.tb.list(type) + } + + addClass(name: Name): ClassBuilder { + return this.tb.addClass(name); + } + + addEnum(name: Name): EnumBuilder { + return this.tb.addEnum(name); + } +} diff --git a/engine/language-client-codegen/src/typescript/templates/types.ts.j2 b/engine/language-client-codegen/src/typescript/templates/types.ts.j2 new file mode 100644 index 000000000..f560952ce --- /dev/null +++ b/engine/language-client-codegen/src/typescript/templates/types.ts.j2 @@ -0,0 +1,20 @@ +import { Image } from "@boundaryml/baml" + +{%- for enum in enums %} +export enum {{enum.name}} { + {%- for value in enum.values %} + {{ value }} = "{{ value }}", + {%- endfor %} +} +{% endfor %} + +{%- for cls in classes %} +export interface {{cls.name}} { + {%- for (name, optional, type) in cls.fields %} + {{name}}{% if optional %}?{% endif %}: {{type}} + {%- endfor %} + {% if cls.dynamic %} + [key: string]: any; + {%- endif %} +} +{% endfor %} \ No newline at end of file