From f82ebf0f28b22b409fef784c5cf314882a168e4e Mon Sep 17 00:00:00 2001 From: hellovai Date: Tue, 5 Nov 2024 11:59:10 -0800 Subject: [PATCH] chore: Bump version to 0.67.0 (#1148) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bump version to 0.67.0\n\n❌ Typescript integ tests\n❌ Python integ tests\n\nGenerated by bump-version script. ---- > [!IMPORTANT] > Bump version from 0.66.0 to 0.67.0 across multiple configuration files and package manifests, with changelog updates but no integration tests. > > - **Version Bump**: > - Update version from `0.66.0` to `0.67.0` in `Cargo.toml`, `pyproject.toml`, `baml.gemspec`, `package.json`, and other related files. > - Update `current_version` to `0.67.0` in `engine.cfg`, `integ-tests.cfg`, `python.cfg`, `ruby.cfg`, `typescript.cfg`, and `vscode.cfg`. > - **Changelog**: > - Add entries for version `0.67.0` in `CHANGELOG.md` with bug fixes and features. > - **Misc**: > - No integration tests for TypeScript and Python included in this version bump. > > This description was created by [Ellipsis](https://www.ellipsis.dev?ref=BoundaryML%2Fbaml&utm_source=github&utm_medium=referral) for 39ab730af24bab32af3d7eb71d8fcafdf481697b. It will automatically update as commits are pushed. --- CHANGELOG.md | 9 + engine/Cargo.lock | 30 +- engine/Cargo.toml | 2 +- engine/language_client_python/pyproject.toml | 2 +- engine/language_client_ruby/baml.gemspec | 2 +- .../language_client_typescript/package.json | 2 +- integ-tests/baml_src/generators.baml | 8 +- integ-tests/python/baml_client/inlinedbaml.py | 2 +- integ-tests/python/report.html | 1604 ++++++++--------- integ-tests/ruby/baml_client/inlined.rb | 2 +- .../typescript/baml_client/inlinedbaml.ts | 2 +- integ-tests/typescript/test-report.html | 33 +- tools/versions/engine.cfg | 2 +- tools/versions/integ-tests.cfg | 2 +- tools/versions/python.cfg | 2 +- tools/versions/ruby.cfg | 2 +- tools/versions/typescript.cfg | 2 +- tools/versions/vscode.cfg | 2 +- typescript/vscode-ext/packages/package.json | 2 +- 19 files changed, 844 insertions(+), 868 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0de5217b8..96c3800fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,15 @@ All notable changes to this project will be documented in this file. See [conventional commits](https://www.conventionalcommits.org/) for commit guidelines. +## [0.67.0](https://github.com/boundaryml/baml/compare/0.66.0..0.67.0) - 2024-11-05 + +### Bug Fixes +- URGENT: fix generated typescript code (#1147) - ([bd9da16](https://github.com/boundaryml/baml/commit/bd9da1683112d849595580866382cba2c6bed8be)) - hellovai + + +### Features +- Parser improvement: handle code within backticks (`) (#1146) - ([3d8ef34](https://github.com/boundaryml/baml/commit/3d8ef34af15a7f2b957876ffa71314ce38da2a01)) - hellovai + ## [0.66.0](https://github.com/boundaryml/baml/compare/0.65.0..0.66.0) - 2024-11-04 ### Features diff --git a/engine/Cargo.lock b/engine/Cargo.lock index bec21783f..d81f19760 100644 --- a/engine/Cargo.lock +++ b/engine/Cargo.lock @@ -773,7 +773,7 @@ dependencies = [ [[package]] name = "baml-cli" -version = "0.66.0" +version = "0.67.0" dependencies = [ "ambassador", "anyhow", @@ -860,7 +860,7 @@ dependencies = [ [[package]] name = "baml-lib" -version = "0.66.0" +version = "0.67.0" dependencies = [ "base64 0.13.1", "dissimilar", @@ -899,7 +899,7 @@ dependencies = [ [[package]] name = "baml-runtime" -version = "0.66.0" +version = "0.67.0" dependencies = [ "ambassador", "anyhow", @@ -992,7 +992,7 @@ dependencies = [ [[package]] name = "baml-schema-build" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "baml-runtime", @@ -1028,7 +1028,7 @@ dependencies = [ [[package]] name = "baml-types" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "clap", @@ -1155,7 +1155,7 @@ dependencies = [ [[package]] name = "bstd" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "assert_cmd", @@ -2539,7 +2539,7 @@ dependencies = [ [[package]] name = "internal-baml-codegen" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "askama", @@ -2564,7 +2564,7 @@ dependencies = [ [[package]] name = "internal-baml-core" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "baml-types", @@ -2600,7 +2600,7 @@ dependencies = [ [[package]] name = "internal-baml-diagnostics" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "colored", @@ -2613,7 +2613,7 @@ dependencies = [ [[package]] name = "internal-baml-jinja" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "askama", @@ -2634,7 +2634,7 @@ dependencies = [ [[package]] name = "internal-baml-jinja-types" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "askama", @@ -2653,7 +2653,7 @@ dependencies = [ [[package]] name = "internal-baml-parser-database" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "baml-types", @@ -2676,7 +2676,7 @@ dependencies = [ [[package]] name = "internal-baml-prompt-parser" -version = "0.66.0" +version = "0.67.0" dependencies = [ "internal-baml-diagnostics", "internal-baml-schema-ast", @@ -2688,7 +2688,7 @@ dependencies = [ [[package]] name = "internal-baml-schema-ast" -version = "0.66.0" +version = "0.67.0" dependencies = [ "baml-types", "either", @@ -2793,7 +2793,7 @@ checksum = "9dbbfed4e59ba9750e15ba154fdfd9329cee16ff3df539c2666b70f58cc32105" [[package]] name = "jsonish" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "assert-json-diff", diff --git a/engine/Cargo.toml b/engine/Cargo.toml index 69f3478f3..7d0ea7645 100644 --- a/engine/Cargo.toml +++ b/engine/Cargo.toml @@ -95,7 +95,7 @@ internal-baml-jinja = { path = "baml-lib/jinja" } internal-baml-schema-ast = { path = "baml-lib/schema-ast" } [workspace.package] -version = "0.66.0" +version = "0.67.0" authors = ["Boundary "] description = "BAML Toolchain" diff --git a/engine/language_client_python/pyproject.toml b/engine/language_client_python/pyproject.toml index 85bd52838..8e8dcd6a4 100644 --- a/engine/language_client_python/pyproject.toml +++ b/engine/language_client_python/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "baml-py" -version = "0.66.0" +version = "0.67.0" description = "BAML python bindings (pyproject.toml)" readme = "README.md" authors = [["Boundary", "contact@boundaryml.com"]] diff --git a/engine/language_client_ruby/baml.gemspec b/engine/language_client_ruby/baml.gemspec index 93b3b97fa..87b69aa51 100644 --- a/engine/language_client_ruby/baml.gemspec +++ b/engine/language_client_ruby/baml.gemspec @@ -2,7 +2,7 @@ Gem::Specification.new do |spec| spec.name = "baml" - spec.version = "0.66.0" + spec.version = "0.67.0" spec.authors = ["BoundaryML"] spec.email = ["contact@boundaryml.com"] diff --git a/engine/language_client_typescript/package.json b/engine/language_client_typescript/package.json index 24e62efa2..82c4f787f 100644 --- a/engine/language_client_typescript/package.json +++ b/engine/language_client_typescript/package.json @@ -1,6 +1,6 @@ { "name": "@boundaryml/baml", - "version": "0.66.0", + "version": "0.67.0", "description": "BAML typescript bindings (package.json)", "repository": { "type": "git", diff --git a/integ-tests/baml_src/generators.baml b/integ-tests/baml_src/generators.baml index 94dec0f41..70513bb2b 100644 --- a/integ-tests/baml_src/generators.baml +++ b/integ-tests/baml_src/generators.baml @@ -1,24 +1,24 @@ generator lang_python { output_type python/pydantic output_dir "../python" - version "0.66.0" + version "0.67.0" } generator lang_typescript { output_type typescript output_dir "../typescript" - version "0.66.0" + version "0.67.0" } generator lang_ruby { output_type ruby/sorbet output_dir "../ruby" - version "0.66.0" + version "0.67.0" } // generator openapi { // output_type rest/openapi // output_dir "../openapi" -// version "0.66.0" +// version "0.67.0" // on_generate "rm .gitignore" // } diff --git a/integ-tests/python/baml_client/inlinedbaml.py b/integ-tests/python/baml_client/inlinedbaml.py index b5d5b1e32..b6ceb8714 100644 --- a/integ-tests/python/baml_client/inlinedbaml.py +++ b/integ-tests/python/baml_client/inlinedbaml.py @@ -25,7 +25,7 @@ "fiddle-examples/extract-receipt-info.baml": "class ReceiptItem {\n name string\n description string?\n quantity int\n price float\n}\n\nclass ReceiptInfo {\n items ReceiptItem[]\n total_cost float?\n venue \"barisa\" | \"ox_burger\"\n}\n\nfunction ExtractReceiptInfo(email: string, reason: \"curiosity\" | \"personal_finance\") -> ReceiptInfo {\n client GPT4o\n prompt #\"\n Given the receipt below:\n\n ```\n {{email}}\n ```\n\n {{ ctx.output_format }}\n \"#\n}\n\n", "fiddle-examples/images/image.baml": "function DescribeImage(img: image) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\") }}\n\n\n Describe the image below in 20 words:\n {{ img }}\n \"#\n\n}\n\nclass FakeImage {\n url string\n}\n\nclass ClassWithImage {\n myImage image\n param2 string\n fake_image FakeImage\n}\n\n// chat role user present\nfunction DescribeImage2(classWithImage: ClassWithImage, img2: image) -> string { \n client GPT4Turbo\n prompt #\"\n {{ _.role(\"user\") }}\n You should return 2 answers that answer the following commands.\n\n 1. Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n 2. Also tell me what's happening here in one sentence:\n {{ img2 }}\n \"#\n}\n\n// no chat role\nfunction DescribeImage3(classWithImage: ClassWithImage, img2: image) -> string {\n client GPT4Turbo\n prompt #\"\n Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n Tell me also what's happening here in one sentence and relate it to the word {{ classWithImage.param2 }}:\n {{ img2 }}\n \"#\n}\n\n\n// system prompt and chat prompt\nfunction DescribeImage4(classWithImage: ClassWithImage, img2: image) -> string {\n client GPT4Turbo\n prompt #\"\n {{ _.role(\"system\")}}\n\n Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n Tell me also what's happening here in one sentence and relate it to the word {{ classWithImage.param2 }}:\n {{ img2 }}\n \"#\n}\n\ntest TestName {\n functions [DescribeImage]\n args {\n img { url \"https://imgs.xkcd.com/comics/standards.png\"}\n }\n}\n", "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", - "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.66.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.66.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.66.0\"\n}\n\n// generator openapi {\n// output_type rest/openapi\n// output_dir \"../openapi\"\n// version \"0.66.0\"\n// on_generate \"rm .gitignore\"\n// }\n", + "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.67.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.67.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.67.0\"\n}\n\n// generator openapi {\n// output_type rest/openapi\n// output_dir \"../openapi\"\n// version \"0.67.0\"\n// on_generate \"rm .gitignore\"\n// }\n", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(\n User is confused\n )\n E @description(\n User is excited\n )\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/python/report.html b/integ-tests/python/report.html index f99e905d2..26baf843b 100644 --- a/integ-tests/python/report.html +++ b/integ-tests/python/report.html @@ -3,11 +3,11 @@
Test Report

Summary

94
3 failed 91 passed

Tests

tests/test_functions.py 388 0:03:14.480245

PASSED test_env_vars_reset 0:00:03.537187

Setup

Call

Captured stdout call
Context depth is greater than 0!
+    
Test Report

Summary

94
2 failed 92 passed

Tests

tests/test_functions.py 289 0:03:00.139964

PASSED test_env_vars_reset 0:00:01.650679

Setup

Call

Captured stdout call
Context depth is greater than 0!
 Except but ending trace!
 Context depth is greater than 0!
-
Captured stderr call
[2024-11-04T16:53:21Z WARN  baml_events] Function ExtractPeople:
-    Client: GPT4 (<unknown>) - 202ms
+
Captured stderr call
[2024-11-05T19:54:24Z WARN  baml_events] Function ExtractPeople:
+    Client: GPT4 (<unknown>) - 192ms
     ---PROMPT---
     [chat] system: You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.
     
@@ -32,8 +32,8 @@
         }
     }
     
-[2024-11-04T16:53:24Z INFO  baml_events] Function ExtractPeople:
-    Client: GPT4 (gpt-4o-2024-08-06) - 2938ms. StopReason: stop. Tokens(in/out): 124/22
+[2024-11-05T19:54:25Z INFO  baml_events] Function ExtractPeople:
+    Client: GPT4 (gpt-4o-2024-08-06) - 1066ms. StopReason: stop. Tokens(in/out): 124/22
     ---PROMPT---
     [chat] system: You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.
     
@@ -60,11 +60,11 @@
         "hair_color": "BLACK"
       }
     ]
-

Teardown

PASSED test_sync 0:00:00.472642

Setup

Call

Captured stdout call
got response key
+

Teardown

PASSED test_sync 0:00:00.408052

Setup

Call

Captured stdout call
got response key
 true
 52
-
Captured stderr call
[2024-11-04T16:53:24Z INFO  baml_events] Function TestFnNamedArgsSingleClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 468ms. StopReason: stop. Tokens(in/out): 19/5
+
Captured stderr call
[2024-11-05T19:54:26Z INFO  baml_events] Function TestFnNamedArgsSingleClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 406ms. StopReason: stop. Tokens(in/out): 19/5
     ---PROMPT---
     [chat] system: Print these values back to me:
     key
@@ -77,8 +77,8 @@
     52
     ---Parsed Response (string)---
     "key\ntrue\n52"
-

Teardown

PASSED TestAllInputs::test_single_bool 0:00:00.477585

Setup

Call

Captured stderr call
[2024-11-04T16:53:25Z INFO  baml_events] Function TestFnNamedArgsSingleBool:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 469ms. StopReason: stop. Tokens(in/out): 15/1
+

Teardown

PASSED TestAllInputs::test_single_bool 0:00:00.370021

Setup

Call

Captured stderr call
[2024-11-05T19:54:26Z INFO  baml_events] Function TestFnNamedArgsSingleBool:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 368ms. StopReason: stop. Tokens(in/out): 15/1
     ---PROMPT---
     [chat] system: Return this value back to me: true
     
@@ -86,8 +86,8 @@
     true
     ---Parsed Response (string)---
     "true"
-

Teardown

PASSED TestAllInputs::test_single_string_list 0:00:00.467412

Setup

Call

Captured stderr call
[2024-11-04T16:53:25Z INFO  baml_events] Function TestFnNamedArgsSingleStringList:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 457ms. StopReason: stop. Tokens(in/out): 23/9
+

Teardown

PASSED TestAllInputs::test_single_string_list 0:00:00.706536

Setup

Call

Captured stderr call
[2024-11-05T19:54:27Z INFO  baml_events] Function TestFnNamedArgsSingleStringList:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 703ms. StopReason: stop. Tokens(in/out): 23/9
     ---PROMPT---
     [chat] system: Return this value back to me: ["a", "b", "c"]
     
@@ -95,74 +95,19 @@
     ["a", "b", "c"]
     ---Parsed Response (string)---
     "[\"a\", \"b\", \"c\"]"
-

Teardown

FAILED TestAllInputs::test_return_literal_union 0:00:00.428025

baml_py.internal_monkeypatch.BamlValidationError: BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: : Failed to find any (1 | true | "string output") in 3 items
-  - : Expected 1, got Object([("status", Number(Number(1)))]).
-  - : Expected true, got Object([("status", Number(Number(1)))]).
-  - : Expected "string output", got Object([("status", Number(Number(1)))])., raw_output={
-    "status": 1
-}, prompt=[chat] system: Return one of these values: 
-Answer in JSON using any of these schemas:
-1 or true or "string output"
-)

Setup

Call

self = 
-
->   ???
-
-tests/test_functions.py:108: 
-_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
-
-self = , input = 'a'
-baml_options = {}
-
-    async def LiteralUnionsTest(
-        self,
-        input: str,
-        baml_options: BamlCallOptions = {},
-    ) -> Union[Literal[1], Literal[True], Literal["string output"]]:
-      __tb__ = baml_options.get("tb", None)
-      if __tb__ is not None:
-        tb = __tb__._tb
-      else:
-        tb = None
-      __cr__ = baml_options.get("client_registry", None)
-    
-      raw = await self.__runtime.call_function(
-        "LiteralUnionsTest",
-        {
-          "input": input,
-        },
-        self.__ctx_manager.get(),
-        tb,
-        __cr__,
-      )
->     return cast(Union[Literal[1], Literal[True], Literal["string output"]], raw.cast_to(types, types))
-E     baml_py.internal_monkeypatch.BamlValidationError: BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: : Failed to find any (1 | true | "string output") in 3 items
-E       - : Expected 1, got Object([("status", Number(Number(1)))]).
-E       - : Expected true, got Object([("status", Number(Number(1)))]).
-E       - : Expected "string output", got Object([("status", Number(Number(1)))])., raw_output={
-E         "status": 1
-E     }, prompt=[chat] system: Return one of these values: 
-E     Answer in JSON using any of these schemas:
-E     1 or true or "string output"
-E     )
-
-baml_client/async_client.py:1270: BamlValidationError
Captured stderr call
[2024-11-04T16:53:26Z WARN  baml_events] Function LiteralUnionsTest:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 422ms. StopReason: stop. Tokens(in/out): 31/9
+

Teardown

PASSED TestAllInputs::test_return_literal_union 0:00:00.345808

Setup

Call

Captured stderr call
[2024-11-05T19:54:27Z INFO  baml_events] Function LiteralUnionsTest:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 343ms. StopReason: stop. Tokens(in/out): 31/1
     ---PROMPT---
     [chat] system: Return one of these values: 
     Answer in JSON using any of these schemas:
     1 or true or "string output"
     
     ---LLM REPLY---
-    {
-        "status": 1
-    }
-    ---Parsed Response (Error)---
-    Failed to coerce value: <root>: Failed to find any (1 | true | "string output") in 3 items
-      - <root>: Expected 1, got Object([("status", Number(Number(1)))]).
-      - <root>: Expected true, got Object([("status", Number(Number(1)))]).
-      - <root>: Expected "string output", got Object([("status", Number(Number(1)))]).
-

Teardown

PASSED TestAllInputs::test_constraints 0:00:00.959370

Setup

Call

Captured stderr call
[2024-11-04T16:53:27Z INFO  baml_events] Function PredictAge:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 948ms. StopReason: stop. Tokens(in/out): 116/36
+    1
+    ---Parsed Response (int)---
+    1
+

Teardown

PASSED TestAllInputs::test_constraints 0:00:00.781047

Setup

Call

Captured stderr call
[2024-11-05T19:54:28Z INFO  baml_events] Function PredictAge:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 774ms. StopReason: stop. Tokens(in/out): 116/36
     ---PROMPT---
     [chat] system: Using your understanding of the historical popularity
     of names, predict the age of a person with the name
@@ -185,7 +130,7 @@
     ---LLM REPLY---
     {
       "planetary_age": {
-        "age": 61
+        "age": 41
       },
       "certainty": 90,
       "species": "Homo sapiens"
@@ -194,7 +139,7 @@
     {
       "planetary_age": {
         "age": {
-          "value": 61,
+          "value": 41,
           "checks": {
             "young_enough": {
               "name": "young_enough",
@@ -217,6 +162,11 @@
       "species": {
         "value": "Homo sapiens",
         "checks": {
+          "trivial": {
+            "name": "trivial",
+            "expression": "this == \"Homo sapiens\"",
+            "status": "succeeded"
+          },
           "regex_bad": {
             "name": "regex_bad",
             "expression": "this|regex_match(\"neanderthalensis\")",
@@ -226,17 +176,12 @@
             "name": "regex_good",
             "expression": "this|regex_match(\"Homo\")",
             "status": "succeeded"
-          },
-          "trivial": {
-            "name": "trivial",
-            "expression": "this == \"Homo sapiens\"",
-            "status": "succeeded"
           }
         }
       }
     }
-

Teardown

PASSED TestAllInputs::test_constraint_union_variant_checking 0:00:00.891976

Setup

Call

Captured stderr call
[2024-11-04T16:53:28Z INFO  baml_events] Function ExtractContactInfo:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 871ms. StopReason: stop. Tokens(in/out): 98/39
+

Teardown

PASSED TestAllInputs::test_constraint_union_variant_checking 0:00:00.718112

Setup

Call

Captured stderr call
[2024-11-05T19:54:29Z INFO  baml_events] Function ExtractContactInfo:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 705ms. StopReason: stop. Tokens(in/out): 98/39
     ---PROMPT---
     [chat] system: Extract a primary contact info, and if possible a secondary contact
     info, from this document:
@@ -275,8 +220,8 @@
         "value": "111-222-3333"
       }
     }
-

Teardown

PASSED TestAllInputs::test_return_malformed_constraint 0:00:00.676887

Setup

Call

Captured stderr call
[2024-11-04T16:53:28Z WARN  baml_events] Function ReturnMalformedConstraints:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 665ms. StopReason: stop. Tokens(in/out): 28/9
+

Teardown

PASSED TestAllInputs::test_return_malformed_constraint 0:00:00.499815

Setup

Call

Captured stderr call
[2024-11-05T19:54:29Z WARN  baml_events] Function ReturnMalformedConstraints:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 495ms. StopReason: stop. Tokens(in/out): 28/9
     ---PROMPT---
     [chat] system: Return the integer after 1
     
@@ -293,10 +238,10 @@
     Failed to coerce value: <root>: Failed while parsing required fields: missing=0, unparsed=1
       - <root>: Failed to parse field foo: foo: Failed to evaluate constraints: unknown method: object has no method named length (in <string>:1)
         - foo: Failed to evaluate constraints: unknown method: object has no method named length (in <string>:1)
-

Teardown

PASSED TestAllInputs::test_use_malformed_constraint 0:00:00.001900

Setup

Call

Captured stderr call
[2024-11-04T16:53:28Z ERROR baml_runtime::tracing]   Error: a: Failed to evaluate assert: Error evaluating constraint: unknown method: object has no method named length (in <string>:1)
+

Teardown

PASSED TestAllInputs::test_use_malformed_constraint 0:00:00.002574

Setup

Call

Captured stderr call
[2024-11-05T19:54:29Z ERROR baml_runtime::tracing]   Error: a: Failed to evaluate assert: Error evaluating constraint: unknown method: object has no method named length (in <string>:1)
     
-

Teardown

PASSED TestAllInputs::test_single_class 0:00:00.512190

Setup

Call

Captured stderr call
[2024-11-04T16:53:29Z INFO  baml_events] Function TestFnNamedArgsSingleClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 509ms. StopReason: stop. Tokens(in/out): 19/5
+

Teardown

PASSED TestAllInputs::test_single_class 0:00:00.519443

Setup

Call

Captured stderr call
[2024-11-05T19:54:30Z INFO  baml_events] Function TestFnNamedArgsSingleClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 516ms. StopReason: stop. Tokens(in/out): 19/5
     ---PROMPT---
     [chat] system: Print these values back to me:
     key
@@ -309,8 +254,8 @@
     52
     ---Parsed Response (string)---
     "key\ntrue\n52"
-

Teardown

PASSED TestAllInputs::test_multiple_args 0:00:00.626104

Setup

Call

Captured stderr call
[2024-11-04T16:53:29Z INFO  baml_events] Function TestMulticlassNamedArgs:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 619ms. StopReason: stop. Tokens(in/out): 25/11
+

Teardown

PASSED TestAllInputs::test_multiple_args 0:00:00.527998

Setup

Call

Captured stderr call
[2024-11-05T19:54:30Z INFO  baml_events] Function TestMulticlassNamedArgs:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 522ms. StopReason: stop. Tokens(in/out): 25/11
     ---PROMPT---
     [chat] system: Print these values back to me:
     key
@@ -329,8 +274,8 @@
     64
     ---Parsed Response (string)---
     "key\ntrue\n52\nkey\ntrue\n64"
-

Teardown

PASSED TestAllInputs::test_single_enum_list 0:00:00.502776

Setup

Call

Captured stderr call
[2024-11-04T16:53:30Z INFO  baml_events] Function TestFnNamedArgsSingleEnumList:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 494ms. StopReason: stop. Tokens(in/out): 18/4
+

Teardown

PASSED TestAllInputs::test_single_enum_list 0:00:00.426740

Setup

Call

Captured stderr call
[2024-11-05T19:54:31Z INFO  baml_events] Function TestFnNamedArgsSingleEnumList:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 422ms. StopReason: stop. Tokens(in/out): 18/4
     ---PROMPT---
     [chat] system: Print these values back to me:
     ["TWO"]
@@ -339,8 +284,8 @@
     ["TWO"]
     ---Parsed Response (string)---
     "[\"TWO\"]"
-

Teardown

PASSED TestAllInputs::test_single_float 0:00:00.445423

Setup

Call

Captured stderr call
[2024-11-04T16:53:30Z INFO  baml_events] Function TestFnNamedArgsSingleFloat:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 438ms. StopReason: stop. Tokens(in/out): 18/3
+

Teardown

PASSED TestAllInputs::test_single_float 0:00:00.480304

Setup

Call

Captured stderr call
[2024-11-05T19:54:31Z INFO  baml_events] Function TestFnNamedArgsSingleFloat:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 477ms. StopReason: stop. Tokens(in/out): 18/3
     ---PROMPT---
     [chat] system: Return this value back to me: 3.12
     
@@ -348,8 +293,8 @@
     3.12
     ---Parsed Response (string)---
     "3.12"
-

Teardown

PASSED TestAllInputs::test_single_int 0:00:00.523071

Setup

Call

Captured stderr call
[2024-11-04T16:53:31Z INFO  baml_events] Function TestFnNamedArgsSingleInt:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 516ms. StopReason: stop. Tokens(in/out): 17/2
+

Teardown

PASSED TestAllInputs::test_single_int 0:00:00.662206

Setup

Call

Captured stderr call
[2024-11-05T19:54:32Z INFO  baml_events] Function TestFnNamedArgsSingleInt:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 659ms. StopReason: stop. Tokens(in/out): 17/2
     ---PROMPT---
     [chat] system: Return this value back to me: 3566
     
@@ -357,8 +302,8 @@
     3566
     ---Parsed Response (string)---
     "3566"
-

Teardown

PASSED TestAllInputs::test_single_literal_int 0:00:00.322169

Setup

Call

Captured stderr call
[2024-11-04T16:53:31Z INFO  baml_events] Function TestNamedArgsLiteralInt:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 315ms. StopReason: stop. Tokens(in/out): 16/1
+

Teardown

PASSED TestAllInputs::test_single_literal_int 0:00:00.402970

Setup

Call

Captured stderr call
[2024-11-05T19:54:32Z INFO  baml_events] Function TestNamedArgsLiteralInt:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 399ms. StopReason: stop. Tokens(in/out): 16/1
     ---PROMPT---
     [chat] system: Return this value back to me: 1
     
@@ -366,8 +311,8 @@
     1
     ---Parsed Response (string)---
     "1"
-

Teardown

PASSED TestAllInputs::test_single_literal_bool 0:00:00.328030

Setup

Call

Captured stderr call
[2024-11-04T16:53:32Z INFO  baml_events] Function TestNamedArgsLiteralBool:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 322ms. StopReason: stop. Tokens(in/out): 15/1
+

Teardown

PASSED TestAllInputs::test_single_literal_bool 0:00:00.644420

Setup

Call

Captured stderr call
[2024-11-05T19:54:33Z INFO  baml_events] Function TestNamedArgsLiteralBool:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 640ms. StopReason: stop. Tokens(in/out): 15/1
     ---PROMPT---
     [chat] system: Return this value back to me: true
     
@@ -375,8 +320,8 @@
     true
     ---Parsed Response (string)---
     "true"
-

Teardown

PASSED TestAllInputs::test_single_literal_string 0:00:00.467905

Setup

Call

Captured stderr call
[2024-11-04T16:53:32Z INFO  baml_events] Function TestNamedArgsLiteralString:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 459ms. StopReason: stop. Tokens(in/out): 16/2
+

Teardown

PASSED TestAllInputs::test_single_literal_string 0:00:00.521603

Setup

Call

Captured stderr call
[2024-11-05T19:54:33Z INFO  baml_events] Function TestNamedArgsLiteralString:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 519ms. StopReason: stop. Tokens(in/out): 16/2
     ---PROMPT---
     [chat] system: Return this value back to me: My String
     
@@ -384,8 +329,8 @@
     My String
     ---Parsed Response (string)---
     "My String"
-

Teardown

PASSED TestAllInputs::test_class_with_literal_prop 0:00:00.848156

Setup

Call

Captured stderr call
[2024-11-04T16:53:33Z INFO  baml_events] Function FnLiteralClassInputOutput:
-    Client: GPT4 (gpt-4o-2024-08-06) - 839ms. StopReason: stop. Tokens(in/out): 30/9
+

Teardown

PASSED TestAllInputs::test_class_with_literal_prop 0:00:01.109324

Setup

Call

Captured stderr call
[2024-11-05T19:54:35Z INFO  baml_events] Function FnLiteralClassInputOutput:
+    Client: GPT4 (gpt-4o-2024-08-06) - 1105ms. StopReason: stop. Tokens(in/out): 30/13
     ---PROMPT---
     [chat] system: Return the same object you were given.
     Answer in JSON using this schema:
@@ -394,25 +339,17 @@
     }
     
     ---LLM REPLY---
+    ```json
     {
       "prop": "hello"
     }
+    ```
     ---Parsed Response (class LiteralClassHello)---
     {
       "prop": "hello"
     }
-

Teardown

FAILED TestAllInputs::test_literal_classs_with_literal_union_prop 0:00:00.699651

AssertionError: assert False
- +  where False = isinstance(LiteralClassTwo(prop='two'), LiteralClassOne)

Setup

Call

self = 
-
-    @pytest.mark.asyncio
-    async def test_literal_classs_with_literal_union_prop(self):
-        res = await b.FnLiteralUnionClassInputOutput(input=LiteralClassOne(prop="one"))
->       assert isinstance(res, LiteralClassOne)
-E       AssertionError: assert False
-E        +  where False = isinstance(LiteralClassTwo(prop='two'), LiteralClassOne)
-
-tests/test_functions.py:205: AssertionError
Captured stderr call
[2024-11-04T16:53:34Z INFO  baml_events] Function FnLiteralUnionClassInputOutput:
-    Client: GPT4 (gpt-4o-2024-08-06) - 690ms. StopReason: stop. Tokens(in/out): 41/13
+

Teardown

PASSED TestAllInputs::test_literal_classs_with_literal_union_prop 0:00:00.628783

Setup

Call

Captured stderr call
[2024-11-05T19:54:35Z INFO  baml_events] Function FnLiteralUnionClassInputOutput:
+    Client: GPT4 (gpt-4o-2024-08-06) - 624ms. StopReason: stop. Tokens(in/out): 41/13
     ---PROMPT---
     [chat] system: Return the same object you were given.
     Answer in JSON using any of these schemas:
@@ -425,27 +362,27 @@
     ---LLM REPLY---
     ```json
     {
-      "prop": "two"
+      "prop": "one"
     }
     ```
-    ---Parsed Response (class LiteralClassTwo)---
+    ---Parsed Response (class LiteralClassOne)---
     {
-      "prop": "two"
+      "prop": "one"
     }
-

Teardown

PASSED TestAllInputs::test_single_map_string_to_string 0:00:00.593131

Setup

Call

Captured stderr call
[2024-11-04T16:53:34Z INFO  baml_events] Function TestFnNamedArgsSingleMapStringToString:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 588ms. StopReason: stop. Tokens(in/out): 29/15
+

Teardown

PASSED TestAllInputs::test_single_map_string_to_string 0:00:00.559410

Setup

Call

Captured stderr call
[2024-11-05T19:54:36Z INFO  baml_events] Function TestFnNamedArgsSingleMapStringToString:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 554ms. StopReason: stop. Tokens(in/out): 29/15
     ---PROMPT---
-    [chat] system: Return this value back to me: {"lorem": "ipsum", "dolor": "sit"}
+    [chat] system: Return this value back to me: {"dolor": "sit", "lorem": "ipsum"}
     
     ---LLM REPLY---
-    {"lorem": "ipsum", "dolor": "sit"}
+    {"dolor": "sit", "lorem": "ipsum"}
     ---Parsed Response (map<string, string>)---
     {
-      "lorem": "ipsum",
-      "dolor": "sit"
+      "dolor": "sit",
+      "lorem": "ipsum"
     }
-

Teardown

PASSED TestAllInputs::test_single_map_string_to_class 0:00:00.617705

Setup

Call

Captured stderr call
[2024-11-04T16:53:35Z INFO  baml_events] Function TestFnNamedArgsSingleMapStringToClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 608ms. StopReason: stop. Tokens(in/out): 28/18
+

Teardown

PASSED TestAllInputs::test_single_map_string_to_class 0:00:00.609054

Setup

Call

Captured stderr call
[2024-11-05T19:54:36Z INFO  baml_events] Function TestFnNamedArgsSingleMapStringToClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 602ms. StopReason: stop. Tokens(in/out): 28/18
     ---PROMPT---
     [chat] system: Return this value back to me: {"lorem": {
         "word": "ipsum",
@@ -463,8 +400,8 @@
         "word": "ipsum"
       }
     }
-

Teardown

PASSED TestAllInputs::test_single_map_string_to_map 0:00:00.550053

Setup

Call

Captured stderr call
[2024-11-04T16:53:35Z INFO  baml_events] Function TestFnNamedArgsSingleMapStringToMap:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 543ms. StopReason: stop. Tokens(in/out): 25/11
+

Teardown

PASSED TestAllInputs::test_single_map_string_to_map 0:00:00.579827

Setup

Call

Captured stderr call
[2024-11-05T19:54:37Z INFO  baml_events] Function TestFnNamedArgsSingleMapStringToMap:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 572ms. StopReason: stop. Tokens(in/out): 25/11
     ---PROMPT---
     [chat] system: Return this value back to me: {"lorem": {"word": "ipsum"}}
     
@@ -476,8 +413,13 @@
         "word": "ipsum"
       }
     }
-

Teardown

PASSED test_should_work_for_all_outputs 0:00:05.367930

Setup

Call

Captured stderr call
[2024-11-04T16:53:36Z INFO  baml_events] Function FnOutputBool:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 436ms. StopReason: stop. Tokens(in/out): 16/1
+

Teardown

FAILED test_should_work_for_all_outputs 0:00:02.672510

assert 0 > 0
+ +  where 0 = len([])

Setup

Call

>   ???
+E   assert 0 > 0
+E    +  where 0 = len([])
+
+tests/test_functions.py:260: AssertionError
Captured stderr call
[2024-11-05T19:54:37Z INFO  baml_events] Function FnOutputBool:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 416ms. StopReason: stop. Tokens(in/out): 16/1
     ---PROMPT---
     [chat] system: Return a true: Answer as a: bool
     
@@ -485,8 +427,8 @@
     True
     ---Parsed Response (bool)---
     true
-[2024-11-04T16:53:36Z INFO  baml_events] Function FnOutputInt:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 333ms. StopReason: stop. Tokens(in/out): 17/1
+[2024-11-05T19:54:38Z INFO  baml_events] Function FnOutputInt:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 488ms. StopReason: stop. Tokens(in/out): 17/1
     ---PROMPT---
     [chat] system: Return the integer 5 with no additional context.
     
@@ -494,18 +436,18 @@
     5
     ---Parsed Response (int)---
     5
-[2024-11-04T16:53:37Z INFO  baml_events] Function FnOutputLiteralInt:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 304ms. StopReason: stop. Tokens(in/out): 18/1
+[2024-11-05T19:54:38Z INFO  baml_events] Function FnOutputLiteralInt:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 427ms. StopReason: stop. Tokens(in/out): 18/6
     ---PROMPT---
     [chat] system: Return an integer: Answer using this specific value:
     5
     
     ---LLM REPLY---
-    5
+    The answer is 5.
     ---Parsed Response (int)---
     5
-[2024-11-04T16:53:37Z INFO  baml_events] Function FnOutputLiteralBool:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 331ms. StopReason: stop. Tokens(in/out): 18/1
+[2024-11-05T19:54:39Z INFO  baml_events] Function FnOutputLiteralBool:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 399ms. StopReason: stop. Tokens(in/out): 18/1
     ---PROMPT---
     [chat] system: Return a false: Answer using this specific value:
     false
@@ -514,18 +456,18 @@
     false
     ---Parsed Response (bool)---
     false
-[2024-11-04T16:53:37Z INFO  baml_events] Function FnOutputLiteralString:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 471ms. StopReason: stop. Tokens(in/out): 21/2
+[2024-11-05T19:54:39Z INFO  baml_events] Function FnOutputLiteralString:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 405ms. StopReason: stop. Tokens(in/out): 21/4
     ---PROMPT---
     [chat] system: Return a string: Answer using this specific value:
     "example output"
     
     ---LLM REPLY---
-    example output
+    "example output"
     ---Parsed Response (string)---
     "example output"
-[2024-11-04T16:53:38Z INFO  baml_events] Function FnOutputClassList:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 969ms. StopReason: stop. Tokens(in/out): 46/44
+[2024-11-05T19:54:40Z INFO  baml_events] Function FnOutputClassList:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 495ms. StopReason: stop. Tokens(in/out): 46/1
     ---PROMPT---
     [chat] system: Return a JSON array that follows this schema: 
     Answer with a JSON Array using this schema:
@@ -539,178 +481,67 @@
     JSON:
     
     ---LLM REPLY---
-    [
-      {
-        "prop1": "example1",
-        "prop2": 25
-      },
-      {
-        "prop1": "example2",
-        "prop2": 50
-      }
-    ]
-    ---Parsed Response (list<class TestOutputClass>)---
-    [
-      {
-        "prop1": "example1",
-        "prop2": 25
-      },
-      {
-        "prop1": "example2",
-        "prop2": 50
-      }
-    ]
-[2024-11-04T16:53:39Z INFO  baml_events] Function FnOutputClassWithEnum:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 639ms. StopReason: stop. Tokens(in/out): 48/21
-    ---PROMPT---
-    [chat] system: Return a made up json blob that matches this schema:
-    Answer in JSON using this schema:
-    {
-      prop1: string,
-      prop2: 'ONE' or 'TWO',
-    }
-    ---
-    
-    JSON:
-    
-    ---LLM REPLY---
-    {
-      "prop1": "Hello, world!",
-      "prop2": "TWO"
-    }
-    ---Parsed Response (class TestClassWithEnum)---
-    {
-      "prop1": "Hello, world!",
-      "prop2": "TWO"
-    }
-[2024-11-04T16:53:40Z INFO  baml_events] Function FnOutputClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 581ms. StopReason: stop. Tokens(in/out): 50/20
-    ---PROMPT---
-    [chat] system: Return a JSON blob with this schema: 
-    Answer in JSON using this schema:
-    {
-      prop1: string,
-      prop2: int,
-    }
-    
-    For the prop2, always return a 540
-    
-    JSON:
-    
-    ---LLM REPLY---
-    {
-      "prop1": "Hello, world!",
-      "prop2": 540
-    }
-    ---Parsed Response (class TestOutputClass)---
-    {
-      "prop1": "Hello, world!",
-      "prop2": 540
-    }
-[2024-11-04T16:53:40Z INFO  baml_events] Function FnEnumListOutput:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 552ms. StopReason: stop. Tokens(in/out): 51/12
-    ---PROMPT---
-    [chat] system: Print out two of these values randomly selected from the list below in a json array.
-    
-    Answer with a JSON Array using this schema:
-    [
-      'ONE' or 'TWO' or 'THREE'
-    ]
-    
-    Answer:
-    
-    ---LLM REPLY---
-    [
-      "TWO",
-      "THREE"
-    ]
-    ---Parsed Response (list<enum EnumOutput>)---
-    [
-      "TWO",
-      "THREE"
-    ]
-[2024-11-04T16:53:41Z INFO  baml_events] Function FnEnumOutput:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 685ms. StopReason: stop. Tokens(in/out): 42/24
-    ---PROMPT---
-    [chat] system: Choose one of these values randomly. Before you give the answer, write out an unrelated haiku about the ocean.
-    
-    VALUE_ENUM
-    ----
-    - ONE
-    - TWO
-    - THREE
-    
-    ---LLM REPLY---
-    A blue vast expanse
-    Whispers of waves in the wind
-    Ocean's mystery
+    []
     
-    VALUE_ENUM
-    ----
-    TWO
-    ---Parsed Response (enum EnumOutput)---
-    "TWO"
-

Teardown

PASSED test_should_work_with_image_url 0:00:01.184329

Setup

Call

Captured stderr call
[2024-11-04T16:53:42Z INFO  baml_events] Function TestImageInput:
-    Client: GPT4o (gpt-4o-2024-08-06) - 1177ms. StopReason: stop. Tokens(in/out): 275/6
+    ---Parsed Response (list<?>)---
+    []
+

Teardown

PASSED test_should_work_with_image_url 0:00:01.382067

Setup

Call

Captured stderr call
[2024-11-05T19:54:41Z INFO  baml_events] Function TestImageInput:
+    Client: GPT4o (gpt-4o-2024-08-06) - 1377ms. StopReason: stop. Tokens(in/out): 275/8
     ---PROMPT---
     [chat] user: Describe this in 4 words. One word must be the color<image_placeholder: https://upload.wikimedia.org/wikipedia/en/4/4d/Shrek_%28character%29.png>
     
     ---LLM REPLY---
-    Green ogre standing smiling.
+    Green, animated, smiling ogre.
     ---Parsed Response (string)---
-    "Green ogre standing smiling."
-

Teardown

PASSED test_should_work_with_image_list 0:00:03.228315

Setup

Call

Captured stderr call
[2024-11-04T16:53:45Z INFO  baml_events] Function TestImageListInput:
-    Client: GPT4o (gpt-4o-2024-08-06) - 3221ms. StopReason: stop. Tokens(in/out): 528/10
+    "Green, animated, smiling ogre."
+

Teardown

PASSED test_should_work_with_image_list 0:00:01.547280

Setup

Call

Captured stderr call
[2024-11-05T19:54:43Z INFO  baml_events] Function TestImageListInput:
+    Client: GPT4o (gpt-4o-2024-08-06) - 1540ms. StopReason: stop. Tokens(in/out): 528/7
     ---PROMPT---
     [chat] user: What colors do these have in common? [<image_placeholder: https://upload.wikimedia.org/wikipedia/en/4/4d/Shrek_%28character%29.png>,<image_placeholder: https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_92x30dp.png>]
     
     ---LLM REPLY---
-    Both images have some shades of green in common.
+    Both images feature the color green.
     ---Parsed Response (string)---
-    "Both images have some shades of green in common."
-

Teardown

PASSED test_should_work_with_vertex 0:00:09.396058

Setup

Call

Captured stderr call
[2024-11-04T16:53:55Z INFO  baml_events] Function TestVertex:
-    Client: Vertex () - 9386ms. StopReason: "STOP". Tokens(in/out): 8/460
+    "Both images feature the color green."
+

Teardown

PASSED test_should_work_with_vertex 0:00:10.080527

Setup

Call

Captured stderr call
[2024-11-05T19:54:53Z INFO  baml_events] Function TestVertex:
+    Client: Vertex () - 10071ms. StopReason: "STOP". Tokens(in/out): 8/471
     ---PROMPT---
     [chat] user: Write a nice short story about donkey kong
     
     ---LLM REPLY---
-    The morning mist clung to the jungle leaves like sleepy primates. Donkey Kong, however, wasn't sleepy. He was famished. His stomach rumbled louder than a coconut avalanche. 
-    
-    He sniffed the air, his large nostrils twitching. Bananas. They were close. He followed the scent, his powerful knuckles thudding against the jungle floor. 
+    The jungle symphony was in full swing, a cacophony of chirps, croaks, and the rustling of leaves. Donkey Kong, however, paid it no mind. He sat on his favorite cliff, the setting sun painting the sky in fiery hues, a melancholy air about him. He missed Diddy.
     
-    He emerged into a clearing to find a sight that made his fur stand on end. A mountain of bananas, the likes of which he’d never seen, stood before him. They were piled high, glistening like golden treasure in the morning light.
+    The little monkey had been gone for days, on a quest to find the legendary Golden Banana, said to grant eternal joy. Donkey Kong had scoffed at the legend, content with his life of bananas and naps. But Diddy, always the adventurer, was determined.
     
-    And perched atop this glorious peak, munching on a particularly plump banana, sat Cranky Kong. 
+    A familiar chirp broke Donkey Kong from his thoughts. A small, blue bird landed on his knee, a tiny scroll tied to its leg. Donkey Kong carefully unfurled it. It was Diddy's scrawl: "Found it! Trapped. Need help! Hurry!" followed by a crude map.
     
-    Donkey Kong’s heart sank. Cranky was a legend, the grumpiest, most possessive Kong in the jungle. Approaching him, especially when he was surrounded by bananas, was like wrestling a tiger for its stripes.
+    Donkey Kong's heart pounded. Diddy was in trouble! He scooped up the bird, who affectionately nuzzled his finger. "Lead the way, little friend," he rumbled, his voice thick with worry.
     
-    But Donkey Kong’s stomach had other plans. It growled again, this time sounding suspiciously like Cranky’s name. 
+    He followed the bird through tangled undergrowth and across rushing rivers. The map led to a dormant volcano, its entrance sealed by fallen rocks. Donkey Kong felt a tremor of fear. He had to get to Diddy.
     
-    Cranky Kong’s one good eye snapped open. “Did someone say ‘banana’?” he rasped, his voice like gravel on a drum.
+    With a roar that shook the trees, he began to move the boulders, his immense strength making light work of the obstacle. The bird chirped encouragingly. Finally, Donkey Kong cleared a path and rushed inside.
     
-    Donkey Kong froze. He considered retreating, vanishing back into the foliage. Then, he remembered a trick his father had taught him. 
+    He found Diddy hanging precariously from a vine over a bubbling lava pit, the Golden Banana glinting nearby. Donkey Kong, with a mighty leap and a swing of his long arms, snatched Diddy from danger. 
     
-    He plucked a nearby flower, its petals a vibrant red, and held it out to Cranky. “For you, elder,” he mumbled, hoping Cranky wouldn't notice his rumbling belly.
+    Safe in Donkey Kong's arms, Diddy beamed, "I knew you'd come!"
     
-    Cranky squinted at him, then at the flower. A low, rumbling sound emerged from him, but it wasn’t a growl. It was a chuckle. 
+    Donkey Kong grunted, a smile tugging at his lips.  They retrieved the Golden Banana, its glow warming their fur. 
     
-    “Well, well,” Cranky croaked, accepting the flower with surprising gentleness. “It seems someone remembers their manners.” He gestured with a gnarled finger towards the banana mountain. “Help yourself, lad. But leave some for an old Kong, eh?”
-    
-    And so, Donkey Kong feasted, sharing the mountain of bananas with the grumpiest Kong in the jungle. He learned that day that even the toughest shells can crack with a little bit of kindness and a lot of bananas. 
+    As they made their way out, Donkey Kong realized the legend was true.  It wasn't the golden fruit that brought joy, but the love and loyalty shared between a grumpy old gorilla and his adventurous little buddy.  And that was a treasure more precious than any gold. 
     
     ---Parsed Response (string)---
-    "The morning mist clung to the jungle leaves like sleepy primates. Donkey Kong, however, wasn't sleepy. He was famished. His stomach rumbled louder than a coconut avalanche. \n\nHe sniffed the air, his large nostrils twitching. Bananas. They were close. He followed the scent, his powerful knuckles thudding against the jungle floor. \n\nHe emerged into a clearing to find a sight that made his fur stand on end. A mountain of bananas, the likes of which he’d never seen, stood before him. They were piled high, glistening like golden treasure in the morning light.\n\nAnd perched atop this glorious peak, munching on a particularly plump banana, sat Cranky Kong. \n\nDonkey Kong’s heart sank. Cranky was a legend, the grumpiest, most possessive Kong in the jungle. Approaching him, especially when he was surrounded by bananas, was like wrestling a tiger for its stripes.\n\nBut Donkey Kong’s stomach had other plans. It growled again, this time sounding suspiciously like Cranky’s name. \n\nCranky Kong’s one good eye snapped open. “Did someone say ‘banana’?” he rasped, his voice like gravel on a drum.\n\nDonkey Kong froze. He considered retreating, vanishing back into the foliage. Then, he remembered a trick his father had taught him. \n\nHe plucked a nearby flower, its petals a vibrant red, and held it out to Cranky. “For you, elder,” he mumbled, hoping Cranky wouldn't notice his rumbling belly.\n\nCranky squinted at him, then at the flower. A low, rumbling sound emerged from him, but it wasn’t a growl. It was a chuckle. \n\n“Well, well,” Cranky croaked, accepting the flower with surprising gentleness. “It seems someone remembers their manners.” He gestured with a gnarled finger towards the banana mountain. “Help yourself, lad. But leave some for an old Kong, eh?”\n\nAnd so, Donkey Kong feasted, sharing the mountain of bananas with the grumpiest Kong in the jungle. He learned that day that even the toughest shells can crack with a little bit of kindness and a lot of bananas. \n"
-

Teardown

PASSED test_should_work_with_image_base64 0:00:01.013122

Setup

Call

Captured stderr call
[2024-11-04T16:53:56Z INFO  baml_events] Function TestImageInput:
-    Client: GPT4o (gpt-4o-2024-08-06) - 985ms. StopReason: stop. Tokens(in/out): 275/6
+    "The jungle symphony was in full swing, a cacophony of chirps, croaks, and the rustling of leaves. Donkey Kong, however, paid it no mind. He sat on his favorite cliff, the setting sun painting the sky in fiery hues, a melancholy air about him. He missed Diddy.\n\nThe little monkey had been gone for days, on a quest to find the legendary Golden Banana, said to grant eternal joy. Donkey Kong had scoffed at the legend, content with his life of bananas and naps. But Diddy, always the adventurer, was determined.\n\nA familiar chirp broke Donkey Kong from his thoughts. A small, blue bird landed on his knee, a tiny scroll tied to its leg. Donkey Kong carefully unfurled it. It was Diddy's scrawl: \"Found it! Trapped. Need help! Hurry!\" followed by a crude map.\n\nDonkey Kong's heart pounded. Diddy was in trouble! He scooped up the bird, who affectionately nuzzled his finger. \"Lead the way, little friend,\" he rumbled, his voice thick with worry.\n\nHe followed the bird through tangled undergrowth and across rushing rivers. The map led to a dormant volcano, its entrance sealed by fallen rocks. Donkey Kong felt a tremor of fear. He had to get to Diddy.\n\nWith a roar that shook the trees, he began to move the boulders, his immense strength making light work of the obstacle. The bird chirped encouragingly. Finally, Donkey Kong cleared a path and rushed inside.\n\nHe found Diddy hanging precariously from a vine over a bubbling lava pit, the Golden Banana glinting nearby. Donkey Kong, with a mighty leap and a swing of his long arms, snatched Diddy from danger. \n\nSafe in Donkey Kong's arms, Diddy beamed, \"I knew you'd come!\"\n\nDonkey Kong grunted, a smile tugging at his lips.  They retrieved the Golden Banana, its glow warming their fur. \n\nAs they made their way out, Donkey Kong realized the legend was true.  It wasn't the golden fruit that brought joy, but the love and loyalty shared between a grumpy old gorilla and his adventurous little buddy.  And that was a treasure more precious than any gold. \n"
+

Teardown

PASSED test_should_work_with_image_base64 0:00:01.465171

Setup

Call

Captured stderr call
[2024-11-05T19:54:54Z INFO  baml_events] Function TestImageInput:
+    Client: GPT4o (gpt-4o-2024-08-06) - 1437ms. StopReason: stop. Tokens(in/out): 275/7
     ---PROMPT---
     [chat] user: Describe this in 4 words. One word must be the color<image_placeholder base64>
     
     ---LLM REPLY---
-    Green animated ogre smiling.
+    Green ogre, brown vest.
     ---Parsed Response (string)---
-    "Green animated ogre smiling."
-

Teardown

PASSED test_should_work_with_audio_base64 0:00:01.042699

Setup

Call

Captured stderr call
[2024-11-04T16:53:57Z INFO  baml_events] Function AudioInput:
-    Client: Gemini () - 1028ms. StopReason: "STOP". Tokens(in/out): 114/1
+    "Green ogre, brown vest."
+

Teardown

PASSED test_should_work_with_audio_base64 0:00:01.087347

Setup

Call

Captured stderr call
[2024-11-05T19:54:55Z INFO  baml_events] Function AudioInput:
+    Client: Gemini () - 1073ms. StopReason: "STOP". Tokens(in/out): 114/1
     ---PROMPT---
     [chat] user: Does this sound like a roar? Yes or no? One word no other characters.<audio_placeholder base64>
     
@@ -719,8 +550,8 @@
     
     ---Parsed Response (string)---
     "Yes \n"
-

Teardown

PASSED test_should_work_with_audio_url 0:00:01.113802

Setup

Call

Captured stderr call
[2024-11-04T16:53:58Z INFO  baml_events] Function AudioInput:
-    Client: Gemini () - 977ms. StopReason: "STOP". Tokens(in/out): 178/1
+

Teardown

PASSED test_should_work_with_audio_url 0:00:01.200185

Setup

Call

Captured stderr call
[2024-11-05T19:54:56Z INFO  baml_events] Function AudioInput:
+    Client: Gemini () - 1078ms. StopReason: "STOP". Tokens(in/out): 178/1
     ---PROMPT---
     [chat] user: Does this sound like a roar? Yes or no? One word no other characters.<audio_placeholder base64>
     
@@ -729,10 +560,10 @@
     
     ---Parsed Response (string)---
     "No \n"
-

Teardown

PASSED test_works_with_retries2 0:00:02.227588

Setup

Call

Captured stdout call
Expected error LLM call failed: LLMErrorResponse { client: "RetryClientExponential", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Say a haiku")] }]), request_options: {"model": String("gpt-3.5-turbo")}, start_time: SystemTime { tv_sec: 1730739240, tv_nsec: 306947000 }, latency: 192.71975ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: blahh. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
-
Captured stderr call
[2024-11-04T16:54:00Z WARN  baml_events] Function TestRetryExponential:
+

Teardown

PASSED test_works_with_retries2 0:00:02.380336

Setup

Call

Captured stdout call
Expected error LLM call failed: LLMErrorResponse { client: "RetryClientExponential", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Say a haiku")] }]), request_options: {"model": String("gpt-3.5-turbo")}, start_time: SystemTime { tv_sec: 1730836499, tv_nsec: 22550000 }, latency: 240.442542ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: blahh. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
+
Captured stderr call
[2024-11-05T19:54:59Z WARN  baml_events] Function TestRetryExponential:
     (3 other previous tries)
-    Client: RetryClientExponential (<unknown>) - 192ms
+    Client: RetryClientExponential (<unknown>) - 240ms
     ---PROMPT---
     [chat] system: Say a haiku
     
@@ -748,21 +579,21 @@
         }
     }
     
-

Teardown

PASSED test_works_with_fallbacks 0:00:02.143794

Setup

Call

Captured stderr call
[2024-11-04T16:54:02Z INFO  baml_events] Function TestFallbackClient:
+

Teardown

PASSED test_works_with_fallbacks 0:00:02.041390

Setup

Call

Captured stderr call
[2024-11-05T19:55:01Z INFO  baml_events] Function TestFallbackClient:
     (5 other previous tries)
-    Client: GPT35 (gpt-3.5-turbo-0125) - 597ms. StopReason: stop. Tokens(in/out): 14/14
+    Client: GPT35 (gpt-3.5-turbo-0125) - 639ms. StopReason: stop. Tokens(in/out): 14/16
     ---PROMPT---
     [chat] system: Say a haiku about mexico.
     
     ---LLM REPLY---
-    Colorful maracas
-    Mariachi serenades
-    Mexico's beauty
+    Vibrant colors shine
+    Mariachi music calls
+    Mexico's heart beats true
     ---Parsed Response (string)---
-    "Colorful maracas\nMariachi serenades\nMexico's beauty"
-

Teardown

PASSED test_works_with_failing_azure_fallback 0:00:00.001270

Setup

Call

Captured stderr call
[2024-11-04T16:54:02Z ERROR baml_runtime::tracing] Either base_url or (resource_name, deployment_id) must be provided
-

Teardown

PASSED test_claude 0:00:01.088193

Setup

Call

Captured stderr call
[2024-11-04T16:54:03Z INFO  baml_events] Function PromptTestClaude:
-    Client: Sonnet (claude-3-5-sonnet-20241022) - 1084ms. StopReason: "end_turn". Tokens(in/out): 19/42
+    "Vibrant colors shine\nMariachi music calls\nMexico's heart beats true"
+

Teardown

PASSED test_works_with_failing_azure_fallback 0:00:00.003004

Setup

Call

Captured stderr call
[2024-11-05T19:55:01Z ERROR baml_runtime::tracing] Either base_url or (resource_name, deployment_id) must be provided
+

Teardown

PASSED test_claude 0:00:01.141294

Setup

Call

Captured stderr call
[2024-11-05T19:55:02Z INFO  baml_events] Function PromptTestClaude:
+    Client: Sonnet (claude-3-5-sonnet-20241022) - 1135ms. StopReason: "end_turn". Tokens(in/out): 19/44
     ---PROMPT---
     [chat] user: Tell me a haiku about Mt Rainier is tall
     
@@ -770,346 +601,341 @@
     Here's a haiku about Mt. Rainier:
     
     Rainier stands proud, high
-    Fourteen thousand feet skyward
-    Snow-crowned mountain king
+    Piercing through clouds to the sky
+    Nature's crown of snow
     ---Parsed Response (string)---
-    "Here's a haiku about Mt. Rainier:\n\nRainier stands proud, high\nFourteen thousand feet skyward\nSnow-crowned mountain king"
-

Teardown

PASSED test_gemini 0:00:07.604349

Setup

Call

Captured stdout call
LLM output from Gemini: The diner was quiet, save for the rhythmic sizzle of the grill and the steady click of the old clock on the wall. A lone figure sat at the counter, hunched over a steaming mug. He was a peculiar man, with a kind face creased with age and eyes that sparkled with a mischievous glint. This was Dr. Pepper, though few knew his name.
+    "Here's a haiku about Mt. Rainier:\n\nRainier stands proud, high\nPiercing through clouds to the sky\nNature's crown of snow"
+

Teardown

PASSED test_gemini 0:00:07.400576

Setup

Call

Captured stdout call
LLM output from Gemini: Dr. Pepper wasn't a doctor at all, not in the traditional sense. He didn't wear a white coat or have a stethoscope slung around his neck. No, Dr. Pepper was a feeling, an effervescent sensation that lived inside a glass bottle.
 
-He wasn't a real doctor, not anymore. Once, he had been a renowned chemist, a master of flavors and concoctions. He had poured his heart and soul into creating a drink unlike any other, a symphony of 23 flavors that danced on the tongue. It was his masterpiece, his legacy. He named it after himself, a whimsical touch in an otherwise serious world.
+He spent his days on supermarket shelves, nestled between his bubbly brethren, dreaming of adventure. He yearned to escape the monotony of cardboard boxes and fluorescent lights, to quench a thirst that went beyond the physical.
 
-But fame was a fickle mistress, and soon the world forgot the man behind the drink. His creation, however, lived on, bottled and shipped to every corner of the globe.  It no longer belonged to him, not really. Yet, he took solace in the smiles it brought to faces, the small moments of joy it ignited.
+One Tuesday, a hand reached for him. It belonged to a girl with sunshine hair and eyes full of stories. Dr. Pepper felt a flutter of hope. This wasn't just any hand; this was the hand of an adventurer.
 
-He lifted the mug to his lips, the familiar aroma filling his senses. Each sip was a journey back in time, a reminder of his younger, ambitious self. It was bittersweet, this legacy of his. A legacy he could only observe from the sidelines, a silent observer to his own creation's success.
+The girl, whose name was Lily, took him on all sorts of escapades. He accompanied her to the park, where he soaked in the laughter of children and the warmth of the sun. He was there during her triumphs, like when she finally finished that challenging puzzle, and during her quiet moments of contemplation, sharing the silence as she stared at the night sky. 
 
-The bell above the diner door jingled as a young couple entered, their faces flushed from the cold. The girl, her eyes bright with excitement, ordered two Dr. Peppers. The doctor watched as they clinked glasses, their laughter echoing in the quiet diner.
+Lily treated Dr. Pepper differently than she did the other drinks. She savored each sip, her face lighting up with delight at the unique blend of flavors. It wasn't just his 23 flavors she appreciated; it was the feeling he evoked – a sense of joyful possibility and a reminder that even the smallest moments could sparkle.
 
-A small smile played on his lips. Forgotten, perhaps, but never truly gone. As long as people craved adventure in a bottle, his spirit would live on, one delicious sip at a time. 
+As the last drop was consumed, Dr. Pepper felt a pang of sadness. He knew his time with Lily was ending, but he also felt a sense of satisfaction. He had lived up to his name, not as a medical doctor, but as a doctor of delight, a dispenser of joy. He knew that somewhere out there, another adventure, another Lily, awaited. And he couldn't wait. 
 
-
Captured stderr call
[2024-11-04T16:54:11Z INFO  baml_events] Function TestGemini:
-    Client: Gemini () - 7595ms. StopReason: "STOP". Tokens(in/out): 10/388
+
Captured stderr call
[2024-11-05T19:55:09Z INFO  baml_events] Function TestGemini:
+    Client: Gemini () - 7392ms. StopReason: "STOP". Tokens(in/out): 10/375
     ---PROMPT---
     [chat] user: Write a nice short story about Dr. Pepper
     
     ---LLM REPLY---
-    The diner was quiet, save for the rhythmic sizzle of the grill and the steady click of the old clock on the wall. A lone figure sat at the counter, hunched over a steaming mug. He was a peculiar man, with a kind face creased with age and eyes that sparkled with a mischievous glint. This was Dr. Pepper, though few knew his name.
+    Dr. Pepper wasn't a doctor at all, not in the traditional sense. He didn't wear a white coat or have a stethoscope slung around his neck. No, Dr. Pepper was a feeling, an effervescent sensation that lived inside a glass bottle.
     
-    He wasn't a real doctor, not anymore. Once, he had been a renowned chemist, a master of flavors and concoctions. He had poured his heart and soul into creating a drink unlike any other, a symphony of 23 flavors that danced on the tongue. It was his masterpiece, his legacy. He named it after himself, a whimsical touch in an otherwise serious world.
+    He spent his days on supermarket shelves, nestled between his bubbly brethren, dreaming of adventure. He yearned to escape the monotony of cardboard boxes and fluorescent lights, to quench a thirst that went beyond the physical.
     
-    But fame was a fickle mistress, and soon the world forgot the man behind the drink. His creation, however, lived on, bottled and shipped to every corner of the globe.  It no longer belonged to him, not really. Yet, he took solace in the smiles it brought to faces, the small moments of joy it ignited.
+    One Tuesday, a hand reached for him. It belonged to a girl with sunshine hair and eyes full of stories. Dr. Pepper felt a flutter of hope. This wasn't just any hand; this was the hand of an adventurer.
     
-    He lifted the mug to his lips, the familiar aroma filling his senses. Each sip was a journey back in time, a reminder of his younger, ambitious self. It was bittersweet, this legacy of his. A legacy he could only observe from the sidelines, a silent observer to his own creation's success.
+    The girl, whose name was Lily, took him on all sorts of escapades. He accompanied her to the park, where he soaked in the laughter of children and the warmth of the sun. He was there during her triumphs, like when she finally finished that challenging puzzle, and during her quiet moments of contemplation, sharing the silence as she stared at the night sky. 
     
-    The bell above the diner door jingled as a young couple entered, their faces flushed from the cold. The girl, her eyes bright with excitement, ordered two Dr. Peppers. The doctor watched as they clinked glasses, their laughter echoing in the quiet diner.
+    Lily treated Dr. Pepper differently than she did the other drinks. She savored each sip, her face lighting up with delight at the unique blend of flavors. It wasn't just his 23 flavors she appreciated; it was the feeling he evoked – a sense of joyful possibility and a reminder that even the smallest moments could sparkle.
     
-    A small smile played on his lips. Forgotten, perhaps, but never truly gone. As long as people craved adventure in a bottle, his spirit would live on, one delicious sip at a time. 
+    As the last drop was consumed, Dr. Pepper felt a pang of sadness. He knew his time with Lily was ending, but he also felt a sense of satisfaction. He had lived up to his name, not as a medical doctor, but as a doctor of delight, a dispenser of joy. He knew that somewhere out there, another adventure, another Lily, awaited. And he couldn't wait. 
     
     ---Parsed Response (string)---
-    "The diner was quiet, save for the rhythmic sizzle of the grill and the steady click of the old clock on the wall. A lone figure sat at the counter, hunched over a steaming mug. He was a peculiar man, with a kind face creased with age and eyes that sparkled with a mischievous glint. This was Dr. Pepper, though few knew his name.\n\nHe wasn't a real doctor, not anymore. Once, he had been a renowned chemist, a master of flavors and concoctions. He had poured his heart and soul into creating a drink unlike any other, a symphony of 23 flavors that danced on the tongue. It was his masterpiece, his legacy. He named it after himself, a whimsical touch in an otherwise serious world.\n\nBut fame was a fickle mistress, and soon the world forgot the man behind the drink. His creation, however, lived on, bottled and shipped to every corner of the globe.  It no longer belonged to him, not really. Yet, he took solace in the smiles it brought to faces, the small moments of joy it ignited.\n\nHe lifted the mug to his lips, the familiar aroma filling his senses. Each sip was a journey back in time, a reminder of his younger, ambitious self. It was bittersweet, this legacy of his. A legacy he could only observe from the sidelines, a silent observer to his own creation's success.\n\nThe bell above the diner door jingled as a young couple entered, their faces flushed from the cold. The girl, her eyes bright with excitement, ordered two Dr. Peppers. The doctor watched as they clinked glasses, their laughter echoing in the quiet diner.\n\nA small smile played on his lips. Forgotten, perhaps, but never truly gone. As long as people craved adventure in a bottle, his spirit would live on, one delicious sip at a time. \n"
-

Teardown

PASSED test_gemini_streaming 0:00:09.095219

Setup

Call

Captured stdout call
LLM output from Gemini: Dr. Pepper wasn't a real doctor, though sometimes, late at night, when the city lights painted the bodega window in a neon glow, he felt like one. He wasn't stitching wounds, but mending spirits. At least, that's what he told himself as he rang up another pack of gummy bears and a lottery ticket, the usual order for Mrs. Garcia on a Tuesday.
+    "Dr. Pepper wasn't a doctor at all, not in the traditional sense. He didn't wear a white coat or have a stethoscope slung around his neck. No, Dr. Pepper was a feeling, an effervescent sensation that lived inside a glass bottle.\n\nHe spent his days on supermarket shelves, nestled between his bubbly brethren, dreaming of adventure. He yearned to escape the monotony of cardboard boxes and fluorescent lights, to quench a thirst that went beyond the physical.\n\nOne Tuesday, a hand reached for him. It belonged to a girl with sunshine hair and eyes full of stories. Dr. Pepper felt a flutter of hope. This wasn't just any hand; this was the hand of an adventurer.\n\nThe girl, whose name was Lily, took him on all sorts of escapades. He accompanied her to the park, where he soaked in the laughter of children and the warmth of the sun. He was there during her triumphs, like when she finally finished that challenging puzzle, and during her quiet moments of contemplation, sharing the silence as she stared at the night sky. \n\nLily treated Dr. Pepper differently than she did the other drinks. She savored each sip, her face lighting up with delight at the unique blend of flavors. It wasn't just his 23 flavors she appreciated; it was the feeling he evoked – a sense of joyful possibility and a reminder that even the smallest moments could sparkle.\n\nAs the last drop was consumed, Dr. Pepper felt a pang of sadness. He knew his time with Lily was ending, but he also felt a sense of satisfaction. He had lived up to his name, not as a medical doctor, but as a doctor of delight, a dispenser of joy. He knew that somewhere out there, another adventure, another Lily, awaited. And he couldn't wait. \n"
+

Teardown

PASSED test_gemini_streaming 0:00:07.756724

Setup

Call

Captured stdout call
LLM output from Gemini: Dr. Pepper wasn’t a doctor at all. Not the kind who wore a white coat and peered in ears, anyway. No, Dr. Pepper was a feeling. A tingle that started at the back of your throat, like a fizzy secret, and spread out in a wave of warm, comforting nostalgia. 
+
+Old Man Higgins, proprietor of Higgins’ Haberdashery and Purveyor of Peculiar Potions, knew this better than anyone. He watched folks cross his threshold, weary from the day, shadows clinging to their shoulders. He'd offer a smile and a wink, "What'll it be today? Tonic for the soul?"
 
-His real name was Daniel, but the nickname stuck ever since he started working at the corner store.  It started with the kids, their sticky fingers pointing at the soda fridge, yelling "Dr. Pepper, please!" He suspected their parents put them up to it, a little joke to brighten their day. But soon, it was everyone. Old Man Henderson with his crossword puzzle and lukewarm coffee, Sarah the florist with her bright smile and wilting carnations, even Mr. Kim, the usually stoic owner, would sometimes slip and call him "Doc."
+Inevitably, their gaze would fall on the old, oak barrel tucked away in the corner, the one with the faded, red lettering that whispered, "Dr. Pepper." And just like that, the shadows would lift, replaced by a flicker of something brighter. Memory, maybe. Or hope.
 
-Dr. Pepper. It had a ring to it, a whimsicality that belied the everyday ordinariness of his life. And maybe, just maybe, it gave him a little push to be more than just a guy behind the counter. He started remembering everyone's usual orders, asking about their day, offering a kind word or a sympathetic ear when needed. The bodega, once just a job, became a stage for these small acts of human connection.
+One day, a young girl named Lily shuffled in, clutching a worn teddy bear.  "Do you have something for a grumpy heart, mister?" she mumbled, lip trembling.
 
-One rainy afternoon, a young girl, no older than eight, stood at the counter, tears streaming down her face. She'd lost the five dollar bill her mother gave her for milk. Dr. Pepper knelt before her, his heart aching for her distress.  "Tell you what," he said, pulling out a can of Dr. Pepper from under the counter, "This one's on the house. Consider it a prescription for the blues." 
+Old Man Higgins chuckled, his own heart aching for her unspoken sadness. "Why, I believe I do," he said, pulling out a frosty mug. 
 
-The girl's eyes widened, then a smile broke through her tears. She hugged the can to her chest, the familiar logo a beacon of comfort.  As she skipped out, rain boots splashing in puddles, Dr. Pepper knew.  Maybe, just maybe, he wasn't a real doctor, but he dispensed something just as important: kindness, a listening ear, and the occasional can of fizzy, caramel-colored hope. 
+He filled it with the elixir from the whispering barrel, the rich, caramel scent filling the air. Lily took a sip, her eyes widening. The fizzy secret did its work. A smile bloomed on her face, chasing away the gloom. 
 
-
Captured stderr call
[2024-11-04T16:54:20Z INFO  baml_events] Function TestGemini:
-    Client: Gemini (gemini-1.5-pro-001) - 9075ms. StopReason: Stop. Tokens(in/out): unknown/unknown
+And in that moment, Old Man Higgins knew Dr. Pepper wasn't just a feeling. It was a reminder. A reminder that sometimes, all it takes to chase away the shadows is a little bit of magic, disguised as a drink, in an old oak barrel, whispering stories of joy. 
+
+
Captured stderr call
[2024-11-05T19:55:17Z INFO  baml_events] Function TestGemini:
+    Client: Gemini (gemini-1.5-pro-001) - 7741ms. StopReason: Stop. Tokens(in/out): unknown/unknown
     ---PROMPT---
     [chat] user: Write a nice short story about Dr. Pepper
     
     ---LLM REPLY---
-    Dr. Pepper wasn't a real doctor, though sometimes, late at night, when the city lights painted the bodega window in a neon glow, he felt like one. He wasn't stitching wounds, but mending spirits. At least, that's what he told himself as he rang up another pack of gummy bears and a lottery ticket, the usual order for Mrs. Garcia on a Tuesday.
+    Dr. Pepper wasn’t a doctor at all. Not the kind who wore a white coat and peered in ears, anyway. No, Dr. Pepper was a feeling. A tingle that started at the back of your throat, like a fizzy secret, and spread out in a wave of warm, comforting nostalgia. 
+    
+    Old Man Higgins, proprietor of Higgins’ Haberdashery and Purveyor of Peculiar Potions, knew this better than anyone. He watched folks cross his threshold, weary from the day, shadows clinging to their shoulders. He'd offer a smile and a wink, "What'll it be today? Tonic for the soul?"
     
-    His real name was Daniel, but the nickname stuck ever since he started working at the corner store.  It started with the kids, their sticky fingers pointing at the soda fridge, yelling "Dr. Pepper, please!" He suspected their parents put them up to it, a little joke to brighten their day. But soon, it was everyone. Old Man Henderson with his crossword puzzle and lukewarm coffee, Sarah the florist with her bright smile and wilting carnations, even Mr. Kim, the usually stoic owner, would sometimes slip and call him "Doc."
+    Inevitably, their gaze would fall on the old, oak barrel tucked away in the corner, the one with the faded, red lettering that whispered, "Dr. Pepper." And just like that, the shadows would lift, replaced by a flicker of something brighter. Memory, maybe. Or hope.
     
-    Dr. Pepper. It had a ring to it, a whimsicality that belied the everyday ordinariness of his life. And maybe, just maybe, it gave him a little push to be more than just a guy behind the counter. He started remembering everyone's usual orders, asking about their day, offering a kind word or a sympathetic ear when needed. The bodega, once just a job, became a stage for these small acts of human connection.
+    One day, a young girl named Lily shuffled in, clutching a worn teddy bear.  "Do you have something for a grumpy heart, mister?" she mumbled, lip trembling.
     
-    One rainy afternoon, a young girl, no older than eight, stood at the counter, tears streaming down her face. She'd lost the five dollar bill her mother gave her for milk. Dr. Pepper knelt before her, his heart aching for her distress.  "Tell you what," he said, pulling out a can of Dr. Pepper from under the counter, "This one's on the house. Consider it a prescription for the blues." 
+    Old Man Higgins chuckled, his own heart aching for her unspoken sadness. "Why, I believe I do," he said, pulling out a frosty mug. 
     
-    The girl's eyes widened, then a smile broke through her tears. She hugged the can to her chest, the familiar logo a beacon of comfort.  As she skipped out, rain boots splashing in puddles, Dr. Pepper knew.  Maybe, just maybe, he wasn't a real doctor, but he dispensed something just as important: kindness, a listening ear, and the occasional can of fizzy, caramel-colored hope. 
+    He filled it with the elixir from the whispering barrel, the rich, caramel scent filling the air. Lily took a sip, her eyes widening. The fizzy secret did its work. A smile bloomed on her face, chasing away the gloom. 
+    
+    And in that moment, Old Man Higgins knew Dr. Pepper wasn't just a feeling. It was a reminder. A reminder that sometimes, all it takes to chase away the shadows is a little bit of magic, disguised as a drink, in an old oak barrel, whispering stories of joy. 
     
     ---Parsed Response (string)---
-    "Dr. Pepper wasn't a real doctor, though sometimes, late at night, when the city lights painted the bodega window in a neon glow, he felt like one. He wasn't stitching wounds, but mending spirits. At least, that's what he told himself as he rang up another pack of gummy bears and a lottery ticket, the usual order for Mrs. Garcia on a Tuesday.\n\nHis real name was Daniel, but the nickname stuck ever since he started working at the corner store.  It started with the kids, their sticky fingers pointing at the soda fridge, yelling \"Dr. Pepper, please!\" He suspected their parents put them up to it, a little joke to brighten their day. But soon, it was everyone. Old Man Henderson with his crossword puzzle and lukewarm coffee, Sarah the florist with her bright smile and wilting carnations, even Mr. Kim, the usually stoic owner, would sometimes slip and call him \"Doc.\"\n\nDr. Pepper. It had a ring to it, a whimsicality that belied the everyday ordinariness of his life. And maybe, just maybe, it gave him a little push to be more than just a guy behind the counter. He started remembering everyone's usual orders, asking about their day, offering a kind word or a sympathetic ear when needed. The bodega, once just a job, became a stage for these small acts of human connection.\n\nOne rainy afternoon, a young girl, no older than eight, stood at the counter, tears streaming down her face. She'd lost the five dollar bill her mother gave her for milk. Dr. Pepper knelt before her, his heart aching for her distress.  \"Tell you what,\" he said, pulling out a can of Dr. Pepper from under the counter, \"This one's on the house. Consider it a prescription for the blues.\" \n\nThe girl's eyes widened, then a smile broke through her tears. She hugged the can to her chest, the familiar logo a beacon of comfort.  As she skipped out, rain boots splashing in puddles, Dr. Pepper knew.  Maybe, just maybe, he wasn't a real doctor, but he dispensed something just as important: kindness, a listening ear, and the occasional can of fizzy, caramel-colored hope. \n"
-

Teardown

PASSED test_aws 0:00:02.412382

Setup

Call

Captured stderr call
[2024-11-04T16:54:20Z WARN  aws_runtime::env_config::normalize] section [Connection 1] ignored; config must be in the AWS config file rather than the credentials file
-[2024-11-04T16:54:20Z INFO  aws_config::meta::region] load_region; provider=EnvironmentVariableRegionProvider { env: Env(Real) }
-[2024-11-04T16:54:22Z INFO  baml_events] Function TestAws:
-    Client: AwsBedrock (anthropic.claude-3-haiku-20240307-v1:0) - 2231ms. StopReason: max_tokens. Tokens(in/out): 19/100
+    "Dr. Pepper wasn’t a doctor at all. Not the kind who wore a white coat and peered in ears, anyway. No, Dr. Pepper was a feeling. A tingle that started at the back of your throat, like a fizzy secret, and spread out in a wave of warm, comforting nostalgia. \n\nOld Man Higgins, proprietor of Higgins’ Haberdashery and Purveyor of Peculiar Potions, knew this better than anyone. He watched folks cross his threshold, weary from the day, shadows clinging to their shoulders. He'd offer a smile and a wink, \"What'll it be today? Tonic for the soul?\"\n\nInevitably, their gaze would fall on the old, oak barrel tucked away in the corner, the one with the faded, red lettering that whispered, \"Dr. Pepper.\" And just like that, the shadows would lift, replaced by a flicker of something brighter. Memory, maybe. Or hope.\n\nOne day, a young girl named Lily shuffled in, clutching a worn teddy bear.  \"Do you have something for a grumpy heart, mister?\" she mumbled, lip trembling.\n\nOld Man Higgins chuckled, his own heart aching for her unspoken sadness. \"Why, I believe I do,\" he said, pulling out a frosty mug. \n\nHe filled it with the elixir from the whispering barrel, the rich, caramel scent filling the air. Lily took a sip, her eyes widening. The fizzy secret did its work. A smile bloomed on her face, chasing away the gloom. \n\nAnd in that moment, Old Man Higgins knew Dr. Pepper wasn't just a feeling. It was a reminder. A reminder that sometimes, all it takes to chase away the shadows is a little bit of magic, disguised as a drink, in an old oak barrel, whispering stories of joy. \n"
+

Teardown

PASSED test_aws 0:00:02.618416

Setup

Call

Captured stderr call
[2024-11-05T19:55:17Z WARN  aws_runtime::env_config::normalize] section [Connection 1] ignored; config must be in the AWS config file rather than the credentials file
+[2024-11-05T19:55:17Z INFO  aws_config::meta::region] load_region; provider=EnvironmentVariableRegionProvider { env: Env(Real) }
+[2024-11-05T19:55:20Z INFO  baml_events] Function TestAws:
+    Client: AwsBedrock (anthropic.claude-3-haiku-20240307-v1:0) - 2452ms. StopReason: max_tokens. Tokens(in/out): 19/100
     ---PROMPT---
     [chat] user: Write a nice short story about Mt Rainier is tall
     
     ---LLM REPLY---
-    Here is a short story about the majestic Mount Rainier:
+    Here is a short story about Mt. Rainier:
     
-    Towering over the Pacific Northwest, Mount Rainier stands as a colossal sentinel, its snow-capped peak piercing the azure sky. At an imposing elevation of 14,411 feet, this majestic volcanic mountain commands awe and reverence from all who gaze upon it.
+    Mt. Rainier Stands Tall
     
-    For the residents of the surrounding region, Mount Rainier is a constant and reassuring
+    The majestic peak of Mount Rainier stood tall, reaching towards the sky. Its snow-capped summit glistened in the sunlight, a testament to the sheer scale and grandeur of this iconic Pacific Northwest landmark.
+    
+    From a distance, the mountain appeared almost impossibly large, dwarfing the surrounding landscape with its impressive stature. Ne
     ---Parsed Response (string)---
-    "Here is a short story about the majestic Mount Rainier:\n\nTowering over the Pacific Northwest, Mount Rainier stands as a colossal sentinel, its snow-capped peak piercing the azure sky. At an imposing elevation of 14,411 feet, this majestic volcanic mountain commands awe and reverence from all who gaze upon it.\n\nFor the residents of the surrounding region, Mount Rainier is a constant and reassuring"
-

Teardown

PASSED test_openai_shorthand 0:00:15.723216

Setup

Call

Captured stderr call
[2024-11-04T16:54:38Z INFO  baml_events] Function TestOpenAIShorthand:
-    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 15715ms. StopReason: stop. Tokens(in/out): 18/603
+    "Here is a short story about Mt. Rainier:\n\nMt. Rainier Stands Tall\n\nThe majestic peak of Mount Rainier stood tall, reaching towards the sky. Its snow-capped summit glistened in the sunlight, a testament to the sheer scale and grandeur of this iconic Pacific Northwest landmark.\n\nFrom a distance, the mountain appeared almost impossibly large, dwarfing the surrounding landscape with its impressive stature. Ne"
+

Teardown

PASSED test_openai_shorthand 0:00:09.476301

Setup

Call

Captured stderr call
[2024-11-05T19:55:29Z INFO  baml_events] Function TestOpenAIShorthand:
+    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 9470ms. StopReason: stop. Tokens(in/out): 18/577
     ---PROMPT---
     [chat] system: Write a nice short story about Mt Rainier is tall
     
     ---LLM REPLY---
-    Once upon a time in the heart of the Pacific Northwest, there stood a majestic giant named Mount Rainier. Towering over the sprawling valleys and lush forests, Rainier was not just the highest peak in the Cascades; it was a sentinel of time and nature, watching over the land with a gentle yet imposing presence.
+    Once upon a time, in the heart of Washington State, there stood a giant known as Mount Rainier. Towering above the landscape at an impressive 14,411 feet, Rainier wasn’t just tall; it was a sentinel, watching over the lush forests and vibrant meadows that sprawled below.
+    
+    In a small town at the base of the mountain lived a curious girl named Lily. Every day after school, she would rush to the park where she could see the majestic peak shimmering in the sunlight. To her, it was more than just a mountain; it was a mystery waiting to be unraveled.
     
-    In a small town at the base of the mountain lived a curious boy named Eli. He was fascinated by the stories his grandmother told him about Rainier. She would sit by the fire, her voice filled with wonder, recounting how the mountain was a sleeping guardian, covered in a glistening white blanket of snow. “She protects the lands and waters, and on clear days, her beauty takes your breath away,” she would say, pointing toward the distant peak.
+    One bright Saturday morning, inspired by her dreams of adventure, Lily decided it was time to explore the trails that wound their way up Rainier’s slopes. Equipped with a backpack filled with snacks, a notebook, and a pencil, she set off on her journey.
     
-    Eli’s heart swelled with excitement every time he gazed up at the summit, especially on bright, sunny days when the sunlight kissed the snow, making it sparkle like a thousand diamonds. He yearned to see all that Mount Rainier had to offer, so one summer morning, armed with nothing but a backpack filled with sandwiches, a sketchbook, and a spirit full of adventure, he set off to hike the trails that crisscrossed the mountain.
+    As she walked, the trees whispered secrets and the wind sang gentle melodies. Lily marveled at the wildflowers that adorned her path, bursting with colors she had only seen in her art books—vivid blues, radiant yellows, and soft pinks danced in the sunlight. Each step brought her closer to the towering giant, and her excitement grew with every moment.
     
-    With each step, Eli felt the pulse of the earth beneath his feet. He wandered through fields ablaze with wildflowers and listened to the whisper of the wind through the towering pines. As he climbed higher, the air turned crisp, and he could see panoramic views of valleys stretching into infinity. The sight of the glaciers shimmering under the sun took his breath away, and it was at that moment Eli understood why the mountain was revered by so many.
+    After hours of hiking, she reached a clearing where she could see the mountain in all its glory. Clouds swirled around the summit like a fluffy crown, and the sunlight made the glacial ice sparkle like diamonds. Overwhelmed by the beauty, Lily sat down on a large rock and took out her notebook.
     
-    Eventually, after a long and glorious day of exploration, he reached a viewpoint overlooking a vast expanse of wilderness. The sun began to dip below the horizon, casting hues of orange and purple across the sky. Eli sat down, his heart brimming with joy as he took out his sketchbook to capture the moment. Every line he drew was filled with the feeling of awe that Rainier’s towering presence inspired within him.
+    “I want to capture this moment,” she thought, and began sketching the scene before her. As she drew, she felt a connection to the mountain, as if it was sharing its stories with her. It had stood there for thousands of years, witnessing the seasons change, the stars twinkle, and the world transform around it. 
     
-    As he sketched, Eli couldn’t help but think about how much he had learned from his journey. Mount Rainier wasn’t just tall; it was a symbol of resilience, permanence, and beauty. It stood through decades, weathering storms and sunshine alike, reminding everyone who gazed upon it that even the tallest of mountains started as mere stones beneath the earth.
+    Just then, a squirrel scurried by, stopping to peek at her with curious eyes. With a gentle laugh, Lily reached into her backpack for a snack and tossed a bit of granola. The squirrel chattered happily and scampered off, leaving Lily feeling even more enchanted by her surroundings.
     
-    In the twilight, with the first stars appearing, Eli knew he had forged a special bond with the mountain. He murmured a promise to return, to keep exploring its secrets, to share its beauty with others, and to remember the stories of his grandmother. As the silhouette of Mount Rainier stood silhouetted against the night sky, he made his way down the trail, heart full and spirit soaring, inspired by the abiding grandeur of the tall, timeless giant that would always watch over him.
+    As the sun began to dip below the horizon, painting the sky in hues of orange and pink, Lily knew it was time to head back. With her heart full of wonder and her notebook brimming with memories, she whispered a promise to the mountain. “I’ll come back to learn more about you, to explore your secrets.”
+    
+    With one last look at the majestic peak, she began her descent, already dreaming of the adventures that awaited her in the future. For in her heart, she knew that no matter how high Mt. Rainier stood, its true height was measured not in feet but in the wonder and inspiration it bestowed upon those who dared to climb.
     ---Parsed Response (string)---
-    "Once upon a time in the heart of the Pacific Northwest, there stood a majestic giant named Mount Rainier. Towering over the sprawling valleys and lush forests, Rainier was not just the highest peak in the Cascades; it was a sentinel of time and nature, watching over the land with a gentle yet imposing presence.\n\nIn a small town at the base of the mountain lived a curious boy named Eli. He was fascinated by the stories his grandmother told him about Rainier. She would sit by the fire, her voice filled with wonder, recounting how the mountain was a sleeping guardian, covered in a glistening white blanket of snow. “She protects the lands and waters, and on clear days, her beauty takes your breath away,” she would say, pointing toward the distant peak.\n\nEli’s heart swelled with excitement every time he gazed up at the summit, especially on bright, sunny days when the sunlight kissed the snow, making it sparkle like a thousand diamonds. He yearned to see all that Mount Rainier had to offer, so one summer morning, armed with nothing but a backpack filled with sandwiches, a sketchbook, and a spirit full of adventure, he set off to hike the trails that crisscrossed the mountain.\n\nWith each step, Eli felt the pulse of the earth beneath his feet. He wandered through fields ablaze with wildflowers and listened to the whisper of the wind through the towering pines. As he climbed higher, the air turned crisp, and he could see panoramic views of valleys stretching into infinity. The sight of the glaciers shimmering under the sun took his breath away, and it was at that moment Eli understood why the mountain was revered by so many.\n\nEventually, after a long and glorious day of exploration, he reached a viewpoint overlooking a vast expanse of wilderness. The sun began to dip below the horizon, casting hues of orange and purple across the sky. Eli sat down, his heart brimming with joy as he took out his sketchbook to capture the moment. Every line he drew was filled with the feeling of awe that Rainier’s towering presence inspired within him.\n\nAs he sketched, Eli couldn’t help but think about how much he had learned from his journey. Mount Rainier wasn’t just tall; it was a symbol of resilience, permanence, and beauty. It stood through decades, weathering storms and sunshine alike, reminding everyone who gazed upon it that even the tallest of mountains started as mere stones beneath the earth.\n\nIn the twilight, with the first stars appearing, Eli knew he had forged a special bond with the mountain. He murmured a promise to return, to keep exploring its secrets, to share its beauty with others, and to remember the stories of his grandmother. As the silhouette of Mount Rainier stood silhouetted against the night sky, he made his way down the trail, heart full and spirit soaring, inspired by the abiding grandeur of the tall, timeless giant that would always watch over him."
-

Teardown

PASSED test_openai_shorthand_streaming 0:00:13.107354

Setup

Call

Captured stderr call
[2024-11-04T16:54:51Z INFO  baml_events] Function TestOpenAIShorthand:
-    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 13097ms. StopReason: stop. Tokens(in/out): 18/685
+    "Once upon a time, in the heart of Washington State, there stood a giant known as Mount Rainier. Towering above the landscape at an impressive 14,411 feet, Rainier wasn’t just tall; it was a sentinel, watching over the lush forests and vibrant meadows that sprawled below.\n\nIn a small town at the base of the mountain lived a curious girl named Lily. Every day after school, she would rush to the park where she could see the majestic peak shimmering in the sunlight. To her, it was more than just a mountain; it was a mystery waiting to be unraveled.\n\nOne bright Saturday morning, inspired by her dreams of adventure, Lily decided it was time to explore the trails that wound their way up Rainier’s slopes. Equipped with a backpack filled with snacks, a notebook, and a pencil, she set off on her journey.\n\nAs she walked, the trees whispered secrets and the wind sang gentle melodies. Lily marveled at the wildflowers that adorned her path, bursting with colors she had only seen in her art books—vivid blues, radiant yellows, and soft pinks danced in the sunlight. Each step brought her closer to the towering giant, and her excitement grew with every moment.\n\nAfter hours of hiking, she reached a clearing where she could see the mountain in all its glory. Clouds swirled around the summit like a fluffy crown, and the sunlight made the glacial ice sparkle like diamonds. Overwhelmed by the beauty, Lily sat down on a large rock and took out her notebook.\n\n“I want to capture this moment,” she thought, and began sketching the scene before her. As she drew, she felt a connection to the mountain, as if it was sharing its stories with her. It had stood there for thousands of years, witnessing the seasons change, the stars twinkle, and the world transform around it. \n\nJust then, a squirrel scurried by, stopping to peek at her with curious eyes. With a gentle laugh, Lily reached into her backpack for a snack and tossed a bit of granola. The squirrel chattered happily and scampered off, leaving Lily feeling even more enchanted by her surroundings.\n\nAs the sun began to dip below the horizon, painting the sky in hues of orange and pink, Lily knew it was time to head back. With her heart full of wonder and her notebook brimming with memories, she whispered a promise to the mountain. “I’ll come back to learn more about you, to explore your secrets.”\n\nWith one last look at the majestic peak, she began her descent, already dreaming of the adventures that awaited her in the future. For in her heart, she knew that no matter how high Mt. Rainier stood, its true height was measured not in feet but in the wonder and inspiration it bestowed upon those who dared to climb."
+

Teardown

PASSED test_openai_shorthand_streaming 0:00:06.649597

Setup

Call

Captured stderr call
[2024-11-05T19:55:36Z INFO  baml_events] Function TestOpenAIShorthand:
+    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 6640ms. StopReason: stop. Tokens(in/out): 18/480
     ---PROMPT---
     [chat] system: Write a nice short story about Mt Rainier is tall
     
     ---LLM REPLY---
-    Once upon a time, in the lush, green foothills of the Pacific Northwest, there stood a majestic giant named Mt. Rainier. Towering high above the vibrant landscape, she was a guardian of the hills, a beacon of beauty encircled by ancient forests and sparkling rivers.
-    
-    Every morning, the sun would rise and cast its golden rays upon Mt. Rainier’s snow-capped peaks, illuminating the valleys below and igniting the sky with brilliant hues of orange and pink. The mountain had a personality all her own; among the locals, she was both humbling and inspiring, an embodiment of nature’s grandeur.
+    Once upon a time in the heart of the Pacific Northwest, there stood a majestic giant named Mt. Rainier. With its snow-capped peak piercing the sky and cloaked in a veil of clouds, the mountain reigned over the lush green forests and sparkling rivers that danced at its base. 
     
-    In a quaint little town at the base of the mountain, a young girl named Lily found solace beneath Mt. Rainier's watchful presence. She would often wander into the meadows where wildflowers danced in the breeze, dreaming of the adventures that awaited her. With each passing day, she nestled herself closer to the mountain, filling her heart with its stories: tales of brave climbers, swaying deer, and whispers from the ancient trees.
+    Every morning, as the first rays of sunlight kissed the mountain's summit, it seemed to stretch a little taller, shimmering in hues of pink and gold. The animals around, from the curious deer to the playful otters, would look up in awe. They whispered to one another about how, on some days, Mt. Rainier would appear to touch the sky itself.
     
-    One summer afternoon, Lily decided she wanted to see the world from the mountain’s summit. With a mix of excitement and trepidation, she shared her dream with her parents. Holding her hands, they said, "It may be tall, but if you truly wish to reach the top, you must prepare, take your time, and respect her beauty."
+    One summer, a young girl named Lila decided to embark on an adventure with her family to the foot of the great mountain. Her heart raced with excitement as they trekked through fields of wildflowers, their colors painting the landscape like a vibrant tapestry. With every step, the mountain seemed to grow larger, embodying a presence that felt almost magical.
     
-    Determined, Lily started her quest. She learned about the trails, practiced her climbing skills, and filled her backpack with essentials. Day after day, she hiked the lower terrains, each step unfurling her love for the mountain. She met fellow adventurers along the way who shared their stories, each climbing higher with every passing season.
+    At the base of Rainier, Lila’s father pointed up at the peak and said, “Look how tall it is! It feels like you could reach out and touch the clouds if you try hard enough!” Lila laughed, imagining climbing to the snowy top and having a tea party with the clouds. 
     
-    Finally, the day arrived—a perfect summer morning when the sky was clear and the air was fresh. With her heart racing and her determination high, Lily set out on the trail to the summit of Mt. Rainier. The climb was both exhilarating and daunting. As she ascended, the world around her transformed; lush greens and wildflowers receded, replaced by rocky paths and the serene beauty of glaciers.
+    As they continued their hike, Lila noticed how the mountain called to her. It whispered tales of ancient stories, of glacial rivers and legendary explorers who once stood where she stood. She imagined herself amidst those brave souls, gazing down at the vast expanse below.
     
-    As the hours wore on, fatigue set in, but Lily remembered her dream and the mountain's whispers of perseverance. Slowly, one foot in front of the other, she climbed higher until, at last, she reached the summit. Standing on the precipice of the world, she took a deep breath and gazed out at the sweeping vistas—the valleys, the rivers, and even the glimmering ocean in the distance.
+    That evening, as the sun dipped behind the mountain, painting the sky in shades of lavender and orange, Lila sat on a rock and gazed at Mt. Rainier. It towered majestically, a silent guardian of the land. In that moment, she understood that the mountain was not just tall, but it was also a symbol of adventure, wonder, and resilience. 
     
-    With the wind gently tousling her hair, Lily felt a profound connection to Mt. Rainier. It was not just a tall mountain; it was a testament to dreams, determination, and the spirit of adventure. At that moment, she realized that her journey was her own, a reflection of the beauty that awaited anyone who dared to reach for the sky.
-    
-    As she made her way back down, her heart brimmed with joy. Mt. Rainier would always be tall and mighty, but she now understood that its true strength lay in the inspiration it gave to all those who gazed upon it. And in that small town, beneath the watchful eye of the mountain, stories would continue to unfold for generations to come, inspiring dreamers like Lily to chase after the heights they wished to reach.
+    With a heart full of dreams, Lila vowed to climb it one day. Mt. Rainier might be an imposing giant, but in her eyes, it was an invitation to explore the heights of possibility. And from then on, every tall tree, high hill, and lofty dream reminded her of the giant that stood watch over her beloved home, inspiring her to reach for the stars.
     ---Parsed Response (string)---
-    "Once upon a time, in the lush, green foothills of the Pacific Northwest, there stood a majestic giant named Mt. Rainier. Towering high above the vibrant landscape, she was a guardian of the hills, a beacon of beauty encircled by ancient forests and sparkling rivers.\n\nEvery morning, the sun would rise and cast its golden rays upon Mt. Rainier’s snow-capped peaks, illuminating the valleys below and igniting the sky with brilliant hues of orange and pink. The mountain had a personality all her own; among the locals, she was both humbling and inspiring, an embodiment of nature’s grandeur.\n\nIn a quaint little town at the base of the mountain, a young girl named Lily found solace beneath Mt. Rainier's watchful presence. She would often wander into the meadows where wildflowers danced in the breeze, dreaming of the adventures that awaited her. With each passing day, she nestled herself closer to the mountain, filling her heart with its stories: tales of brave climbers, swaying deer, and whispers from the ancient trees.\n\nOne summer afternoon, Lily decided she wanted to see the world from the mountain’s summit. With a mix of excitement and trepidation, she shared her dream with her parents. Holding her hands, they said, \"It may be tall, but if you truly wish to reach the top, you must prepare, take your time, and respect her beauty.\"\n\nDetermined, Lily started her quest. She learned about the trails, practiced her climbing skills, and filled her backpack with essentials. Day after day, she hiked the lower terrains, each step unfurling her love for the mountain. She met fellow adventurers along the way who shared their stories, each climbing higher with every passing season.\n\nFinally, the day arrived—a perfect summer morning when the sky was clear and the air was fresh. With her heart racing and her determination high, Lily set out on the trail to the summit of Mt. Rainier. The climb was both exhilarating and daunting. As she ascended, the world around her transformed; lush greens and wildflowers receded, replaced by rocky paths and the serene beauty of glaciers.\n\nAs the hours wore on, fatigue set in, but Lily remembered her dream and the mountain's whispers of perseverance. Slowly, one foot in front of the other, she climbed higher until, at last, she reached the summit. Standing on the precipice of the world, she took a deep breath and gazed out at the sweeping vistas—the valleys, the rivers, and even the glimmering ocean in the distance.\n\nWith the wind gently tousling her hair, Lily felt a profound connection to Mt. Rainier. It was not just a tall mountain; it was a testament to dreams, determination, and the spirit of adventure. At that moment, she realized that her journey was her own, a reflection of the beauty that awaited anyone who dared to reach for the sky.\n\nAs she made her way back down, her heart brimmed with joy. Mt. Rainier would always be tall and mighty, but she now understood that its true strength lay in the inspiration it gave to all those who gazed upon it. And in that small town, beneath the watchful eye of the mountain, stories would continue to unfold for generations to come, inspiring dreamers like Lily to chase after the heights they wished to reach."
-

Teardown

PASSED test_anthropic_shorthand 0:00:03.309795

Setup

Call

Captured stderr call
[2024-11-04T16:54:54Z INFO  baml_events] Function TestAnthropicShorthand:
-    Client: anthropic/claude-3-haiku-20240307 (claude-3-haiku-20240307) - 3301ms. StopReason: "end_turn". Tokens(in/out): 19/396
+    "Once upon a time in the heart of the Pacific Northwest, there stood a majestic giant named Mt. Rainier. With its snow-capped peak piercing the sky and cloaked in a veil of clouds, the mountain reigned over the lush green forests and sparkling rivers that danced at its base. \n\nEvery morning, as the first rays of sunlight kissed the mountain's summit, it seemed to stretch a little taller, shimmering in hues of pink and gold. The animals around, from the curious deer to the playful otters, would look up in awe. They whispered to one another about how, on some days, Mt. Rainier would appear to touch the sky itself.\n\nOne summer, a young girl named Lila decided to embark on an adventure with her family to the foot of the great mountain. Her heart raced with excitement as they trekked through fields of wildflowers, their colors painting the landscape like a vibrant tapestry. With every step, the mountain seemed to grow larger, embodying a presence that felt almost magical.\n\nAt the base of Rainier, Lila’s father pointed up at the peak and said, “Look how tall it is! It feels like you could reach out and touch the clouds if you try hard enough!” Lila laughed, imagining climbing to the snowy top and having a tea party with the clouds. \n\nAs they continued their hike, Lila noticed how the mountain called to her. It whispered tales of ancient stories, of glacial rivers and legendary explorers who once stood where she stood. She imagined herself amidst those brave souls, gazing down at the vast expanse below.\n\nThat evening, as the sun dipped behind the mountain, painting the sky in shades of lavender and orange, Lila sat on a rock and gazed at Mt. Rainier. It towered majestically, a silent guardian of the land. In that moment, she understood that the mountain was not just tall, but it was also a symbol of adventure, wonder, and resilience. \n\nWith a heart full of dreams, Lila vowed to climb it one day. Mt. Rainier might be an imposing giant, but in her eyes, it was an invitation to explore the heights of possibility. And from then on, every tall tree, high hill, and lofty dream reminded her of the giant that stood watch over her beloved home, inspiring her to reach for the stars."
+

Teardown

PASSED test_anthropic_shorthand 0:00:03.363343

Setup

Call

Captured stderr call
[2024-11-05T19:55:39Z INFO  baml_events] Function TestAnthropicShorthand:
+    Client: anthropic/claude-3-haiku-20240307 (claude-3-haiku-20240307) - 3356ms. StopReason: "end_turn". Tokens(in/out): 19/444
     ---PROMPT---
     [chat] user: Write a nice short story about Mt Rainier is tall
     
     ---LLM REPLY---
     Here is a short story about Mount Rainier:
     
-    Towering High
+    The Majestic Mt. Rainier
     
-    The morning mist clung to the ancient slopes of Mount Rainier, its majestic peak piercing through the clouds like a frozen spear. Jayden gazed upwards in awe, feeling insignificant in the shadow of this colossal mountain.
+    As the sun began to peek over the distant horizon, its golden rays slowly illuminated the towering peak of Mount Rainier. Standing tall at 14,411 feet, the massive stratovolcano dominated the landscape, its snow-capped summit reaching up towards the heavens.
     
-    At 14,411 feet, Mount Rainier was the highest point in the Cascade Range, a sleeping giant that loomed over the lush forests and alpine meadows of the Pacific Northwest. Jayden had come to the national park many times, but each visit left him humbled by the mountain's sheer, daunting scale.
+    Sarah stood in awe, her eyes transfixed on the mountain's grandeur. She had hiked these trails many times, but the sheer scale and beauty of Mt. Rainier never ceased to amaze her. The mountain's rugged volcanic slopes, carved by glaciers over millennia, seemed to exude a primal power that stirred something deep within her soul.
     
-    As he began his hike, Jayden's legs burned from the steep incline, but his eyes remained fixed on the icy summit far above. With each step, the mountain's grandeur only seemed to grow, towering ever higher against the brilliant blue sky.
+    As she began her ascent, Sarah felt a sense of humility wash over her. Compared to the towering giant before her, she was but a speck, a mere mortal dwarfed by the majesty of nature. Each step brought her closer to the summit, but the peak seemed to recede further into the distance, a testament to the mountain's true enormity.
     
-    Pausing to catch his breath, Jayden turned and looked back at the way he had come. The world below had shrunk to a patchwork of green and gold, a reminded of how small he was in comparison to this ancient, majestic peak.
+    The climb was arduous, testing her physical and mental limits, but Sarah pressed on, driven by a relentless determination to reach the top. Finally, after hours of grueling effort, she emerged onto the breathtaking vista of the summit, her heart swelling with a profound sense of accomplishment.
     
-    Jayden knew he would never reach the top - few ever did. But simply being in the shadow of Mount Rainier, feeling its power and presence, was enough to fill him with a sense of wonder. This was a mountain that commanded respect, one that had stood sentinel over the land for millennia.
+    From this lofty perch, Sarah gazed out over the vast, untamed wilderness that surrounded her. In every direction, the landscape was dominated by the towering presence of Mount Rainier, a timeless and majestic sentinel that had borne witness to the ebb and flow of human history.
     
-    As he continued his ascent, Jayden couldn't help but marvel at the mountain's enduring strength. No matter how many years passed, Mount Rainier would continue to rise high above, a timeless symbol of the raw, untamed beauty of the natural world.
+    In that moment, Sarah felt a deep connection to the mountain, a kinship that transcended the boundaries of time and space. She knew that she would return to this place, again and again, drawn by the siren call of its grandeur and the transformative power of its presence.
     ---Parsed Response (string)---
-    "Here is a short story about Mount Rainier:\n\nTowering High\n\nThe morning mist clung to the ancient slopes of Mount Rainier, its majestic peak piercing through the clouds like a frozen spear. Jayden gazed upwards in awe, feeling insignificant in the shadow of this colossal mountain.\n\nAt 14,411 feet, Mount Rainier was the highest point in the Cascade Range, a sleeping giant that loomed over the lush forests and alpine meadows of the Pacific Northwest. Jayden had come to the national park many times, but each visit left him humbled by the mountain's sheer, daunting scale.\n\nAs he began his hike, Jayden's legs burned from the steep incline, but his eyes remained fixed on the icy summit far above. With each step, the mountain's grandeur only seemed to grow, towering ever higher against the brilliant blue sky.\n\nPausing to catch his breath, Jayden turned and looked back at the way he had come. The world below had shrunk to a patchwork of green and gold, a reminded of how small he was in comparison to this ancient, majestic peak.\n\nJayden knew he would never reach the top - few ever did. But simply being in the shadow of Mount Rainier, feeling its power and presence, was enough to fill him with a sense of wonder. This was a mountain that commanded respect, one that had stood sentinel over the land for millennia.\n\nAs he continued his ascent, Jayden couldn't help but marvel at the mountain's enduring strength. No matter how many years passed, Mount Rainier would continue to rise high above, a timeless symbol of the raw, untamed beauty of the natural world."
-

Teardown

PASSED test_anthropic_shorthand_streaming 0:00:03.229976

Setup

Call

Captured stderr call
[2024-11-04T16:54:58Z INFO  baml_events] Function TestAnthropicShorthand:
-    Client: anthropic/claude-3-haiku-20240307 (claude-3-haiku-20240307) - 3206ms. StopReason: "end_turn". Tokens(in/out): 19/383
+    "Here is a short story about Mount Rainier:\n\nThe Majestic Mt. Rainier\n\nAs the sun began to peek over the distant horizon, its golden rays slowly illuminated the towering peak of Mount Rainier. Standing tall at 14,411 feet, the massive stratovolcano dominated the landscape, its snow-capped summit reaching up towards the heavens.\n\nSarah stood in awe, her eyes transfixed on the mountain's grandeur. She had hiked these trails many times, but the sheer scale and beauty of Mt. Rainier never ceased to amaze her. The mountain's rugged volcanic slopes, carved by glaciers over millennia, seemed to exude a primal power that stirred something deep within her soul.\n\nAs she began her ascent, Sarah felt a sense of humility wash over her. Compared to the towering giant before her, she was but a speck, a mere mortal dwarfed by the majesty of nature. Each step brought her closer to the summit, but the peak seemed to recede further into the distance, a testament to the mountain's true enormity.\n\nThe climb was arduous, testing her physical and mental limits, but Sarah pressed on, driven by a relentless determination to reach the top. Finally, after hours of grueling effort, she emerged onto the breathtaking vista of the summit, her heart swelling with a profound sense of accomplishment.\n\nFrom this lofty perch, Sarah gazed out over the vast, untamed wilderness that surrounded her. In every direction, the landscape was dominated by the towering presence of Mount Rainier, a timeless and majestic sentinel that had borne witness to the ebb and flow of human history.\n\nIn that moment, Sarah felt a deep connection to the mountain, a kinship that transcended the boundaries of time and space. She knew that she would return to this place, again and again, drawn by the siren call of its grandeur and the transformative power of its presence."
+

Teardown

PASSED test_anthropic_shorthand_streaming 0:00:03.164320

Setup

Call

Captured stderr call
[2024-11-05T19:55:42Z INFO  baml_events] Function TestAnthropicShorthand:
+    Client: anthropic/claude-3-haiku-20240307 (claude-3-haiku-20240307) - 3155ms. StopReason: "end_turn". Tokens(in/out): 19/366
     ---PROMPT---
     [chat] user: Write a nice short story about Mt Rainier is tall
     
     ---LLM REPLY---
-    Here is a short story about Mount Rainier:
+    Here is a short story about the towering beauty of Mount Rainier:
     
-    The Majestic Presence of Mount Rainier
+    Reaching for the Sky
     
-    As I gazed up at the towering peak of Mount Rainier, I was struck by its sheer size and grandeur. Rising dramatically from the surrounding landscape, the mountain's snow-capped summit seemed to pierce the sky, dwarfing everything around it.  
+    The morning sun peeked above the horizon, bathing the landscape in a warm glow. As Sarah stepped outside, her eyes were immediately drawn to the magnificent peak that dominated the skyline. Mount Rainier stood tall and proud, its snow-capped summit reaching over 14,000 feet into the crisp, blue sky. 
     
-    I had seen pictures of Rainier before, but nothing could have prepared me for the awe-inspiring sight of this ancient, glaciated volcano in person. Standing over 14,000 feet tall, it commanded attention and respect, a true testament to the incredible power of nature.
+    Sarah had always been in awe of this natural wonder. Even from miles away, the sheer size and scale of the mountain was humbling. She remembered the first time she had laid eyes on it, how the sight had taken her breath away. There was something truly majestic about its presence - a timeless, stoic beauty that commanded respect.
     
-    As I hiked through the lush, verdant forests that clung to the mountain's lower slopes, I couldn't help but feel small and insignificant in comparison to Rainier's massive presence. Yet, there was also a sense of wonder and humility that washed over me - this was a landscape sculpted over eons, a natural wonder that had witnessed the passage of millennia.
+    As Sarah gazed up at the towering peak, she felt a sense of wonder wash over her. The mountain seemed to radiate an ancient power, a connection to the forces that had shaped the earth itself. Its glaciers and rocky crags told a story of eons of change, of volcanic activity and weathering that had formed this awe-inspiring natural wonder.
     
-    With each step, the mountain seemed to grow taller, its rugged, snow-covered peak reaching ever higher into the clouds. I found myself pausing frequently, craning my neck to take in the full scale and majesty of Rainier, mesmerized by its sheer, towering beauty.
-    
-    In that moment, I was reminded of just how grand and powerful our natural world can be. Mount Rainier stood as a testament to the enduring strength of the Earth, a monument to the forces that have shaped our planet over countless generations. And as I gazed upon its majestic form, I felt a profound sense of connection to the timeless cycles of nature that had given rise to this incredible natural wonder.
+    Slowly, Sarah began to make her way towards the mountain, her steps filled with a sense of reverence. She knew that she could never truly fathom the depth of its history or the grandeur of its form, but in that moment, she felt a deep appreciation for its place in the world. Mount Rainier was not just a mountain - it was a testament to the raw power and beauty of the natural world, a reminder of our own insignificance in the face of such a towering, magnificent presence.
     ---Parsed Response (string)---
-    "Here is a short story about Mount Rainier:\n\nThe Majestic Presence of Mount Rainier\n\nAs I gazed up at the towering peak of Mount Rainier, I was struck by its sheer size and grandeur. Rising dramatically from the surrounding landscape, the mountain's snow-capped summit seemed to pierce the sky, dwarfing everything around it.  \n\nI had seen pictures of Rainier before, but nothing could have prepared me for the awe-inspiring sight of this ancient, glaciated volcano in person. Standing over 14,000 feet tall, it commanded attention and respect, a true testament to the incredible power of nature.\n\nAs I hiked through the lush, verdant forests that clung to the mountain's lower slopes, I couldn't help but feel small and insignificant in comparison to Rainier's massive presence. Yet, there was also a sense of wonder and humility that washed over me - this was a landscape sculpted over eons, a natural wonder that had witnessed the passage of millennia.\n\nWith each step, the mountain seemed to grow taller, its rugged, snow-covered peak reaching ever higher into the clouds. I found myself pausing frequently, craning my neck to take in the full scale and majesty of Rainier, mesmerized by its sheer, towering beauty.\n\nIn that moment, I was reminded of just how grand and powerful our natural world can be. Mount Rainier stood as a testament to the enduring strength of the Earth, a monument to the forces that have shaped our planet over countless generations. And as I gazed upon its majestic form, I felt a profound sense of connection to the timeless cycles of nature that had given rise to this incredible natural wonder."
-

Teardown

PASSED test_fallback_to_shorthand 0:00:01.001753

Setup

Call

Captured stderr call
[2024-11-04T16:54:59Z INFO  baml_events] Function TestFallbackToShorthand:
+    "Here is a short story about the towering beauty of Mount Rainier:\n\nReaching for the Sky\n\nThe morning sun peeked above the horizon, bathing the landscape in a warm glow. As Sarah stepped outside, her eyes were immediately drawn to the magnificent peak that dominated the skyline. Mount Rainier stood tall and proud, its snow-capped summit reaching over 14,000 feet into the crisp, blue sky. \n\nSarah had always been in awe of this natural wonder. Even from miles away, the sheer size and scale of the mountain was humbling. She remembered the first time she had laid eyes on it, how the sight had taken her breath away. There was something truly majestic about its presence - a timeless, stoic beauty that commanded respect.\n\nAs Sarah gazed up at the towering peak, she felt a sense of wonder wash over her. The mountain seemed to radiate an ancient power, a connection to the forces that had shaped the earth itself. Its glaciers and rocky crags told a story of eons of change, of volcanic activity and weathering that had formed this awe-inspiring natural wonder.\n\nSlowly, Sarah began to make her way towards the mountain, her steps filled with a sense of reverence. She knew that she could never truly fathom the depth of its history or the grandeur of its form, but in that moment, she felt a deep appreciation for its place in the world. Mount Rainier was not just a mountain - it was a testament to the raw power and beauty of the natural world, a reminder of our own insignificance in the face of such a towering, magnificent presence."
+

Teardown

PASSED test_fallback_to_shorthand 0:00:00.880085

Setup

Call

Captured stderr call
[2024-11-05T19:55:43Z INFO  baml_events] Function TestFallbackToShorthand:
     (1 other previous tries)
-    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 729ms. StopReason: stop. Tokens(in/out): 18/19
+    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 633ms. StopReason: stop. Tokens(in/out): 18/20
     ---PROMPT---
     [chat] system: Say a haiku about Mt Rainier is tall.
     
     ---LLM REPLY---
-    Majestic and wide,  
-    Mt. Rainier crowns the sky,  
-    Nature’s sentinel.
+    Majestic and grand,  
+    Mt. Rainier kisses sky,  
+    Above all, it stands.
     ---Parsed Response (string)---
-    "Majestic and wide,  \nMt. Rainier crowns the sky,  \nNature’s sentinel."
-

Teardown

PASSED test_aws_streaming 0:00:01.930766

Setup

Call

Captured stderr call
[2024-11-04T16:54:59Z WARN  aws_runtime::env_config::normalize] section [Connection 1] ignored; config must be in the AWS config file rather than the credentials file
-[2024-11-04T16:54:59Z INFO  aws_config::meta::region] load_region; provider=EnvironmentVariableRegionProvider { env: Env(Real) }
-[2024-11-04T16:55:01Z INFO  baml_events] Function TestAws:
-    Client: AwsBedrock (anthropic.claude-3-haiku-20240307-v1:0) - 1920ms. StopReason: unknown. Tokens(in/out): 19/100
+    "Majestic and grand,  \nMt. Rainier kisses sky,  \nAbove all, it stands."
+

Teardown

PASSED test_aws_streaming 0:00:02.213066

Setup

Call

Captured stderr call
[2024-11-05T19:55:43Z WARN  aws_runtime::env_config::normalize] section [Connection 1] ignored; config must be in the AWS config file rather than the credentials file
+[2024-11-05T19:55:43Z INFO  aws_config::meta::region] load_region; provider=EnvironmentVariableRegionProvider { env: Env(Real) }
+[2024-11-05T19:55:45Z INFO  baml_events] Function TestAws:
+    Client: AwsBedrock (anthropic.claude-3-haiku-20240307-v1:0) - 2200ms. StopReason: unknown. Tokens(in/out): 19/100
     ---PROMPT---
     [chat] user: Write a nice short story about Mt Rainier is tall
     
     ---LLM REPLY---
     Here is a short story about Mt. Rainier:
     
-    The Towering Giant of the Cascades
-    
-    As the sun peeked over the horizon, its warm rays illuminated the majestic snow-capped peak of Mount Rainier. Standing tall at 14,411 feet, the dormant volcano cast a commanding presence over the surrounding landscape, dwarfing the evergreen forests and rolling hills that stretched out beneath it.
+    Rising high above the lush, green forests of the Pacific Northwest, Mt. Rainier stands tall and majestic. Its snow-capped peak, over 14,000 feet in elevation, commands awe and respect from all who gaze upon it.
     
-    For the people who live
+    For the hikers and mountaineers who attempt to reach the summit, the journey is both physically and mentally challenging. The climb is arduous,
     ---Parsed Response (string)---
-    "Here is a short story about Mt. Rainier:\n\nThe Towering Giant of the Cascades\n\nAs the sun peeked over the horizon, its warm rays illuminated the majestic snow-capped peak of Mount Rainier. Standing tall at 14,411 feet, the dormant volcano cast a commanding presence over the surrounding landscape, dwarfing the evergreen forests and rolling hills that stretched out beneath it.\n\nFor the people who live"
-

Teardown

PASSED test_streaming 0:00:05.029893

Setup

Call

Captured stderr call
[2024-11-04T16:55:06Z INFO  baml_events] Function PromptTestStreaming:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 5020ms. StopReason: stop. Tokens(in/out): 19/306
+    "Here is a short story about Mt. Rainier:\n\nRising high above the lush, green forests of the Pacific Northwest, Mt. Rainier stands tall and majestic. Its snow-capped peak, over 14,000 feet in elevation, commands awe and respect from all who gaze upon it.\n\nFor the hikers and mountaineers who attempt to reach the summit, the journey is both physically and mentally challenging. The climb is arduous,"
+

Teardown

PASSED test_streaming 0:00:03.561350

Setup

Call

Captured stderr call
[2024-11-05T19:55:49Z INFO  baml_events] Function PromptTestStreaming:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 3553ms. StopReason: stop. Tokens(in/out): 19/305
     ---PROMPT---
     [chat] system: Tell me a short story about Programming languages are fun to create
     
     ---LLM REPLY---
-    Once upon a time, in a bustling city filled with brilliant minds, a group of young programmers came together with a dream to create their very own programming language. They were inspired by the languages they had learned and used, but they wanted to build something entirely unique and special.
+    Once upon a time in a world filled with endless creativity and imagination, a group of young programmers decided to embark on a journey to create their very own programming language. They were inspired by the beauty and elegance of existing languages, but wanted to add their own unique twist to the coding world.
     
-    They started by brainstorming ideas, sketching out the syntax, and testing different features. As they worked tirelessly, they found that creating a programming language was not just an exercise in logic and problem-solving, but also a journey of creativity and imagination.
+    They spent countless hours brainstorming ideas, sketching out designs, and meticulously crafting each line of code. As they worked together, they discovered the joy of combining logic and creativity to bring their language to life. They experimented with new features, played with different syntaxes, and tested out various functions to see what worked best.
     
-    They experimented with different data types, control structures, and functions, weaving them together like artists painting a masterpiece. Each line of code was like a brushstroke on a canvas, adding depth and complexity to their creation.
+    As their language began to take shape, they marveled at how each decision they made had a ripple effect on the entire system. They learned to anticipate the needs of their users, to think from different perspectives, and to constantly refine and improve their creation.
     
-    As the language began to take shape, they tested it out on simple programs, watching in awe as their creation came to life. They marveled at how the language could bring order to chaos, turning lines of code into meaningful instructions that could be executed by a computer.
+    Finally, after many months of hard work and dedication, their programming language was complete. They released it to the world with a sense of pride and excitement, eager to see how others would use and build upon their creation.
     
-    Finally, after many long days and nights of hard work, they unveiled their programming language to the world. Programmers from far and wide flocked to try it out, amazed by its elegance and power.
-    
-    And so, the young programmers realized that creating a programming language was not just about building a tool for others to use, but also a joyful and fulfilling expression of their creativity and passion for programming. They knew that they had created something truly special, a language that would inspire and empower generations of coders to come.
+    And as developers around the globe began to adopt and use their language, the young programmers realized that the true joy of creating a programming language was not just in the act of coding itself, but in the knowledge that they had contributed something meaningful and innovative to the ever-evolving world of technology. And so, they continued to dream, to create, and to push the boundaries of what was possible, one line of code at a time.
     ---Parsed Response (string)---
-    "Once upon a time, in a bustling city filled with brilliant minds, a group of young programmers came together with a dream to create their very own programming language. They were inspired by the languages they had learned and used, but they wanted to build something entirely unique and special.\n\nThey started by brainstorming ideas, sketching out the syntax, and testing different features. As they worked tirelessly, they found that creating a programming language was not just an exercise in logic and problem-solving, but also a journey of creativity and imagination.\n\nThey experimented with different data types, control structures, and functions, weaving them together like artists painting a masterpiece. Each line of code was like a brushstroke on a canvas, adding depth and complexity to their creation.\n\nAs the language began to take shape, they tested it out on simple programs, watching in awe as their creation came to life. They marveled at how the language could bring order to chaos, turning lines of code into meaningful instructions that could be executed by a computer.\n\nFinally, after many long days and nights of hard work, they unveiled their programming language to the world. Programmers from far and wide flocked to try it out, amazed by its elegance and power.\n\nAnd so, the young programmers realized that creating a programming language was not just about building a tool for others to use, but also a joyful and fulfilling expression of their creativity and passion for programming. They knew that they had created something truly special, a language that would inspire and empower generations of coders to come."
-

Teardown

PASSED test_streaming_uniterated 0:00:04.365119

Setup

Call

Captured stderr call
[2024-11-04T16:55:10Z INFO  baml_events] Function PromptTestStreaming:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 4356ms. StopReason: stop. Tokens(in/out): 19/312
+    "Once upon a time in a world filled with endless creativity and imagination, a group of young programmers decided to embark on a journey to create their very own programming language. They were inspired by the beauty and elegance of existing languages, but wanted to add their own unique twist to the coding world.\n\nThey spent countless hours brainstorming ideas, sketching out designs, and meticulously crafting each line of code. As they worked together, they discovered the joy of combining logic and creativity to bring their language to life. They experimented with new features, played with different syntaxes, and tested out various functions to see what worked best.\n\nAs their language began to take shape, they marveled at how each decision they made had a ripple effect on the entire system. They learned to anticipate the needs of their users, to think from different perspectives, and to constantly refine and improve their creation.\n\nFinally, after many months of hard work and dedication, their programming language was complete. They released it to the world with a sense of pride and excitement, eager to see how others would use and build upon their creation.\n\nAnd as developers around the globe began to adopt and use their language, the young programmers realized that the true joy of creating a programming language was not just in the act of coding itself, but in the knowledge that they had contributed something meaningful and innovative to the ever-evolving world of technology. And so, they continued to dream, to create, and to push the boundaries of what was possible, one line of code at a time."
+

Teardown

PASSED test_streaming_uniterated 0:00:04.736298

Setup

Call

Captured stderr call
[2024-11-05T19:55:54Z INFO  baml_events] Function PromptTestStreaming:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 4726ms. StopReason: stop. Tokens(in/out): 19/316
     ---PROMPT---
     [chat] system: Tell me a short story about The color blue makes me sad
     
     ---LLM REPLY---
-    Once there was a girl named Lily who had always loved the color blue. The calming hue of the sky and the deep depths of the ocean always brought her comfort and joy. But as she grew older, something changed within her.
+    Once there was a little girl named Rosie who had always loved the color blue. It reminded her of the clear skies on a sunny day and the peaceful ocean waters. She used to wear blue dresses and paint her room in shades of blue.
     
-    Lily began to associate the color blue with sadness and loss. It reminded her of the day she had to say goodbye to her beloved pet dog, whose collar was a vibrant shade of blue. She couldn't bear to look at anything blue without feeling a pang of sorrow deep in her heart.
+    However, as Rosie grew older, she began to associate the color blue with sadness. It started when her beloved grandmother passed away and blue flowers were placed on her grave. Then, a close friend moved away, leaving Rosie feeling a deep sense of loss. The blue sky that she once found so peaceful now seemed to mock her with its vast emptiness.
     
-    As the years went by, Lily found herself avoiding anything blue. She refused to wear blue clothing or decorate her home with blue accents. The once beautiful color now brought only melancholy and pain.
+    Rosie found herself avoiding anything blue, unable to shake the feeling of sadness that engulfed her whenever she saw the color. She even stopped wearing her favorite blue dresses and started painting her room in warmer tones to chase away the melancholy that blue brought.
     
-    One day, as Lily was walking through a park, she came across a vibrant patch of blue flowers. Despite her best efforts to avoid them, the sight caught her eye and she couldn't help but feel a surge of emotion. Tears welled in her eyes as memories of her past flooded back, but this time, they were accompanied by a sense of healing and acceptance.
+    One day, as Rosie was walking in the park, she came across a beautiful blue butterfly fluttering among the flowers. At first, she felt a pang of sadness, but then she realized that the butterfly was a sign of hope and renewal. Just like the butterfly emerging from its cocoon, Rosie could emerge from her sadness and find joy in the color blue once again.
     
-    Lily realized that the color blue may always hold a tinge of sadness for her, but it was also a reminder of the love and happiness she had experienced in her life. She no longer needed to avoid it, for the color blue was a part of her story, a reminder of the bittersweet beauty of life. And so, with a newfound appreciation, Lily embraced the color blue once more, letting it bring both joy and sorrow in equal measure.
+    From that day on, Rosie made peace with the color blue. She embraced it as a reminder that sadness is just a part of life, but that there is always beauty and hope waiting to be discovered. And so, the color blue no longer made Rosie sad, but instead served as a source of strength and resilience in the face of life's challenges.
     ---Parsed Response (string)---
-    "Once there was a girl named Lily who had always loved the color blue. The calming hue of the sky and the deep depths of the ocean always brought her comfort and joy. But as she grew older, something changed within her.\n\nLily began to associate the color blue with sadness and loss. It reminded her of the day she had to say goodbye to her beloved pet dog, whose collar was a vibrant shade of blue. She couldn't bear to look at anything blue without feeling a pang of sorrow deep in her heart.\n\nAs the years went by, Lily found herself avoiding anything blue. She refused to wear blue clothing or decorate her home with blue accents. The once beautiful color now brought only melancholy and pain.\n\nOne day, as Lily was walking through a park, she came across a vibrant patch of blue flowers. Despite her best efforts to avoid them, the sight caught her eye and she couldn't help but feel a surge of emotion. Tears welled in her eyes as memories of her past flooded back, but this time, they were accompanied by a sense of healing and acceptance.\n\nLily realized that the color blue may always hold a tinge of sadness for her, but it was also a reminder of the love and happiness she had experienced in her life. She no longer needed to avoid it, for the color blue was a part of her story, a reminder of the bittersweet beauty of life. And so, with a newfound appreciation, Lily embraced the color blue once more, letting it bring both joy and sorrow in equal measure."
-

Teardown

PASSED test_streaming_sync 0:00:03.859453

Setup

Call

Captured stderr call
[2024-11-04T16:55:14Z INFO  baml_events] Function PromptTestStreaming:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 3852ms. StopReason: stop. Tokens(in/out): 19/269
+    "Once there was a little girl named Rosie who had always loved the color blue. It reminded her of the clear skies on a sunny day and the peaceful ocean waters. She used to wear blue dresses and paint her room in shades of blue.\n\nHowever, as Rosie grew older, she began to associate the color blue with sadness. It started when her beloved grandmother passed away and blue flowers were placed on her grave. Then, a close friend moved away, leaving Rosie feeling a deep sense of loss. The blue sky that she once found so peaceful now seemed to mock her with its vast emptiness.\n\nRosie found herself avoiding anything blue, unable to shake the feeling of sadness that engulfed her whenever she saw the color. She even stopped wearing her favorite blue dresses and started painting her room in warmer tones to chase away the melancholy that blue brought.\n\nOne day, as Rosie was walking in the park, she came across a beautiful blue butterfly fluttering among the flowers. At first, she felt a pang of sadness, but then she realized that the butterfly was a sign of hope and renewal. Just like the butterfly emerging from its cocoon, Rosie could emerge from her sadness and find joy in the color blue once again.\n\nFrom that day on, Rosie made peace with the color blue. She embraced it as a reminder that sadness is just a part of life, but that there is always beauty and hope waiting to be discovered. And so, the color blue no longer made Rosie sad, but instead served as a source of strength and resilience in the face of life's challenges."
+

Teardown

PASSED test_streaming_sync 0:00:04.580829

Setup

Call

Captured stderr call
[2024-11-05T19:55:58Z INFO  baml_events] Function PromptTestStreaming:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 4574ms. StopReason: stop. Tokens(in/out): 19/313
     ---PROMPT---
     [chat] system: Tell me a short story about Programming languages are fun to create
     
     ---LLM REPLY---
-    Once upon a time, in a land where creativity and innovation flourished, a group of ambitious programmers gathered to create their own programming language. They were excited by the challenge of designing a language that was powerful, efficient, and easy to use.
-    
-    They started by brainstorming ideas for syntax, data structures, and operations. They debated the merits of different programming paradigms and incorporated elements from their favorite languages. As they worked, they encountered obstacles and setbacks, but they never lost their enthusiasm for the project.
+    Once upon a time, in a land where technology and innovation flourished, there lived a group of passionate programmers who loved to push the boundaries of what was possible. One day, a young programmer named Alex had a brilliant idea - to create their very own programming language.
     
-    After many long days and late nights of coding and testing, they finally had a working prototype of their new language. They named it "Codeland" and released it to the world with great fanfare.
+    With excitement and determination, Alex set out on the journey of creating their own programming language from scratch. They began by defining the syntax and structure of the language, carefully designing each feature to be both powerful and easy to use. They drew inspiration from their favorite languages and added unique twists of their own, creating a language that was truly one-of-a-kind.
     
-    To their delight, Codeland quickly gained popularity among programmers for its simplicity and versatility. Developers praised its elegant syntax and powerful features, and soon it was being used to create a wide range of applications, from simple scripts to complex algorithms.
+    As the days turned into weeks, Alex poured their heart and soul into perfecting every aspect of the language. They tested and debugged tirelessly, overcoming countless challenges along the way. But with each obstacle overcome, Alex's passion only grew stronger, fueled by the joy of seeing their creation come to life.
     
-    As word spread about Codeland, more and more programmers joined the community, contributing ideas and improvements to the language. Together, they continued to refine and expand Codeland, making it even better and more functional with each new release.
+    Finally, after many long hours and late nights, Alex's programming language was complete. They proudly named it "InnovateScript" and released it into the world for others to use and enjoy. And to their delight, programmers from far and wide embraced the language, finding it to be a joy to work with and a powerful tool for their projects.
     
-    And so, the programmers of Codeland lived happily ever after, united by their love of coding and the joy of creating something truly amazing.
+    And so, Alex's programming language lived on, inspiring new generations of programmers to dream big, create boldly, and push the boundaries of what was possible in the world of technology. And they all lived happily ever after, knowing that programming languages truly were a fun and magical creation.
     ---Parsed Response (string)---
-    "Once upon a time, in a land where creativity and innovation flourished, a group of ambitious programmers gathered to create their own programming language. They were excited by the challenge of designing a language that was powerful, efficient, and easy to use.\n\nThey started by brainstorming ideas for syntax, data structures, and operations. They debated the merits of different programming paradigms and incorporated elements from their favorite languages. As they worked, they encountered obstacles and setbacks, but they never lost their enthusiasm for the project.\n\nAfter many long days and late nights of coding and testing, they finally had a working prototype of their new language. They named it \"Codeland\" and released it to the world with great fanfare.\n\nTo their delight, Codeland quickly gained popularity among programmers for its simplicity and versatility. Developers praised its elegant syntax and powerful features, and soon it was being used to create a wide range of applications, from simple scripts to complex algorithms.\n\nAs word spread about Codeland, more and more programmers joined the community, contributing ideas and improvements to the language. Together, they continued to refine and expand Codeland, making it even better and more functional with each new release.\n\nAnd so, the programmers of Codeland lived happily ever after, united by their love of coding and the joy of creating something truly amazing."
-

Teardown

PASSED test_streaming_uniterated_sync 0:00:04.015893

Setup

Call

Captured stderr call
[2024-11-04T16:55:18Z INFO  baml_events] Function PromptTestStreaming:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 4006ms. StopReason: stop. Tokens(in/out): 19/289
+    "Once upon a time, in a land where technology and innovation flourished, there lived a group of passionate programmers who loved to push the boundaries of what was possible. One day, a young programmer named Alex had a brilliant idea - to create their very own programming language.\n\nWith excitement and determination, Alex set out on the journey of creating their own programming language from scratch. They began by defining the syntax and structure of the language, carefully designing each feature to be both powerful and easy to use. They drew inspiration from their favorite languages and added unique twists of their own, creating a language that was truly one-of-a-kind.\n\nAs the days turned into weeks, Alex poured their heart and soul into perfecting every aspect of the language. They tested and debugged tirelessly, overcoming countless challenges along the way. But with each obstacle overcome, Alex's passion only grew stronger, fueled by the joy of seeing their creation come to life.\n\nFinally, after many long hours and late nights, Alex's programming language was complete. They proudly named it \"InnovateScript\" and released it into the world for others to use and enjoy. And to their delight, programmers from far and wide embraced the language, finding it to be a joy to work with and a powerful tool for their projects.\n\nAnd so, Alex's programming language lived on, inspiring new generations of programmers to dream big, create boldly, and push the boundaries of what was possible in the world of technology. And they all lived happily ever after, knowing that programming languages truly were a fun and magical creation."
+

Teardown

PASSED test_streaming_uniterated_sync 0:00:04.282157

Setup

Call

Captured stderr call
[2024-11-05T19:56:03Z INFO  baml_events] Function PromptTestStreaming:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 4274ms. StopReason: stop. Tokens(in/out): 19/399
     ---PROMPT---
     [chat] system: Tell me a short story about The color blue makes me sad
     
     ---LLM REPLY---
-    Once there was a girl named Emily who had a deep connection to the color blue. She found it to be the most beautiful and calming hue, and it reminded her of the vastness of the ocean and the endless sky.
+    Once upon a time, there was a young girl named Lily who always felt a deep sadness whenever she saw the color blue. It reminded her of the endless ocean that took her father away from her when she was just a little girl.
     
-    However, as she grew older, Emily started associating the color blue with her sadness. It reminded her of the times she felt lost and alone, of moments when she cried under the open sky, feeling small and insignificant.
+    Lily's father was a sailor who loved the sea, but one stormy night, his ship was lost at sea, leaving Lily and her mother to grieve his loss. Since then, the color blue became a constant reminder of her father's absence, and every time she saw it, her heart ached with sorrow.
     
-    Despite her efforts to shake off the feeling, the color blue continued to weigh heavy on her heart. She would avoid anything blue, whether it be the sky, the ocean, or even a simple piece of clothing.
+    Despite her sadness, Lily found solace in painting. She would spend hours in her room, creating beautiful works of art that expressed her emotions and helped her cope with her feelings of loss. She painted vibrant landscapes and colorful flowers, but she always avoided the color blue.
     
-    One day, while taking a walk by the sea, Emily encountered a stunning blue flower blooming amidst the rocks. At first, she turned away, unable to bear the sadness that the color brought her. But as she looked closer, she realized that the flower was a symbol of hope and resilience.
+    One day, a wise old artist visited Lily and saw her struggle with the color blue. He encouraged her to embrace her sadness and channel it into her art. He told her that sadness could be a source of inspiration and that the color blue could be a symbol of healing and growth.
     
-    In that moment, Emily made peace with the color blue. She understood that while it may bring back painful memories, it also held the power to soothe, to heal, and to remind her that even in the midst of sadness, beauty can still bloom.
+    Inspired by the artist's words, Lily decided to paint a new masterpiece. She picked up her paintbrush and bravely dipped it into the deep blue paint. As she worked, she poured her grief and love into the canvas, creating a stunning portrait of the ocean at dusk.
     
-    From then on, whenever Emily saw the color blue, she felt a twinge of sadness but also a glimmer of hope, knowing that she could conquer her sadness and find solace in the beauty that surrounded her.
+    When she finished, Lily looked at her painting and felt a sense of peace wash over her. The color blue no longer made her sad, but instead, it brought her comfort and a sense of connection to her father. She realized that through her art, she could keep his memory alive and find beauty in the color that once brought her pain.
+    
+    From that day on, Lily embraced the color blue as a reminder of her father's love and the healing power of art. And whenever she felt sad, she would paint her emotions onto the canvas, transforming her sorrow into something beautiful and meaningful.
     ---Parsed Response (string)---
-    "Once there was a girl named Emily who had a deep connection to the color blue. She found it to be the most beautiful and calming hue, and it reminded her of the vastness of the ocean and the endless sky.\n\nHowever, as she grew older, Emily started associating the color blue with her sadness. It reminded her of the times she felt lost and alone, of moments when she cried under the open sky, feeling small and insignificant.\n\nDespite her efforts to shake off the feeling, the color blue continued to weigh heavy on her heart. She would avoid anything blue, whether it be the sky, the ocean, or even a simple piece of clothing.\n\nOne day, while taking a walk by the sea, Emily encountered a stunning blue flower blooming amidst the rocks. At first, she turned away, unable to bear the sadness that the color brought her. But as she looked closer, she realized that the flower was a symbol of hope and resilience.\n\nIn that moment, Emily made peace with the color blue. She understood that while it may bring back painful memories, it also held the power to soothe, to heal, and to remind her that even in the midst of sadness, beauty can still bloom.\n\nFrom then on, whenever Emily saw the color blue, she felt a twinge of sadness but also a glimmer of hope, knowing that she could conquer her sadness and find solace in the beauty that surrounded her."
-

Teardown

PASSED test_streaming_claude 0:00:01.150139

Setup

Call

Captured stdout call
msgs:
+    "Once upon a time, there was a young girl named Lily who always felt a deep sadness whenever she saw the color blue. It reminded her of the endless ocean that took her father away from her when she was just a little girl.\n\nLily's father was a sailor who loved the sea, but one stormy night, his ship was lost at sea, leaving Lily and her mother to grieve his loss. Since then, the color blue became a constant reminder of her father's absence, and every time she saw it, her heart ached with sorrow.\n\nDespite her sadness, Lily found solace in painting. She would spend hours in her room, creating beautiful works of art that expressed her emotions and helped her cope with her feelings of loss. She painted vibrant landscapes and colorful flowers, but she always avoided the color blue.\n\nOne day, a wise old artist visited Lily and saw her struggle with the color blue. He encouraged her to embrace her sadness and channel it into her art. He told her that sadness could be a source of inspiration and that the color blue could be a symbol of healing and growth.\n\nInspired by the artist's words, Lily decided to paint a new masterpiece. She picked up her paintbrush and bravely dipped it into the deep blue paint. As she worked, she poured her grief and love into the canvas, creating a stunning portrait of the ocean at dusk.\n\nWhen she finished, Lily looked at her painting and felt a sense of peace wash over her. The color blue no longer made her sad, but instead, it brought her comfort and a sense of connection to her father. She realized that through her art, she could keep his memory alive and find beauty in the color that once brought her pain.\n\nFrom that day on, Lily embraced the color blue as a reminder of her father's love and the healing power of art. And whenever she felt sad, she would paint her emotions onto the canvas, transforming her sorrow into something beautiful and meaningful."
+

Teardown

PASSED test_streaming_claude 0:00:01.036682

Setup

Call

Captured stdout call
msgs:
 Here's a haiku about Mt. Rainier:
 
-Rainier stands so high
-Piercing clouds with snowy peak
-Guardian of sky
+Rainier stands supreme
+Above Northwest clouds and pines
+Ancient ice gleaming
 final:
 Here's a haiku about Mt. Rainier:
 
-Rainier stands so high
-Piercing clouds with snowy peak
-Guardian of sky
-
Captured stderr call
[2024-11-04T16:55:19Z INFO  baml_events] Function PromptTestClaude:
-    Client: Sonnet (claude-3-5-sonnet-20241022) - 1117ms. StopReason: "end_turn". Tokens(in/out): 19/41
+Rainier stands supreme
+Above Northwest clouds and pines
+Ancient ice gleaming
+
Captured stderr call
[2024-11-05T19:56:04Z INFO  baml_events] Function PromptTestClaude:
+    Client: Sonnet (claude-3-5-sonnet-20241022) - 1016ms. StopReason: "end_turn". Tokens(in/out): 19/34
     ---PROMPT---
     [chat] user: Tell me a haiku about Mt Rainier is tall
     
     ---LLM REPLY---
     Here's a haiku about Mt. Rainier:
     
-    Rainier stands so high
-    Piercing clouds with snowy peak
-    Guardian of sky
+    Rainier stands supreme
+    Above Northwest clouds and pines
+    Ancient ice gleaming
     ---Parsed Response (string)---
-    "Here's a haiku about Mt. Rainier:\n\nRainier stands so high\nPiercing clouds with snowy peak\nGuardian of sky"
-

Teardown

PASSED test_streaming_gemini 0:00:06.320319

Setup

Call

Captured stdout call
msgs:
-Dr. Pepper hadn't always been Dr. Pepper. He'd started life as Perry P. Pepper, a bright-eyed boy fascinated by the fizz and pop of soda. Perry experimented in his basement lab, mixing concoctions of fruit extracts and bubbly water. His dream? To create a soda unlike any other, a flavor symphony that would dance on the tongue.
-
-Years passed, filled with bubbling beakers and countless taste tests. Perry, now a young man, grew disheartened. His creations were good, but not groundbreaking. Then, one rainy afternoon, as lightning crackled outside, he stumbled upon a forgotten ingredient – a dusty vial labeled "23." 
-
-Hesitant but intrigued, Perry added a single drop to his latest concoction. The mixture fizzed violently, spewing aromatic steam. When it settled, the air was thick with the scent of cherry, licorice, and something undefinably unique. He took a sip.
-
-His eyes widened. It was perfect. A kaleidoscope of 23 distinct flavors, harmonizing in a sweet, slightly spicy crescendo. The soda was more than just a drink; it was an experience, a burst of pure joy.
-
-From that day forward, Perry P. Pepper was no more. He became Dr. Pepper, the eccentric inventor of the world's most unique soda. His creation, a testament to perseverance and a pinch of serendipity, became a global sensation, proving that sometimes, the best things in life are worth the wait – and a little bit of magic. 
-
-final:
-Dr. Pepper hadn't always been Dr. Pepper. He'd started life as Perry P. Pepper, a bright-eyed boy fascinated by the fizz and pop of soda. Perry experimented in his basement lab, mixing concoctions of fruit extracts and bubbly water. His dream? To create a soda unlike any other, a flavor symphony that would dance on the tongue.
-
-Years passed, filled with bubbling beakers and countless taste tests. Perry, now a young man, grew disheartened. His creations were good, but not groundbreaking. Then, one rainy afternoon, as lightning crackled outside, he stumbled upon a forgotten ingredient – a dusty vial labeled "23." 
+    "Here's a haiku about Mt. Rainier:\n\nRainier stands supreme\nAbove Northwest clouds and pines\nAncient ice gleaming"
+

Teardown

FAILED test_streaming_gemini 0:00:02.539860

baml_py.BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "Gemini", model: Some("gemini-1.5-pro-001"), prompt: Chat([RenderedChatMessage { role: "user", allow_duplicate_role: false, parts: [Text("Write a nice short story about Dr.Pepper")] }]), request_options: {"safetySettings": Object {"threshold": String("BLOCK_LOW_AND_ABOVE"), "category": String("HARM_CATEGORY_HATE_SPEECH")}}, start_time: SystemTime { tv_sec: 1730836564, tv_nsec: 188563000 }, latency: 2.535240917s, message: "Failed to parse event: Error(\"missing field `content`\", line: 1, column: 359)", code: UnsupportedResponse(2) }

Setup

Call

@pytest.mark.asyncio
+    async def test_streaming_gemini():
+        stream = b.stream.TestGemini(input="Dr.Pepper")
+        msgs: list[str] = []
+        async for msg in stream:
+            if msg is not None:
+                msgs.append(msg)
+>       final = await stream.get_final_response()
 
-Hesitant but intrigued, Perry added a single drop to his latest concoction. The mixture fizzed violently, spewing aromatic steam. When it settled, the air was thick with the scent of cherry, licorice, and something undefinably unique. He took a sip.
+tests/test_functions.py:543: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+../../engine/language_client_python/python_src/baml_py/stream.py:81: in get_final_response
+    return self.__final_coerce((await asyncio.wrap_future(final)))
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
 
-His eyes widened. It was perfect. A kaleidoscope of 23 distinct flavors, harmonizing in a sweet, slightly spicy crescendo. The soda was more than just a drink; it was an experience, a burst of pure joy.
+x = 
 
-From that day forward, Perry P. Pepper was no more. He became Dr. Pepper, the eccentric inventor of the world's most unique soda. His creation, a testament to perseverance and a pinch of serendipity, became a global sensation, proving that sometimes, the best things in life are worth the wait – and a little bit of magic. 
+>     lambda x: cast(str, x.cast_to(types, types)),
+      self.__ctx_manager.get(),
+    )
+E   baml_py.BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "Gemini", model: Some("gemini-1.5-pro-001"), prompt: Chat([RenderedChatMessage { role: "user", allow_duplicate_role: false, parts: [Text("Write a nice short story about Dr.Pepper")] }]), request_options: {"safetySettings": Object {"threshold": String("BLOCK_LOW_AND_ABOVE"), "category": String("HARM_CATEGORY_HATE_SPEECH")}}, start_time: SystemTime { tv_sec: 1730836564, tv_nsec: 188563000 }, latency: 2.535240917s, message: "Failed to parse event: Error(\"missing field `content`\", line: 1, column: 359)", code: UnsupportedResponse(2) }
 
-
Captured stderr call
[2024-11-04T16:55:25Z INFO  baml_events] Function TestGemini:
-    Client: Gemini (gemini-1.5-pro-001) - 6305ms. StopReason: Stop. Tokens(in/out): unknown/unknown
+baml_client/async_client.py:5406: BamlClientHttpError
Captured stderr call
[2024-11-05T19:56:06Z WARN  baml_events] Function TestGemini:
+    Client: Gemini (gemini-1.5-pro-001) - 2535ms
     ---PROMPT---
     [chat] user: Write a nice short story about Dr.Pepper
     
-    ---LLM REPLY---
-    Dr. Pepper hadn't always been Dr. Pepper. He'd started life as Perry P. Pepper, a bright-eyed boy fascinated by the fizz and pop of soda. Perry experimented in his basement lab, mixing concoctions of fruit extracts and bubbly water. His dream? To create a soda unlike any other, a flavor symphony that would dance on the tongue.
-    
-    Years passed, filled with bubbling beakers and countless taste tests. Perry, now a young man, grew disheartened. His creations were good, but not groundbreaking. Then, one rainy afternoon, as lightning crackled outside, he stumbled upon a forgotten ingredient – a dusty vial labeled "23." 
-    
-    Hesitant but intrigued, Perry added a single drop to his latest concoction. The mixture fizzed violently, spewing aromatic steam. When it settled, the air was thick with the scent of cherry, licorice, and something undefinably unique. He took a sip.
-    
-    His eyes widened. It was perfect. A kaleidoscope of 23 distinct flavors, harmonizing in a sweet, slightly spicy crescendo. The soda was more than just a drink; it was an experience, a burst of pure joy.
-    
-    From that day forward, Perry P. Pepper was no more. He became Dr. Pepper, the eccentric inventor of the world's most unique soda. His creation, a testament to perseverance and a pinch of serendipity, became a global sensation, proving that sometimes, the best things in life are worth the wait – and a little bit of magic. 
-    
-    ---Parsed Response (string)---
-    "Dr. Pepper hadn't always been Dr. Pepper. He'd started life as Perry P. Pepper, a bright-eyed boy fascinated by the fizz and pop of soda. Perry experimented in his basement lab, mixing concoctions of fruit extracts and bubbly water. His dream? To create a soda unlike any other, a flavor symphony that would dance on the tongue.\n\nYears passed, filled with bubbling beakers and countless taste tests. Perry, now a young man, grew disheartened. His creations were good, but not groundbreaking. Then, one rainy afternoon, as lightning crackled outside, he stumbled upon a forgotten ingredient – a dusty vial labeled \"23.\" \n\nHesitant but intrigued, Perry added a single drop to his latest concoction. The mixture fizzed violently, spewing aromatic steam. When it settled, the air was thick with the scent of cherry, licorice, and something undefinably unique. He took a sip.\n\nHis eyes widened. It was perfect. A kaleidoscope of 23 distinct flavors, harmonizing in a sweet, slightly spicy crescendo. The soda was more than just a drink; it was an experience, a burst of pure joy.\n\nFrom that day forward, Perry P. Pepper was no more. He became Dr. Pepper, the eccentric inventor of the world's most unique soda. His creation, a testament to perseverance and a pinch of serendipity, became a global sensation, proving that sometimes, the best things in life are worth the wait – and a little bit of magic. \n"
-

Teardown

PASSED test_tracing_async_only 0:00:05.660904

Setup

Call

Captured stdout call
STATS TraceStats(failed=0, started=15, finalized=15, submitted=15, sent=15, done=15)
-
Captured stderr call
[2024-11-04T16:55:27Z INFO  baml_events] Function FnOutputClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 930ms. StopReason: stop. Tokens(in/out): 50/18
+    ---REQUEST OPTIONS---
+    safetySettings: {"threshold":"BLOCK_LOW_AND_ABOVE","category":"HARM_CATEGORY_HATE_SPEECH"}
+    ---ERROR (BadResponse 2)---
+    Failed to parse event: Error("missing field `content`", line: 1, column: 359)
+

Teardown

PASSED test_tracing_async_only 0:00:04.998515

Setup

Call

Captured stdout call
STATS TraceStats(failed=0, started=15, finalized=15, submitted=15, sent=15, done=15)
+
Captured stderr call
[2024-11-05T19:56:07Z INFO  baml_events] Function FnOutputClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 678ms. StopReason: stop. Tokens(in/out): 50/20
     ---PROMPT---
     [chat] system: Return a JSON blob with this schema: 
     Answer in JSON using this schema:
@@ -1124,16 +950,16 @@
     
     ---LLM REPLY---
     {
-      "prop1": "example",
+      "prop1": "Hello, JSON!",
       "prop2": 540
     }
     ---Parsed Response (class TestOutputClass)---
     {
-      "prop1": "example",
+      "prop1": "Hello, JSON!",
       "prop2": 540
     }
-[2024-11-04T16:55:28Z INFO  baml_events] Function FnOutputClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 864ms. StopReason: stop. Tokens(in/out): 50/20
+[2024-11-05T19:56:08Z INFO  baml_events] Function FnOutputClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 796ms. StopReason: stop. Tokens(in/out): 50/20
     ---PROMPT---
     [chat] system: Return a JSON blob with this schema: 
     Answer in JSON using this schema:
@@ -1148,16 +974,16 @@
     
     ---LLM REPLY---
     {
-      "prop1": "Hello, world!",
+      "prop1": "Hello, World!",
       "prop2": 540
     }
     ---Parsed Response (class TestOutputClass)---
     {
-      "prop1": "Hello, world!",
+      "prop1": "Hello, World!",
       "prop2": 540
     }
-[2024-11-04T16:55:29Z INFO  baml_events] Function FnOutputClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 1065ms. StopReason: stop. Tokens(in/out): 50/20
+[2024-11-05T19:56:09Z INFO  baml_events] Function FnOutputClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 588ms. StopReason: stop. Tokens(in/out): 50/19
     ---PROMPT---
     [chat] system: Return a JSON blob with this schema: 
     Answer in JSON using this schema:
@@ -1172,16 +998,16 @@
     
     ---LLM REPLY---
     {
-      "prop1": "Hello, world!",
+      "prop1": "String data",
       "prop2": 540
     }
     ---Parsed Response (class TestOutputClass)---
     {
-      "prop1": "Hello, world!",
+      "prop1": "String data",
       "prop2": 540
     }
-[2024-11-04T16:55:31Z INFO  baml_events] Function FnOutputClass:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 833ms. StopReason: stop. Tokens(in/out): 50/19
+[2024-11-05T19:56:11Z INFO  baml_events] Function FnOutputClass:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 705ms. StopReason: stop. Tokens(in/out): 50/19
     ---PROMPT---
     [chat] system: Return a JSON blob with this schema: 
     Answer in JSON using this schema:
@@ -1196,17 +1022,17 @@
     
     ---LLM REPLY---
     {
-      "prop1": "Example string",
+      "prop1": "Sample string",
       "prop2": 540
     }
     ---Parsed Response (class TestOutputClass)---
     {
-      "prop1": "Example string",
+      "prop1": "Sample string",
       "prop2": 540
     }
-

Teardown

PASSED test_tracing_sync 0:00:00.001787

Setup

Call

Teardown

PASSED test_tracing_thread_pool 0:00:01.393800

Setup

Call

Teardown

PASSED test_tracing_thread_pool_async 0:00:14.123566

Setup

Call

Teardown

PASSED test_tracing_async_gather 0:00:01.477560

Setup

Call

Teardown

PASSED test_tracing_async_gather_top_level 0:00:01.139670

Setup

Call

Teardown

PASSED test_dynamic 0:00:01.428467

Setup

Call

Captured stdout call
{'name': 'Harrison', 'hair_color': <Color.BLACK: 'BLACK'>, 'last_name': [], 'height': 1.83, 'hobbies': [<Hobby.SPORTS: 'SPORTS'>]}
-
Captured stderr call
[2024-11-04T16:55:51Z INFO  baml_events] Function ExtractPeople:
-    Client: GPT4 (gpt-4o-2024-08-06) - 1415ms. StopReason: stop. Tokens(in/out): 177/49
+

Teardown

PASSED test_tracing_sync 0:00:00.001328

Setup

Call

Teardown

PASSED test_tracing_thread_pool 0:00:01.421198

Setup

Call

Teardown

PASSED test_tracing_thread_pool_async 0:00:12.931142

Setup

Call

Teardown

PASSED test_tracing_async_gather 0:00:01.480602

Setup

Call

Teardown

PASSED test_tracing_async_gather_top_level 0:00:01.143204

Setup

Call

Teardown

PASSED test_dynamic 0:00:01.624562

Setup

Call

Captured stdout call
{'name': 'Harrison', 'hair_color': <Color.BLACK: 'BLACK'>, 'last_name': [], 'height': 1.83, 'hobbies': [<Hobby.SPORTS: 'SPORTS'>]}
+
Captured stderr call
[2024-11-05T19:56:30Z INFO  baml_events] Function ExtractPeople:
+    Client: GPT4 (gpt-4o-2024-08-06) - 1619ms. StopReason: stop. Tokens(in/out): 177/53
     ---PROMPT---
     [chat] system: You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.
     
@@ -1234,7 +1060,9 @@
         "hair_color": "BLACK",
         "last_name": [],
         "height": 1.83,
-        "hobbies": ["sports"]
+        "hobbies": [
+          "sports"
+        ]
       }
     ]
     ```
@@ -1250,10 +1078,10 @@
         ]
       }
     ]
-

Teardown

PASSED test_dynamic_class_output 0:00:00.957732

Setup

Call

Captured stdout call
[]
+

Teardown

PASSED test_dynamic_class_output 0:00:01.183615

Setup

Call

Captured stdout call
[]
 {"hair_color":"black"}
-
Captured stderr call
[2024-11-04T16:55:51Z INFO  baml_events] Function MyFunc:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 430ms. StopReason: stop. Tokens(in/out): 49/10
+
Captured stderr call
[2024-11-05T19:56:31Z INFO  baml_events] Function MyFunc:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 660ms. StopReason: stop. Tokens(in/out): 49/10
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1272,7 +1100,7 @@
     {
       "hair_color": "black"
     }
-[2024-11-04T16:55:52Z INFO  baml_events] Function MyFunc:
+[2024-11-05T19:56:31Z INFO  baml_events] Function MyFunc:
     Client: GPT35 (gpt-3.5-turbo-0125) - 513ms. StopReason: stop. Tokens(in/out): 49/10
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
@@ -1292,9 +1120,9 @@
     {
       "hair_color": "black"
     }
-

Teardown

PASSED test_dynamic_class_nested_output_no_stream 0:00:01.558718

Setup

Call

Captured stdout call
{"name":{"first_name":"Mark","last_name":"Gonzalez","middle_name":null},"address":null,"hair_color":"black","height":6.0}
-
Captured stderr call
[2024-11-04T16:55:53Z INFO  baml_events] Function MyFunc:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 1544ms. StopReason: stop. Tokens(in/out): 97/57
+

Teardown

PASSED test_dynamic_class_nested_output_no_stream 0:00:01.248148

Setup

Call

Captured stdout call
{"name":{"first_name":"Mark","last_name":"Gonzalez","middle_name":null},"address":null,"hair_color":"black","height":6.0}
+
Captured stderr call
[2024-11-05T19:56:32Z INFO  baml_events] Function MyFunc:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1234ms. StopReason: stop. Tokens(in/out): 97/55
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1322,7 +1150,7 @@
       },
       "address": null,
       "hairColor": "black",
-      "height": 6.0
+      "height": 6
     }
     ---Parsed Response (class DynamicOutput)---
     {
@@ -1335,7 +1163,7 @@
       "hair_color": "black",
       "height": 6.0
     }
-

Teardown

PASSED test_dynamic_class_nested_output_stream 0:00:00.941964

Setup

Call

Captured stdout call
streamed  name=None hair_color=None
+

Teardown

PASSED test_dynamic_class_nested_output_stream 0:00:00.946501

Setup

Call

Captured stdout call
streamed  name=None hair_color=None
 streamed  {'name': None, 'hair_color': None}
 streamed  name=None hair_color=None
 streamed  {'name': None, 'hair_color': None}
@@ -1412,8 +1240,8 @@
 streamed  name={'first_name': 'Mark', 'last_name': 'Gonzalez'} hair_color='black'
 streamed  {'name': {'first_name': 'Mark', 'last_name': 'Gonzalez'}, 'hair_color': 'black'}
 {"name":{"first_name":"Mark","last_name":"Gonzalez"},"hair_color":"black"}
-
Captured stderr call
[2024-11-04T16:55:54Z INFO  baml_events] Function MyFunc:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 932ms. StopReason: stop. Tokens(in/out): 73/35
+
Captured stderr call
[2024-11-05T19:56:33Z INFO  baml_events] Function MyFunc:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 934ms. StopReason: stop. Tokens(in/out): 73/35
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1444,24 +1272,25 @@
       },
       "hair_color": "black"
     }
-

Teardown

PASSED test_stream_dynamic_class_output 0:00:00.610370

Setup

Call

Captured stdout call
[]
-streamed  {'hair_color': None}
-streamed  {'hair_color': None}
-streamed  {'hair_color': None}
-streamed  {'hair_color': None}
-streamed  {'hair_color': None}
-streamed  {'hair_color': None}
+

Teardown

PASSED test_stream_dynamic_class_output 0:00:00.853403

Setup

Call

Captured stdout call
[]
+streamed  {'hair_color': '{'}
+streamed  {'hair_color': '{'}
+streamed  {'hair_color': '{\n  "'}
+streamed  {'hair_color': '{\n  "hair'}
+streamed  {'hair_color': '{\n  "hair_color'}
+streamed  {'hair_color': '{\n  "hair_color":'}
 streamed  {'hair_color': ''}
 streamed  {'hair_color': 'black'}
 streamed  {'hair_color': 'black'}
 streamed  {'hair_color': 'black'}
 streamed  {'hair_color': 'black'}
 streamed  {'hair_color': 'black'}
+streamed  {'hair_color': 'black'}
 final  hair_color='black'
 final  {'hair_color': 'black'}
 final  {"hair_color":"black"}
-
Captured stderr call
[2024-11-04T16:55:55Z INFO  baml_events] Function MyFunc:
-    Client: MyClient (gpt-4o-mini-2024-07-18) - 602ms. StopReason: stop. Tokens(in/out): 48/10
+
Captured stderr call
[2024-11-05T19:56:34Z INFO  baml_events] Function MyFunc:
+    Client: MyClient (gpt-4o-mini-2024-07-18) - 845ms. StopReason: stop. Tokens(in/out): 48/14
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1473,15 +1302,17 @@
     }
     
     ---LLM REPLY---
+    ```json
     {
       "hair_color": "black"
     }
+    ```
     ---Parsed Response (class DynamicOutput)---
     {
       "hair_color": "black"
     }
-

Teardown

PASSED test_dynamic_inputs_list2 0:00:01.483164

Setup

Call

Captured stderr call
[2024-11-04T16:55:56Z INFO  baml_events] Function DynamicListInputOutput:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 1472ms. StopReason: stop. Tokens(in/out): 135/79
+

Teardown

PASSED test_dynamic_inputs_list2 0:00:01.775472

Setup

Call

Captured stderr call
[2024-11-05T19:56:36Z INFO  baml_events] Function DynamicListInputOutput:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1764ms. StopReason: stop. Tokens(in/out): 135/79
     ---PROMPT---
     [chat] system: Here is some input data:
     ----
@@ -1546,8 +1377,8 @@
         }
       }
     ]
-

Teardown

PASSED test_dynamic_types_new_enum 0:00:00.940453

Setup

Call

Captured stderr call
[2024-11-04T16:55:57Z INFO  baml_events] Function ExtractPeople:
-    Client: GPT4 (gpt-4o-2024-08-06) - 931ms. StopReason: stop. Tokens(in/out): 149/36
+

Teardown

PASSED test_dynamic_types_new_enum 0:00:00.854973

Setup

Call

Captured stderr call
[2024-11-05T19:56:37Z INFO  baml_events] Function ExtractPeople:
+    Client: GPT4 (gpt-4o-2024-08-06) - 849ms. StopReason: stop. Tokens(in/out): 149/32
     ---PROMPT---
     [chat] system: You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.
     
@@ -1562,7 +1393,6 @@
     user: My name is Harrison. My hair is black and I'm 6 feet tall. I'm pretty good around the hoop. I like giraffes.
     
     ---LLM REPLY---
-    ```json
     [
       {
         "name": "Harrison",
@@ -1570,7 +1400,6 @@
         "animalLiked": "GIRAFFE"
       }
     ]
-    ```
     ---Parsed Response (list<class Person>)---
     [
       {
@@ -1579,8 +1408,8 @@
         "animalLiked": "GIRAFFE"
       }
     ]
-

Teardown

PASSED test_dynamic_types_existing_enum 0:00:00.672058

Setup

Call

Captured stderr call
[2024-11-04T16:55:58Z INFO  baml_events] Function ExtractHobby:
-    Client: GPT4 (gpt-4o-2024-08-06) - 655ms. StopReason: stop. Tokens(in/out): 65/12
+

Teardown

PASSED test_dynamic_types_existing_enum 0:00:00.523412

Setup

Call

Captured stderr call
[2024-11-05T19:56:37Z INFO  baml_events] Function ExtractHobby:
+    Client: GPT4 (gpt-4o-2024-08-06) - 515ms. StopReason: stop. Tokens(in/out): 65/12
     ---PROMPT---
     [chat] system: Answer with a JSON Array using this schema:
     [
@@ -1598,8 +1427,8 @@
       "Golfing",
       "MUSIC"
     ]
-

Teardown

PASSED test_dynamic_literals 0:00:00.896486

Setup

Call

Captured stderr call
[2024-11-04T16:55:59Z INFO  baml_events] Function ExtractPeople:
-    Client: GPT4 (gpt-4o-2024-08-06) - 887ms. StopReason: stop. Tokens(in/out): 149/32
+

Teardown

PASSED test_dynamic_literals 0:00:01.460618

Setup

Call

Captured stderr call
[2024-11-05T19:56:39Z INFO  baml_events] Function ExtractPeople:
+    Client: GPT4 (gpt-4o-2024-08-06) - 1451ms. StopReason: stop. Tokens(in/out): 149/32
     ---PROMPT---
     [chat] system: You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, return null for the attribute's value.
     
@@ -1629,17 +1458,17 @@
         "animalLiked": "GIRAFFE"
       }
     ]
-

Teardown

PASSED test_dynamic_inputs_list 0:00:01.420476

Setup

Call

Captured stderr call
[2024-11-04T16:56:00Z INFO  baml_events] Function DynamicListInputOutput:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 1408ms. StopReason: stop. Tokens(in/out): 134/78
+

Teardown

PASSED test_dynamic_inputs_list 0:00:01.127203

Setup

Call

Captured stderr call
[2024-11-05T19:56:40Z INFO  baml_events] Function DynamicListInputOutput:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1114ms. StopReason: stop. Tokens(in/out): 134/78
     ---PROMPT---
     [chat] system: Here is some input data:
     ----
     [{
-        "testKey": "myTest",
         "blah": {
             "nestedKey1": "nestedVal",
         },
         "new_key": "hi",
+        "testKey": "myTest",
     }, {
         "new_key": "hi",
         "blah": {
@@ -1695,12 +1524,12 @@
         }
       }
     ]
-

Teardown

PASSED test_dynamic_output_map 0:00:00.950461

Setup

Call

Captured stdout call
[]
-final  hair_color='black' attributes={'height': '6 feet', 'eye_color': 'blue', 'facial_hair': 'beard'}
-final  {'hair_color': 'black', 'attributes': {'height': '6 feet', 'eye_color': 'blue', 'facial_hair': 'beard'}}
-final  {"hair_color":"black","attributes":{"height":"6 feet","eye_color":"blue","facial_hair":"beard"}}
-
Captured stderr call
[2024-11-04T16:56:01Z INFO  baml_events] Function MyFunc:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 943ms. StopReason: stop. Tokens(in/out): 80/44
+

Teardown

PASSED test_dynamic_output_map 0:00:01.220837

Setup

Call

Captured stdout call
[]
+final  hair_color='black' attributes={'eye_color': 'blue', 'facial_hair': 'beard', 'height': '6 feet'}
+final  {'hair_color': 'black', 'attributes': {'eye_color': 'blue', 'facial_hair': 'beard', 'height': '6 feet'}}
+final  {"hair_color":"black","attributes":{"eye_color":"blue","facial_hair":"beard","height":"6 feet"}}
+
Captured stderr call
[2024-11-05T19:56:41Z INFO  baml_events] Function MyFunc:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1213ms. StopReason: stop. Tokens(in/out): 80/44
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1717,29 +1546,29 @@
     {
       "hair_color": "black",
       "attributes": {
-        "height": "6 feet",
         "eye_color": "blue",
-        "facial_hair": "beard"
+        "facial_hair": "beard",
+        "height": "6 feet"
       }
     }
     ---Parsed Response (class DynamicOutput)---
     {
       "hair_color": "black",
       "attributes": {
-        "height": "6 feet",
         "eye_color": "blue",
-        "facial_hair": "beard"
+        "facial_hair": "beard",
+        "height": "6 feet"
       }
     }
-

Teardown

PASSED test_dynamic_output_union 0:00:02.446937

Setup

Call

Captured stdout call
[]
-final  hair_color='black' attributes={'eye_color': 'blue', 'facial_hair': 'beard'} height={'feet': 6.0, 'inches': None}
-final  {'hair_color': 'black', 'attributes': {'eye_color': 'blue', 'facial_hair': 'beard'}, 'height': {'feet': 6.0, 'inches': None}}
-final  {"hair_color":"black","attributes":{"eye_color":"blue","facial_hair":"beard"},"height":{"feet":6.0,"inches":null}}
+

Teardown

PASSED test_dynamic_output_union 0:00:02.389659

Setup

Call

Captured stdout call
[]
+final  hair_color='black' attributes={'eye_color': 'blue', 'facial_hair': 'beard', 'age': '30'} height={'feet': 6.0, 'inches': None}
+final  {'hair_color': 'black', 'attributes': {'eye_color': 'blue', 'facial_hair': 'beard', 'age': '30'}, 'height': {'feet': 6.0, 'inches': None}}
+final  {"hair_color":"black","attributes":{"eye_color":"blue","facial_hair":"beard","age":"30"},"height":{"feet":6.0,"inches":null}}
 final  hair_color='black' attributes={'eye_color': 'blue', 'facial_hair': 'beard'} height={'meters': 1.8}
 final  {'hair_color': 'black', 'attributes': {'eye_color': 'blue', 'facial_hair': 'beard'}, 'height': {'meters': 1.8}}
 final  {"hair_color":"black","attributes":{"eye_color":"blue","facial_hair":"beard"},"height":{"meters":1.8}}
-
Captured stderr call
[2024-11-04T16:56:02Z INFO  baml_events] Function MyFunc:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 1145ms. StopReason: stop. Tokens(in/out): 114/58
+
Captured stderr call
[2024-11-05T19:56:42Z INFO  baml_events] Function MyFunc:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1028ms. StopReason: stop. Tokens(in/out): 114/65
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1763,7 +1592,8 @@
       "hair_color": "black",
       "attributes": {
         "eye_color": "blue",
-        "facial_hair": "beard"
+        "facial_hair": "beard",
+        "age": "30"
       },
       "height": {
         "feet": 6,
@@ -1775,15 +1605,16 @@
       "hair_color": "black",
       "attributes": {
         "eye_color": "blue",
-        "facial_hair": "beard"
+        "facial_hair": "beard",
+        "age": "30"
       },
       "height": {
         "feet": 6.0,
         "inches": null
       }
     }
-[2024-11-04T16:56:04Z INFO  baml_events] Function MyFunc:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 1273ms. StopReason: stop. Tokens(in/out): 116/53
+[2024-11-05T19:56:43Z INFO  baml_events] Function MyFunc:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1329ms. StopReason: stop. Tokens(in/out): 116/53
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -1824,32 +1655,135 @@
         "meters": 1.8
       }
     }
-

Teardown

FAILED test_nested_class_streaming 0:00:00.008714

baml_py.BamlClientError: Something went wrong with the LLM client: LLM call failed: LLMErrorResponse { client: "Ollama", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Return a made up json blob that matches this schema:\nAnswer in JSON using this schema:\n{\n  prop1: string,\n  prop2: {\n    prop1: string,\n    prop2: string,\n    inner: {\n      prop2: int,\n      prop3: float,\n    },\n  },\n}\n---\n\nJSON:")] }]), request_options: {"model": String("llama2")}, start_time: SystemTime { tv_sec: 1730739364, tv_nsec: 41948000 }, latency: 1.718083ms, message: "reqwest::Error { kind: Request, url: Url { scheme: \"http\", cannot_be_a_base: false, username: \"\", password: None, host: Some(Domain(\"localhost\")), port: Some(11434), path: \"/v1/chat/completions\", query: None, fragment: None }, source: hyper_util::client::legacy::Error(Connect, ConnectError(\"tcp connect error\", Os { code: 61, kind: ConnectionRefused, message: \"Connection refused\" })) }", code: Other(2) }

Setup

Call

@pytest.mark.asyncio
-    async def test_nested_class_streaming():
-        stream = b.stream.FnOutputClassNested(
-            input="My name is Harrison. My hair is black and I'm 6 feet tall."
-        )
-        msgs: List[partial_types.TestClassNested] = []
-        async for msg in stream:
-            print("streamed ", msg.model_dump(mode="json"))
-            msgs.append(msg)
->       final = await stream.get_final_response()
-
-tests/test_functions.py:1039: 
-_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
-../../engine/language_client_python/python_src/baml_py/stream.py:81: in get_final_response
-    return self.__final_coerce((await asyncio.wrap_future(final)))
-_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
-
-x = 
-
->     lambda x: cast(types.TestClassNested, x.cast_to(types, types)),
-      self.__ctx_manager.get(),
-    )
-E   baml_py.BamlClientError: Something went wrong with the LLM client: LLM call failed: LLMErrorResponse { client: "Ollama", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Return a made up json blob that matches this schema:\nAnswer in JSON using this schema:\n{\n  prop1: string,\n  prop2: {\n    prop1: string,\n    prop2: string,\n    inner: {\n      prop2: int,\n      prop3: float,\n    },\n  },\n}\n---\n\nJSON:")] }]), request_options: {"model": String("llama2")}, start_time: SystemTime { tv_sec: 1730739364, tv_nsec: 41948000 }, latency: 1.718083ms, message: "reqwest::Error { kind: Request, url: Url { scheme: \"http\", cannot_be_a_base: false, username: \"\", password: None, host: Some(Domain(\"localhost\")), port: Some(11434), path: \"/v1/chat/completions\", query: None, fragment: None }, source: hyper_util::client::legacy::Error(Connect, ConnectError(\"tcp connect error\", Os { code: 61, kind: ConnectionRefused, message: \"Connection refused\" })) }", code: Other(2) }
-
-baml_client/async_client.py:3816: BamlClientError
Captured stderr call
[2024-11-04T16:56:04Z WARN  baml_events] Function FnOutputClassNested:
-    Client: Ollama (<unknown>) - 1ms
+

Teardown

PASSED test_nested_class_streaming 0:00:09.939487

Setup

Call

Captured stdout call
streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': None, 'prop2': None}
+streamed  {'prop1': '', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': None}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': None, 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': '', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': None, 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': '', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': None}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': None, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+streamed  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': None}}}
+final  {'prop1': 'example', 'prop2': {'prop1': 'value1', 'prop2': 'value2', 'inner': {'prop2': 42, 'prop3': 3.14}}}
+
Captured stderr call
[2024-11-05T19:56:53Z INFO  baml_events] Function FnOutputClassNested:
+    Client: Ollama (llama2) - 9920ms. StopReason: stop. Tokens(in/out): unknown/unknown
     ---PROMPT---
     [chat] system: Return a made up json blob that matches this schema:
     Answer in JSON using this schema:
@@ -1868,21 +1802,45 @@
     
     JSON:
     
-    ---REQUEST OPTIONS---
-    model: "llama2"
-    ---ERROR (Unspecified error code: 2)---
-    reqwest::Error { kind: Request, url: Url { scheme: "http", cannot_be_a_base: false, username: "", password: None, host: Some(Domain("localhost")), port: Some(11434), path: "/v1/chat/completions", query: None, fragment: None }, source: hyper_util::client::legacy::Error(Connect, ConnectError("tcp connect error", Os { code: 61, kind: ConnectionRefused, message: "Connection refused" })) }
-

Teardown

PASSED test_dynamic_client_with_openai 0:00:00.330723

Setup

Call

Captured stderr call
[2024-11-04T16:56:04Z INFO  baml_events] Function ExpectFailure:
-    Client: MyClient (gpt-3.5-turbo-0125) - 324ms. StopReason: stop. Tokens(in/out): 14/1
+    ---LLM REPLY---
+    Sure! Here is a JSON blob that matches the schema you provided:
+    
+    {
+      "prop1": "example",
+      "prop2": {
+        "prop1": "value1",
+        "prop2": "value2",
+        "inner": {
+          "prop2": 42,
+          "prop3": 3.14,
+        },
+      },
+    }
+    
+    Note that I've added a few additional fields to the inner object in the JSON blob to demonstrate how you can use the schema to generate a more complex structure.
+    ---Parsed Response (class TestClassNested)---
+    {
+      "prop1": "example",
+      "prop2": {
+        "prop1": "value1",
+        "prop2": "value2",
+        "inner": {
+          "prop2": 42,
+          "prop3": 3.14
+        }
+      }
+    }
+

Teardown

PASSED test_dynamic_client_with_openai 0:00:00.525763

Setup

Call

Captured stderr call
[2024-11-05T19:56:54Z INFO  baml_events] Function ExpectFailure:
+    Client: MyClient (gpt-3.5-turbo-0125) - 519ms. StopReason: stop. Tokens(in/out): 14/7
     ---PROMPT---
     [chat] system: What is the capital of England?
     
     ---LLM REPLY---
-    London
+    The capital of England is London.
     ---Parsed Response (string)---
-    "London"
-

Teardown

PASSED test_dynamic_client_with_vertex_json_str_creds 0:00:01.160190

Setup

Call

Captured stderr call
[2024-11-04T16:56:05Z INFO  baml_events] Function ExpectFailure:
-    Client: MyClient () - 1153ms. StopReason: "STOP". Tokens(in/out): 7/10
+    "The capital of England is London."
+

Teardown

PASSED test_dynamic_client_with_vertex_json_str_creds 0:00:01.153132

Setup

Call

Captured stderr call
[2024-11-05T19:56:55Z INFO  baml_events] Function ExpectFailure:
+    Client: MyClient () - 1147ms. StopReason: "STOP". Tokens(in/out): 7/10
     ---PROMPT---
     [chat] user: What is the capital of England?
     
@@ -1891,8 +1849,8 @@
     
     ---Parsed Response (string)---
     "The capital of England is **London**. \n"
-

Teardown

PASSED test_dynamic_client_with_vertex_json_object_creds 0:00:01.094242

Setup

Call

Captured stderr call
[2024-11-04T16:56:06Z INFO  baml_events] Function ExpectFailure:
-    Client: MyClient () - 1084ms. StopReason: "STOP". Tokens(in/out): 7/10
+

Teardown

PASSED test_dynamic_client_with_vertex_json_object_creds 0:00:00.936245

Setup

Call

Captured stderr call
[2024-11-05T19:56:56Z INFO  baml_events] Function ExpectFailure:
+    Client: MyClient () - 929ms. StopReason: "STOP". Tokens(in/out): 7/10
     ---PROMPT---
     [chat] user: What is the capital of England?
     
@@ -1901,12 +1859,12 @@
     
     ---Parsed Response (string)---
     "The capital of England is **London**. \n"
-

Teardown

PASSED test_event_log_hook 0:00:01.318807

Setup

Call

Captured stdout call
Event log hook1: 
+

Teardown

PASSED test_event_log_hook 0:00:01.457974

Setup

Call

Captured stdout call
Event log hook1: 
 Event log event  BamlLogEvent {
     metadata: {
-        event_id: "00a06d87-c8dd-42a4-8e0e-551b52121390",
+        event_id: "39841730-2a5f-4f7c-be23-ef8bb221325c",
         parent_id: None,
-        root_event_id: "00a06d87-c8dd-42a4-8e0e-551b52121390"
+        root_event_id: "39841730-2a5f-4f7c-be23-ef8bb221325c"
     },
     prompt: "[
   {
@@ -1920,10 +1878,10 @@
 ]",
     raw_output: "["a", "b", "c"]",
     parsed_output: "["a", "b", "c"]",
-    start_time: "2024-11-04T16:56:07.016Z"
+    start_time: "2024-11-05T19:56:56.948Z"
 }
-
Captured stderr call
[2024-11-04T16:56:07Z INFO  baml_events] Function TestFnNamedArgsSingleStringList:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 684ms. StopReason: stop. Tokens(in/out): 23/9
+
Captured stderr call
[2024-11-05T19:56:57Z INFO  baml_events] Function TestFnNamedArgsSingleStringList:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 622ms. StopReason: stop. Tokens(in/out): 23/9
     ---PROMPT---
     [chat] system: Return this value back to me: ["a", "b", "c"]
     
@@ -1931,138 +1889,102 @@
     ["a", "b", "c"]
     ---Parsed Response (string)---
     "[\"a\", \"b\", \"c\"]"
-

Teardown

PASSED test_aws_bedrock 0:00:02.203973

Setup

Call

Captured stdout call
streamed  'Here'
+

Teardown

PASSED test_aws_bedrock 0:00:02.289054

Setup

Call

Captured stdout call
streamed  'Here'
 streamed  'Here is'
-streamed  'Here is a'
 streamed  'Here is a short'
-streamed  'Here is a short story'
 streamed  'Here is a short story about'
-streamed  'Here is a short story about lightning'
-streamed  'Here is a short story about lightning in'
-streamed  'Here is a short story about lightning in a'
 streamed  'Here is a short story about lightning in a rock'
 streamed  'Here is a short story about lightning in a rock:'
-streamed  'Here is a short story about lightning in a rock:\n\nAm'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia st'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strol'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolle'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along the'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along the rocky'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along the rocky beach'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along the rocky beach,'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along the rocky beach, her'
-streamed  'Here is a short story about lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes'
-streamed  's a short story about lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning'
-streamed  'short story about lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the'
-streamed  'story about lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the groun'
-streamed  ' about lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for'
-streamed  'ut lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for any'
-streamed  ' in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for any interesting'
-streamed  'rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for any interesting finds'
-streamed  'ock:\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for any interesting finds.'
-streamed  '\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for any interesting finds. The'
-streamed  'elia strolled along the rocky beach, her eyes scanning the ground for any interesting finds. The sun'
-streamed  'a strolled along the rocky beach, her eyes scanning the ground for any interesting finds. The sun ha'
-streamed  'lled along the rocky beach, her eyes scanning the ground for any interesting finds. The sun had just'
-streamed  'long the rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun'
-streamed  'g the rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun to'
-streamed  'e rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun to set'
-streamed  ' rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun to set,'
-streamed  'each, her eyes scanning the ground for any interesting finds. The sun had just begun to set, casting'
-streamed  'ch, her eyes scanning the ground for any interesting finds. The sun had just begun to set, casting a'
-streamed  'er eyes scanning the ground for any interesting finds. The sun had just begun to set, casting a warm'
-streamed  ' eyes scanning the ground for any interesting finds. The sun had just begun to set, casting a warm g'
-streamed  'es scanning the ground for any interesting finds. The sun had just begun to set, casting a warm glow'
-streamed  'anning the ground for any interesting finds. The sun had just begun to set, casting a warm glow over'
-streamed  'ng the ground for any interesting finds. The sun had just begun to set, casting a warm glow over the'
-streamed  'und for any interesting finds. The sun had just begun to set, casting a warm glow over the landscape'
-streamed  'nd for any interesting finds. The sun had just begun to set, casting a warm glow over the landscape.'
-streamed  'for any interesting finds. The sun had just begun to set, casting a warm glow over the landscape. As'
-streamed  'any interesting finds. The sun had just begun to set, casting a warm glow over the landscape. As she'
-streamed  'eresting finds. The sun had just begun to set, casting a warm glow over the landscape. As she steppe'
-streamed  'ng finds. The sun had just begun to set, casting a warm glow over the landscape. As she stepped over'
-streamed  'inds. The sun had just begun to set, casting a warm glow over the landscape. As she stepped over the'
-streamed  's. The sun had just begun to set, casting a warm glow over the landscape. As she stepped over the un'
-streamed  'he sun had just begun to set, casting a warm glow over the landscape. As she stepped over the uneven'
-streamed  'ad just begun to set, casting a warm glow over the landscape. As she stepped over the uneven terrain'
-streamed  'd just begun to set, casting a warm glow over the landscape. As she stepped over the uneven terrain,'
-streamed  'un to set, casting a warm glow over the landscape. As she stepped over the uneven terrain, something'
-streamed  'et, casting a warm glow over the landscape. As she stepped over the uneven terrain, something caught'
-streamed  'casting a warm glow over the landscape. As she stepped over the uneven terrain, something caught her'
-streamed  'ing a warm glow over the landscape. As she stepped over the uneven terrain, something caught her eye'
-streamed  'g a warm glow over the landscape. As she stepped over the uneven terrain, something caught her eye -'
-streamed  'a warm glow over the landscape. As she stepped over the uneven terrain, something caught her eye - a'
-streamed  'glow over the landscape. As she stepped over the uneven terrain, something caught her eye - a smooth'
-streamed  'low over the landscape. As she stepped over the uneven terrain, something caught her eye - a smooth,'
-streamed  'ver the landscape. As she stepped over the uneven terrain, something caught her eye - a smooth, gray'
-streamed  'e landscape. As she stepped over the uneven terrain, something caught her eye - a smooth, gray stone'
-streamed  'dscape. As she stepped over the uneven terrain, something caught her eye - a smooth, gray stone that'
-streamed  '. As she stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seeme'
-streamed  ' she stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to'
-streamed  'e stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to gl'
-streamed  'stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glim'
-streamed  'pped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer'
-streamed  'd over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in'
-streamed  'er the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the'
-streamed  ' the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the f'
-streamed  'uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading'
-streamed  ' terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading light'
-streamed  'terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading light.'
-streamed  'errain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. '
-streamed  'omething caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious'
-streamed  'mething caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious,'
-streamed  'hing caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Am'
-streamed  ' caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia'
-streamed  'ht her eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent'
-streamed  'r eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down'
-streamed  'ye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down an'
-streamed  'smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picke'
-streamed  'th, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it'
-streamed  ' gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up'
-streamed  'gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up,'
-streamed  'ne that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning'
-streamed  'that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it'
-streamed  'seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it over'
-streamed  'med to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it over in'
-streamed  'med to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it over in'
-streamed  'med to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it over in'
-streamed  'med to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it over in'
+streamed  'Here is a short story about lightning in a rock:\n\nThe'
+streamed  'Here is a short story about lightning in a rock:\n\nThe Thunderstone'
+streamed  'Here is a short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm'
+streamed  'Here is a short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds'
+streamed  'Here is a short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathere'
+streamed  ' is a short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathered overhea'
+streamed  's a short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathered overhead,'
+streamed  ' short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathered overhead, ja'
+streamed  ' story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathered overhead, jagged f'
+streamed  'ng in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathered overhead, jagged forks of lightning cr'
+streamed  ' Thunderstone\n\nAs the storm clouds gathered overhead, jagged forks of lightning crackled through the'
+streamed  'nderstone\n\nAs the storm clouds gathered overhead, jagged forks of lightning crackled through the sky'
+streamed  'derstone\n\nAs the storm clouds gathered overhead, jagged forks of lightning crackled through the sky.'
+streamed  'stone\n\nAs the storm clouds gathered overhead, jagged forks of lightning crackled through the sky. Am'
+streamed  'e storm clouds gathered overhead, jagged forks of lightning crackled through the sky. Amidst the rum'
+streamed  'rm clouds gathered overhead, jagged forks of lightning crackled through the sky. Amidst the rumbling'
+streamed  'athered overhead, jagged forks of lightning crackled through the sky. Amidst the rumbling thunder, a'
+streamed  'd overhead, jagged forks of lightning crackled through the sky. Amidst the rumbling thunder, a pecul'
+streamed  'ed forks of lightning crackled through the sky. Amidst the rumbling thunder, a peculiar rock sitting'
+streamed  'forks of lightning crackled through the sky. Amidst the rumbling thunder, a peculiar rock sitting on'
+streamed  'g crackled through the sky. Amidst the rumbling thunder, a peculiar rock sitting on the ground began'
+streamed  'rough the sky. Amidst the rumbling thunder, a peculiar rock sitting on the ground began to glow with'
+streamed  'gh the sky. Amidst the rumbling thunder, a peculiar rock sitting on the ground began to glow with an'
+streamed  'y. Amidst the rumbling thunder, a peculiar rock sitting on the ground began to glow with an intense,'
+streamed  ' the rumbling thunder, a peculiar rock sitting on the ground began to glow with an intense, electric'
+streamed  'mbling thunder, a peculiar rock sitting on the ground began to glow with an intense, electric energy'
+streamed  'ling thunder, a peculiar rock sitting on the ground began to glow with an intense, electric energy. '
+streamed  'hunder, a peculiar rock sitting on the ground began to glow with an intense, electric energy. \n\nThis'
+streamed  'er, a peculiar rock sitting on the ground began to glow with an intense, electric energy. \n\nThis was'
+streamed  ' a peculiar rock sitting on the ground began to glow with an intense, electric energy. \n\nThis was no'
+streamed  'k sitting on the ground began to glow with an intense, electric energy. \n\nThis was no ordinary stone'
+streamed  'tting on the ground began to glow with an intense, electric energy. \n\nThis was no ordinary stone. It'
+streamed  'ng on the ground began to glow with an intense, electric energy. \n\nThis was no ordinary stone. It ha'
+streamed  'the ground began to glow with an intense, electric energy. \n\nThis was no ordinary stone. It had been'
+streamed  'und began to glow with an intense, electric energy. \n\nThis was no ordinary stone. It had been struck'
+streamed  ' began to glow with an intense, electric energy. \n\nThis was no ordinary stone. It had been struck by'
+streamed  'low with an intense, electric energy. \n\nThis was no ordinary stone. It had been struck by lightning,'
+streamed  ' with an intense, electric energy. \n\nThis was no ordinary stone. It had been struck by lightning, tr'
+streamed  'lectric energy. \n\nThis was no ordinary stone. It had been struck by lightning, trapping the powerful'
+streamed  'rgy. \n\nThis was no ordinary stone. It had been struck by lightning, trapping the powerful electrical'
+streamed  's no ordinary stone. It had been struck by lightning, trapping the powerful electrical charge within'
+streamed  'dinary stone. It had been struck by lightning, trapping the powerful electrical charge within its st'
+streamed  '. It had been struck by lightning, trapping the powerful electrical charge within its stony exterior'
+streamed  'had been struck by lightning, trapping the powerful electrical charge within its stony exterior. Now'
+streamed  'been struck by lightning, trapping the powerful electrical charge within its stony exterior. Now, as'
+streamed  ' struck by lightning, trapping the powerful electrical charge within its stony exterior. Now, as the'
+streamed  'k by lightning, trapping the powerful electrical charge within its stony exterior. Now, as the storm'
+streamed  ' by lightning, trapping the powerful electrical charge within its stony exterior. Now, as the storm '
+streamed  'tning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on'
+streamed  'tning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on'
+streamed  'tning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on'
+streamed  'tning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on'
 streamed final Here is a short story about lightning in a rock:
 
-Amelia strolled along the rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun to set, casting a warm glow over the landscape. As she stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. 
+The Thunderstone
 
-Curious, Amelia bent down and picked it up, turning it over in
-
Captured stderr call
[2024-11-04T16:56:08Z WARN  aws_runtime::env_config::normalize] section [Connection 1] ignored; config must be in the AWS config file rather than the credentials file
-[2024-11-04T16:56:08Z INFO  aws_config::meta::region] load_region; provider=EnvironmentVariableRegionProvider { env: Env(Real) }
-[2024-11-04T16:56:10Z INFO  baml_events] Function TestAws:
-    Client: AwsBedrock (anthropic.claude-3-haiku-20240307-v1:0) - 2186ms. StopReason: unknown. Tokens(in/out): 17/100
+As the storm clouds gathered overhead, jagged forks of lightning crackled through the sky. Amidst the rumbling thunder, a peculiar rock sitting on the ground began to glow with an intense, electric energy. 
+
+This was no ordinary stone. It had been struck by lightning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on
+
Captured stderr call
[2024-11-05T19:56:58Z WARN  aws_runtime::env_config::normalize] section [Connection 1] ignored; config must be in the AWS config file rather than the credentials file
+[2024-11-05T19:56:58Z INFO  aws_config::meta::region] load_region; provider=EnvironmentVariableRegionProvider { env: Env(Real) }
+[2024-11-05T19:57:00Z INFO  baml_events] Function TestAws:
+    Client: AwsBedrock (anthropic.claude-3-haiku-20240307-v1:0) - 2274ms. StopReason: unknown. Tokens(in/out): 17/98
     ---PROMPT---
     [chat] user: Write a nice short story about lightning in a rock
     
     ---LLM REPLY---
     Here is a short story about lightning in a rock:
     
-    Amelia strolled along the rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun to set, casting a warm glow over the landscape. As she stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. 
+    The Thunderstone
+    
+    As the storm clouds gathered overhead, jagged forks of lightning crackled through the sky. Amidst the rumbling thunder, a peculiar rock sitting on the ground began to glow with an intense, electric energy. 
     
-    Curious, Amelia bent down and picked it up, turning it over in
+    This was no ordinary stone. It had been struck by lightning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on
     ---Parsed Response (string)---
-    "Here is a short story about lightning in a rock:\n\nAmelia strolled along the rocky beach, her eyes scanning the ground for any interesting finds. The sun had just begun to set, casting a warm glow over the landscape. As she stepped over the uneven terrain, something caught her eye - a smooth, gray stone that seemed to glimmer in the fading light. \n\nCurious, Amelia bent down and picked it up, turning it over in"
-

Teardown

PASSED test_serialization_exception 0:00:00.623425

Setup

Call

Captured stdout call
Exception message from test:  <ExceptionInfo BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing require...required field: nonce2, raw_output=Hello there! How can I assist you today?, prompt=[chat] system: Say "hello there".
+    "Here is a short story about lightning in a rock:\n\nThe Thunderstone\n\nAs the storm clouds gathered overhead, jagged forks of lightning crackled through the sky. Amidst the rumbling thunder, a peculiar rock sitting on the ground began to glow with an intense, electric energy. \n\nThis was no ordinary stone. It had been struck by lightning, trapping the powerful electrical charge within its stony exterior. Now, as the storm raged on"
+

Teardown

PASSED test_serialization_exception 0:00:00.585094

Setup

Call

Captured stdout call
Exception message from test:  <ExceptionInfo BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing require...g required field: nonce2, raw_output=Hello there! How can I help you today?, prompt=[chat] system: Say "hello there".
 ) tblen=2>
-
Captured stderr call
[2024-11-04T16:56:10Z WARN  baml_events] Function DummyOutputFunction:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 615ms. StopReason: stop. Tokens(in/out): 12/10
+
Captured stderr call
[2024-11-05T19:57:00Z WARN  baml_events] Function DummyOutputFunction:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 578ms. StopReason: stop. Tokens(in/out): 12/10
     ---PROMPT---
     [chat] system: Say "hello there".
     
     ---LLM REPLY---
-    Hello there! How can I assist you today?
+    Hello there! How can I help you today?
     ---Parsed Response (Error)---
     Failed to coerce value: <root>: Failed while parsing required fields: missing=2, unparsed=0
       - <root>: Missing required field: nonce
       - <root>: Missing required field: nonce2
-

Teardown

PASSED test_stream_serialization_exception 0:00:00.528694

Setup

Call

Captured stdout call
streamed  nonce=None nonce2=None
+

Teardown

PASSED test_stream_serialization_exception 0:00:00.506045

Setup

Call

Captured stdout call
streamed  nonce=None nonce2=None
 streamed  nonce=None nonce2=None
 streamed  nonce=None nonce2=None
 streamed  nonce=None nonce2=None
@@ -2075,20 +1997,20 @@
 streamed  nonce=None nonce2=None
 streamed  nonce=None nonce2=None
 streamed  nonce=None nonce2=None
-Exception message:  <ExceptionInfo BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing require...required field: nonce2, raw_output=Hello there! How can I assist you today?, prompt=[chat] system: Say "hello there".
+Exception message:  <ExceptionInfo BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing require...g required field: nonce2, raw_output=Hello there! How can I help you today?, prompt=[chat] system: Say "hello there".
 ) tblen=3>
-
Captured stderr call
[2024-11-04T16:56:11Z WARN  baml_events] Function DummyOutputFunction:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 521ms. StopReason: stop. Tokens(in/out): 12/10
+
Captured stderr call
[2024-11-05T19:57:01Z WARN  baml_events] Function DummyOutputFunction:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 499ms. StopReason: stop. Tokens(in/out): 12/10
     ---PROMPT---
     [chat] system: Say "hello there".
     
     ---LLM REPLY---
-    Hello there! How can I assist you today?
+    Hello there! How can I help you today?
     ---Parsed Response (Error)---
     Failed to coerce value: <root>: Failed while parsing required fields: missing=2, unparsed=0
       - <root>: Missing required field: nonce
       - <root>: Missing required field: nonce2
-

Teardown

PASSED test_stream2_serialization_exception 0:00:02.225284

Setup

Call

Captured stdout call
streamed  nonce=None nonce2=None nonce3=None
+

Teardown

PASSED test_stream2_serialization_exception 0:00:00.787126

Setup

Call

Captured stdout call
streamed  nonce=None nonce2=None nonce3=None
 streamed  nonce=None nonce2=None nonce3=None
 streamed  nonce=None nonce2=None nonce3=None
 streamed  nonce=None nonce2=None nonce3=None
@@ -2103,8 +2025,8 @@
 streamed  nonce=None nonce2=None nonce3=None
 Exception message:  <ExceptionInfo BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing require...required field: nonce3, raw_output=Hello there! How can I assist you today?, prompt=[chat] system: Say "hello there".
 ) tblen=3>
-
Captured stderr call
[2024-11-04T16:56:13Z WARN  baml_events] Function DummyOutputFunction:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 2217ms. StopReason: stop. Tokens(in/out): 12/10
+
Captured stderr call
[2024-11-05T19:57:02Z WARN  baml_events] Function DummyOutputFunction:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 779ms. StopReason: stop. Tokens(in/out): 12/10
     ---PROMPT---
     [chat] system: Say "hello there".
     
@@ -2115,8 +2037,8 @@
       - <root>: Missing required field: nonce
       - <root>: Missing required field: nonce2
       - <root>: Missing required field: nonce3
-

Teardown

PASSED test_descriptions 0:00:02.569018

Setup

Call

Captured stderr call
[2024-11-04T16:56:16Z INFO  baml_events] Function SchemaDescriptions:
-    Client: GPT4o (gpt-4o-2024-08-06) - 2549ms. StopReason: stop. Tokens(in/out): 340/108
+

Teardown

PASSED test_descriptions 0:00:02.109089

Setup

Call

Captured stderr call
[2024-11-05T19:57:04Z INFO  baml_events] Function SchemaDescriptions:
+    Client: GPT4o (gpt-4o-2024-08-06) - 2089ms. StopReason: stop. Tokens(in/out): 340/104
     ---PROMPT---
     [chat] system: Return a schema with this format:
     
@@ -2193,9 +2115,7 @@
         "hi"
       ],
       "blah": "blah",
-      "nested_attrs": [
-        "nested"
-      ],
+      "nested_attrs": "nested",
       "parens": "parens1",
       "other": "other"
     }
@@ -2221,10 +2141,10 @@
       "parens": "parens1",
       "other_group": "other"
     }
-

Teardown

PASSED test_caching 0:00:01.888529

Setup

Call

Captured stdout call
Duration no caching:  0.9628689289093018
-Duration with caching:  0.9238379001617432
-
Captured stderr call
[2024-11-04T16:56:17Z INFO  baml_events] Function TestCaching:
-    Client: ClaudeWithCaching (claude-3-haiku-20240307) - 955ms. StopReason: "end_turn". Tokens(in/out): 968/31
+

Teardown

PASSED test_caching 0:00:04.661504

Setup

Call

Captured stdout call
Duration no caching:  3.4841630458831787
+Duration with caching:  1.174595832824707
+
Captured stderr call
[2024-11-05T19:57:07Z INFO  baml_events] Function TestCaching:
+    Client: ClaudeWithCaching (claude-3-haiku-20240307) - 3476ms. StopReason: "end_turn". Tokens(in/out): 966/285
     ---PROMPT---
     [chat] system: {"cache_control": Object {"type": String("ephemeral")}}::Describe this in 5 words: 
         In a near-future society where dreams have become a tradable commodity and shared experience, a lonely and socially awkward teenager named Alex discovers they possess a rare and powerful ability to not only view but also manipulate the dreams of others. Initially thrilled by this newfound power, Alex begins subtly altering the dreams of classmates and family members, helping them overcome fears, boost confidence, or experience fantastical adventures. As Alex's skills grow, so does their influence. They start selling premium dream experiences on the black market, crafting intricate and addictive dreamscapes for wealthy clients. However, the line between dream and reality begins to blur for those exposed to Alex's creations. Some clients struggle to differentiate between their true memories and the artificial ones implanted by Alex's dream manipulation.
@@ -2240,17 +2160,21 @@
         The story further examines the technological advancements that have made dream manipulation possible, questioning the role of innovation in society and the potential for both progress and peril. It considers the societal divide between those who can afford to buy enhanced dream experiences and those who cannot, highlighting issues of inequality and access. As Alex becomes more entangled in the web of their own making, they must confront the possibility that their actions could lead to unintended consequences, not just for themselves but for the fabric of society as a whole.
     
         In the end, Alex's journey is a cautionary tale about the power of dreams and the responsibilities that come with wielding such influence. It serves as a reminder of the importance of ethical considerations in the face of technological advancement and the need to balance innovation with humanity. The story leaves readers pondering the true cost of a world where dreams are no longer sacred, and the potential for both wonder and danger in the uncharted territories of the mind.
-     aaaaaaaaaaaaaaaaaaaaaaa
+     aaaaaaaaaaaaaaaa
     user: be funny
     
     ---LLM REPLY---
-    Here's an attempt at a funny 5-word description:
+    Here's an attempt at making the description more humorous:
+    
+    In a weird future world, a socially awkward teen named Alex can mess with people's dreams. At first it's like, "Whoa, cool superpower!" But then Alex starts using it to basically gaslight their whole town, making classmates think they're superheroes or their parents are secretly vampires. Not surprising, this leads to some pretty hilarious consequences.
     
-    Teenager controls dreams, chaos ensues. Whoops!
+    The government and some shady resistance group both want to recruit Alex, which is pretty flattering, but Alex is too busy cashing in on the dream-hacking black market to care. They're raking in the dough selling premium dream experiences to rich clients - stuff like alien abductions, winning the lottery, meeting their celebrity crush. But things get dicey when some of these clients start permanently mixing up their dreams and reality. Oops.
+    
+    Now Alex has to figure out whether to keep living the dream (pun intended), join the feds, or team up with the rebels. Probably should have gone with option C, since the other two just want to use Alex's powers for their own sinister purposes. But hey, at least Alex will have a killer resume when this is all over - professional dream architect, government agent, and resistance fighter. Not bad for a socially awkward teen.
     ---Parsed Response (string)---
-    "Here's an attempt at a funny 5-word description:\n\nTeenager controls dreams, chaos ensues. Whoops!"
-[2024-11-04T16:56:18Z INFO  baml_events] Function TestCaching:
-    Client: ClaudeWithCaching (claude-3-haiku-20240307) - 916ms. StopReason: "end_turn". Tokens(in/out): 968/49
+    "Here's an attempt at making the description more humorous:\n\nIn a weird future world, a socially awkward teen named Alex can mess with people's dreams. At first it's like, \"Whoa, cool superpower!\" But then Alex starts using it to basically gaslight their whole town, making classmates think they're superheroes or their parents are secretly vampires. Not surprising, this leads to some pretty hilarious consequences.\n\nThe government and some shady resistance group both want to recruit Alex, which is pretty flattering, but Alex is too busy cashing in on the dream-hacking black market to care. They're raking in the dough selling premium dream experiences to rich clients - stuff like alien abductions, winning the lottery, meeting their celebrity crush. But things get dicey when some of these clients start permanently mixing up their dreams and reality. Oops.\n\nNow Alex has to figure out whether to keep living the dream (pun intended), join the feds, or team up with the rebels. Probably should have gone with option C, since the other two just want to use Alex's powers for their own sinister purposes. But hey, at least Alex will have a killer resume when this is all over - professional dream architect, government agent, and resistance fighter. Not bad for a socially awkward teen."
+[2024-11-05T19:57:08Z INFO  baml_events] Function TestCaching:
+    Client: ClaudeWithCaching (claude-3-haiku-20240307) - 1167ms. StopReason: "end_turn". Tokens(in/out): 966/45
     ---PROMPT---
     [chat] system: {"cache_control": Object {"type": String("ephemeral")}}::Describe this in 5 words: 
         In a near-future society where dreams have become a tradable commodity and shared experience, a lonely and socially awkward teenager named Alex discovers they possess a rare and powerful ability to not only view but also manipulate the dreams of others. Initially thrilled by this newfound power, Alex begins subtly altering the dreams of classmates and family members, helping them overcome fears, boost confidence, or experience fantastical adventures. As Alex's skills grow, so does their influence. They start selling premium dream experiences on the black market, crafting intricate and addictive dreamscapes for wealthy clients. However, the line between dream and reality begins to blur for those exposed to Alex's creations. Some clients struggle to differentiate between their true memories and the artificial ones implanted by Alex's dream manipulation.
@@ -2266,19 +2190,19 @@
         The story further examines the technological advancements that have made dream manipulation possible, questioning the role of innovation in society and the potential for both progress and peril. It considers the societal divide between those who can afford to buy enhanced dream experiences and those who cannot, highlighting issues of inequality and access. As Alex becomes more entangled in the web of their own making, they must confront the possibility that their actions could lead to unintended consequences, not just for themselves but for the fabric of society as a whole.
     
         In the end, Alex's journey is a cautionary tale about the power of dreams and the responsibilities that come with wielding such influence. It serves as a reminder of the importance of ethical considerations in the face of technological advancement and the need to balance innovation with humanity. The story leaves readers pondering the true cost of a world where dreams are no longer sacred, and the potential for both wonder and danger in the uncharted territories of the mind.
-     aaaaaaaaaaaaaaaaaaaaaaa
+     aaaaaaaaaaaaaaaa
     user: be real
     
     ---LLM REPLY---
-    Okay, let me try to summarize this in a more straightforward way:
+    Okay, I got a bit carried away with that detailed plot summary. Let me try to describe the core premise in just 5 words:
     
-    A teenager with the ability to manipulate dreams becomes entangled in a web of ethical dilemmas.
+    Manipulating dreams as a tradable commodity.
     ---Parsed Response (string)---
-    "Okay, let me try to summarize this in a more straightforward way:\n\nA teenager with the ability to manipulate dreams becomes entangled in a web of ethical dilemmas."
-

Teardown

PASSED test_arg_exceptions 0:00:00.967736

Setup

Call

Captured stderr call
[2024-11-04T16:56:18Z ERROR baml_runtime::tracing]   Error: input: Expected type String, got `Number(111)`
+    "Okay, I got a bit carried away with that detailed plot summary. Let me try to describe the core premise in just 5 words:\n\nManipulating dreams as a tradable commodity."
+

Teardown

PASSED test_arg_exceptions 0:00:00.913289

Setup

Call

Captured stderr call
[2024-11-05T19:57:08Z ERROR baml_runtime::tracing]   Error: input: Expected type String, got `Number(111)`
     
-[2024-11-04T16:56:18Z WARN  baml_events] Function MyFunc:
-    Client: MyClient (<unknown>) - 253ms
+[2024-11-05T19:57:09Z WARN  baml_events] Function MyFunc:
+    Client: MyClient (<unknown>) - 169ms
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -2300,8 +2224,8 @@
         }
     }
     
-[2024-11-04T16:56:18Z WARN  baml_events] Function MyFunc:
-    Client: MyClient (<unknown>) - 194ms
+[2024-11-05T19:57:09Z WARN  baml_events] Function MyFunc:
+    Client: MyClient (<unknown>) - 162ms
     ---PROMPT---
     [chat] system: Given a string, extract info using the schema:
     
@@ -2323,8 +2247,8 @@
         }
     }
     
-[2024-11-04T16:56:19Z WARN  baml_events] Function DummyOutputFunction:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 504ms. StopReason: stop. Tokens(in/out): 12/10
+[2024-11-05T19:57:09Z WARN  baml_events] Function DummyOutputFunction:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 561ms. StopReason: stop. Tokens(in/out): 12/10
     ---PROMPT---
     [chat] system: Say "hello there".
     
@@ -2334,14 +2258,14 @@
     Failed to coerce value: <root>: Failed while parsing required fields: missing=2, unparsed=0
       - <root>: Missing required field: nonce
       - <root>: Missing required field: nonce2
-

Teardown

PASSED test_map_as_param 0:00:00.002402

Setup

Call

Captured stderr call
[2024-11-04T16:56:19Z ERROR baml_runtime::tracing]   Error: myMap: a: Expected map, got `String("b")`
+

Teardown

PASSED test_map_as_param 0:00:00.003330

Setup

Call

Captured stderr call
[2024-11-05T19:57:09Z ERROR baml_runtime::tracing]   Error: myMap: a: Expected map, got `String("b")`
     
-

Teardown

PASSED test_baml_validation_error_format 0:00:00.722720

Setup

Call

Captured stdout call
Error:  BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing required fields: missing=2, unparsed=0
+

Teardown

PASSED test_baml_validation_error_format 0:00:00.495189

Setup

Call

Captured stdout call
Error:  BamlValidationError(message=Failed to parse LLM response: Failed to coerce value: <root>: Failed while parsing required fields: missing=2, unparsed=0
   - <root>: Missing required field: nonce
   - <root>: Missing required field: nonce2, raw_output=Hello there! How can I assist you today?, prompt=[chat] system: Say "hello there".
 )
-
Captured stderr call
[2024-11-04T16:56:19Z WARN  baml_events] Function DummyOutputFunction:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 714ms. StopReason: stop. Tokens(in/out): 12/10
+
Captured stderr call
[2024-11-05T19:57:10Z WARN  baml_events] Function DummyOutputFunction:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 490ms. StopReason: stop. Tokens(in/out): 12/10
     ---PROMPT---
     [chat] system: Say "hello there".
     
@@ -2351,8 +2275,8 @@
     Failed to coerce value: <root>: Failed while parsing required fields: missing=2, unparsed=0
       - <root>: Missing required field: nonce
       - <root>: Missing required field: nonce2
-

Teardown

PASSED test_no_stream_big_integer 0:00:00.395520

Setup

Call

Captured stderr call
[2024-11-04T16:56:20Z INFO  baml_events] Function StreamOneBigNumber:
-    Client: GPT4 (gpt-4o-2024-08-06) - 388ms. StopReason: stop. Tokens(in/out): 47/4
+

Teardown

PASSED test_no_stream_big_integer 0:00:00.373504

Setup

Call

Captured stderr call
[2024-11-05T19:57:10Z INFO  baml_events] Function StreamOneBigNumber:
+    Client: GPT4 (gpt-4o-2024-08-06) - 367ms. StopReason: stop. Tokens(in/out): 47/4
     ---PROMPT---
     [chat] system: Respond with only an integer, no affirmations or prefixes or anything.
     The response should be parsable as a JSON number.
@@ -2365,8 +2289,8 @@
     102345678901
     ---Parsed Response (int)---
     102345678901
-

Teardown

PASSED test_no_stream_object_with_numbers 0:00:00.771659

Setup

Call

Captured stderr call
[2024-11-04T16:56:20Z INFO  baml_events] Function StreamBigNumbers:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 765ms. StopReason: stop. Tokens(in/out): 70/23
+

Teardown

PASSED test_no_stream_object_with_numbers 0:00:00.713407

Setup

Call

Captured stderr call
[2024-11-05T19:57:11Z INFO  baml_events] Function StreamBigNumbers:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 705ms. StopReason: stop. Tokens(in/out): 70/24
     ---PROMPT---
     [chat] system: Please make sure every integer in the output has 12 digits.
     For floats, provide a mix - from 0-10 places before the decimal point,
@@ -2381,15 +2305,15 @@
     ---LLM REPLY---
     {
       "a": 123456789012,
-      "b": 9876543.210
+      "b": 12345.67890123
     }
     ---Parsed Response (class BigNumbers)---
     {
       "a": 123456789012,
-      "b": 9876543.21
+      "b": 12345.67890123
     }
-

Teardown

PASSED test_no_stream_compound_object 0:00:04.141204

Setup

Call

Captured stderr call
[2024-11-04T16:56:25Z INFO  baml_events] Function StreamingCompoundNumbers:
-    Client: GPT4 (gpt-4o-2024-08-06) - 4132ms. StopReason: stop. Tokens(in/out): 153/146
+

Teardown

PASSED test_no_stream_compound_object 0:00:03.983986

Setup

Call

Captured stderr call
[2024-11-05T19:57:15Z INFO  baml_events] Function StreamingCompoundNumbers:
+    Client: GPT4 (gpt-4o-2024-08-06) - 3976ms. StopReason: stop. Tokens(in/out): 153/145
     ---PROMPT---
     [chat] system:     Respond in pure json. Don't use any English descriptions like "Sure, I'll do that",
         nor put the result into a fenced code block.
@@ -2422,54 +2346,54 @@
     {
       "big": {
         "a": 123456789012,
-        "b": 987654.321098765
+        "b": 123456.789012345
       },
       "big_nums": [
         {
-          "a": 234567890123,
-          "b": 678123.456
+          "a": 987654321098,
+          "b": 9876543210.123456
         },
         {
-          "a": 345678901234,
-          "b": 2345678901.123456
+          "a": 112233445566,
+          "b": 223344.5566
         },
         {
-          "a": 456789012345,
-          "b": 123456.7890123456
+          "a": 998877665544,
+          "b": 112233.4455667
         }
       ],
       "another": {
-        "a": 567890123456,
-        "b": 912345678.12345
+        "a": 102030405060,
+        "b": 908070.60705
       }
     }
     ---Parsed Response (class CompoundBigNumbers)---
     {
       "big": {
         "a": 123456789012,
-        "b": 987654.321098765
+        "b": 123456.789012345
       },
       "big_nums": [
         {
-          "a": 234567890123,
-          "b": 678123.456
+          "a": 987654321098,
+          "b": 9876543210.123455
         },
         {
-          "a": 345678901234,
-          "b": 2345678901.123456
+          "a": 112233445566,
+          "b": 223344.5566
         },
         {
-          "a": 456789012345,
-          "b": 123456.7890123456
+          "a": 998877665544,
+          "b": 112233.4455667
         }
       ],
       "another": {
-        "a": 567890123456,
-        "b": 912345678.12345
+        "a": 102030405060,
+        "b": 908070.60705
       }
     }
-

Teardown

PASSED test_no_stream_compound_object_with_yapping 0:00:03.088378

Setup

Call

Captured stderr call
[2024-11-04T16:56:28Z INFO  baml_events] Function StreamingCompoundNumbers:
-    Client: GPT4 (gpt-4o-2024-08-06) - 3079ms. StopReason: stop. Tokens(in/out): 134/171
+

Teardown

PASSED test_no_stream_compound_object_with_yapping 0:00:02.419003

Setup

Call

Captured stderr call
[2024-11-05T19:57:17Z INFO  baml_events] Function StreamingCompoundNumbers:
+    Client: GPT4 (gpt-4o-2024-08-06) - 2409ms. StopReason: stop. Tokens(in/out): 134/172
     ---PROMPT---
     [chat] system:     Please give me a friendly response before outputting json. And put the JSON
         into a fenced code block.
@@ -2497,31 +2421,31 @@
     }
     
     ---LLM REPLY---
-    Hello! I'm here to help. Here's the JSON dataset you requested, with a friendly touch.
+    Hey there! I'm excited to help you with that JSON structure. Here's your data:
     
     ```json
     {
       "big": {
         "a": 123456789012,
-        "b": 9876543.12345678
+        "b": 12345.6789012345
       },
       "big_nums": [
         {
-          "a": 111111111111,
-          "b": 12345.6789
+          "a": 234567890123,
+          "b": 54321.0987654321
         },
         {
-          "a": 222222222222,
-          "b": 9876543210.123456
+          "a": 345678901234,
+          "b": 9999999999.123456789
         },
         {
-          "a": 333333333333,
-          "b": 1111111111111.23456789
+          "a": 456789012345,
+          "b": 9876543210.0
         }
       ],
       "another": {
-        "a": 444444444444,
-        "b": 567890.1
+        "a": 567890123456,
+        "b": 1234567890.12345
       }
     }
     ```
@@ -2529,29 +2453,29 @@
     {
       "big": {
         "a": 123456789012,
-        "b": 9876543.12345678
+        "b": 12345.6789012345
       },
       "big_nums": [
         {
-          "a": 111111111111,
-          "b": 12345.6789
+          "a": 234567890123,
+          "b": 54321.0987654321
         },
         {
-          "a": 222222222222,
-          "b": 9876543210.123455
+          "a": 345678901234,
+          "b": 9999999999.123457
         },
         {
-          "a": 333333333333,
-          "b": 1111111111111.2346
+          "a": 456789012345,
+          "b": 9876543210.0
         }
       ],
       "another": {
-        "a": 444444444444,
-        "b": 567890.1
+        "a": 567890123456,
+        "b": 1234567890.12345
       }
     }
-

Teardown

PASSED test_differing_unions 0:00:01.562015

Setup

Call

Captured stderr call
[2024-11-04T16:56:29Z INFO  baml_events] Function DifferentiateUnions:
-    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 1552ms. StopReason: stop. Tokens(in/out): 50/36
+

Teardown

PASSED test_differing_unions 0:00:01.118353

Setup

Call

Captured stderr call
[2024-11-05T19:57:18Z INFO  baml_events] Function DifferentiateUnions:
+    Client: openai/gpt-4o-mini (gpt-4o-mini-2024-07-18) - 1107ms. StopReason: stop. Tokens(in/out): 50/52
     ---PROMPT---
     [chat] system: Create a data model that represents the latter of the two classes.
     
@@ -2564,21 +2488,23 @@
     }
     
     ---LLM REPLY---
-    Here is a JSON data model representing the latter of the two classes:
+    Here is a data model that represents the latter of the two classes in JSON format:
     
     ```json
     {
-      "value": 42,
-      "value2": "example string"
+      "value": 10,
+      "value2": "sample string"
     }
-    ```
+    ``` 
+    
+    Feel free to adjust the values according to your specific requirements.
     ---Parsed Response (class OriginalB)---
     {
-      "value": 42,
-      "value2": "example string"
+      "value": 10,
+      "value2": "sample string"
     }
-

Teardown

PASSED test_return_failing_assert 0:00:00.522013

Setup

Call

Captured stderr call
[2024-11-04T16:56:30Z WARN  baml_events] Function ReturnFailingAssert:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 514ms. StopReason: stop. Tokens(in/out): 20/1
+

Teardown

PASSED test_return_failing_assert 0:00:00.636227

Setup

Call

Captured stderr call
[2024-11-05T19:57:19Z WARN  baml_events] Function ReturnFailingAssert:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 630ms. StopReason: stop. Tokens(in/out): 20/1
     ---PROMPT---
     [chat] system: Return the next integer after 1.
     
@@ -2588,9 +2514,57 @@
     2
     ---Parsed Response (Error)---
     Failed to coerce value: <root>: Assertions failed.
-

Teardown

PASSED test_parameter_failing_assert 0:00:00.003441

Setup

Call

Captured stderr call
[2024-11-04T16:56:30Z ERROR baml_runtime::tracing]   Error: inp: Failed assert: small_int
+

Teardown

PASSED test_parameter_failing_assert 0:00:00.003347

Setup

Call

Captured stderr call
[2024-11-05T19:57:19Z ERROR baml_runtime::tracing]   Error: inp: Failed assert: small_int
     
-

Teardown

PASSED test_failing_assert_can_stream 0:00:03.776608

Setup

Call

Captured stdout call
None
+

Teardown

PASSED test_failing_assert_can_stream 0:00:03.338813

Setup

Call

Captured stdout call
None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
+None
 None
 None
 None
@@ -3178,8 +3152,8 @@
 None
 None
 None
-
Captured stderr call
[2024-11-04T16:56:34Z WARN  baml_events] Function StreamFailingAssertion:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 3768ms. StopReason: stop. Tokens(in/out): 62/291
+
Captured stderr call
[2024-11-05T19:57:22Z WARN  baml_events] Function StreamFailingAssertion:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 3331ms. StopReason: stop. Tokens(in/out): 62/315
     ---PROMPT---
     [chat] system: Tell me two different stories along the theme of Yoshimi battles the pink robots with the same title.
     Please make each about 300 words long.
@@ -3193,8 +3167,8 @@
     ---LLM REPLY---
     {
       "title": "Yoshimi Battles the Pink Robots",
-      "story_a": "Yoshimi was a young warrior tasked with defending her land from the invasion of pink robots. These robots were powered by a mysterious energy that seemed to be draining the life force from everything around them. Despite the odds stacked against her, Yoshimi bravely faced the robots in battle. With her quick reflexes and strategic thinking, she managed to outmaneuver the robots and find their weak spots. Using her sword and shield, Yoshimi fought valiantly to protect her homeland. In the end, she emerged victorious, destroying the pink robots and restoring peace to the land. Her bravery and strength made her a legend among her people, and they hailed her as their savior.",
-      "story_b": "Yoshimi was a scientist working in a high-tech laboratory when the pink robots suddenly appeared. These robots were part of a failed experiment gone wrong, threatening to destroy everything in their path. As the robots wreaked havoc, Yoshimi realized that she was the only one who could stop them. Using her knowledge of technology and robotics, she developed a plan to deactivate the pink robots and save her city. With her quick thinking and ingenuity, Yoshimi managed to hack into the robots' programming and shut them down one by one. Her heroic actions saved the city from destruction and earned her recognition as a brilliant scientist and hero."
+      "story_a": "Yoshimi was an ordinary girl living in a small town until one day, pink robots suddenly invaded her peaceful community. Armed with only her courage and determination, Yoshimi took it upon herself to battle these menacing robots. With quick thinking and resourcefulness, she devised clever strategies to outsmart the pink robots and protect her town. Despite the overwhelming odds, Yoshimi never gave up and eventually emerged victorious, earning the admiration and gratitude of her fellow townspeople. The story of Yoshimi's fearless battle against the pink robots became a legend, inspiring others to stand up against any threat that comes their way.",
+      "story_b": "In a futuristic world where pink robots ruled with an iron fist, Yoshimi was a rebellious young woman who dared to challenge their authority. These robots, programmed to enforce strict rules and suppress any form of creativity, saw Yoshimi as a threat to their regime. Determined to fight for freedom and individuality, Yoshimi led a brave rebellion against the pink robots. With her unmatched skills in combat and unwavering spirit, she took on the robots in fierce battles that shook the foundations of their oppressive society. In the final showdown, Yoshimi faced the leader of the pink robots in a dramatic confrontation that would determine the fate of their world. Through her courage and defiance, Yoshimi emerged victorious, bringing an end to the era of the pink robots and ushering in a new era of freedom and self-expression for all." 
     }
     ---Parsed Response (Error)---
     Failed to coerce value: <root>: Failed while parsing required fields: missing=0, unparsed=2
@@ -3202,8 +3176,8 @@
         - <root>: Assertions failed.
       - <root>: Failed to parse field story_b: <root>: Assertions failed.
         - <root>: Assertions failed.
-

Teardown

PASSED test_block_constraints 0:00:00.620434

Setup

Call

Captured stderr call
[2024-11-04T16:56:34Z INFO  baml_events] Function MakeBlockConstraint:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 613ms. StopReason: stop. Tokens(in/out): 42/19
+

Teardown

PASSED test_block_constraints 0:00:00.559182

Setup

Call

Captured stderr call
[2024-11-05T19:57:23Z INFO  baml_events] Function MakeBlockConstraint:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 552ms. StopReason: stop. Tokens(in/out): 42/22
     ---PROMPT---
     [chat] system: Generate an output in the following schema with a short string and a large int.
     
@@ -3216,13 +3190,13 @@
     ---LLM REPLY---
     {
       "foo": 1000000,
-      "bar": "Large number"
+      "bar": "This is a short string"
     }
     ---Parsed Response (class BlockConstraint)---
     {
       "value": {
         "foo": 1000000,
-        "bar": "Large number"
+        "bar": "This is a short string"
       },
       "checks": {
         "cross_field": {
@@ -3232,9 +3206,9 @@
         }
       }
     }
-

Teardown

PASSED test_nested_block_constraints 0:00:00.934504

Setup

Call

Captured stdout call
nbc=Checked[BlockConstraint, Literal['cross_field']](value=BlockConstraint(foo=1, bar='hello'), checks={'cross_field': Check(name='cross_field', expression='this.bar|length > this.foo', status='succeeded')})
-
Captured stderr call
[2024-11-04T16:56:35Z INFO  baml_events] Function MakeNestedBlockConstraint:
-    Client: GPT35 (gpt-3.5-turbo-0125) - 925ms. StopReason: stop. Tokens(in/out): 52/24
+

Teardown

PASSED test_nested_block_constraints 0:00:01.027956

Setup

Call

Captured stdout call
nbc=Checked[BlockConstraint, Literal['cross_field']](value=BlockConstraint(foo=1, bar='hello'), checks={'cross_field': Check(name='cross_field', expression='this.bar|length > this.foo', status='succeeded')})
+
Captured stderr call
[2024-11-05T19:57:24Z INFO  baml_events] Function MakeNestedBlockConstraint:
+    Client: GPT35 (gpt-3.5-turbo-0125) - 1019ms. StopReason: stop. Tokens(in/out): 52/24
     ---PROMPT---
     [chat] system: Generate an output where the inner foo is 1 and the inner bar is "hello".
       Answer in JSON using this schema:
@@ -3268,8 +3242,8 @@
         }
       }
     }
-

Teardown

PASSED test_block_constraint_arguments 0:00:00.003363

Setup

Call

Captured stderr call
[2024-11-04T16:56:35Z ERROR baml_runtime::tracing]   Error: inp: Failed assert: hi
+

Teardown

PASSED test_block_constraint_arguments 0:00:00.004030

Setup

Call

Captured stderr call
[2024-11-05T19:57:24Z ERROR baml_runtime::tracing]   Error: inp: Failed assert: hi
     
-[2024-11-04T16:56:35Z ERROR baml_runtime::tracing]   Error: inp: Failed assert: hi
+[2024-11-05T19:57:24Z ERROR baml_runtime::tracing]   Error: inp: Failed assert: hi
     
-

Teardown

tests/test_pydantic.py 3 0:00:00.001939

PASSED test_model_validate_success 0:00:00.000897

Setup

Call

Teardown

PASSED test_model_validate_failure 0:00:00.000591

Setup

Call

Teardown

PASSED test_model_dump 0:00:00.000452

Setup

Call

Teardown

\ No newline at end of file +

Teardown

tests/test_pydantic.py 3 0:00:00.003597

PASSED test_model_validate_success 0:00:00.000850

Setup

Call

Teardown

PASSED test_model_validate_failure 0:00:00.001981

Setup

Call

Teardown

PASSED test_model_dump 0:00:00.000766

Setup

Call

Teardown

\ No newline at end of file diff --git a/integ-tests/ruby/baml_client/inlined.rb b/integ-tests/ruby/baml_client/inlined.rb index f08e4427b..004189384 100644 --- a/integ-tests/ruby/baml_client/inlined.rb +++ b/integ-tests/ruby/baml_client/inlined.rb @@ -25,7 +25,7 @@ module Inlined "fiddle-examples/extract-receipt-info.baml" => "class ReceiptItem {\n name string\n description string?\n quantity int\n price float\n}\n\nclass ReceiptInfo {\n items ReceiptItem[]\n total_cost float?\n venue \"barisa\" | \"ox_burger\"\n}\n\nfunction ExtractReceiptInfo(email: string, reason: \"curiosity\" | \"personal_finance\") -> ReceiptInfo {\n client GPT4o\n prompt #\"\n Given the receipt below:\n\n ```\n {{email}}\n ```\n\n {{ ctx.output_format }}\n \"#\n}\n\n", "fiddle-examples/images/image.baml" => "function DescribeImage(img: image) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\") }}\n\n\n Describe the image below in 20 words:\n {{ img }}\n \"#\n\n}\n\nclass FakeImage {\n url string\n}\n\nclass ClassWithImage {\n myImage image\n param2 string\n fake_image FakeImage\n}\n\n// chat role user present\nfunction DescribeImage2(classWithImage: ClassWithImage, img2: image) -> string { \n client GPT4Turbo\n prompt #\"\n {{ _.role(\"user\") }}\n You should return 2 answers that answer the following commands.\n\n 1. Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n 2. Also tell me what's happening here in one sentence:\n {{ img2 }}\n \"#\n}\n\n// no chat role\nfunction DescribeImage3(classWithImage: ClassWithImage, img2: image) -> string {\n client GPT4Turbo\n prompt #\"\n Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n Tell me also what's happening here in one sentence and relate it to the word {{ classWithImage.param2 }}:\n {{ img2 }}\n \"#\n}\n\n\n// system prompt and chat prompt\nfunction DescribeImage4(classWithImage: ClassWithImage, img2: image) -> string {\n client GPT4Turbo\n prompt #\"\n {{ _.role(\"system\")}}\n\n Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n Tell me also what's happening here in one sentence and relate it to the word {{ classWithImage.param2 }}:\n {{ img2 }}\n \"#\n}\n\ntest TestName {\n functions [DescribeImage]\n args {\n img { url \"https://imgs.xkcd.com/comics/standards.png\"}\n }\n}\n", "fiddle-examples/symbol-tuning.baml" => "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", - "generators.baml" => "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.66.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.66.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.66.0\"\n}\n\n// generator openapi {\n// output_type rest/openapi\n// output_dir \"../openapi\"\n// version \"0.66.0\"\n// on_generate \"rm .gitignore\"\n// }\n", + "generators.baml" => "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.67.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.67.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.67.0\"\n}\n\n// generator openapi {\n// output_type rest/openapi\n// output_dir \"../openapi\"\n// version \"0.67.0\"\n// on_generate \"rm .gitignore\"\n// }\n", "test-files/aliases/aliased-inputs.baml" => "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml" => "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml" => "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(\n User is confused\n )\n E @description(\n User is excited\n )\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/typescript/baml_client/inlinedbaml.ts b/integ-tests/typescript/baml_client/inlinedbaml.ts index 25fd0f636..3fcafb3b3 100644 --- a/integ-tests/typescript/baml_client/inlinedbaml.ts +++ b/integ-tests/typescript/baml_client/inlinedbaml.ts @@ -26,7 +26,7 @@ const fileMap = { "fiddle-examples/extract-receipt-info.baml": "class ReceiptItem {\n name string\n description string?\n quantity int\n price float\n}\n\nclass ReceiptInfo {\n items ReceiptItem[]\n total_cost float?\n venue \"barisa\" | \"ox_burger\"\n}\n\nfunction ExtractReceiptInfo(email: string, reason: \"curiosity\" | \"personal_finance\") -> ReceiptInfo {\n client GPT4o\n prompt #\"\n Given the receipt below:\n\n ```\n {{email}}\n ```\n\n {{ ctx.output_format }}\n \"#\n}\n\n", "fiddle-examples/images/image.baml": "function DescribeImage(img: image) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\") }}\n\n\n Describe the image below in 20 words:\n {{ img }}\n \"#\n\n}\n\nclass FakeImage {\n url string\n}\n\nclass ClassWithImage {\n myImage image\n param2 string\n fake_image FakeImage\n}\n\n// chat role user present\nfunction DescribeImage2(classWithImage: ClassWithImage, img2: image) -> string { \n client GPT4Turbo\n prompt #\"\n {{ _.role(\"user\") }}\n You should return 2 answers that answer the following commands.\n\n 1. Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n 2. Also tell me what's happening here in one sentence:\n {{ img2 }}\n \"#\n}\n\n// no chat role\nfunction DescribeImage3(classWithImage: ClassWithImage, img2: image) -> string {\n client GPT4Turbo\n prompt #\"\n Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n Tell me also what's happening here in one sentence and relate it to the word {{ classWithImage.param2 }}:\n {{ img2 }}\n \"#\n}\n\n\n// system prompt and chat prompt\nfunction DescribeImage4(classWithImage: ClassWithImage, img2: image) -> string {\n client GPT4Turbo\n prompt #\"\n {{ _.role(\"system\")}}\n\n Describe this in 5 words:\n {{ classWithImage.myImage }}\n\n Tell me also what's happening here in one sentence and relate it to the word {{ classWithImage.param2 }}:\n {{ img2 }}\n \"#\n}\n\ntest TestName {\n functions [DescribeImage]\n args {\n img { url \"https://imgs.xkcd.com/comics/standards.png\"}\n }\n}\n", "fiddle-examples/symbol-tuning.baml": "enum Category3 {\n Refund @alias(\"k1\")\n @description(\"Customer wants to refund a product\")\n\n CancelOrder @alias(\"k2\")\n @description(\"Customer wants to cancel an order\")\n\n TechnicalSupport @alias(\"k3\")\n @description(\"Customer needs help with a technical issue unrelated to account creation or login\")\n\n AccountIssue @alias(\"k4\")\n @description(\"Specifically relates to account-login or account-creation\")\n\n Question @alias(\"k5\")\n @description(\"Customer has a question\")\n}\n\nfunction ClassifyMessage3(input: string) -> Category {\n client GPT4\n\n prompt #\"\n Classify the following INPUT into ONE\n of the following categories:\n\n INPUT: {{ input }}\n\n {{ ctx.output_format }}\n\n Response:\n \"#\n}", - "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.66.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.66.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.66.0\"\n}\n\n// generator openapi {\n// output_type rest/openapi\n// output_dir \"../openapi\"\n// version \"0.66.0\"\n// on_generate \"rm .gitignore\"\n// }\n", + "generators.baml": "generator lang_python {\n output_type python/pydantic\n output_dir \"../python\"\n version \"0.67.0\"\n}\n\ngenerator lang_typescript {\n output_type typescript\n output_dir \"../typescript\"\n version \"0.67.0\"\n}\n\ngenerator lang_ruby {\n output_type ruby/sorbet\n output_dir \"../ruby\"\n version \"0.67.0\"\n}\n\n// generator openapi {\n// output_type rest/openapi\n// output_dir \"../openapi\"\n// version \"0.67.0\"\n// on_generate \"rm .gitignore\"\n// }\n", "test-files/aliases/aliased-inputs.baml": "\nclass InputClass {\n key string @alias(\"color\")\n key2 string\n}\n\n\nclass InputClassNested {\n key string\n nested InputClass @alias(\"interesting-key\")\n}\n \n\nfunction AliasedInputClass(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {{input}}\n\n This is a test. What's the name of the first json key above? Remember, tell me the key, not value.\n \"#\n}\n \nfunction AliasedInputClass2(input: InputClass) -> string {\n client GPT35\n prompt #\"\n\n {# making sure we can still access the original key #}\n {%if input.key == \"tiger\"%}\n Repeat this value back to me, and nothing else: {{input.key}}\n {%endif%}\n \"#\n}\n \n function AliasedInputClassNested(input: InputClassNested) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n\n {{input}}\n\n This is a test. What's the name of the second json key above? Remember, tell me the key, not value.\n \"#\n }\n\n\nenum AliasedEnum {\n KEY_ONE @alias(\"tiger\")\n KEY_TWO\n}\n\nfunction AliasedInputEnum(input: AliasedEnum) -> string {\n client GPT4o\n prompt #\"\n {{ _.role(\"user\")}}\n\n\n Write out this word only in your response, in lowercase:\n ---\n {{input}}\n ---\n Answer:\n \"#\n}\n\n\nfunction AliasedInputList(input: AliasedEnum[]) -> string {\n client GPT35\n prompt #\"\n {{ _.role(\"user\")}}\n Given this array:\n ---\n {{input}}\n ---\n\n Return the first element in the array:\n \"#\n}\n\n", "test-files/aliases/classes.baml": "class TestClassAlias {\n key string @alias(\"key-dash\") @description(#\"\n This is a description for key\n af asdf\n \"#)\n key2 string @alias(\"key21\")\n key3 string @alias(\"key with space\")\n key4 string //unaliased\n key5 string @alias(\"key.with.punctuation/123\")\n}\n\nfunction FnTestClassAlias(input: string) -> TestClassAlias {\n client GPT35\n prompt #\"\n {{ctx.output_format}}\n \"#\n}\n\ntest FnTestClassAlias {\n functions [FnTestClassAlias]\n args {\n input \"example input\"\n }\n}\n", "test-files/aliases/enums.baml": "enum TestEnum {\n A @alias(\"k1\") @description(#\"\n User is angry\n \"#)\n B @alias(\"k22\") @description(#\"\n User is happy\n \"#)\n // tests whether k1 doesnt incorrectly get matched with k11\n C @alias(\"k11\") @description(#\"\n User is sad\n \"#)\n D @alias(\"k44\") @description(\n User is confused\n )\n E @description(\n User is excited\n )\n F @alias(\"k5\") // only alias\n \n G @alias(\"k6\") @description(#\"\n User is bored\n With a long description\n \"#)\n \n @@alias(\"Category\")\n}\n\nfunction FnTestAliasedEnumOutput(input: string) -> TestEnum {\n client GPT35\n prompt #\"\n Classify the user input into the following category\n \n {{ ctx.output_format }}\n\n {{ _.role('user') }}\n {{input}}\n\n {{ _.role('assistant') }}\n Category ID:\n \"#\n}\n\ntest FnTestAliasedEnumOutput {\n functions [FnTestAliasedEnumOutput]\n args {\n input \"mehhhhh\"\n }\n}", diff --git a/integ-tests/typescript/test-report.html b/integ-tests/typescript/test-report.html index 21632f5cd..9d1d68fd3 100644 --- a/integ-tests/typescript/test-report.html +++ b/integ-tests/typescript/test-report.html @@ -257,18 +257,11 @@ font-size: 1rem; padding: 0 0.5rem; } -

Test Report

Started: 2024-11-04 08:11:51
Suites (1)
0 passed
1 failed
0 pending
Tests (61)
58 passed
3 failed
0 pending
Integ tests > should work for all inputs
single bool
passed
0.777s
Integ tests > should work for all inputs
single string list
passed
1.036s
Integ tests > should work for all inputs
return literal union
failed
0.608s
BamlValidationError: BamlValidationError: Failed to parse LLM response: Failed to coerce value: <root>: Failed to find any (1 | true | "string output") in 3 items
-  - <root>: Expected 1, got Object([("answer", Boolean(true))]).
-  - <root>: Expected true, got Object([("answer", Boolean(true))]).
-  - <root>: Expected "string output", got Object([("answer", Boolean(true))]).
-    at Function.from (/Users/vbv/repos/gloo-lang/engine/language_client_typescript/index.js:33:28)
-    at from (/Users/vbv/repos/gloo-lang/engine/language_client_typescript/index.js:58:32)
-    at BamlAsyncClient.LiteralUnionsTest (/Users/vbv/repos/gloo-lang/integ-tests/typescript/baml_client/async_client.ts:1362:50)
-    at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:42:19)
Integ tests > should work for all inputs
single class
passed
0.928s
Integ tests > should work for all inputs
multiple classes
passed
0.578s
Integ tests > should work for all inputs
single enum list
passed
0.994s
Integ tests > should work for all inputs
single float
passed
0.519s
Integ tests > should work for all inputs
single int
passed
0.337s
Integ tests > should work for all inputs
single literal int
passed
0.359s
Integ tests > should work for all inputs
single literal bool
passed
0.449s
Integ tests > should work for all inputs
single literal string
passed
0.419s
Integ tests > should work for all inputs
single class with literal prop
passed
0.523s
Integ tests > should work for all inputs
single class with literal union prop
passed
0.613s
Integ tests > should work for all inputs
single optional string
passed
0.434s
Integ tests > should work for all inputs
single map string to string
passed
0.706s
Integ tests > should work for all inputs
single map string to class
passed
1.031s
Integ tests > should work for all inputs
single map string to map
passed
0.741s
Integ tests
should work for all outputs
passed
6.504s
Integ tests
works with retries1
passed
1.339s
Integ tests
works with retries2
passed
2.254s
Integ tests
works with fallbacks
passed
2.21s
Integ tests
should work with image from url
passed
2.462s
Integ tests
should work with image from base 64
passed
0.95s
Integ tests
should work with audio base 64
passed
1.031s
Integ tests
should work with audio from url
passed
1.082s
Integ tests
should support streaming in OpenAI
passed
3.648s
Integ tests
should support streaming in Gemini
failed
0.914s
Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "Gemini", model: Some("gemini-1.5-pro-001"), prompt: Chat([RenderedChatMessage { role: "user", allow_duplicate_role: false, parts: [Text("Write a nice short story about Dr. Pepper")] }]), request_options: {"safetySettings": Object {"category": String("HARM_CATEGORY_HATE_SPEECH"), "threshold": String("BLOCK_LOW_AND_ABOVE")}}, start_time: SystemTime { tv_sec: 1730736744, tv_nsec: 711125000 }, latency: 829.885458ms, message: "Failed to parse event: Error(\"missing field `content`\", line: 1, column: 359)", code: UnsupportedResponse(2) }
+

Test Report

Started: 2024-11-05 11:52:36
Suites (1)
0 passed
1 failed
0 pending
Tests (61)
59 passed
2 failed
0 pending
Integ tests > should work for all inputs
single bool
passed
0.965s
Integ tests > should work for all inputs
single string list
passed
0.527s
Integ tests > should work for all inputs
return literal union
passed
0.424s
Integ tests > should work for all inputs
single class
passed
0.464s
Integ tests > should work for all inputs
multiple classes
passed
0.512s
Integ tests > should work for all inputs
single enum list
passed
0.426s
Integ tests > should work for all inputs
single float
passed
0.436s
Integ tests > should work for all inputs
single int
passed
0.513s
Integ tests > should work for all inputs
single literal int
passed
0.623s
Integ tests > should work for all inputs
single literal bool
passed
0.318s
Integ tests > should work for all inputs
single literal string
passed
0.514s
Integ tests > should work for all inputs
single class with literal prop
passed
0.768s
Integ tests > should work for all inputs
single class with literal union prop
passed
0.579s
Integ tests > should work for all inputs
single optional string
passed
0.413s
Integ tests > should work for all inputs
single map string to string
passed
0.591s
Integ tests > should work for all inputs
single map string to class
passed
0.948s
Integ tests > should work for all inputs
single map string to map
passed
0.659s
Integ tests
should work for all outputs
passed
6.114s
Integ tests
works with retries1
passed
1.226s
Integ tests
works with retries2
passed
2.28s
Integ tests
works with fallbacks
passed
1.975s
Integ tests
should work with image from url
passed
1.146s
Integ tests
should work with image from base 64
passed
1.098s
Integ tests
should work with audio base 64
passed
1.181s
Integ tests
should work with audio from url
passed
1.339s
Integ tests
should support streaming in OpenAI
passed
3.181s
Integ tests
should support streaming in Gemini
failed
8.795s
Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "Gemini", model: Some("gemini-1.5-pro-001"), prompt: Chat([RenderedChatMessage { role: "user", allow_duplicate_role: false, parts: [Text("Write a nice short story about Dr. Pepper")] }]), request_options: {"safetySettings": Object {"threshold": String("BLOCK_LOW_AND_ABOVE"), "category": String("HARM_CATEGORY_HATE_SPEECH")}}, start_time: SystemTime { tv_sec: 1730836385, tv_nsec: 647343000 }, latency: 8.766259167s, message: "Failed to parse event: Error(\"missing field `parts`\", line: 1, column: 45)", code: UnsupportedResponse(2) }
     at BamlStream.parsed [as getFinalResponse] (/Users/vbv/repos/gloo-lang/engine/language_client_typescript/stream.js:58:39)
-    at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:253:19)
Integ tests
should support AWS
passed
2.313s
Integ tests
should support streaming in AWS
passed
2.232s
Integ tests
should support OpenAI shorthand
passed
11.846s
Integ tests
should support OpenAI shorthand streaming
passed
9.93s
Integ tests
should support anthropic shorthand
passed
3.186s
Integ tests
should support anthropic shorthand streaming
passed
2.896s
Integ tests
should support streaming without iterating
passed
5.531s
Integ tests
should support streaming in Claude
passed
1.112s
Integ tests
should support vertex
passed
7.336s
Integ tests
supports tracing sync
passed
0.006s
Integ tests
supports tracing async
passed
2.815s
Integ tests
should work with dynamic types single
passed
1.203s
Integ tests
should work with dynamic types enum
passed
1.234s
Integ tests
should work with dynamic literals
passed
0.929s
Integ tests
should work with dynamic types class
passed
1.722s
Integ tests
should work with dynamic inputs class
passed
0.628s
Integ tests
should work with dynamic inputs list
passed
0.724s
Integ tests
should work with dynamic output map
passed
1.09s
Integ tests
should work with dynamic output union
passed
4.278s
Integ tests
should work with nested classes
failed
0.103s
Error: BamlError: BamlClientError: Something went wrong with the LLM client: LLM call failed: LLMErrorResponse { client: "Ollama", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Return a made up json blob that matches this schema:\nAnswer in JSON using this schema:\n{\n  prop1: string,\n  prop2: {\n    prop1: string,\n    prop2: string,\n    inner: {\n      prop2: int,\n      prop3: float,\n    },\n  },\n}\n---\n\nJSON:")] }]), request_options: {"model": String("llama2")}, start_time: SystemTime { tv_sec: 1730736806, tv_nsec: 665727000 }, latency: 1.319041ms, message: "reqwest::Error { kind: Request, url: Url { scheme: \"http\", cannot_be_a_base: false, username: \"\", password: None, host: Some(Domain(\"localhost\")), port: Some(11434), path: \"/v1/chat/completions\", query: None, fragment: None }, source: hyper_util::client::legacy::Error(Connect, ConnectError(\"tcp connect error\", Os { code: 61, kind: ConnectionRefused, message: \"Connection refused\" })) }", code: Other(2) }
+    at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:253:19)
Integ tests
should support AWS
passed
2.918s
Integ tests
should support streaming in AWS
passed
2.129s
Integ tests
should support OpenAI shorthand
passed
8.881s
Integ tests
should support OpenAI shorthand streaming
passed
7.341s
Integ tests
should support anthropic shorthand
passed
2.162s
Integ tests
should support anthropic shorthand streaming
passed
2.855s
Integ tests
should support streaming without iterating
passed
3.209s
Integ tests
should support streaming in Claude
passed
1.316s
Integ tests
should support vertex
passed
9.33s
Integ tests
supports tracing sync
passed
0.008s
Integ tests
supports tracing async
passed
3.627s
Integ tests
should work with dynamic types single
passed
1.212s
Integ tests
should work with dynamic types enum
passed
2.493s
Integ tests
should work with dynamic literals
passed
0.913s
Integ tests
should work with dynamic types class
passed
1.325s
Integ tests
should work with dynamic inputs class
passed
0.656s
Integ tests
should work with dynamic inputs list
passed
0.616s
Integ tests
should work with dynamic output map
passed
0.999s
Integ tests
should work with dynamic output union
passed
2.587s
Integ tests
should work with nested classes
failed
0.103s
Error: BamlError: BamlClientError: Something went wrong with the LLM client: LLM call failed: LLMErrorResponse { client: "Ollama", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Return a made up json blob that matches this schema:\nAnswer in JSON using this schema:\n{\n  prop1: string,\n  prop2: {\n    prop1: string,\n    prop2: string,\n    inner: {\n      prop2: int,\n      prop3: float,\n    },\n  },\n}\n---\n\nJSON:")] }]), request_options: {"model": String("llama2")}, start_time: SystemTime { tv_sec: 1730836449, tv_nsec: 45236000 }, latency: 1.251542ms, message: "reqwest::Error { kind: Request, url: Url { scheme: \"http\", cannot_be_a_base: false, username: \"\", password: None, host: Some(Domain(\"localhost\")), port: Some(11434), path: \"/v1/chat/completions\", query: None, fragment: None }, source: hyper_util::client::legacy::Error(Connect, ConnectError(\"tcp connect error\", Os { code: 61, kind: ConnectionRefused, message: \"Connection refused\" })) }", code: Other(2) }
     at BamlStream.parsed [as getFinalResponse] (/Users/vbv/repos/gloo-lang/engine/language_client_typescript/stream.js:58:39)
-    at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:578:19)
Integ tests
should work with dynamic client
passed
0.458s
Integ tests
should work with 'onLogEvent'
passed
2.049s
Integ tests
should work with a sync client
passed
0.547s
Integ tests
should raise an error when appropriate
passed
0.926s
Integ tests
should raise a BAMLValidationError
passed
0.508s
Integ tests
should reset environment variables correctly
passed
1.144s
Integ tests
should use aliases when serializing input objects - classes
passed
0.949s
Integ tests
should use aliases when serializing, but still have original keys in jinja
passed
1.129s
Integ tests
should use aliases when serializing input objects - enums
passed
0.539s
Integ tests
should use aliases when serializing input objects - lists
passed
0.438s
Integ tests
constraints: should handle checks in return types
passed
0.844s
Integ tests
constraints: should handle checks in returned unions
passed
0.893s
Integ tests
constraints: should handle block-level checks
passed
0.737s
Integ tests
constraints: should handle nested-block-level checks
passed
0.928s
Console Log
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:47:15)
+    at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:578:19)
Integ tests
should work with dynamic client
passed
0.547s
Integ tests
should work with 'onLogEvent'
passed
2.007s
Integ tests
should work with a sync client
passed
0.623s
Integ tests
should raise an error when appropriate
passed
0.967s
Integ tests
should raise a BAMLValidationError
passed
0.44s
Integ tests
should reset environment variables correctly
passed
1.044s
Integ tests
should use aliases when serializing input objects - classes
passed
1.024s
Integ tests
should use aliases when serializing, but still have original keys in jinja
passed
0.975s
Integ tests
should use aliases when serializing input objects - enums
passed
0.423s
Integ tests
should use aliases when serializing input objects - lists
passed
0.469s
Integ tests
constraints: should handle checks in return types
passed
0.846s
Integ tests
constraints: should handle checks in returned unions
passed
1.086s
Integ tests
constraints: should handle block-level checks
passed
0.703s
Integ tests
constraints: should handle nested-block-level checks
passed
0.816s
Console Log
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:47:15)
     at Promise.then.completed (/Users/vbv/repos/gloo-lang/integ-tests/typescript/node_modules/.pnpm/jest-circus@29.7.0/node_modules/jest-circus/build/utils.js:298:28)
     at new Promise (<anonymous>)
     at callAsyncCircusFn (/Users/vbv/repos/gloo-lang/integ-tests/typescript/node_modules/.pnpm/jest-circus@29.7.0/node_modules/jest-circus/build/utils.js:231:10)
@@ -283,11 +276,11 @@
     at runTestInternal (/Users/vbv/repos/gloo-lang/integ-tests/typescript/node_modules/.pnpm/jest-runner@29.7.0/node_modules/jest-runner/build/runTest.js:367:16)
     at runTest (/Users/vbv/repos/gloo-lang/integ-tests/typescript/node_modules/.pnpm/jest-runner@29.7.0/node_modules/jest-runner/build/runTest.js:444:34)
calling with class
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:53:15)
got response key
 true
-52
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:176:15)
Expected error Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "RetryClientConstant", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Say a haiku")] }]), request_options: {"model": String("gpt-3.5-turbo")}, start_time: SystemTime { tv_sec: 1730736730, tv_nsec: 756666000 }, latency: 275.386333ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: blah. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
+52
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:176:15)
Expected error Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "RetryClientConstant", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Say a haiku")] }]), request_options: {"model": String("gpt-3.5-turbo")}, start_time: SystemTime { tv_sec: 1730836373, tv_nsec: 121068000 }, latency: 277.371875ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: blah. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
     at BamlAsyncClient.parsed [as TestRetryConstant] (/Users/vbv/repos/gloo-lang/integ-tests/typescript/baml_client/async_client.ts:2635:18)
     at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:173:7) {
   code: 'GenericFailure'
-}
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:185:15)
Expected error Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "RetryClientExponential", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Say a haiku")] }]), request_options: {"model": String("gpt-3.5-turbo")}, start_time: SystemTime { tv_sec: 1730736733, tv_nsec: 77889000 }, latency: 243.340167ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: blahh. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
+}
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:185:15)
Expected error Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "RetryClientExponential", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Say a haiku")] }]), request_options: {"model": String("gpt-3.5-turbo")}, start_time: SystemTime { tv_sec: 1730836375, tv_nsec: 506548000 }, latency: 207.725542ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: blahh. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
     at BamlAsyncClient.parsed [as TestRetryExponential] (/Users/vbv/repos/gloo-lang/integ-tests/typescript/baml_client/async_client.ts:2660:18)
     at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:182:7) {
   code: 'GenericFailure'
@@ -685,7 +678,7 @@
     at runTestInternal (/Users/vbv/repos/gloo-lang/integ-tests/typescript/node_modules/.pnpm/jest-runner@29.7.0/node_modules/jest-runner/build/runTest.js:367:16)
     at runTest (/Users/vbv/repos/gloo-lang/integ-tests/typescript/node_modules/.pnpm/jest-runner@29.7.0/node_modules/jest-runner/build/runTest.js:444:34)
Property: height
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:550:13)
final  {
   hair_color: 'black',
-  attributes: { eye_color: 'blue', facial_hair: 'beard', age: '30' },
+  attributes: { eye_color: 'blue', facial_hair: 'beard', age: '30 years old' },
   height: { feet: 6, inches: null }
 }
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:561:13)
final  {
   hair_color: 'black',
@@ -694,8 +687,8 @@
 }
    at log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:599:15)
     at callback (/Users/vbv/repos/gloo-lang/engine/language_client_typescript/async_context_vars.js:70:17)
onLogEvent {
   metadata: {
-    eventId: '2c5c90ba-407d-4dd4-b842-8fd3a322d57c',
-    rootEventId: '2c5c90ba-407d-4dd4-b842-8fd3a322d57c'
+    eventId: 'af275568-a67a-4a71-8dd8-85b25aadeb8b',
+    rootEventId: 'af275568-a67a-4a71-8dd8-85b25aadeb8b'
   },
   prompt: '[\n' +
     '  {\n' +
@@ -709,12 +702,12 @@
     ']',
   rawOutput: '["a", "b", "c"]',
   parsedOutput: '["a", "b", "c"]',
-  startTime: '2024-11-04T16:13:27.643Z'
+  startTime: '2024-11-05T19:54:10.425Z'
 }
    at log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:599:15)
     at callback (/Users/vbv/repos/gloo-lang/engine/language_client_typescript/async_context_vars.js:70:17)
onLogEvent {
   metadata: {
-    eventId: '8a1e0133-67e0-4b95-b355-ae06f1b9d111',
-    rootEventId: '8a1e0133-67e0-4b95-b355-ae06f1b9d111'
+    eventId: 'fdb2defa-da6c-419b-91f7-6964ab73b424',
+    rootEventId: 'fdb2defa-da6c-419b-91f7-6964ab73b424'
   },
   prompt: '[\n' +
     '  {\n' +
@@ -728,8 +721,8 @@
     ']',
   rawOutput: '["d", "e", "f"]',
   parsedOutput: '["d", "e", "f"]',
-  startTime: '2024-11-04T16:13:28.168Z'
-}
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:633:15)
Error: Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "MyClient", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Given a string, extract info using the schema:\n\nMy name is Harrison. My hair is black and I'm 6 feet tall.\n\nAnswer in JSON using this schema:\n{\n}")] }]), request_options: {"model": String("gpt-4o-mini")}, start_time: SystemTime { tv_sec: 1730736809, tv_nsec: 990070000 }, latency: 215.395209ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: INVALID_KEY. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
+  startTime: '2024-11-05T19:54:10.913Z'
+}
    at Object.log (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:633:15)
Error: Error: BamlError: BamlClientError: BamlClientHttpError: LLM call failed: LLMErrorResponse { client: "MyClient", model: None, prompt: Chat([RenderedChatMessage { role: "system", allow_duplicate_role: false, parts: [Text("Given a string, extract info using the schema:\n\nMy name is Harrison. My hair is black and I'm 6 feet tall.\n\nAnswer in JSON using this schema:\n{\n}")] }]), request_options: {"model": String("gpt-4o-mini")}, start_time: SystemTime { tv_sec: 1730836452, tv_nsec: 600615000 }, latency: 183.738125ms, message: "Request failed: {\n    \"error\": {\n        \"message\": \"Incorrect API key provided: INVALID_KEY. You can find your API key at https://platform.openai.com/account/api-keys.\",\n        \"type\": \"invalid_request_error\",\n        \"param\": null,\n        \"code\": \"invalid_api_key\"\n    }\n}\n", code: InvalidAuthentication }
     at BamlAsyncClient.parsed (/Users/vbv/repos/gloo-lang/integ-tests/typescript/baml_client/async_client.ts:1435:18)
     at Object.<anonymous> (/Users/vbv/repos/gloo-lang/integ-tests/typescript/tests/integ-tests.test.ts:630:7) {
   code: 'GenericFailure'
diff --git a/tools/versions/engine.cfg b/tools/versions/engine.cfg
index 8c6554577..c4d42f58a 100644
--- a/tools/versions/engine.cfg
+++ b/tools/versions/engine.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 0.66.0
+current_version = 0.67.0
 commit = False
 tag = False
 parse = ^(?P\d+)\.(?P\d+).(?P\d+)$
diff --git a/tools/versions/integ-tests.cfg b/tools/versions/integ-tests.cfg
index ffe84afed..34a52998d 100644
--- a/tools/versions/integ-tests.cfg
+++ b/tools/versions/integ-tests.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 0.66.0
+current_version = 0.67.0
 commit = False
 tag = False
 parse = ^(?P\d+)\.(?P\d+).(?P\d+)$
diff --git a/tools/versions/python.cfg b/tools/versions/python.cfg
index c0cd2f44d..fd5c233de 100644
--- a/tools/versions/python.cfg
+++ b/tools/versions/python.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 0.66.0
+current_version = 0.67.0
 commit = False
 tag = False
 parse = ^(?P\d+)\.(?P\d+).(?P\d+)$
diff --git a/tools/versions/ruby.cfg b/tools/versions/ruby.cfg
index b0cac4d1a..f3865972b 100644
--- a/tools/versions/ruby.cfg
+++ b/tools/versions/ruby.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 0.66.0
+current_version = 0.67.0
 commit = False
 tag = False
 parse = ^(?P\d+)\.(?P\d+).(?P\d+)$
diff --git a/tools/versions/typescript.cfg b/tools/versions/typescript.cfg
index d31f1433e..7b16b2bf1 100644
--- a/tools/versions/typescript.cfg
+++ b/tools/versions/typescript.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 0.66.0
+current_version = 0.67.0
 commit = False
 tag = False
 parse = ^(?P\d+)\.(?P\d+).(?P\d+)$
diff --git a/tools/versions/vscode.cfg b/tools/versions/vscode.cfg
index a72828835..d81286081 100644
--- a/tools/versions/vscode.cfg
+++ b/tools/versions/vscode.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 0.66.0
+current_version = 0.67.0
 commit = False
 tag = False
 parse = ^(?P\d+)\.(?P\d+).(?P\d+)$
diff --git a/typescript/vscode-ext/packages/package.json b/typescript/vscode-ext/packages/package.json
index 65b0e4c47..4d0f27c6c 100644
--- a/typescript/vscode-ext/packages/package.json
+++ b/typescript/vscode-ext/packages/package.json
@@ -2,7 +2,7 @@
   "name": "baml-extension",
   "displayName": "Baml",
   "description": "BAML is a DSL for AI applications.",
-  "version": "0.66.0",
+  "version": "0.67.0",
   "publisher": "Boundary",
   "repository": "https://github.com/BoundaryML/baml",
   "homepage": "https://www.boundaryml.com",