-
Notifications
You must be signed in to change notification settings - Fork 62
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
84 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
39 changes: 39 additions & 0 deletions
39
...ontend/public/_examples/prompt-engineering/all-prompting-techniques/baml_src/clients.baml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
// These are LLM clients you can use in your functions. We currently support Anthropic, OpenAI / Azure, and Ollama as providers but are expanding to many more. | ||
|
||
// For this playground, we have setup a few clients for you to use already with some free credits. | ||
|
||
client<llm> GPT4 { | ||
// Use one of the following: https://docs.boundaryml.com/v3/syntax/client/client#providers | ||
provider openai | ||
// You can pass in any parameters from the OpenAI Python documentation into the options block. | ||
options { | ||
model gpt-4 | ||
api_key env.OPENAI_API_KEY | ||
} | ||
} | ||
|
||
client<llm> GPT4Turbo { | ||
provider openai | ||
options { | ||
model gpt-4-turbo | ||
api_key env.OPENAI_API_KEY | ||
} | ||
} | ||
|
||
client<llm> GPT35 { | ||
provider openai | ||
options { | ||
model gpt-3.5-turbo | ||
api_key env.OPENAI_API_KEY | ||
} | ||
} | ||
|
||
client<llm> Claude { | ||
provider anthropic | ||
options { | ||
model claude-3-haiku-20240307 | ||
api_key env.ANTHROPIC_API_KEY | ||
max_tokens 1000 | ||
|
||
} | ||
} |
42 changes: 42 additions & 0 deletions
42
...-frontend/public/_examples/prompt-engineering/all-prompting-techniques/baml_src/main.baml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
class Email { | ||
subject string | ||
body string | ||
from_address string | ||
} | ||
|
||
enum OrderStatus { | ||
ORDERED | ||
SHIPPED | ||
DELIVERED | ||
CANCELLED | ||
} | ||
|
||
class OrderInfo { | ||
order_status OrderStatus | ||
tracking_number string? | ||
estimated_arrival_date string? | ||
} | ||
|
||
function GetOrderInfo(email: Email) -> OrderInfo { | ||
client GPT4Turbo | ||
prompt #" | ||
Given the email below: | ||
|
||
\`\`\` | ||
from: {{email.from_address}} | ||
Email Subject: {{email.subject}} | ||
Email Body: {{email.body}} | ||
\`\`\` | ||
|
||
Extract this info from the email in JSON format: | ||
{{ ctx.output_format }} | ||
|
||
Before you output the JSON, please explain your | ||
reasoning step-by-step. Here is an example on how to do this: | ||
'If we think step by step we can see that ... | ||
therefore the output JSON is: | ||
{ | ||
... the json schema ... | ||
}' | ||
"# | ||
} |