From abe6ebcabf429f394199aa8a9635fcd574e1f132 Mon Sep 17 00:00:00 2001 From: Jason Date: Sun, 9 Jul 2023 12:46:46 +0800 Subject: [PATCH] more docs --- docs/chat-completion.md | 2 +- docs/help.md | 3 -- docs/index.md | 80 +++++++++++++++++++++++----------------- docs/multitask.md | 15 +++++++- docs/openai_schema.md | 11 +++--- docs/pipeline-example.md | 36 ++++++++++++++++-- mkdocs.yml | 6 +-- 7 files changed, 102 insertions(+), 51 deletions(-) delete mode 100644 docs/help.md diff --git a/docs/chat-completion.md b/docs/chat-completion.md index c5652797f..edeca4247 100644 --- a/docs/chat-completion.md +++ b/docs/chat-completion.md @@ -1,4 +1,4 @@ -# Using the Chatcompletion +# Using the Prompt Pipeline To get started with this api we must first instantiate a `ChatCompletion` object and build the api call by piping messages and functions to it. diff --git a/docs/help.md b/docs/help.md deleted file mode 100644 index fd64e12d3..000000000 --- a/docs/help.md +++ /dev/null @@ -1,3 +0,0 @@ -# Docs are incomplete - -Help wanted! \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 53c79b4d3..b847b998c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,6 +1,19 @@ # Welcome to OpenAI Function Call -We try to provides a powerful and efficient approach to output parsing when interacting with OpenAI's Function Call API. One that is framework agnostic and minimizes any dependencies. It leverages the data validation capabilities of the Pydantic library to handle output parsing in a more structured and reliable manner. If you have any feedback, leave an issue or hit me up on [twitter](https://twitter.com/jxnlco). +We offer a minially invasive extention of `Pydantic.BaseModel` named `OpenAISchema`. It only has two methods, one to generate the correct schema, and one to produce the class from the completion. + +This library is more, so a list of examples and a helper class so I'll keep the example as just structured extraction. + +If the OpenAI is a chef's knife of code, I hope you sell you a nice handle which comes with a little pamplet of cuttign techniques. + +It leverages the data validation capabilities of the Pydantic library to handle output parsing in a more structured and reliable manner. + +If you have any feedback, leave an issue or hit me up on [twitter](https://twitter.com/jxnlco). + +If you're looking for something more batteries included I strongly recommend [MarvinAI](https://www.askmarvin.ai/) which offers a high level api but does not provide as much access to prompting. + +!!! tip "Just rip it out!" + If you don't want to install dependencies. I recommend literally ripping the `function_calls.py` into your own codebase. [[source code]](https://github.com/jxnl/openai_function_call/blob/main/openai_function_call/function_calls.py) ## Installation @@ -10,9 +23,39 @@ pip install openai_function_call ## Usage -This module simplifies the interaction with the OpenAI API, enabling a more structured outputs. Below are examples showcasing the use of function calls and schemas with OpenAI and Pydantic. In later modoules we'll go over a wide array of more creative uses. +Below are examples showcasing the use of function calls and schemas with OpenAI and Pydantic. In later docs we'll go over a wide array of more creative uses. -### Example 1: Function Calls +### Example 1: Extraction + +!!! Tip + Prompt are now sourced from docstrings and descriptions, so write clear and descriptive documentation! + +```python +import openai +from openai_function_call import OpenAISchema + +from pydantic import Field + +class UserDetails(OpenAISchema): + """Details of a user""" + name: str = Field(..., description="users's full name") + age: int + +completion = openai.ChatCompletion.create( + model="gpt-3.5-turbo-0613", + functions=[UserDetails.openai_schema], + function_call={"name": UserDetails.openai_schema["name"]}, + messages=[ + {"role": "system", "content": "Extract user details from my requests"}, + {"role": "user", "content": "My name is John Doe and I'm 30 years old."}, + ], +) + +user_details = UserDetails.from_response(completion) +print(user_details) # name="John Doe", age=30 +``` + +### Example 2: Function Calls ```python import openai @@ -43,34 +86,3 @@ completion = openai.ChatCompletion.create( result = sum.from_response(completion) print(result) # 9 ``` - -### Example 2: Schema Extraction - -```python -import openai -from openai_function_call import OpenAISchema - -from pydantic import Field - -class UserDetails(OpenAISchema): - """Details of a user""" - name: str = Field(..., description="users's full name") - age: int - -completion = openai.ChatCompletion.create( - model="gpt-3.5-turbo-0613", - functions=[UserDetails.openai_schema], - function_call={"name": UserDetails.openai_schema["name"]}, - messages=[ - {"role": "system", "content": "Extract user details from my requests"}, - {"role": "user", "content": "My name is John Doe and I'm 30 years old."}, - ], -) - -user_details = UserDetails.from_response(completion) -print(user_details) # name="John Doe", age=30 -``` - -# Code - -::: openai_function_call \ No newline at end of file diff --git a/docs/multitask.md b/docs/multitask.md index d5a5172d5..83c413375 100644 --- a/docs/multitask.md +++ b/docs/multitask.md @@ -1,5 +1,18 @@ # MultiTask -We define a helper function `MultiTask` that dynamitcally creates a new schema that has a task attribute defined as a list of the task subclass, it including some prebuild prompts and allows us to avoid writing some extra code. +Defining a task and creating a list of classes is a common enough pattern that we define a helper function `MultiTask` that dynamitcally creates a new schema that has a task attribute defined as a list of the task subclass, it including some prebuild prompts and allows us to avoid writing some extra code. + +!!! example "Extending user details" + + Using the previous example with extracting `UserDetails` we might want to extract multiple users rather than a single user, `MultiTask` makes it easy! + + ```python + class UserDetails(OpenAISchema): + """Details of a user""" + name: str = Field(..., description="users's full name") + age: int + + MultiUserDetails = MultiTask(UserDetails) + ``` ::: openai_function_call.dsl.multitask \ No newline at end of file diff --git a/docs/openai_schema.md b/docs/openai_schema.md index bbc7fc8bd..178e631bf 100644 --- a/docs/openai_schema.md +++ b/docs/openai_schema.md @@ -1,15 +1,16 @@ # OpenAI Schema -The most generic helper is a light weight extention of Pydantic's BaseModel `OpenAISchema`. -It has a method to help you produce the schema and parse the result of function calls - -This library is moreso a list of examples and a helper class so I'll keep the example as just structured extraction. +We offer a minimally invasive extention of `Pydantic.BaseModel` named `OpenAISchema`. It only has two methods, one to generate the correct schema, and one to produce the class from the completion. !!! note "Where does the prompt go?" - Instead of defining your prompts in the messages the prompts you would usually use are now defined as part of the dostring of your class and the field descriptions. This is nice since it allows you to colocate the schema with the class you use to represent the structure. + Our philosphy is that the prompt should live beside the code. Prompting is done via dostrings and field descriptions which allows you to colocate prompts with your schema. ## Structured Extraction +You can directly use the class in your `openai` create calls by passing in the classes `openai_schema` and extract the class out with `from_completion`. + +With this style of usage you get as close to the api call as possible giving you full control over configuration and prompting. + ```python import openai from openai_function_call import OpenAISchema diff --git a/docs/pipeline-example.md b/docs/pipeline-example.md index 244c2bc4e..697b9fd9c 100644 --- a/docs/pipeline-example.md +++ b/docs/pipeline-example.md @@ -1,8 +1,12 @@ -# Using the pipeline +# Using the ChatCompletion pipeline -The pipeapi is some syntactic sugar to help build prompts in a readable way that avoids having to remember best practices around wording and structure. Examples include adding tips, tagging data with xml, or even including the chain of thought prompt as an assistant message. +The pipeline api is just syntactic sugar to help build prompts in a readable way that avoids having to remember best practices around wording and structure. Examples include adding tips, tagging data with xml, or even including the chain of thought prompt as an assistant message. -### Example Pipeline +## Example Pipeline + +Here we'll define a task to segment queries and add some more instructions via the prompt pipeline api. + +### Designing the schema ```python from openai_function_call import OpenAISchema, dsl @@ -19,10 +23,31 @@ class SearchQuery(OpenAISchema): SearchResponse = dsl.MultiTask( subtask_class=SearchQuery, ) +``` + +!!! tip "MultiTask" + To learn more about what multi task does, checkout the [MultiTask](multitask.md) documentation + + +### Building our prompts +We dont deal with prompt templates and treat chat, message, output schema as first class citizens and then pipe them into a completion object. +!!! note "Whats that?" + The pipe `|` is an overloaded operator that lets us cleanly compose our prompts. + + `ChatCompletion` contains all the configuration for the model while we use `|` to build our prompt + + We can then chain `|` together to add `Messages` or `OpenAISchema` and `ChatCompletion` will build out query for us while giving us a readable block to code to look ad + + To see what 'message templates' are available check out our [docs](chat-completion.md) + +```python task = ( - dsl.ChatCompletion(name="Segmenting Search requests example") + dsl.ChatCompletion( + name="Segmenting Search requests example", + model='gpt-3.5-turbo-0613, + max_token=1000) | dsl.SystemTask(task="Segment search results") | dsl.TaggedMessage( content="can you send me the data about the video investment and the one about spot the dog?", @@ -42,6 +67,9 @@ assert isinstance(search_request, SearchResponse) print(search_request.json(indent=2)) ``` +!!! tip + If you want to see what its actually sent to OpenAI scroll to the bottom of the page! + Output ```json diff --git a/mkdocs.yml b/mkdocs.yml index fbcfdfbde..586680853 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -35,8 +35,8 @@ nav: - Home: 'index.md' - API Reference: - 'OpenAISchema': 'openai_schema.md' - - "Helper: MultiTask": "multitask.md" - - "Example: Pipeline API": "pipeline-example.md" - - "Docs": "chat-completion.md" + - "MultiTask Schema": "multitask.md" + - "Introduction: Pipeline API": "pipeline-example.md" + - "Message Templates": "chat-completion.md" - Examples: - 'Segmented Search': 'examples/search.md' \ No newline at end of file