From 53e5e24c8331f88731224043b5d99b276340363a Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Mon, 16 Dec 2024 17:57:59 +0800 Subject: [PATCH] [MINOR] Doc fix --- adalflow/adalflow/core/prompt_builder.py | 6 +++--- docs/source/tutorials/tool_helper.rst | 8 +++----- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/adalflow/adalflow/core/prompt_builder.py b/adalflow/adalflow/core/prompt_builder.py index eca455578..0d998b63e 100644 --- a/adalflow/adalflow/core/prompt_builder.py +++ b/adalflow/adalflow/core/prompt_builder.py @@ -1,4 +1,4 @@ -"""Class prompt builder for LightRAG system prompt.""" +"""Class prompt builder for AdalFlow system prompt.""" from typing import Dict, Any, Optional, List, TypeVar import logging @@ -20,10 +20,10 @@ class Prompt(Component): __doc__ = r"""Renders a text string(prompt) from a Jinja2 template string. - In default, we use the :ref:`DEFAULT_LIGHTRAG_SYSTEM_PROMPT` as the template. + In default, we use the :ref:`DEFAULT_ADALFLOW_SYSTEM_PROMPT` as the template. Args: - template (str, optional): The Jinja2 template string. Defaults to DEFAULT_LIGHTRAG_SYSTEM_PROMPT. + template (str, optional): The Jinja2 template string. Defaults to DEFAULT_ADALFLOW_SYSTEM_PROMPT. preset_prompt_kwargs (Optional[Dict], optional): The preset prompt kwargs to fill in the variables in the prompt. Defaults to {}. Examples: diff --git a/docs/source/tutorials/tool_helper.rst b/docs/source/tutorials/tool_helper.rst index 4b607a26c..a9eadeb3b 100644 --- a/docs/source/tutorials/tool_helper.rst +++ b/docs/source/tutorials/tool_helper.rst @@ -510,8 +510,8 @@ We will use :class:`components.output_parsers.outputs.JsonOutputParser` to strea from adalflow.components.output_parsers import JsonOutputParser - func_parser = JsonOutputParser(data_class=Function) - instructions = func_parser.format_instructions(exclude=["thought", "args"]) + func_parser = JsonOutputParser(data_class=Function, exclude_fields=["thought", "args"]) + instructions = func_parser.format_instructions() print(instructions) The output is: @@ -542,9 +542,7 @@ Now, let's prepare our generator with the above prompt, ``Function`` data class, model_kwargs = {"model": "gpt-3.5-turbo"} prompt_kwargs = { "tools": tool_manager.yaml_definitions, - "output_format_str": func_parser.format_instructions( - exclude=["thought", "args"] - ), + "output_format_str": func_parser.format_instructions(), } generator = Generator( model_client=ModelClientType.OPENAI(),