Skip to content

Commit

Permalink
test: introduced doctest and doc examples
Browse files Browse the repository at this point in the history
  • Loading branch information
MoritzLaurer committed Nov 17, 2024
1 parent 8835d88 commit 2d50d71
Show file tree
Hide file tree
Showing 9 changed files with 322 additions and 39 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ jobs:

- name: Run tests with pytest
continue-on-error: true # Won't fail the PR
run: poetry run pytest --cov=hf_hub_prompts --cov-report=xml
run: poetry run pytest --doctest-modules --cov=hf_hub_prompts --cov-report=xml

- name: Upload results to Codecov
uses: codecov/codecov-action@v4
Expand Down
24 changes: 11 additions & 13 deletions hf_hub_prompts/hub_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,26 +26,24 @@ def download_prompt(
>>> # Inspect the template
>>> template.template
'Translate the following text to {language}:\\n{text}'
>>> # Populate the template
>>> prompt = template.populate_template(
... language="French",
... text="Hello world!"
... )
>>> template.input_variables
['language', 'text']
>>> template.metadata['name']
'Simple Translator'
Download and use a chat prompt template:
>>> # Download code teaching prompt
>>> chat_template = download_prompt(
>>> template = download_prompt(
... repo_id="MoritzLaurer/example_prompts",
... filename="code_teacher.yaml"
... )
>>> # Inspect the template
>>> chat_template.messages[1]["content"]
'Explain what {concept} is in {programming_language}.'
>>> # Populate the template
>>> chat_prompt = chat_template.populate_template(
... concept="list comprehension",
... programming_language="Python"
... )
>>> template.messages
[{'role': 'system', 'content': 'You are a coding assistant who explains concepts clearly and provides short examples.'}, {'role': 'user', 'content': 'Explain what {concept} is in {programming_language}.'}]
>>> template.input_variables
['concept', 'programming_language']
>>> template.metadata['version']
'0.0.1'
Args:
repo_id (str): The repository ID on Hugging Face Hub (e.g., 'username/repo_name').
Expand Down
50 changes: 39 additions & 11 deletions hf_hub_prompts/populated_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,59 @@

@dataclass
class PopulatedPrompt:
"""A class representing a populated prompt.
Examples:
>>> # For standard prompts
>>> prompt = template.populate_template(name="Alice")
>>> text = prompt.content
>>>
>>> # For chat prompts
>>> prompt = chat_template.populate_template(name="Alice")
>>> messages = prompt.format_for_client(client="anthropic")
"""A class representing a populated prompt that can be formatted to be compatible with different LLM clients.
This class serves two main purposes:
1. Store populated prompts (both text and chat formats)
2. Convert chat prompts between different LLM client formats (e.g., OpenAI, Anthropic)
The class handles two types of content:
* **Text prompts**: Simple strings that can be used directly with any LLM
* **Chat prompts**: Lists or Dicts of messages that are compatible with the format expected by different LLM clients
For examples of converting between client formats, see the [`format_for_client()`][hf_hub_prompts.populated_prompt.PopulatedPrompt.format_for_client] method.
"""

content: Union[str, List[Dict[str, Any]]]

def format_for_client(self, client: str = "openai") -> Union[List[Dict[str, Any]], Dict[str, Any]]:
"""Format the prompt content for a specific client.
Examples:
Format chat messages for different clients:
>>> from hf_hub_prompts import download_prompt
>>> template = download_prompt(
... repo_id="MoritzLaurer/example_prompts",
... filename="code_teacher.yaml"
... )
>>> prompt = template.populate_template(
... concept="list comprehension",
... programming_language="Python"
... )
>>> prompt.content
[{'role': 'system', 'content': 'You are a coding assistant who explains concepts clearly and provides short examples.'}, {'role': 'user', 'content': 'Explain what list comprehension is in Python.'}]
>>> # By default, the populated prompt.content is in the OpenAI messages format
>>> messages_openai = prompt.format_for_client("openai")
>>> messages_openai == prompt.content
True
>>> # We can also convert the populated prompt to other formats
>>> messages_anthropic = prompt.format_for_client("anthropic")
>>> messages_anthropic == prompt.content
False
>>> messages_anthropic
{'system': 'You are a coding assistant who explains concepts clearly and provides short examples.', 'messages': [{'role': 'user', 'content': 'Explain what list comprehension is in Python.'}]}
Args:
client (str): The client format to use ('openai', 'anthropic'). Defaults to 'openai'.
Returns:
Union[List[Dict[str, Any]], Dict[str, Any]]: Formatted prompt content suitable for the specified client.
Raises:
ValueError: If an unsupported client format is specified.
ValueError: If an unsupported client format is specified or if trying to format a text prompt.
"""
if isinstance(self.content, str):
# For standard prompts, format_for_client does not add value
Expand Down
Loading

0 comments on commit 2d50d71

Please sign in to comment.