Skip to content

Commit

Permalink
Add Saiga chat format. (abetlen#1050)
Browse files Browse the repository at this point in the history
  • Loading branch information
femoiseev authored Jan 4, 2024
1 parent f766b70 commit 907b9e9
Showing 1 changed file with 22 additions and 0 deletions.
22 changes: 22 additions & 0 deletions llama_cpp/llama_chat_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -734,6 +734,28 @@ def format_openchat(
return ChatFormatterResponse(prompt=_prompt, stop=_sep)


# Chat format for Saiga models, see more details and available models:
# https://huggingface.co/collections/IlyaGusev/saiga2-saigamistral-6505d4ccc3d1e53166b636cd
@register_chat_format("saiga")
def format_saiga(
messages: list[llama_types.ChatCompletionRequestMessage],
**kwargs,
) -> ChatFormatterResponse:
_message_template = "<s>{role}\n{content}</s>"
_roles = dict(user="user", bot="bot", system="system")
_messages = _map_roles(messages, _roles)

_prompt = ""
for role, content in _messages:
if content:
_prompt += _message_template.format(role=role, content=content)
else:
_prompt += f"<s>{role}\n"
# Response template
_prompt += "<s>bot"
return ChatFormatterResponse(prompt=_prompt.strip())


@register_chat_completion_handler("functionary")
def functionary_chat_handler(
llama: llama.Llama,
Expand Down

0 comments on commit 907b9e9

Please sign in to comment.