Skip to content

Commit

Permalink
fix: Pass raise_exception and add_generation_prompt to jinja2 chat te…
Browse files Browse the repository at this point in the history
…mplate
  • Loading branch information
abetlen committed Jan 31, 2024
1 parent 4114947 commit 078cca0
Showing 1 changed file with 9 additions and 8 deletions.
17 changes: 9 additions & 8 deletions llama_cpp/llama_chat_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,16 +185,17 @@ def __call__(
messages: List[llama_types.ChatCompletionRequestMessage],
**kwargs: Any,
) -> ChatFormatterResponse:
if self.add_generation_prompt:
messages = [
*messages,
llama_types.ChatCompletionRequestAssistantMessage(
role="assistant", content=""
),
]
def raise_exception(message: str):
raise ValueError(message)

prompt = self._environment.render(
messages=messages, eos_token=self.eos_token, bos_token=self.bos_token
messages=messages,
eos_token=self.eos_token,
bos_token=self.bos_token,
raise_exception=raise_exception,
add_generation_prompt=self.add_generation_prompt
)

return ChatFormatterResponse(prompt=prompt, stop=[self.eos_token])

def to_chat_handler(self) -> LlamaChatCompletionHandler:
Expand Down

0 comments on commit 078cca0

Please sign in to comment.