Skip to content

Commit

Permalink
Add Bumblebee Phi-4 (#233)
Browse files Browse the repository at this point in the history
This commit adds the Prompt template for the Phi-4 model.

Tested with quantized Phi-4 on 40GB A100. (17.7 GB vram and 100GB ram to quantize)

template_format: :phi_4
  • Loading branch information
marcnnn authored Jan 13, 2025
1 parent ead0e55 commit 92ad647
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 1 deletion.
9 changes: 8 additions & 1 deletion lib/chat_models/chat_bumblebee.ex
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,14 @@ defmodule LangChain.ChatModels.ChatBumblebee do
# # more focused and deterministic.
# field :temperature, :float, default: 1.0

field :template_format, Ecto.Enum, values: [:inst, :im_start, :zephyr, :llama_2, :llama_3]
field :template_format, Ecto.Enum, values: [
:inst,
:im_start,
:zephyr,
:phi_4,
:llama_2,
:llama_3
]

# The bumblebee model may compile differently based on the stream true/false
# option on the serving. Therefore, streaming should be enabled on the
Expand Down
23 changes: 23 additions & 0 deletions lib/utils/chat_templates.ex
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,29 @@ defmodule LangChain.Utils.ChatTemplates do
)
end

def apply_chat_template!(messages, :phi_4, _opts) do
# translation form https://huggingface.co/microsoft/phi-4/blob/main/tokenizer_config.json#L774 to Elixir via Claude 3.5 Sonnet Copilot
# {% for message in messages %}{% if (message['role'] == 'system') %}{{'<|im_start|>system<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'user') %}{{'<|im_start|>user<|im_sep|>' + message['content'] + '<|im_end|><|im_start|>assistant<|im_sep|>'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|im_end|>'}}{% endif %}{% endfor %}
{system, first_user, rest} = prep_and_validate_messages(messages)

text = """
<%= if @system != nil do %><|im_start|>system<|im_sep|><%= @system.content %><|im_end|><% end %>\
<%= if @first_user != nil do %><|im_start|>user<|im_sep|><%= @first_user.content %><|im_end|><|im_start|>assistant<|im_sep|><% end %>\
<%= for m <- @rest do %>\
<%= if m.role == :user do %><|im_start|>user<|im_sep|><%= m.content %><|im_end|><|im_start|>assistant<|im_sep|>\
<% else %><%= m.content %><|im_end|><% end %>\
<% end %>
"""

EEx.eval_string(text,
assigns: [
system: system,
first_user: first_user,
rest: rest
]
)
end

# Does LLaMa 2 formatted text
def apply_chat_template!(messages, :llama_2, _opts) do
# https://huggingface.co/blog/llama2#how-to-prompt-llama-2
Expand Down

0 comments on commit 92ad647

Please sign in to comment.