Skip to content

Commit

Permalink
remove unnecessary changes
Browse files Browse the repository at this point in the history
  • Loading branch information
yangw1234 committed Oct 2, 2023
1 parent 41c4be1 commit 0aa1228
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions python/llm/src/bigdl/llm/transformers/models/llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,11 @@ def llama_attention_forward_4_31(
value_states = self.v_proj(hidden_states)

query_states = query_states.view(bsz, q_len,
self.num_heads, self.head_dim).transpose(1, 2)
self.num_heads, self.head_dim).transpose(1, 2)
key_states = key_states.view(bsz, q_len,
self.num_key_value_heads, self.head_dim).transpose(1, 2)
self.num_key_value_heads, self.head_dim).transpose(1, 2)
value_states = value_states.view(bsz, q_len,
self.num_key_value_heads, self.head_dim).transpose(1, 2)
self.num_key_value_heads, self.head_dim).transpose(1, 2)

kv_seq_len = key_states.shape[-2]
if past_key_value is not None:
Expand Down

0 comments on commit 0aa1228

Please sign in to comment.