Skip to content

Commit

Permalink
fix sty;e
Browse files Browse the repository at this point in the history
  • Loading branch information
rnwang04 committed Dec 26, 2024
1 parent 2a414f5 commit 9f8f981
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -433,12 +433,13 @@ def convert_fused_llama_layer(model, fused_layers, n_splits_linear, n_splits_dow
weights.append((torch.stack(l_weights, axis=0), torch.stack(scales, axis=0),
torch.stack(zeros, axis=0)))
else:
weights.append((torch.stack(l_weights, axis=0), torch.stack(scales, axis=0)))
weights.append((torch.stack(l_weights, axis=0),
torch.stack(scales, axis=0)))
else:
for layer in [attn_layer.q_proj, attn_layer.k_proj,
attn_layer.v_proj, attn_layer.o_proj,
mlp_layer.gate_proj, mlp_layer.up_proj,
mlp_layer.down_proj]:
attn_layer.v_proj, attn_layer.o_proj,
mlp_layer.gate_proj, mlp_layer.up_proj,
mlp_layer.down_proj]:
if layer.zero is not None:
weights.append((layer.weight, layer.scale, layer.zero))
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,8 @@ def convert_fused_minicpm_layer(model, fused_layers, n_splits_linear, n_splits_d
weights.append((torch.stack(l_weights, axis=0), torch.stack(scales, axis=0),
torch.stack(zeros, axis=0)))
else:
weights.append((torch.stack(l_weights, axis=0), torch.stack(scales, axis=0)))
weights.append((torch.stack(l_weights, axis=0),
torch.stack(scales, axis=0)))
else:
for layer in [attn_layer.q_proj, attn_layer.k_proj,
attn_layer.v_proj, attn_layer.o_proj,
Expand Down

0 comments on commit 9f8f981

Please sign in to comment.