From f0ca1637401a8c9a18e9cbf105686b0049f4a54b Mon Sep 17 00:00:00 2001 From: Ahmed Ahmed Date: Wed, 23 Oct 2024 12:13:23 -0700 Subject: [PATCH] update for v4 so we don't crash --- config/llama_7b_tulu.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/llama_7b_tulu.yaml b/config/llama_7b_tulu.yaml index 2cd9bf5a2..48af18b2a 100644 --- a/config/llama_7b_tulu.yaml +++ b/config/llama_7b_tulu.yaml @@ -14,7 +14,7 @@ model: # 7B class model num_heads: 32 num_kv_heads: 32 use_flash_attention: True - flash_attention_block_size: 1024 + flash_attention_block_size: 512 use_bias: false use_layer_norm_weight: false trainer: