From b83e9a6cd2d0ba3e8e2eaf7465e2e36e3c373a5e Mon Sep 17 00:00:00 2001 From: Gabe Goodhart Date: Thu, 12 Dec 2024 15:02:38 -0700 Subject: [PATCH] fix: Remove unused LLM_KV_ATTENTION_LAYER_COUNT I'd added this at one point, but it's not actually needed Branch: BambaArchitecture Signed-off-by: Gabe Goodhart --- src/llama.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/src/llama.cpp b/src/llama.cpp index 04a01b253058e..8fd054c0e29c6 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -310,7 +310,6 @@ enum llm_kv { LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT, LLM_KV_ATTENTION_SLIDING_WINDOW, LLM_KV_ATTENTION_SCALE, - LLM_KV_ATTENTION_LAYER_COUNT, LLM_KV_ATTENTION_LAYER_INDICES, LLM_KV_ROPE_DIMENSION_COUNT,