Skip to content

Commit cd0dc98

Browse files
committed
feat: Add c++ side constants for attention layer indices hparam
Branch: GraniteFour
1 parent c678901 commit cd0dc98

File tree

2 files changed

+2
-0
lines changed

2 files changed

+2
-0
lines changed

src/llama-arch.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,7 @@ static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
144144
{ LLM_KV_ATTENTION_SCALE, "%s.attention.scale" },
145145
{ LLM_KV_ATTENTION_KEY_LENGTH_MLA, "%s.attention.key_length_mla" },
146146
{ LLM_KV_ATTENTION_VALUE_LENGTH_MLA, "%s.attention.value_length_mla" },
147+
{ LLM_KV_ATTENTION_LAYER_INDICES, "%s.attention.layer_indices" },
147148

148149
{ LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
149150
{ LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },

src/llama-arch.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,7 @@ enum llm_kv {
148148
LLM_KV_ATTENTION_SCALE,
149149
LLM_KV_ATTENTION_KEY_LENGTH_MLA,
150150
LLM_KV_ATTENTION_VALUE_LENGTH_MLA,
151+
LLM_KV_ATTENTION_LAYER_INDICES,
151152

152153
LLM_KV_ROPE_DIMENSION_COUNT,
153154
LLM_KV_ROPE_DIMENSION_SECTIONS,

0 commit comments

Comments
 (0)