File tree Expand file tree Collapse file tree 2 files changed +20
-4
lines changed Expand file tree Collapse file tree 2 files changed +20
-4
lines changed Original file line number Diff line number Diff line change @@ -49,7 +49,10 @@ uint32_t llama_hparams::n_embd_v_gqa(uint32_t il) const {
49
49
return n_embd_head_v * n_head_kv;
50
50
}
51
51
52
- uint32_t llama_hparams::n_embd_k_s () const {
52
+ uint32_t llama_hparams::n_embd_k_s (uint32_t il) const {
53
+ if (!recurrent_layer (il)) {
54
+ return 0 ;
55
+ }
53
56
if (wkv_head_size != 0 ) {
54
57
// for RWKV models
55
58
return token_shift_count * n_embd;
@@ -60,7 +63,10 @@ uint32_t llama_hparams::n_embd_k_s() const {
60
63
return (ssm_d_conv > 0 ? ssm_d_conv - 1 : 0 ) * ssm_d_inner;
61
64
}
62
65
63
- uint32_t llama_hparams::n_embd_v_s () const {
66
+ uint32_t llama_hparams::n_embd_v_s (uint32_t il) const {
67
+ if (!recurrent_layer (il)) {
68
+ return 0 ;
69
+ }
64
70
if (wkv_head_size != 0 ) {
65
71
// corresponds to RWKV's wkv_states size
66
72
return n_embd * wkv_head_size;
@@ -70,6 +76,10 @@ uint32_t llama_hparams::n_embd_v_s() const {
70
76
return ssm_d_state * ssm_d_inner;
71
77
}
72
78
79
+ bool llama_hparams::recurrent_layer (uint32_t il) const {
80
+ return recurrent_layer_arr[il];
81
+ }
82
+
73
83
bool llama_hparams::is_swa (uint32_t il) const {
74
84
if (il < n_layer) {
75
85
return n_swa > 0 && n_swa_pattern > 0 && il % n_swa_pattern < (n_swa_pattern - 1 );
Original file line number Diff line number Diff line change @@ -102,6 +102,9 @@ struct llama_hparams {
102
102
uint32_t ssm_d_state = 0 ;
103
103
uint32_t ssm_dt_rank = 0 ;
104
104
105
+ // for hybrid state space models
106
+ std::array<bool , LLAMA_MAX_LAYERS> recurrent_layer_arr;
107
+
105
108
bool ssm_dt_b_c_rms = false ;
106
109
107
110
float f_clamp_kqv = 0 .0f ;
@@ -149,10 +152,13 @@ struct llama_hparams {
149
152
150
153
// dimension of the rolling state embeddings
151
154
// corresponds to Mamba's conv_states size or RWKV's token_shift states size
152
- uint32_t n_embd_k_s () const ;
155
+ uint32_t n_embd_k_s (uint32_t il = 0 ) const ;
153
156
154
157
// dimension of the recurrent state embeddings
155
- uint32_t n_embd_v_s () const ;
158
+ uint32_t n_embd_v_s (uint32_t il = 0 ) const ;
159
+
160
+ // whether or not the given layer is recurrent (for hybrid models)
161
+ bool recurrent_layer (uint32_t il) const ;
156
162
157
163
bool is_swa (uint32_t il) const ;
158
164
};
You can’t perform that action at this time.
0 commit comments