Skip to content

Commit ccd757a

Browse files
committed
convert : fix mistakes from refactoring
1 parent c2f407e commit ccd757a

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

convert-hf-to-gguf.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -81,13 +81,13 @@ def set_gguf_parameters(self):
8181
self.gguf_writer.add_name(self.dir_model.name)
8282
self.gguf_writer.add_block_count(self.block_count)
8383

84-
if (n_ctx := self.hparams.get("max_position_embeddings")) is not None:
84+
if (n_ctx := self.find_hparam(["max_position_embeddings", "n_ctx"], optional=True)) is not None:
8585
self.gguf_writer.add_context_length(n_ctx)
8686

8787
n_embd = self.find_hparam(["hidden_size", "n_embd"])
8888
self.gguf_writer.add_embedding_length(n_embd)
8989

90-
if (n_ff := self.find_hparam(["intermediate_size", "n_inner"])) is not None:
90+
if (n_ff := self.find_hparam(["intermediate_size", "n_inner"], optional=True)) is not None:
9191
self.gguf_writer.add_feed_forward_length(n_ff)
9292

9393
n_head = self.find_hparam(["num_attention_heads", "n_head"])
@@ -98,7 +98,7 @@ def set_gguf_parameters(self):
9898

9999
if (f_rms_eps := self.hparams.get("rms_norm_eps")) is not None:
100100
self.gguf_writer.add_layer_norm_rms_eps(f_rms_eps)
101-
if (f_norm_eps := self.hparams.get("layer_norm_eps")) is not None:
101+
if (f_norm_eps := self.find_hparam(["layer_norm_eps", "layer_norm_epsilon"], optional=True)) is not None:
102102
self.gguf_writer.add_layer_norm_eps(f_norm_eps)
103103
if (n_experts := self.hparams.get("num_local_experts")) is not None:
104104
self.gguf_writer.add_expert_count(n_experts)
@@ -1750,9 +1750,7 @@ def __init__(self, *args, **kwargs):
17501750

17511751
def set_gguf_parameters(self):
17521752
super().set_gguf_parameters()
1753-
self.gguf_writer.add_causal_attention(self.hparams["causal"])
17541753
self.gguf_writer.add_rope_freq_base(self.hparams["rotary_emb_base"])
1755-
self.gguf_writer.add_pooling_layer(True)
17561754

17571755
def get_tensors(self):
17581756
assert self.vocab_size is not None

0 commit comments

Comments
 (0)