We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents e67f42c + 3b2aa49 commit 8451abaCopy full SHA for 8451aba
examples/models/llama/source_transformation/attention_sink.py
@@ -266,7 +266,7 @@ def _replace_attention(
266
for _, child_module in module._modules.items():
267
if len(list(child_module.children())) > 0: # pyre-ignore [16]
268
_replace_attention(
269
- module=child_module,
+ module=child_module, # pyre-ignore [6]
270
rope_with_attention_sink=rope_with_attention_sink,
271
sink_size=sink_size,
272
window_size=window_size,
0 commit comments