We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 71612a6 commit fc6d097Copy full SHA for fc6d097
extension/llm/modules/test/test_attention.py
@@ -146,6 +146,7 @@ def test_attention_export(self):
146
147
assert_close(et_res, tt_res)
148
149
+ @unittest.skip(reason="TODO(T207740932): test is flaky")
150
def test_attention_aoti(self):
151
# Self attention.
152
extension/llm/modules/test/test_position_embeddings.py
@@ -163,6 +163,7 @@ def test_tiled_token_positional_embedding_export(self):
163
164
assert_close(y, ref_y)
165
166
167
def test_tiled_token_positional_embedding_aoti(self):
168
tpe_ep = torch.export.export(
169
self.tpe,
0 commit comments