Skip to content

Commit 1235029

Browse files
ngxsonNeoZhangJianyu
authored andcommitted
llama : remove check flash_attn with lora (ggml-org#11104)
1 parent adfcd72 commit 1235029

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

src/llama.cpp

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11528,13 +11528,7 @@ int32_t llama_lora_adapter_set(
1152811528
struct llama_context * ctx,
1152911529
struct llama_lora_adapter * adapter,
1153011530
float scale) {
11531-
if (ctx->cparams.flash_attn) {
11532-
LLAMA_LOG_ERROR("%s: flash_attn is not compatible with LoRA\n", __func__);
11533-
return -1;
11534-
}
11535-
1153611531
ctx->lora_adapters[adapter] = scale;
11537-
1153811532
return 0;
1153911533
}
1154011534

0 commit comments

Comments
 (0)