We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent adfcd72 commit 1235029Copy full SHA for 1235029
src/llama.cpp
@@ -11528,13 +11528,7 @@ int32_t llama_lora_adapter_set(
11528
struct llama_context * ctx,
11529
struct llama_lora_adapter * adapter,
11530
float scale) {
11531
- if (ctx->cparams.flash_attn) {
11532
- LLAMA_LOG_ERROR("%s: flash_attn is not compatible with LoRA\n", __func__);
11533
- return -1;
11534
- }
11535
-
11536
ctx->lora_adapters[adapter] = scale;
11537
11538
return 0;
11539
}
11540
0 commit comments