Skip to content

Commit a49299b

Browse files
committed
Remove alignment_prevents_mmap which is not more needed.
1 parent 73bcc5b commit a49299b

File tree

1 file changed

+0
-13
lines changed

1 file changed

+0
-13
lines changed

llama.cpp

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -581,22 +581,9 @@ struct llama_model_loader {
581581
if (!llama_mmap::SUPPORTED) {
582582
use_mmap = false;
583583
}
584-
if (use_mmap && alignment_prevents_mmap()) {
585-
fprintf(stderr, "llama.cpp: can't use mmap because tensors are not aligned; convert to new format to avoid this\n");
586-
use_mmap = false;
587-
}
588584
this->use_mmap = use_mmap;
589585
}
590586

591-
bool alignment_prevents_mmap() {
592-
for (const llama_load_tensor & lt : tensors_map.tensors) {
593-
if (lt.file_off & 3) {
594-
return true;
595-
}
596-
}
597-
return false;
598-
}
599-
600587
void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const {
601588
*ctx_size_p = *mmapped_size_p = 0;
602589
for (const llama_load_tensor & lt : tensors_map.tensors) {

0 commit comments

Comments
 (0)