File tree Expand file tree Collapse file tree 2 files changed +18
-0
lines changed Expand file tree Collapse file tree 2 files changed +18
-0
lines changed Original file line number Diff line number Diff line change @@ -50,6 +50,7 @@ llama_build_and_test_executable(test-grad0.cpp)
50
50
llama_build_and_test_executable (test -backend-ops.cpp )
51
51
52
52
llama_build_and_test_executable (test -rope.cpp )
53
+ llama_build_and_test_executable (test -model-load-cancel.cpp )
53
54
54
55
# dummy executable - not installed
55
56
get_filename_component (TEST_TARGET test -c.c NAME_WE )
Original file line number Diff line number Diff line change
1
+ #include " llama.h"
2
+
3
+ #include < cstdlib>
4
+ #include < tuple>
5
+
6
+ int main (void ) {
7
+ llama_backend_init (false );
8
+ auto params = llama_model_params{};
9
+ params.use_mmap = false ;
10
+ params.progress_callback = [](float progress, void * ctx){
11
+ std::ignore = ctx;
12
+ return progress > 0.50 ;
13
+ };
14
+ auto * model = llama_load_model_from_file (" ../models/7B/ggml-model-f16.gguf" , params);
15
+ llama_backend_free ();
16
+ return model == nullptr ? EXIT_SUCCESS : EXIT_FAILURE;
17
+ }
You can’t perform that action at this time.
0 commit comments