@@ -32,30 +32,30 @@ export type LocalApp = {
32
32
/**
33
33
* If the app supports deeplink, URL to open.
34
34
*/
35
- deeplink : ( model : ModelData ) => URL ;
35
+ deeplink : ( model : ModelData , filepath ?: string ) => URL ;
36
36
}
37
37
| {
38
38
/**
39
39
* And if not (mostly llama.cpp), snippet to copy/paste in your terminal
40
40
* Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files.
41
41
*/
42
- snippet : ( model : ModelData ) => string | string [ ] ;
42
+ snippet : ( model : ModelData , filepath ?: string ) => string | string [ ] ;
43
43
}
44
44
) ;
45
45
46
46
function isGgufModel ( model : ModelData ) {
47
47
return model . tags . includes ( "gguf" ) ;
48
48
}
49
49
50
- const snippetLlamacpp = ( model : ModelData ) : string [ ] => {
50
+ const snippetLlamacpp = ( model : ModelData , filepath ?: string ) : string [ ] => {
51
51
return [
52
52
`# Option 1: use llama.cpp with brew
53
53
brew install llama.cpp
54
54
55
55
# Load and run the model
56
56
llama \\
57
57
--hf-repo "${ model . id } " \\
58
- --hf-file {{ GGUF_FILE}} \\
58
+ --hf-file ${ filepath ?? "{{ GGUF_FILE}}" } \\
59
59
-p "I believe the meaning of life is" \\
60
60
-n 128` ,
61
61
`# Option 2: build llama.cpp from source with curl support
@@ -66,7 +66,7 @@ LLAMA_CURL=1 make
66
66
# Load and run the model
67
67
./main \\
68
68
--hf-repo "${ model . id } " \\
69
- -m {{ GGUF_FILE}} \\
69
+ -m ${ filepath ?? "{{ GGUF_FILE}}" } \\
70
70
-p "I believe the meaning of life is" \\
71
71
-n 128` ,
72
72
] ;
@@ -96,7 +96,8 @@ export const LOCAL_APPS = {
96
96
docsUrl : "https://lmstudio.ai" ,
97
97
mainTask : "text-generation" ,
98
98
displayOnModelPage : isGgufModel ,
99
- deeplink : ( model ) => new URL ( `lmstudio://open_from_hf?model=${ model . id } ` ) ,
99
+ deeplink : ( model , filepath ) =>
100
+ new URL ( `lmstudio://open_from_hf?model=${ model . id } ` + filepath ? `&file=${ filepath } ` : "" ) ,
100
101
} ,
101
102
jan : {
102
103
prettyLabel : "Jan" ,
0 commit comments