|
1 | 1 | import type { ModelData } from "./model-data";
|
2 | 2 | import type { PipelineType } from "./pipelines";
|
3 | 3 |
|
| 4 | +/** |
| 5 | + * Code snippet to display |
| 6 | + * Support placeholders in the form of {{key}} in content |
| 7 | + * where key is a key in the params object |
| 8 | + */ |
| 9 | +export interface Snippet { |
| 10 | + content: string; |
| 11 | + language?: "javascript" | "json" | "python" | "bash" | "jboss-cli" | "markdown" | "xml"; |
| 12 | + parameters?: Record<string, string[]>; |
| 13 | +} |
| 14 | + |
4 | 15 | /**
|
5 | 16 | * Elements configurable by a local app.
|
6 | 17 | */
|
@@ -38,28 +49,32 @@ export type LocalApp = {
|
38 | 49 | /**
|
39 | 50 | * And if not (mostly llama.cpp), snippet to copy/paste in your terminal
|
40 | 51 | */
|
41 |
| - snippet: (model: ModelData) => string | string[]; |
| 52 | + snippet: (model: ModelData) => Snippet | Snippet[]; |
42 | 53 | }
|
43 | 54 | );
|
44 | 55 |
|
45 | 56 | function isGgufModel(model: ModelData) {
|
46 | 57 | return model.tags.includes("gguf");
|
47 | 58 | }
|
48 | 59 |
|
49 |
| -const snippetLlamacpp = (model: ModelData): string[] => { |
| 60 | +const snippetLlamacpp = (model: ModelData): Snippet[] => { |
50 | 61 | return [
|
51 |
| - ` |
52 |
| -## Install and build llama.cpp with curl support |
| 62 | + { |
| 63 | + content: `## Install and build llama.cpp with curl support |
53 | 64 | git clone https://github.com/ggerganov/llama.cpp.git
|
54 | 65 | cd llama.cpp
|
55 | 66 | LLAMA_CURL=1 make
|
56 | 67 | `,
|
57 |
| - `## Load and run the model |
| 68 | + }, |
| 69 | + { |
| 70 | + content: `## Load and run the model |
58 | 71 | ./main \\
|
59 | 72 | --hf-repo "${model.id}" \\
|
60 | 73 | -m {{GGUF_FILE}} \\
|
61 | 74 | -p "I believe the meaning of life is" \\
|
62 | 75 | -n 128`,
|
| 76 | + parameters: { GGUF_FILE: model.ggufFilePaths ?? [] }, |
| 77 | + }, |
63 | 78 | ];
|
64 | 79 | };
|
65 | 80 |
|
|
0 commit comments