|
1 | 1 | import type { ModelData } from "./model-data";
|
2 | 2 | import type { PipelineType } from "./pipelines";
|
| 3 | +import { parseGGUFQuantLabel } from "@huggingface/gguf"; |
3 | 4 |
|
4 | 5 | export interface LocalAppSnippet {
|
5 | 6 | /**
|
@@ -53,6 +54,7 @@ export type LocalApp = {
|
53 | 54 | /**
|
54 | 55 | * And if not (mostly llama.cpp), snippet to copy/paste in your terminal
|
55 | 56 | * Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files.
|
| 57 | + * Support the placeholder {{OLLAMA_TAG}} that will be replaced by the list of available quant tags or will be removed if there are no multiple quant files in a same repo. |
56 | 58 | */
|
57 | 59 | snippet: (model: ModelData, filepath?: string) => string | string[] | LocalAppSnippet | LocalAppSnippet[];
|
58 | 60 | }
|
@@ -143,6 +145,15 @@ const snippetNodeLlamaCppCli = (model: ModelData, filepath?: string): LocalAppSn
|
143 | 145 | ];
|
144 | 146 | };
|
145 | 147 |
|
| 148 | +const snippetOllama = (model: ModelData, filepath?: string): string => { |
| 149 | + if (filepath) { |
| 150 | + const quantLabel = parseGGUFQuantLabel(filepath); |
| 151 | + const ollamatag = quantLabel ? `:${quantLabel}` : ""; |
| 152 | + return `ollama run hf.co/${model.id}${ollamatag}`; |
| 153 | + } |
| 154 | + return `ollama run hf.co/${model.id}{{OLLAMA_TAG}}`; |
| 155 | +}; |
| 156 | + |
146 | 157 | const snippetLocalAI = (model: ModelData, filepath?: string): LocalAppSnippet[] => {
|
147 | 158 | const command = (binary: string) =>
|
148 | 159 | ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
|
@@ -389,6 +400,13 @@ export const LOCAL_APPS = {
|
389 | 400 | displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
|
390 | 401 | deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`),
|
391 | 402 | },
|
| 403 | + ollama: { |
| 404 | + prettyLabel: "Ollama", |
| 405 | + docsUrl: "https://ollama.com", |
| 406 | + mainTask: "text-generation", |
| 407 | + displayOnModelPage: isLlamaCppGgufModel, |
| 408 | + snippet: snippetOllama, |
| 409 | + }, |
392 | 410 | } satisfies Record<string, LocalApp>;
|
393 | 411 |
|
394 | 412 | export type LocalAppKey = keyof typeof LOCAL_APPS;
|
0 commit comments