|
1 |
| -import type { ProviderMapping } from "./types"; |
2 |
| - |
3 | 1 | export const HYPERBOLIC_API_BASE_URL = "https://api.hyperbolic.xyz";
|
4 | 2 |
|
5 |
| -type HyperbolicId = string; |
6 |
| - |
7 | 3 | /**
|
8 |
| - * https://docs.together.ai/reference/models-1 |
| 4 | + * See the registered mapping of HF model ID => Hyperbolic model ID here: |
| 5 | + * |
| 6 | + * https://huggingface.co/api/partners/hyperbolic/models |
| 7 | + * |
| 8 | + * This is a publicly available mapping. |
| 9 | + * |
| 10 | + * If you want to try to run inference for a new model locally before it's registered on huggingface.co, |
| 11 | + * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes. |
| 12 | + * |
| 13 | + * - If you work at Hyperbolic and want to update this mapping, please use the model mapping API we provide on huggingface.co |
| 14 | + * - If you're a community member and want to add a new supported HF model to Hyperbolic, please open an issue on the present repo |
| 15 | + * and we will tag Hyperbolic team members. |
| 16 | + * |
| 17 | + * Thanks! |
9 | 18 | */
|
10 |
| -export const HYPERBOLIC_SUPPORTED_MODEL_IDS: ProviderMapping<HyperbolicId> = { |
11 |
| - "text-to-image": { |
12 |
| - "black-forest-labs/FLUX.1-dev": "black-forest-labs/FLUX.1-dev", |
13 |
| - "stabilityai/stable-diffusion-xl-base-1.0": "SDXL1.0-base", |
14 |
| - "stable-diffusion-v1-5/stable-diffusion-v1-5": "stable-diffusion-v1-5/stable-diffusion-v1-5", |
15 |
| - "segmind/SSD-1B": "segmind/SSD-1B", |
16 |
| - "stabilityai/stable-diffusion-2": "stabilityai/stable-diffusion-2", |
17 |
| - "stabilityai/sdxl-turbo": "stabilityai/sdxl-turbo", |
18 |
| - }, |
19 |
| - "image-text-to-text": { |
20 |
| - "Qwen/Qwen2-VL-72B-Instruct": "Qwen/Qwen2-VL-72B-Instruct", |
21 |
| - "mistralai/Pixtral-12B-2409": "mistralai/Pixtral-12B-2409", |
22 |
| - "Qwen/Qwen2-VL-7B-Instruct": "Qwen/Qwen2-VL-7B-Instruct", |
23 |
| - }, |
24 |
| - "text-generation": { |
25 |
| - "meta-llama/Llama-3.1-405B-BASE-BF16": "meta-llama/Llama-3.1-405B-BASE-BF16", |
26 |
| - "meta-llama/Llama-3.1-405B-BASE-FP8": "meta-llama/Llama-3.1-405B-BASE-FP8", |
27 |
| - "Qwen/Qwen2.5-72B-Instruct": "Qwen/Qwen2.5-72B-Instruct-BF16", |
28 |
| - }, |
29 |
| - "text-to-audio": { |
30 |
| - "myshell-ai/MeloTTS-English-v3": "myshell-ai/MeloTTS-English-v3", |
31 |
| - }, |
32 |
| - conversational: { |
33 |
| - "deepseek-ai/DeepSeek-R1": "deepseek-ai/DeepSeek-R1", |
34 |
| - "deepseek-ai/DeepSeek-R1-Zero": "deepseek-ai/DeepSeek-R1-Zero", |
35 |
| - "deepseek-ai/DeepSeek-V3": "deepseek-ai/DeepSeek-V3", |
36 |
| - "meta-llama/Llama-3.2-3B-Instruct": "meta-llama/Llama-3.2-3B-Instruct", |
37 |
| - "meta-llama/Llama-3.3-70B-Instruct": "meta-llama/Llama-3.3-70B-Instruct", |
38 |
| - "meta-llama/Llama-3.1-70B-Instruct": "meta-llama/Llama-3.1-70B-Instruct-BF16", |
39 |
| - "meta-llama/Meta-Llama-3-70B-Instruct": "meta-llama/Llama-3-70b-BF16", |
40 |
| - "meta-llama/Llama-3.1-8B-Instruct": "meta-llama/Llama-3.1-8B-Instruct-BF16", |
41 |
| - "NousResearch/Hermes-3-Llama-3.1-70B": "NousResearch/Hermes-3-Llama-3.1-70B-BF16", |
42 |
| - "Qwen/Qwen2.5-72B-Instruct": "Qwen/Qwen2.5-72B-Instruct-BF16", |
43 |
| - "Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen/Qwen2.5-Coder-32B-Instruct-BF16", |
44 |
| - "Qwen/QwQ-32B-Preview": "Qwen/QwQ-32B-Preview-BF16", |
45 |
| - }, |
46 |
| -}; |
0 commit comments