|
3 | 3 | """
|
4 | 4 | from abc import ABC, abstractmethod
|
5 | 5 | from typing import Optional
|
| 6 | +from langchain_aws import BedrockEmbeddings |
6 | 7 | from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
|
7 |
| -from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings, BedrockEmbeddings |
| 8 | +from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings |
8 | 9 | from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
9 | 10 | from ..helpers import models_tokens
|
10 |
| -from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Ollama, OpenAI, Anthropic, Claude, DeepSeek |
| 11 | +from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Ollama, OpenAI, Anthropic, DeepSeek |
11 | 12 |
|
12 | 13 |
|
13 | 14 | class AbstractGraph(ABC):
|
@@ -145,12 +146,12 @@ def _create_llm(self, llm_config: dict, chat=False) -> object:
|
145 | 146 | except KeyError as exc:
|
146 | 147 | raise KeyError("Model not supported") from exc
|
147 | 148 | return Gemini(llm_params)
|
148 |
| - elif "claude" in llm_params["model"]: |
| 149 | + elif llm_params["model"].startswith("claude"): |
149 | 150 | try:
|
150 | 151 | self.model_token = models_tokens["claude"][llm_params["model"]]
|
151 | 152 | except KeyError as exc:
|
152 | 153 | raise KeyError("Model not supported") from exc
|
153 |
| - return Claude(llm_params) |
| 154 | + return Anthropic(llm_params) |
154 | 155 | elif "ollama" in llm_params["model"]:
|
155 | 156 | llm_params["model"] = llm_params["model"].split("/")[-1]
|
156 | 157 |
|
|
0 commit comments