|
8 | 8 | from pydantic import BaseModel
|
9 | 9 |
|
10 | 10 | from langchain_community.chat_models import ChatOllama
|
| 11 | +from langchain_openai import ChatOpenAI |
11 | 12 |
|
12 | 13 | from langchain_aws import BedrockEmbeddings
|
13 | 14 | from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings
|
|
21 | 22 | from ..models import (
|
22 | 23 | Anthropic,
|
23 | 24 | AzureOpenAI,
|
24 |
| - OpenAI, |
25 | 25 | Bedrock,
|
26 | 26 | Gemini,
|
27 | 27 | Groq,
|
|
37 | 37 | from ..utils.logging import set_verbosity_debug, set_verbosity_warning, set_verbosity_info
|
38 | 38 |
|
39 | 39 | from ..helpers import models_tokens
|
40 |
| -from ..models import AzureOpenAI, OpenAI, Bedrock, Gemini, Groq, HuggingFace, Anthropic, DeepSeek |
| 40 | +from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Anthropic, DeepSeek |
41 | 41 |
|
42 | 42 |
|
43 | 43 | class AbstractGraph(ABC):
|
@@ -311,7 +311,7 @@ def _create_default_embedder(self, llm_config=None) -> object:
|
311 | 311 | return GoogleGenerativeAIEmbeddings(
|
312 | 312 | google_api_key=llm_config["api_key"], model="models/embedding-001"
|
313 | 313 | )
|
314 |
| - if isinstance(self.llm_model, OpenAI): |
| 314 | + if isinstance(self.llm_model, ChatOpenAI): |
315 | 315 | return OpenAIEmbeddings(api_key=self.llm_model.openai_api_key,
|
316 | 316 | base_url=self.llm_model.openai_api_base)
|
317 | 317 | elif isinstance(self.llm_model, DeepSeek):
|
|
0 commit comments