Skip to content

Commit 2ef182a

Browse files
fix!: increases output token limit for claude-3-7 to 12k (#895)
# Motivation <!-- Why is this change necessary? --> # Content <!-- Please include a summary of the change --> # Testing <!-- How was the change tested? --> # Please check the following before marking your PR as ready for review - [ ] I have added tests for my changes - [ ] I have updated the documentation or added new documentation as needed
1 parent 0ffdde0 commit 2ef182a

File tree

1 file changed

+3
-2
lines changed
  • src/codegen/extensions/langchain

1 file changed

+3
-2
lines changed

src/codegen/extensions/langchain/llm.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,8 @@ def _get_model(self) -> BaseChatModel:
8989
if not os.getenv("ANTHROPIC_API_KEY"):
9090
msg = "ANTHROPIC_API_KEY not found in environment. Please set it in your .env file or environment variables."
9191
raise ValueError(msg)
92-
return ChatAnthropic(**self._get_model_kwargs(), max_tokens=8192, max_retries=10, timeout=1000)
92+
max_tokens = 12000 if "claude-3-7" in self.model_name else 8192
93+
return ChatAnthropic(**self._get_model_kwargs(), max_tokens=max_tokens, max_retries=10, timeout=1000)
9394

9495
elif self.model_provider == "openai":
9596
if not os.getenv("OPENAI_API_KEY"):
@@ -101,7 +102,7 @@ def _get_model(self) -> BaseChatModel:
101102
if not os.getenv("XAI_API_KEY"):
102103
msg = "XAI_API_KEY not found in environment. Please set it in your .env file or environment variables."
103104
raise ValueError(msg)
104-
return ChatXAI(**self._get_model_kwargs(), max_tokens=8192)
105+
return ChatXAI(**self._get_model_kwargs(), max_tokens=12000)
105106

106107
msg = f"Unknown model provider: {self.model_provider}. Must be one of: anthropic, openai, xai"
107108
raise ValueError(msg)

0 commit comments

Comments
 (0)