Skip to content

Commit 35b994a

Browse files
authored
fix model_tokens not being used for ollama
I am passing in the explicit model_tokens from user config as the default_token so it will correctly fallback to the users setting if the model is not found
1 parent 88e76ce commit 35b994a

File tree

1 file changed

+4
-3
lines changed

1 file changed

+4
-3
lines changed

scrapegraphai/graphs/abstract_graph.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -174,8 +174,9 @@ def handle_model(model_name, provider, token_key, default_token=8192):
174174

175175
elif "ollama" in llm_params["model"]:
176176
model_name = llm_params["model"].split("ollama/")[-1]
177-
token_key = model_name if "model_tokens" not in llm_params else llm_params["model_tokens"]
178-
return handle_model(model_name, "ollama", token_key)
177+
token_key = model_name if "model_tokens" not in llm_params else None
178+
explicit_model_tokens = 8192 if "model_tokens" not in llm_params else llm_params["model_tokens"]
179+
return handle_model(model_name, "ollama", token_key, explicit_model_tokens)
179180

180181
elif "claude-3-" in llm_params["model"]:
181182
return handle_model(llm_params["model"], "anthropic", "claude3")
@@ -271,4 +272,4 @@ def _create_graph(self):
271272
def run(self) -> str:
272273
"""
273274
Abstract method to execute the graph and return the result.
274-
"""
275+
"""

0 commit comments

Comments
 (0)