Skip to content

Commit a53e95c

Browse files
committed
Corrected graphs to use common params
1 parent 8d0e109 commit a53e95c

File tree

7 files changed

+69
-17
lines changed

7 files changed

+69
-17
lines changed

scrapegraphai/graphs/csv_scraper_graph.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,14 +36,24 @@ def _create_graph(self):
3636
parse_node = ParseNode(
3737
input="doc",
3838
output=["parsed_doc"],
39+
node_config={
40+
"chunk_size": self.model_token,
41+
}
3942
)
4043
rag_node = RAGNode(
4144
input="user_prompt & (parsed_doc | doc)",
4245
output=["relevant_chunks"],
46+
node_config={
47+
"llm_model": self.llm_model,
48+
"embedder_model": self.embedder_model,
49+
}
4350
)
4451
generate_answer_node = GenerateAnswerCSVNode(
4552
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
4653
output=["answer"],
54+
node_config={
55+
"llm_model": self.llm_model,
56+
}
4757
)
4858

4959
return BaseGraph(
@@ -68,4 +78,4 @@ def run(self) -> str:
6878
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
6979
self.final_state, self.execution_info = self.graph.execute(inputs)
7080

71-
return self.final_state.get("answer", "No answer found.")
81+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/json_scraper_graph.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,16 +61,23 @@ def _create_graph(self) -> BaseGraph:
6161
input="doc",
6262
output=["parsed_doc"],
6363
node_config={
64-
"chunk_size": self.model_token,
64+
"chunk_size": self.model_token
6565
}
6666
)
6767
rag_node = RAGNode(
6868
input="user_prompt & (parsed_doc | doc)",
6969
output=["relevant_chunks"],
70+
node_config={
71+
"llm_model": self.llm_model,
72+
"embedder_model": self.embedder_model
73+
}
7074
)
7175
generate_answer_node = GenerateAnswerNode(
7276
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7377
output=["answer"],
78+
node_config={
79+
"llm": self.llm_model
80+
}
7481
)
7582

7683
return BaseGraph(
@@ -99,4 +106,4 @@ def run(self) -> str:
99106
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
100107
self.final_state, self.execution_info = self.graph.execute(inputs)
101108

102-
return self.final_state.get("answer", "No answer found.")
109+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/script_creator_graph.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,10 +71,15 @@ def _create_graph(self) -> BaseGraph:
7171
rag_node = RAGNode(
7272
input="user_prompt & (parsed_doc | doc)",
7373
output=["relevant_chunks"],
74+
node_config={
75+
"llm_model": self.llm_model,
76+
"embedder_model": self.embedder_model
77+
}
7478
)
7579
generate_scraper_node = GenerateScraperNode(
7680
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7781
output=["answer"],
82+
node_config={"llm_model": self.llm_model},
7883
library=self.library,
7984
website=self.source
8085
)
@@ -105,4 +110,4 @@ def run(self) -> str:
105110
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
106111
self.final_state, self.execution_info = self.graph.execute(inputs)
107112

108-
return self.final_state.get("answer", "No answer found.")
113+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/search_graph.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,25 +49,35 @@ def _create_graph(self) -> BaseGraph:
4949
search_internet_node = SearchInternetNode(
5050
input="user_prompt",
5151
output=["url"],
52+
node_config={
53+
"llm_model": self.llm_model
54+
}
5255
)
5356
fetch_node = FetchNode(
5457
input="url | local_dir",
55-
output=["doc"],
58+
output=["doc"]
5659
)
5760
parse_node = ParseNode(
5861
input="doc",
5962
output=["parsed_doc"],
6063
node_config={
61-
"chunk_size": self.model_token,
64+
"chunk_size": self.model_token
6265
}
6366
)
6467
rag_node = RAGNode(
6568
input="user_prompt & (parsed_doc | doc)",
6669
output=["relevant_chunks"],
70+
node_config={
71+
"llm_model": self.llm_model,
72+
"embedder_model": self.embedder_model
73+
}
6774
)
6875
generate_answer_node = GenerateAnswerNode(
6976
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7077
output=["answer"],
78+
node_config={
79+
"llm_model": self.llm_model
80+
}
7181
)
7282

7383
return BaseGraph(
@@ -98,4 +108,4 @@ def run(self) -> str:
98108
inputs = {"user_prompt": self.prompt}
99109
self.final_state, self.execution_info = self.graph.execute(inputs)
100110

101-
return self.final_state.get("answer", "No answer found.")
111+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/smart_scraper_graph.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,22 +57,29 @@ def _create_graph(self) -> BaseGraph:
5757
"""
5858
fetch_node = FetchNode(
5959
input="url | local_dir",
60-
output=["doc"],
60+
output=["doc"]
6161
)
6262
parse_node = ParseNode(
6363
input="doc",
6464
output=["parsed_doc"],
6565
node_config={
66-
"chunk_size": self.model_token,
66+
"chunk_size": self.model_token
6767
}
6868
)
6969
rag_node = RAGNode(
7070
input="user_prompt & (parsed_doc | doc)",
7171
output=["relevant_chunks"],
72+
node_config={
73+
"llm_model": self.llm_model,
74+
"embedder_model": self.embedder_model
75+
}
7276
)
7377
generate_answer_node = GenerateAnswerNode(
7478
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7579
output=["answer"],
80+
node_config={
81+
"llm_model": self.llm_model
82+
}
7683
)
7784

7885
return BaseGraph(
@@ -101,4 +108,4 @@ def run(self) -> str:
101108
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
102109
self.final_state, self.execution_info = self.graph.execute(inputs)
103110

104-
return self.final_state.get("answer", "No answer found.")
111+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/speech_graph.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,28 +56,34 @@ def _create_graph(self) -> BaseGraph:
5656

5757
fetch_node = FetchNode(
5858
input="url | local_dir",
59-
output=["doc"],
59+
output=["doc"]
6060
)
6161
parse_node = ParseNode(
6262
input="doc",
6363
output=["parsed_doc"],
6464
node_config={
65-
"chunk_size": self.model_token,
65+
"chunk_size": self.model_token
6666
}
6767
)
6868
rag_node = RAGNode(
6969
input="user_prompt & (parsed_doc | doc)",
7070
output=["relevant_chunks"],
71+
node_config={
72+
"llm_model": self.llm_model,
73+
"embedder_model": self.embedder_model }
7174
)
7275
generate_answer_node = GenerateAnswerNode(
7376
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7477
output=["answer"],
78+
node_config={
79+
"llm_model": self.llm_model
80+
}
7581
)
7682
text_to_speech_node = TextToSpeechNode(
7783
input="answer",
7884
output=["audio"],
7985
node_config={
80-
"tts_model": OpenAITextToSpeech(self.config["tts_model"]),
86+
"tts_model": OpenAITextToSpeech(self.config["tts_model"])
8187
}
8288
)
8389

@@ -116,4 +122,4 @@ def run(self) -> str:
116122
"output_path", "output.mp3"))
117123
print(f"Audio saved to {self.config.get('output_path', 'output.mp3')}")
118124

119-
return self.final_state.get("answer", "No answer found.")
125+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/xml_scraper_graph.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,22 +57,29 @@ def _create_graph(self) -> BaseGraph:
5757

5858
fetch_node = FetchNode(
5959
input="xml_dir",
60-
output=["doc"],
60+
output=["doc"]
6161
)
6262
parse_node = ParseNode(
6363
input="doc",
6464
output=["parsed_doc"],
6565
node_config={
66-
"chunk_size": self.model_token,
66+
"chunk_size": self.model_token
6767
}
6868
)
6969
rag_node = RAGNode(
7070
input="user_prompt & (parsed_doc | doc)",
7171
output=["relevant_chunks"],
72+
node_config={
73+
"llm_model": self.llm_model,
74+
"embedder_model": self.embedder_model
75+
}
7276
)
7377
generate_answer_node = GenerateAnswerNode(
7478
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7579
output=["answer"],
80+
node_config={
81+
"llm_model": self.llm_model
82+
}
7683
)
7784

7885
return BaseGraph(
@@ -101,4 +108,4 @@ def run(self) -> str:
101108
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
102109
self.final_state, self.execution_info = self.graph.execute(inputs)
103110

104-
return self.final_state.get("answer", "No answer found.")
111+
return self.final_state.get("answer", "No answer found.")

0 commit comments

Comments
 (0)