Skip to content

Commit a724968

Browse files
committed
removed rag node
1 parent d3e63d9 commit a724968

11 files changed

+21
-91
lines changed

scrapegraphai/graphs/csv_scraper_graph.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111
from ..nodes import (
1212
FetchNode,
13-
RAGNode,
1413
GenerateAnswerCSVNode
1514
)
1615

@@ -37,14 +36,7 @@ def _create_graph(self):
3736
input="csv | csv_dir",
3837
output=["doc"],
3938
)
40-
rag_node = RAGNode(
41-
input="user_prompt & doc",
42-
output=["relevant_chunks"],
43-
node_config={
44-
"llm_model": self.llm_model,
45-
"embedder_model": self.embedder_model,
46-
}
47-
)
39+
4840
generate_answer_node = GenerateAnswerCSVNode(
4941
input="user_prompt & (relevant_chunks | doc)",
5042
output=["answer"],
@@ -58,12 +50,10 @@ def _create_graph(self):
5850
return BaseGraph(
5951
nodes=[
6052
fetch_node,
61-
rag_node,
6253
generate_answer_node,
6354
],
6455
edges=[
65-
(fetch_node, rag_node),
66-
(rag_node, generate_answer_node)
56+
(fetch_node, generate_answer_node)
6757
],
6858
entry_point=fetch_node,
6959
graph_name=self.__class__.__name__

scrapegraphai/graphs/json_scraper_graph.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111
from ..nodes import (
1212
FetchNode,
13-
RAGNode,
1413
GenerateAnswerNode
1514
)
1615

@@ -62,14 +61,7 @@ def _create_graph(self) -> BaseGraph:
6261
input="json | json_dir",
6362
output=["doc", "link_urls", "img_urls"],
6463
)
65-
rag_node = RAGNode(
66-
input="user_prompt & (parsed_doc | doc)",
67-
output=["relevant_chunks"],
68-
node_config={
69-
"llm_model": self.llm_model,
70-
"embedder_model": self.embedder_model
71-
}
72-
)
64+
7365
generate_answer_node = GenerateAnswerNode(
7466
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7567
output=["answer"],
@@ -83,12 +75,10 @@ def _create_graph(self) -> BaseGraph:
8375
return BaseGraph(
8476
nodes=[
8577
fetch_node,
86-
rag_node,
8778
generate_answer_node,
8879
],
8980
edges=[
90-
(fetch_node, rag_node),
91-
(rag_node, generate_answer_node)
81+
(fetch_node, generate_answer_node)
9282
],
9383
entry_point=fetch_node,
9484
graph_name=self.__class__.__name__

scrapegraphai/graphs/markdown_scraper_graph.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from pydantic import BaseModel
44
from .base_graph import BaseGraph
55
from .abstract_graph import AbstractGraph
6-
from ..nodes import FetchNode, ParseNode, RAGNode, GenerateAnswerNode
6+
from ..nodes import FetchNode, ParseNode, GenerateAnswerNode
77

88
class MDScraperGraph(AbstractGraph):
99
"""
@@ -63,14 +63,6 @@ def _create_graph(self) -> BaseGraph:
6363
"chunk_size": self.model_token
6464
}
6565
)
66-
rag_node = RAGNode(
67-
input="user_prompt & (parsed_doc | doc)",
68-
output=["relevant_chunks"],
69-
node_config={
70-
"llm_model": self.llm_model,
71-
"embedder_model": self.embedder_model
72-
}
73-
)
7466
generate_answer_node = GenerateAnswerNode(
7567
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
7668
output=["answer"],
@@ -86,13 +78,11 @@ def _create_graph(self) -> BaseGraph:
8678
nodes=[
8779
fetch_node,
8880
parse_node,
89-
rag_node,
9081
generate_answer_node,
9182
],
9283
edges=[
9384
(fetch_node, parse_node),
94-
(parse_node, rag_node),
95-
(rag_node, generate_answer_node)
85+
(parse_node, generate_answer_node)
9686
],
9787
entry_point=fetch_node,
9888
graph_name=self.__class__.__name__

scrapegraphai/graphs/omni_scraper_graph.py

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
FetchNode,
1313
ParseNode,
1414
ImageToTextNode,
15-
RAGNode,
1615
GenerateAnswerOmniNode
1716
)
1817

@@ -89,14 +88,7 @@ def _create_graph(self) -> BaseGraph:
8988
"max_images": self.max_images
9089
}
9190
)
92-
rag_node = RAGNode(
93-
input="user_prompt & (parsed_doc | doc)",
94-
output=["relevant_chunks"],
95-
node_config={
96-
"llm_model": self.llm_model,
97-
"embedder_model": self.embedder_model
98-
}
99-
)
91+
10092
generate_answer_omni_node = GenerateAnswerOmniNode(
10193
input="user_prompt & (relevant_chunks | parsed_doc | doc) & img_desc",
10294
output=["answer"],
@@ -112,14 +104,12 @@ def _create_graph(self) -> BaseGraph:
112104
fetch_node,
113105
parse_node,
114106
image_to_text_node,
115-
rag_node,
116107
generate_answer_omni_node,
117108
],
118109
edges=[
119110
(fetch_node, parse_node),
120111
(parse_node, image_to_text_node),
121-
(image_to_text_node, rag_node),
122-
(rag_node, generate_answer_omni_node)
112+
(image_to_text_node, generate_answer_omni_node)
123113
],
124114
entry_point=fetch_node,
125115
graph_name=self.__class__.__name__
@@ -136,4 +126,4 @@ def run(self) -> str:
136126
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
137127
self.final_state, self.execution_info = self.graph.execute(inputs)
138128

139-
return self.final_state.get("answer", "No answer found.")
129+
return self.final_state.get("answer", "No answer found.")

scrapegraphai/graphs/pdf_scraper_graph.py

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
from ..nodes import (
1313
FetchNode,
1414
ParseNode,
15-
RAGNode,
1615
GenerateAnswerPDFNode
1716
)
1817

@@ -76,14 +75,6 @@ def _create_graph(self) -> BaseGraph:
7675
}
7776
)
7877

79-
rag_node = RAGNode(
80-
input="user_prompt & (parsed_doc | doc)",
81-
output=["relevant_chunks"],
82-
node_config={
83-
"llm_model": self.llm_model,
84-
"embedder_model": self.embedder_model
85-
}
86-
)
8778
generate_answer_node_pdf = GenerateAnswerPDFNode(
8879
input="user_prompt & (relevant_chunks | doc)",
8980
output=["answer"],
@@ -98,13 +89,11 @@ def _create_graph(self) -> BaseGraph:
9889
nodes=[
9990
fetch_node,
10091
parse_node,
101-
rag_node,
10292
generate_answer_node_pdf,
10393
],
10494
edges=[
10595
(fetch_node, parse_node),
106-
(parse_node, rag_node),
107-
(rag_node, generate_answer_node_pdf)
96+
(parse_node, generate_answer_node_pdf)
10897
],
10998
entry_point=fetch_node,
11099
graph_name=self.__class__.__name__

scrapegraphai/graphs/smart_scraper_graph.py

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -78,14 +78,7 @@ def _create_graph(self) -> BaseGraph:
7878
"chunk_size": self.model_token
7979
}
8080
)
81-
rag_node = RAGNode(
82-
input="user_prompt & (parsed_doc | doc)",
83-
output=["relevant_chunks"],
84-
node_config={
85-
"llm_model": self.llm_model,
86-
"embedder_model": self.embedder_model
87-
}
88-
)
81+
8982
generate_answer_node = GenerateAnswerNode(
9083
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
9184
output=["answer"],
@@ -100,13 +93,11 @@ def _create_graph(self) -> BaseGraph:
10093
nodes=[
10194
fetch_node,
10295
parse_node,
103-
rag_node,
10496
generate_answer_node,
10597
],
10698
edges=[
10799
(fetch_node, parse_node),
108-
(parse_node, rag_node),
109-
(rag_node, generate_answer_node)
100+
(parse_node, generate_answer_node)
110101
],
111102
entry_point=fetch_node,
112103
graph_name=self.__class__.__name__

scrapegraphai/graphs/xml_scraper_graph.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111
from ..nodes import (
1212
FetchNode,
13-
RAGNode,
1413
GenerateAnswerNode
1514
)
1615

@@ -64,14 +63,7 @@ def _create_graph(self) -> BaseGraph:
6463
input="xml | xml_dir",
6564
output=["doc", "link_urls", "img_urls"]
6665
)
67-
rag_node = RAGNode(
68-
input="user_prompt & doc",
69-
output=["relevant_chunks"],
70-
node_config={
71-
"llm_model": self.llm_model,
72-
"embedder_model": self.embedder_model
73-
}
74-
)
66+
7567
generate_answer_node = GenerateAnswerNode(
7668
input="user_prompt & (relevant_chunks | doc)",
7769
output=["answer"],
@@ -85,12 +77,10 @@ def _create_graph(self) -> BaseGraph:
8577
return BaseGraph(
8678
nodes=[
8779
fetch_node,
88-
rag_node,
8980
generate_answer_node,
9081
],
9182
edges=[
92-
(fetch_node, rag_node),
93-
(rag_node, generate_answer_node)
83+
(fetch_node, generate_answer_node)
9484
],
9585
entry_point=fetch_node,
9686
graph_name=self.__class__.__name__

scrapegraphai/nodes/generate_answer_csv_node.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def execute(self, state):
125125
template=template_no_chunks_csv_prompt,
126126
input_variables=["question"],
127127
partial_variables={
128-
"context": chunk.page_content,
128+
"context": chunk,
129129
"format_instructions": format_instructions,
130130
},
131131
)
@@ -137,7 +137,7 @@ def execute(self, state):
137137
template=template_chunks_csv_prompt,
138138
input_variables=["question"],
139139
partial_variables={
140-
"context": chunk.page_content,
140+
"context": chunk,
141141
"chunk_id": i + 1,
142142
"format_instructions": format_instructions,
143143
},

scrapegraphai/nodes/generate_answer_node.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def execute(self, state: dict) -> dict:
115115
prompt = PromptTemplate(
116116
template=template_no_chunks_prompt,
117117
input_variables=["question"],
118-
partial_variables={"context": chunk.page_content,
118+
partial_variables={"context": chunk,
119119
"format_instructions": format_instructions})
120120
chain = prompt | self.llm_model | output_parser
121121
answer = chain.invoke({"question": user_prompt})
@@ -124,7 +124,7 @@ def execute(self, state: dict) -> dict:
124124
prompt = PromptTemplate(
125125
template=template_chunks_prompt,
126126
input_variables=["question"],
127-
partial_variables={"context": chunk.page_content,
127+
partial_variables={"context": chunk,
128128
"chunk_id": i + 1,
129129
"format_instructions": format_instructions})
130130
# Dynamically name the chains based on their index

scrapegraphai/nodes/generate_answer_omni_node.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def execute(self, state: dict) -> dict:
110110
template=template_no_chunk_omni_prompt,
111111
input_variables=["question"],
112112
partial_variables={
113-
"context": chunk.page_content,
113+
"context": chunk,
114114
"format_instructions": format_instructions,
115115
"img_desc": imag_desc,
116116
},
@@ -123,7 +123,7 @@ def execute(self, state: dict) -> dict:
123123
template=template_chunks_omni_prompt,
124124
input_variables=["question"],
125125
partial_variables={
126-
"context": chunk.page_content,
126+
"context": chunk,
127127
"chunk_id": i + 1,
128128
"format_instructions": format_instructions,
129129
},

scrapegraphai/nodes/generate_answer_pdf_node.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def execute(self, state):
124124
template=template_no_chunks_pdf_prompt,
125125
input_variables=["question"],
126126
partial_variables={
127-
"context":chunk.page_content,
127+
"context":chunk,
128128
"format_instructions": format_instructions,
129129
},
130130
)

0 commit comments

Comments
 (0)