Skip to content

Commit ab00f23

Browse files
committed
fix(node): fixed generate answer node pydantic schema
1 parent 15421ef commit ab00f23

File tree

1 file changed

+4
-19
lines changed

1 file changed

+4
-19
lines changed

scrapegraphai/nodes/generate_answer_node.py

Lines changed: 4 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -93,35 +93,20 @@ def execute(self, state: dict) -> dict:
9393

9494
# Use tqdm to add progress bar
9595
for i, chunk in enumerate(tqdm(doc, desc="Processing chunks", disable=not self.verbose)):
96-
if self.node_config.get("schema", None) is None and len(doc) == 1:
96+
if len(doc) == 1:
9797
prompt = PromptTemplate(
9898
template=template_no_chunks,
9999
input_variables=["question"],
100100
partial_variables={"context": chunk.page_content,
101101
"format_instructions": format_instructions})
102-
elif self.node_config.get("schema", None) is not None and len(doc) == 1:
103-
prompt = PromptTemplate(
104-
template=template_no_chunks_with_schema,
105-
input_variables=["question"],
106-
partial_variables={"context": chunk.page_content,
107-
"format_instructions": format_instructions,
108-
"schema": self.node_config.get("schema", None)
109-
})
110-
elif self.node_config.get("schema", None) is None and len(doc) > 1:
102+
103+
else:
111104
prompt = PromptTemplate(
112105
template=template_chunks,
113106
input_variables=["question"],
114107
partial_variables={"context": chunk.page_content,
115108
"chunk_id": i + 1,
116109
"format_instructions": format_instructions})
117-
elif self.node_config.get("schema", None) is not None and len(doc) > 1:
118-
prompt = PromptTemplate(
119-
template=template_chunks_with_schema,
120-
input_variables=["question"],
121-
partial_variables={"context": chunk.page_content,
122-
"chunk_id": i + 1,
123-
"format_instructions": format_instructions,
124-
"schema": self.node_config.get("schema", None)})
125110

126111
# Dynamically name the chains based on their index
127112
chain_name = f"chunk{i+1}"
@@ -147,4 +132,4 @@ def execute(self, state: dict) -> dict:
147132

148133
# Update the state with the generated answer
149134
state.update({self.output[0]: answer})
150-
return state
135+
return state

0 commit comments

Comments
 (0)