Skip to content

Commit 8d6c0b7

Browse files
committed
Update generate_answer_node.py
1 parent 8b8d8f0 commit 8d6c0b7

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

scrapegraphai/nodes/generate_answer_node.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -94,22 +94,22 @@ def execute(self, state: dict) -> dict:
9494
format_instructions = output_parser.get_format_instructions()
9595

9696
if isinstance(self.llm_model, ChatOpenAI) and not self.script_creator or self.force and not self.script_creator or self.is_md_scraper:
97-
TEMPLATE_NO_CHUNKS_prompt = TEMPLATE_NO_CHUNKS_MD
98-
TEMPLATE_CHUNKS_prompt = TEMPLATE_CHUNKS_MD
99-
TEMPLATE_MERGE_prompt = TEMPLATE_MERGE_MD
97+
template_no_chunks_prompt = TEMPLATE_NO_CHUNKS_MD
98+
template_chunks_prompt = TEMPLATE_CHUNKS_MD
99+
template_merge_prompt = TEMPLATE_MERGE_MD
100100
else:
101-
TEMPLATE_NO_CHUNKS_prompt = TEMPLATE_NO_CHUNKS
102-
TEMPLATE_CHUNKS_prompt = TEMPLATE_CHUNKS
103-
TEMPLATE_MERGE_prompt = TEMPLATE_MERGE
101+
template_no_chunks_prompt = TEMPLATE_NO_CHUNKS
102+
template_chunks_prompt = TEMPLATE_CHUNKS
103+
template_merge_prompt = TEMPLATE_MERGE
104104

105105
if self.additional_info is not None:
106-
TEMPLATE_NO_CHUNKS_prompt = self.additional_info + TEMPLATE_NO_CHUNKS_prompt
107-
TEMPLATE_CHUNKS_prompt = self.additional_info + TEMPLATE_CHUNKS_prompt
108-
TEMPLATE_MERGE_prompt = self.additional_info + TEMPLATE_MERGE_prompt
106+
template_no_chunks_prompt = self.additional_info + template_no_chunks_prompt
107+
template_chunks_prompt = self.additional_info + template_chunks_prompt
108+
template_merge_prompt = self.additional_info + template_merge_prompt
109109

110110
if len(doc) == 1:
111111
prompt = PromptTemplate(
112-
template=TEMPLATE_NO_CHUNKS_prompt,
112+
template=template_no_chunks_prompt ,
113113
input_variables=["question"],
114114
partial_variables={"context": doc,
115115
"format_instructions": format_instructions})
@@ -136,7 +136,7 @@ def execute(self, state: dict) -> dict:
136136
batch_results = async_runner.invoke({"question": user_prompt})
137137

138138
merge_prompt = PromptTemplate(
139-
template = TEMPLATE_MERGE_prompt,
139+
template = template_merge_prompt ,
140140
input_variables=["context", "question"],
141141
partial_variables={"format_instructions": format_instructions},
142142
)

0 commit comments

Comments
 (0)