@@ -57,7 +57,7 @@ def __init__(
57
57
self .is_md_scraper = node_config .get ("is_md_scraper" , False )
58
58
self .additional_info = node_config .get ("additional_info" )
59
59
60
- async def execute (self , state : dict ) -> dict :
60
+ def execute (self , state : dict ) -> dict :
61
61
"""
62
62
Executes the GenerateAnswerNode.
63
63
@@ -123,7 +123,7 @@ async def execute(self, state: dict) -> dict:
123
123
chain = prompt | self .llm_model
124
124
if output_parser :
125
125
chain = chain | output_parser
126
- answer = await chain .ainvoke ({"question" : user_prompt })
126
+ answer = chain .invoke ({"question" : user_prompt })
127
127
128
128
state .update ({self .output [0 ]: answer })
129
129
return state
@@ -143,7 +143,7 @@ async def execute(self, state: dict) -> dict:
143
143
chains_dict [chain_name ] = chains_dict [chain_name ] | output_parser
144
144
145
145
async_runner = RunnableParallel (** chains_dict )
146
- batch_results = await async_runner .ainvoke ({"question" : user_prompt })
146
+ batch_results = async_runner .invoke ({"question" : user_prompt })
147
147
148
148
merge_prompt = PromptTemplate (
149
149
template = template_merge_prompt ,
@@ -154,7 +154,7 @@ async def execute(self, state: dict) -> dict:
154
154
merge_chain = merge_prompt | self .llm_model
155
155
if output_parser :
156
156
merge_chain = merge_chain | output_parser
157
- answer = await merge_chain .ainvoke ({"context" : batch_results , "question" : user_prompt })
157
+ answer = merge_chain .invoke ({"context" : batch_results , "question" : user_prompt })
158
158
159
159
state .update ({self .output [0 ]: answer })
160
160
return state
0 commit comments