File tree Expand file tree Collapse file tree 7 files changed +69
-17
lines changed Expand file tree Collapse file tree 7 files changed +69
-17
lines changed Original file line number Diff line number Diff line change @@ -36,14 +36,24 @@ def _create_graph(self):
36
36
parse_node = ParseNode (
37
37
input = "doc" ,
38
38
output = ["parsed_doc" ],
39
+ node_config = {
40
+ "chunk_size" : self .model_token ,
41
+ }
39
42
)
40
43
rag_node = RAGNode (
41
44
input = "user_prompt & (parsed_doc | doc)" ,
42
45
output = ["relevant_chunks" ],
46
+ node_config = {
47
+ "llm_model" : self .llm_model ,
48
+ "embedder_model" : self .embedder_model ,
49
+ }
43
50
)
44
51
generate_answer_node = GenerateAnswerCSVNode (
45
52
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
46
53
output = ["answer" ],
54
+ node_config = {
55
+ "llm_model" : self .llm_model ,
56
+ }
47
57
)
48
58
49
59
return BaseGraph (
@@ -68,4 +78,4 @@ def run(self) -> str:
68
78
inputs = {"user_prompt" : self .prompt , self .input_key : self .source }
69
79
self .final_state , self .execution_info = self .graph .execute (inputs )
70
80
71
- return self .final_state .get ("answer" , "No answer found." )
81
+ return self .final_state .get ("answer" , "No answer found." )
Original file line number Diff line number Diff line change @@ -61,16 +61,23 @@ def _create_graph(self) -> BaseGraph:
61
61
input = "doc" ,
62
62
output = ["parsed_doc" ],
63
63
node_config = {
64
- "chunk_size" : self .model_token ,
64
+ "chunk_size" : self .model_token
65
65
}
66
66
)
67
67
rag_node = RAGNode (
68
68
input = "user_prompt & (parsed_doc | doc)" ,
69
69
output = ["relevant_chunks" ],
70
+ node_config = {
71
+ "llm_model" : self .llm_model ,
72
+ "embedder_model" : self .embedder_model
73
+ }
70
74
)
71
75
generate_answer_node = GenerateAnswerNode (
72
76
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
73
77
output = ["answer" ],
78
+ node_config = {
79
+ "llm" : self .llm_model
80
+ }
74
81
)
75
82
76
83
return BaseGraph (
@@ -99,4 +106,4 @@ def run(self) -> str:
99
106
inputs = {"user_prompt" : self .prompt , self .input_key : self .source }
100
107
self .final_state , self .execution_info = self .graph .execute (inputs )
101
108
102
- return self .final_state .get ("answer" , "No answer found." )
109
+ return self .final_state .get ("answer" , "No answer found." )
Original file line number Diff line number Diff line change @@ -71,10 +71,15 @@ def _create_graph(self) -> BaseGraph:
71
71
rag_node = RAGNode (
72
72
input = "user_prompt & (parsed_doc | doc)" ,
73
73
output = ["relevant_chunks" ],
74
+ node_config = {
75
+ "llm_model" : self .llm_model ,
76
+ "embedder_model" : self .embedder_model
77
+ }
74
78
)
75
79
generate_scraper_node = GenerateScraperNode (
76
80
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
77
81
output = ["answer" ],
82
+ node_config = {"llm_model" : self .llm_model },
78
83
library = self .library ,
79
84
website = self .source
80
85
)
@@ -105,4 +110,4 @@ def run(self) -> str:
105
110
inputs = {"user_prompt" : self .prompt , self .input_key : self .source }
106
111
self .final_state , self .execution_info = self .graph .execute (inputs )
107
112
108
- return self .final_state .get ("answer" , "No answer found." )
113
+ return self .final_state .get ("answer" , "No answer found." )
Original file line number Diff line number Diff line change @@ -49,25 +49,35 @@ def _create_graph(self) -> BaseGraph:
49
49
search_internet_node = SearchInternetNode (
50
50
input = "user_prompt" ,
51
51
output = ["url" ],
52
+ node_config = {
53
+ "llm_model" : self .llm_model
54
+ }
52
55
)
53
56
fetch_node = FetchNode (
54
57
input = "url | local_dir" ,
55
- output = ["doc" ],
58
+ output = ["doc" ]
56
59
)
57
60
parse_node = ParseNode (
58
61
input = "doc" ,
59
62
output = ["parsed_doc" ],
60
63
node_config = {
61
- "chunk_size" : self .model_token ,
64
+ "chunk_size" : self .model_token
62
65
}
63
66
)
64
67
rag_node = RAGNode (
65
68
input = "user_prompt & (parsed_doc | doc)" ,
66
69
output = ["relevant_chunks" ],
70
+ node_config = {
71
+ "llm_model" : self .llm_model ,
72
+ "embedder_model" : self .embedder_model
73
+ }
67
74
)
68
75
generate_answer_node = GenerateAnswerNode (
69
76
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
70
77
output = ["answer" ],
78
+ node_config = {
79
+ "llm_model" : self .llm_model
80
+ }
71
81
)
72
82
73
83
return BaseGraph (
@@ -98,4 +108,4 @@ def run(self) -> str:
98
108
inputs = {"user_prompt" : self .prompt }
99
109
self .final_state , self .execution_info = self .graph .execute (inputs )
100
110
101
- return self .final_state .get ("answer" , "No answer found." )
111
+ return self .final_state .get ("answer" , "No answer found." )
Original file line number Diff line number Diff line change @@ -57,22 +57,29 @@ def _create_graph(self) -> BaseGraph:
57
57
"""
58
58
fetch_node = FetchNode (
59
59
input = "url | local_dir" ,
60
- output = ["doc" ],
60
+ output = ["doc" ]
61
61
)
62
62
parse_node = ParseNode (
63
63
input = "doc" ,
64
64
output = ["parsed_doc" ],
65
65
node_config = {
66
- "chunk_size" : self .model_token ,
66
+ "chunk_size" : self .model_token
67
67
}
68
68
)
69
69
rag_node = RAGNode (
70
70
input = "user_prompt & (parsed_doc | doc)" ,
71
71
output = ["relevant_chunks" ],
72
+ node_config = {
73
+ "llm_model" : self .llm_model ,
74
+ "embedder_model" : self .embedder_model
75
+ }
72
76
)
73
77
generate_answer_node = GenerateAnswerNode (
74
78
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
75
79
output = ["answer" ],
80
+ node_config = {
81
+ "llm_model" : self .llm_model
82
+ }
76
83
)
77
84
78
85
return BaseGraph (
@@ -101,4 +108,4 @@ def run(self) -> str:
101
108
inputs = {"user_prompt" : self .prompt , self .input_key : self .source }
102
109
self .final_state , self .execution_info = self .graph .execute (inputs )
103
110
104
- return self .final_state .get ("answer" , "No answer found." )
111
+ return self .final_state .get ("answer" , "No answer found." )
Original file line number Diff line number Diff line change @@ -56,28 +56,34 @@ def _create_graph(self) -> BaseGraph:
56
56
57
57
fetch_node = FetchNode (
58
58
input = "url | local_dir" ,
59
- output = ["doc" ],
59
+ output = ["doc" ]
60
60
)
61
61
parse_node = ParseNode (
62
62
input = "doc" ,
63
63
output = ["parsed_doc" ],
64
64
node_config = {
65
- "chunk_size" : self .model_token ,
65
+ "chunk_size" : self .model_token
66
66
}
67
67
)
68
68
rag_node = RAGNode (
69
69
input = "user_prompt & (parsed_doc | doc)" ,
70
70
output = ["relevant_chunks" ],
71
+ node_config = {
72
+ "llm_model" : self .llm_model ,
73
+ "embedder_model" : self .embedder_model }
71
74
)
72
75
generate_answer_node = GenerateAnswerNode (
73
76
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
74
77
output = ["answer" ],
78
+ node_config = {
79
+ "llm_model" : self .llm_model
80
+ }
75
81
)
76
82
text_to_speech_node = TextToSpeechNode (
77
83
input = "answer" ,
78
84
output = ["audio" ],
79
85
node_config = {
80
- "tts_model" : OpenAITextToSpeech (self .config ["tts_model" ]),
86
+ "tts_model" : OpenAITextToSpeech (self .config ["tts_model" ])
81
87
}
82
88
)
83
89
@@ -116,4 +122,4 @@ def run(self) -> str:
116
122
"output_path" , "output.mp3" ))
117
123
print (f"Audio saved to { self .config .get ('output_path' , 'output.mp3' )} " )
118
124
119
- return self .final_state .get ("answer" , "No answer found." )
125
+ return self .final_state .get ("answer" , "No answer found." )
Original file line number Diff line number Diff line change @@ -57,22 +57,29 @@ def _create_graph(self) -> BaseGraph:
57
57
58
58
fetch_node = FetchNode (
59
59
input = "xml_dir" ,
60
- output = ["doc" ],
60
+ output = ["doc" ]
61
61
)
62
62
parse_node = ParseNode (
63
63
input = "doc" ,
64
64
output = ["parsed_doc" ],
65
65
node_config = {
66
- "chunk_size" : self .model_token ,
66
+ "chunk_size" : self .model_token
67
67
}
68
68
)
69
69
rag_node = RAGNode (
70
70
input = "user_prompt & (parsed_doc | doc)" ,
71
71
output = ["relevant_chunks" ],
72
+ node_config = {
73
+ "llm_model" : self .llm_model ,
74
+ "embedder_model" : self .embedder_model
75
+ }
72
76
)
73
77
generate_answer_node = GenerateAnswerNode (
74
78
input = "user_prompt & (relevant_chunks | parsed_doc | doc)" ,
75
79
output = ["answer" ],
80
+ node_config = {
81
+ "llm_model" : self .llm_model
82
+ }
76
83
)
77
84
78
85
return BaseGraph (
@@ -101,4 +108,4 @@ def run(self) -> str:
101
108
inputs = {"user_prompt" : self .prompt , self .input_key : self .source }
102
109
self .final_state , self .execution_info = self .graph .execute (inputs )
103
110
104
- return self .final_state .get ("answer" , "No answer found." )
111
+ return self .final_state .get ("answer" , "No answer found." )
You can’t perform that action at this time.
0 commit comments