Skip to content

Commit 16f53c5

Browse files
committed
add example custom search graph
1 parent 84fcb44 commit 16f53c5

File tree

1 file changed

+105
-0
lines changed

1 file changed

+105
-0
lines changed
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
"""
2+
Example of custom graph using existing nodes
3+
"""
4+
5+
import os
6+
from dotenv import load_dotenv
7+
from langchain_openai import OpenAIEmbeddings
8+
from scrapegraphai.models import OpenAI
9+
from scrapegraphai.graphs import BaseGraph
10+
from scrapegraphai.nodes import FetchNode, ParseNode, RAGNode, GenerateAnswerNode, SearchInternetNode
11+
load_dotenv()
12+
13+
# ************************************************
14+
# Define the configuration for the graph
15+
# ************************************************
16+
17+
openai_key = os.getenv("OPENAI_APIKEY")
18+
19+
graph_config = {
20+
"llm": {
21+
"api_key": openai_key,
22+
"model": "gpt-3.5-turbo",
23+
},
24+
}
25+
26+
# ************************************************
27+
# Define the graph nodes
28+
# ************************************************
29+
30+
llm_model = OpenAI(graph_config["llm"])
31+
embedder = OpenAIEmbeddings(api_key=llm_model.openai_api_key)
32+
33+
search_internet_node = SearchInternetNode(
34+
input="user_prompt",
35+
output=["url"],
36+
node_config={
37+
"llm_model": llm_model
38+
}
39+
)
40+
fetch_node = FetchNode(
41+
input="url | local_dir",
42+
output=["doc"],
43+
node_config={
44+
"verbose": True,
45+
"headless": True,
46+
}
47+
)
48+
parse_node = ParseNode(
49+
input="doc",
50+
output=["parsed_doc"],
51+
node_config={
52+
"chunk_size": 4096,
53+
"verbose": True,
54+
}
55+
)
56+
rag_node = RAGNode(
57+
input="user_prompt & (parsed_doc | doc)",
58+
output=["relevant_chunks"],
59+
node_config={
60+
"llm_model": llm_model,
61+
"embedder_model": embedder,
62+
"verbose": True,
63+
}
64+
)
65+
generate_answer_node = GenerateAnswerNode(
66+
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
67+
output=["answer"],
68+
node_config={
69+
"llm_model": llm_model,
70+
"verbose": True,
71+
}
72+
)
73+
74+
# ************************************************
75+
# Create the graph by defining the connections
76+
# ************************************************
77+
78+
graph = BaseGraph(
79+
nodes=[
80+
search_internet_node,
81+
fetch_node,
82+
parse_node,
83+
rag_node,
84+
generate_answer_node,
85+
],
86+
edges=[
87+
(search_internet_node, fetch_node),
88+
(fetch_node, parse_node),
89+
(parse_node, rag_node),
90+
(rag_node, generate_answer_node)
91+
],
92+
entry_point=search_internet_node
93+
)
94+
95+
# ************************************************
96+
# Execute the graph
97+
# ************************************************
98+
99+
result, execution_info = graph.execute({
100+
"user_prompt": "List me all the typical Chioggia dishes."
101+
})
102+
103+
# get the answer from the result
104+
result = result.get("answer", "No answer found.")
105+
print(result)

0 commit comments

Comments
 (0)