Skip to content

Commit 6cbd84f

Browse files
committed
feat(burr-bridge): BurrBridge class to integrate inside BaseGraph
1 parent 0b5cdd4 commit 6cbd84f

File tree

11 files changed

+668
-173
lines changed

11 files changed

+668
-173
lines changed

.python-version

Lines changed: 0 additions & 1 deletion
This file was deleted.
Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
"""
2+
Example of custom graph using existing nodes
3+
"""
4+
5+
import os
6+
from dotenv import load_dotenv
7+
8+
from langchain_openai import OpenAIEmbeddings
9+
from scrapegraphai.models import OpenAI
10+
from scrapegraphai.graphs import BaseGraph
11+
from scrapegraphai.nodes import FetchNode, ParseNode, RAGNode, GenerateAnswerNode
12+
load_dotenv()
13+
14+
# ************************************************
15+
# Define the configuration for the graph
16+
# ************************************************
17+
18+
openai_key = os.getenv("OPENAI_APIKEY")
19+
20+
graph_config = {
21+
"llm": {
22+
"api_key": openai_key,
23+
"model": "gpt-3.5-turbo",
24+
"temperature": 0,
25+
"streaming": False
26+
},
27+
}
28+
29+
# ************************************************
30+
# Define the graph nodes
31+
# ************************************************
32+
33+
llm_model = OpenAI(graph_config["llm"])
34+
embedder = OpenAIEmbeddings(api_key=llm_model.openai_api_key)
35+
36+
# define the nodes for the graph
37+
38+
fetch_node = FetchNode(
39+
input="url | local_dir",
40+
output=["doc", "link_urls", "img_urls"],
41+
node_config={
42+
"verbose": True,
43+
"headless": True,
44+
}
45+
)
46+
parse_node = ParseNode(
47+
input="doc",
48+
output=["parsed_doc"],
49+
node_config={
50+
"chunk_size": 4096,
51+
"verbose": True,
52+
}
53+
)
54+
rag_node = RAGNode(
55+
input="user_prompt & (parsed_doc | doc)",
56+
output=["relevant_chunks"],
57+
node_config={
58+
"llm_model": llm_model,
59+
"embedder_model": embedder,
60+
"verbose": True,
61+
}
62+
)
63+
generate_answer_node = GenerateAnswerNode(
64+
input="user_prompt & (relevant_chunks | parsed_doc | doc)",
65+
output=["answer"],
66+
node_config={
67+
"llm_model": llm_model,
68+
"verbose": True,
69+
}
70+
)
71+
72+
# ************************************************
73+
# Create the graph by defining the connections
74+
# ************************************************
75+
76+
graph = BaseGraph(
77+
nodes=[
78+
fetch_node,
79+
parse_node,
80+
rag_node,
81+
generate_answer_node,
82+
],
83+
edges=[
84+
(fetch_node, parse_node),
85+
(parse_node, rag_node),
86+
(rag_node, generate_answer_node)
87+
],
88+
entry_point=fetch_node,
89+
use_burr=True,
90+
burr_config={
91+
"app_instance_id": "custom_graph_openai",
92+
"inputs": {
93+
"llm_model": graph_config["llm"].get("model", "gpt-3.5-turbo"),
94+
}
95+
}
96+
)
97+
98+
# ************************************************
99+
# Execute the graph
100+
# ************************************************
101+
102+
result, execution_info = graph.execute({
103+
"user_prompt": "Describe the content",
104+
"url": "https://example.com/"
105+
})
106+
107+
# get the answer from the result
108+
result = result.get("answer", "No answer found.")
109+
print(result)

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ dependencies = [
2929
"playwright==1.43.0",
3030
"google==3.0.0",
3131
"yahoo-search-py==0.3",
32+
"burr[start]"
3233
]
3334

3435
license = "MIT"

0 commit comments

Comments
 (0)