Skip to content

Commit dccb893

Browse files
authored
Merge pull request #456 from ScrapeGraphAI/refactoring-of-search_link_node
fix: search link node
2 parents 2fa04b5 + 830daee commit dccb893

16 files changed

+683
-49
lines changed
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
import os
5+
from dotenv import load_dotenv
6+
from scrapegraphai.graphs import SearchGraph
7+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
8+
from langchain_openai import AzureChatOpenAI
9+
from langchain_openai import AzureOpenAIEmbeddings
10+
11+
# ************************************************
12+
# Define the configuration for the graph
13+
# ************************************************
14+
15+
load_dotenv()
16+
17+
llm_model_instance = AzureChatOpenAI(
18+
openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],
19+
azure_deployment=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"]
20+
)
21+
22+
embedder_model_instance = AzureOpenAIEmbeddings(
23+
azure_deployment=os.environ["AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME"],
24+
openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],
25+
)
26+
27+
# ************************************************
28+
# Create the SmartScraperGraph instance and run it
29+
# ************************************************
30+
31+
graph_config = {
32+
"llm": {"model_instance": llm_model_instance},
33+
"embeddings": {"model_instance": embedder_model_instance}
34+
}
35+
36+
# ************************************************
37+
# Create the SearchGraph instance and run it
38+
# ************************************************
39+
40+
search_graph = SearchGraph(
41+
prompt="List me the best escursions near Trento",
42+
config=graph_config
43+
)
44+
45+
result = search_graph.run()
46+
print(result)
47+
48+
# ************************************************
49+
# Get graph execution info
50+
# ************************************************
51+
52+
graph_exec_info = search_graph.get_execution_info()
53+
print(prettify_exec_info(graph_exec_info))
54+
55+
# Save to json and csv
56+
convert_to_csv(result, "result")
57+
convert_to_json(result, "result")
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
import os
5+
from dotenv import load_dotenv
6+
from scrapegraphai.graphs import SearchGraph
7+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
8+
9+
# ************************************************
10+
# Define the configuration for the graph
11+
# ************************************************
12+
13+
load_dotenv()
14+
15+
groq_key = os.getenv("GROQ_APIKEY")
16+
17+
graph_config = {
18+
"llm": {
19+
"model": "groq/gemma-7b-it",
20+
"api_key": groq_key,
21+
"temperature": 0
22+
},
23+
"embeddings": {
24+
"model": "ollama/nomic-embed-text",
25+
"temperature": 0,
26+
# "base_url": "http://localhost:11434", # set ollama URL arbitrarily
27+
},
28+
"headless": False
29+
}
30+
31+
# ************************************************
32+
# Create the SearchGraph instance and run it
33+
# ************************************************
34+
35+
search_graph = SearchGraph(
36+
prompt="List me the best escursions near Trento",
37+
config=graph_config
38+
)
39+
40+
result = search_graph.run()
41+
print(result)
42+
43+
# ************************************************
44+
# Get graph execution info
45+
# ************************************************
46+
47+
graph_exec_info = search_graph.get_execution_info()
48+
print(prettify_exec_info(graph_exec_info))
49+
50+
# Save to json and csv
51+
convert_to_csv(result, "result")
52+
convert_to_json(result, "result")
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
import os
5+
from dotenv import load_dotenv
6+
from scrapegraphai.graphs import SearchGraph
7+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
8+
9+
# ************************************************
10+
# Define the configuration for the graph
11+
# ************************************************
12+
13+
graph_config = {
14+
"llm": {
15+
"client": "client_name",
16+
"model": "bedrock/anthropic.claude-3-sonnet-20240229-v1:0",
17+
"temperature": 0.0
18+
},
19+
"embeddings": {
20+
"model": "bedrock/cohere.embed-multilingual-v3"
21+
}
22+
}
23+
24+
# ************************************************
25+
# Create the SearchGraph instance and run it
26+
# ************************************************
27+
28+
search_graph = SearchGraph(
29+
prompt="List me the best escursions near Trento",
30+
config=graph_config
31+
)
32+
33+
result = search_graph.run()
34+
print(result)
35+
36+
# ************************************************
37+
# Get graph execution info
38+
# ************************************************
39+
40+
graph_exec_info = search_graph.get_execution_info()
41+
print(prettify_exec_info(graph_exec_info))
42+
43+
# Save to json and csv
44+
convert_to_csv(result, "result")
45+
convert_to_json(result, "result")
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
import os
5+
from dotenv import load_dotenv
6+
from scrapegraphai.graphs import SearchGraph
7+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
8+
9+
# ************************************************
10+
# Define the configuration for the graph
11+
# ************************************************
12+
13+
load_dotenv()
14+
15+
deepseek_key = os.getenv("DEEPSEEK_APIKEY")
16+
17+
graph_config = {
18+
"llm": {
19+
"model": "deepseek-chat",
20+
"openai_api_key": deepseek_key,
21+
"openai_api_base": 'https://api.deepseek.com/v1',
22+
},
23+
"embeddings": {
24+
"model": "ollama/nomic-embed-text",
25+
"temperature": 0,
26+
# "base_url": "http://localhost:11434", # set ollama URL arbitrarily
27+
},
28+
"verbose": True,
29+
}
30+
31+
# ************************************************
32+
# Create the SearchGraph instance and run it
33+
# ************************************************
34+
35+
search_graph = SearchGraph(
36+
prompt="List me the best escursions near Trento",
37+
config=graph_config
38+
)
39+
40+
result = search_graph.run()
41+
print(result)
42+
43+
# ************************************************
44+
# Get graph execution info
45+
# ************************************************
46+
47+
graph_exec_info = search_graph.get_execution_info()
48+
print(prettify_exec_info(graph_exec_info))
49+
50+
# Save to json and csv
51+
convert_to_csv(result, "result")
52+
convert_to_json(result, "result")

examples/ernie/search_graph_ernie.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,18 @@
1212
# Define the configuration for the graph
1313
# ************************************************
1414

15-
openai_key = os.getenv("OPENAI_APIKEY")
16-
1715
graph_config = {
1816
"llm": {
19-
"api_key": openai_key,
20-
"model": "gpt-3.5-turbo",
21-
},
22-
"max_results": 2,
23-
"verbose": True,
17+
"model": "ernie-bot-turbo",
18+
"ernie_client_id": "<ernie_client_id>",
19+
"ernie_client_secret": "<ernie_client_secret>",
20+
"temperature": 0.1
21+
},
22+
"embeddings": {
23+
"model": "ollama/nomic-embed-text",
24+
"temperature": 0,
25+
"base_url": "http://localhost:11434"},
26+
"library": "beautifulsoup"
2427
}
2528

2629
# ************************************************
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
from scrapegraphai.graphs import SearchGraph
5+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
6+
7+
# ************************************************
8+
# Define the configuration for the graph
9+
# ************************************************
10+
11+
graph_config = {
12+
"llm": {
13+
"model": "ernie-bot-turbo",
14+
"ernie_client_id": "<ernie_client_id>",
15+
"ernie_client_secret": "<ernie_client_secret>",
16+
"temperature": 0.1
17+
},
18+
"embeddings": {
19+
"model": "ollama/nomic-embed-text",
20+
"temperature": 0,
21+
"base_url": "http://localhost:11434"},
22+
"library": "beautifulsoup"
23+
}
24+
25+
# ************************************************
26+
# Create the SearchGraph instance and run it
27+
# ************************************************
28+
29+
search_graph = SearchGraph(
30+
prompt="List me the best escursions near Trento",
31+
config=graph_config
32+
)
33+
34+
result = search_graph.run()
35+
print(result)
36+
37+
# ************************************************
38+
# Get graph execution info
39+
# ************************************************
40+
41+
graph_exec_info = search_graph.get_execution_info()
42+
print(prettify_exec_info(graph_exec_info))
43+
44+
# Save to json and csv
45+
convert_to_csv(result, "result")
46+
convert_to_json(result, "result")
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
import os
5+
from dotenv import load_dotenv
6+
from scrapegraphai.graphs import SearchGraph
7+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
8+
9+
# ************************************************
10+
# Define the configuration for the graph
11+
# ************************************************
12+
13+
load_dotenv()
14+
15+
fireworks_api_key = os.getenv("FIREWORKS_APIKEY")
16+
17+
graph_config = {
18+
"llm": {
19+
"api_key": fireworks_api_key,
20+
"model": "fireworks/accounts/fireworks/models/mixtral-8x7b-instruct"
21+
},
22+
"embeddings": {
23+
"model": "ollama/nomic-embed-text",
24+
"temperature": 0,
25+
# "base_url": "http://localhost:11434", # set ollama URL arbitrarily
26+
},
27+
"max_results": 2,
28+
"verbose": True,
29+
"headless": False,
30+
}
31+
# ************************************************
32+
# Create the SearchGraph instance and run it
33+
# ************************************************
34+
35+
search_graph = SearchGraph(
36+
prompt="List me the best escursions near Trento",
37+
config=graph_config
38+
)
39+
40+
result = search_graph.run()
41+
print(result)
42+
43+
# ************************************************
44+
# Get graph execution info
45+
# ************************************************
46+
47+
graph_exec_info = search_graph.get_execution_info()
48+
print(prettify_exec_info(graph_exec_info))
49+
50+
# Save to json and csv
51+
convert_to_csv(result, "result")
52+
convert_to_json(result, "result")
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
"""
2+
Example of Search Graph
3+
"""
4+
import os
5+
from dotenv import load_dotenv
6+
from scrapegraphai.graphs import SearchGraph
7+
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
8+
9+
# ************************************************
10+
# Define the configuration for the graph
11+
# ************************************************
12+
13+
load_dotenv()
14+
15+
gemini_key = os.getenv("GOOGLE_APIKEY")
16+
17+
graph_config = {
18+
"llm": {
19+
"api_key": gemini_key,
20+
"model": "gemini-pro",
21+
},
22+
}
23+
# ************************************************
24+
# Create the SearchGraph instance and run it
25+
# ************************************************
26+
27+
search_graph = SearchGraph(
28+
prompt="List me the best escursions near Trento",
29+
config=graph_config
30+
)
31+
32+
result = search_graph.run()
33+
print(result)
34+
35+
# ************************************************
36+
# Get graph execution info
37+
# ************************************************
38+
39+
graph_exec_info = search_graph.get_execution_info()
40+
print(prettify_exec_info(graph_exec_info))
41+
42+
# Save to json and csv
43+
convert_to_csv(result, "result")
44+
convert_to_json(result, "result")

0 commit comments

Comments
 (0)