Skip to content

Commit 5fb9115

Browse files
committed
feat(version): python 3.12 is now supported 🚀
1 parent e43b801 commit 5fb9115

File tree

5 files changed

+5
-45
lines changed

5 files changed

+5
-45
lines changed

‎pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ dependencies = [
3030
"free-proxy==1.1.1",
3131
"playwright==1.43.0",
3232
"google==3.0.0",
33-
"yahoo-search-py==0.3",
3433
"undetected-playwright==0.3.0",
3534
]
3635

@@ -64,7 +63,7 @@ classifiers = [
6463
"Programming Language :: Python :: 3",
6564
"Operating System :: OS Independent",
6665
]
67-
requires-python = ">=3.9,<3.12"
66+
requires-python = ">=3.9,<4.0"
6867

6968
[project.optional-dependencies]
7069
burr = ["burr[start]==0.19.1"]

‎requirements-dev.lock

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,6 @@ anyio==4.3.0
3030
# via openai
3131
# via starlette
3232
# via watchfiles
33-
async-timeout==4.0.3
34-
# via aiohttp
35-
# via langchain
3633
attrs==23.2.0
3734
# via aiohttp
3835
# via jsonschema
@@ -93,9 +90,6 @@ docutils==0.19
9390
# via sphinx
9491
email-validator==2.1.1
9592
# via fastapi
96-
exceptiongroup==1.2.1
97-
# via anyio
98-
# via pytest
9993
faiss-cpu==1.8.0
10094
# via scrapegraphai
10195
fastapi==0.111.0
@@ -175,7 +169,6 @@ httpx==0.27.0
175169
# via fastapi
176170
# via groq
177171
# via openai
178-
# via yahoo-search-py
179172
huggingface-hub==0.23.1
180173
# via tokenizers
181174
idna==3.7
@@ -330,7 +323,6 @@ pydantic==2.7.1
330323
# via langchain-core
331324
# via langsmith
332325
# via openai
333-
# via yahoo-search-py
334326
pydantic-core==2.18.2
335327
# via pydantic
336328
pydeck==0.9.1
@@ -390,8 +382,6 @@ rsa==4.9
390382
# via google-auth
391383
s3transfer==0.10.1
392384
# via boto3
393-
selectolax==0.3.21
394-
# via yahoo-search-py
395385
sf-hamilton==1.63.0
396386
# via burr
397387
shellingham==1.5.4
@@ -447,8 +437,6 @@ tokenizers==0.19.1
447437
# via anthropic
448438
toml==0.10.2
449439
# via streamlit
450-
tomli==2.0.1
451-
# via pytest
452440
toolz==0.12.1
453441
# via altair
454442
tornado==6.4
@@ -461,9 +449,7 @@ tqdm==4.66.4
461449
typer==0.12.3
462450
# via fastapi-cli
463451
typing-extensions==4.12.0
464-
# via altair
465452
# via anthropic
466-
# via anyio
467453
# via fastapi
468454
# via fastapi-pagination
469455
# via google-generativeai
@@ -478,7 +464,6 @@ typing-extensions==4.12.0
478464
# via streamlit
479465
# via typer
480466
# via typing-inspect
481-
# via uvicorn
482467
typing-inspect==0.9.0
483468
# via dataclasses-json
484469
# via sf-hamilton
@@ -493,7 +478,6 @@ uritemplate==4.1.1
493478
urllib3==2.2.1
494479
# via botocore
495480
# via requests
496-
# via yahoo-search-py
497481
uvicorn==0.29.0
498482
# via burr
499483
# via fastapi
@@ -505,7 +489,5 @@ websockets==12.0
505489
# via uvicorn
506490
win32-setctime==1.1.0
507491
# via loguru
508-
yahoo-search-py==0.3
509-
# via scrapegraphai
510492
yarl==1.9.4
511493
# via aiohttp

‎requirements.lock

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,6 @@ anyio==4.3.0
2222
# via groq
2323
# via httpx
2424
# via openai
25-
async-timeout==4.0.3
26-
# via aiohttp
27-
# via langchain
2825
attrs==23.2.0
2926
# via aiohttp
3027
beautifulsoup4==4.12.3
@@ -54,8 +51,6 @@ distro==1.9.0
5451
# via anthropic
5552
# via groq
5653
# via openai
57-
exceptiongroup==1.2.1
58-
# via anyio
5954
faiss-cpu==1.8.0
6055
# via scrapegraphai
6156
filelock==3.14.0
@@ -115,7 +110,6 @@ httpx==0.27.0
115110
# via anthropic
116111
# via groq
117112
# via openai
118-
# via yahoo-search-py
119113
huggingface-hub==0.23.1
120114
# via tokenizers
121115
idna==3.7
@@ -215,7 +209,6 @@ pydantic==2.7.1
215209
# via langchain-core
216210
# via langsmith
217211
# via openai
218-
# via yahoo-search-py
219212
pydantic-core==2.18.2
220213
# via pydantic
221214
pyee==11.1.0
@@ -248,8 +241,6 @@ rsa==4.9
248241
# via google-auth
249242
s3transfer==0.10.1
250243
# via boto3
251-
selectolax==0.3.21
252-
# via yahoo-search-py
253244
six==1.16.0
254245
# via python-dateutil
255246
sniffio==1.3.1
@@ -279,7 +270,6 @@ tqdm==4.66.4
279270
# via scrapegraphai
280271
typing-extensions==4.12.0
281272
# via anthropic
282-
# via anyio
283273
# via google-generativeai
284274
# via groq
285275
# via huggingface-hub
@@ -300,8 +290,5 @@ uritemplate==4.1.1
300290
urllib3==2.2.1
301291
# via botocore
302292
# via requests
303-
# via yahoo-search-py
304-
yahoo-search-py==0.3
305-
# via scrapegraphai
306293
yarl==1.9.4
307294
# via aiohttp

‎scrapegraphai/integrations/burr_bridge.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
import re
77
from typing import Any, Dict, List, Tuple
8+
import inspect
89

910
try:
1011
import burr
@@ -54,6 +55,9 @@ def writes(self) -> list[str]:
5455

5556
def update(self, result: dict, state: State) -> State:
5657
return state.update(**result)
58+
59+
def get_source(self) -> str:
60+
return inspect.getsource(self.node.__class__)
5761

5862

5963
def parse_boolean_expression(expression: str) -> List[str]:

‎scrapegraphai/utils/research_web.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from typing import List
66
from langchain_community.tools import DuckDuckGoSearchResults
77
from googlesearch import search as google_search
8-
from yahoo_search import search as yahoo_search
98

109

1110
def search_on_web(query: str, search_engine: str = "Google", max_results: int = 10) -> List[str]:
@@ -43,16 +42,5 @@ def search_on_web(query: str, search_engine: str = "Google", max_results: int =
4342
links = re.findall(r'https?://[^\s,\]]+', res)
4443

4544
return links
46-
elif search_engine.lower() == "yahoo":
47-
list_result = yahoo_search(query)
48-
results = []
49-
for page in list_result.pages:
50-
if len(results) >= max_results: # Check if max_results has already been reached
51-
break # Exit loop if max_results has been reached
52-
try:
53-
results.append(page.link)
54-
except AttributeError:
55-
continue
56-
return results
5745
raise ValueError(
5846
"The only search engines available are DuckDuckGo or Google")

0 commit comments

Comments
 (0)