Skip to content

Commit 12428e5

Browse files
authored
Merge pull request #6 from ScrapeGraphAI/pre/beta
Enhanced Docs
2 parents a9c1fa5 + ee9efa6 commit 12428e5

File tree

4 files changed

+36
-245
lines changed

4 files changed

+36
-245
lines changed

scrapegraph-py/.pre-commit-config.yaml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,3 @@ repos:
2121
- id: end-of-file-fixer
2222
- id: check-yaml
2323
exclude: mkdocs.yml
24-
25-
- repo: https://github.com/gitguardian/ggshield
26-
rev: v1.33.0
27-
hooks:
28-
- id: ggshield
29-
language_version: python3
30-
stages: [commit]

scrapegraph-py/README.md

Lines changed: 23 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -25,17 +25,19 @@ Official Python SDK for the ScrapeGraph AI API - Smart web scraping powered by A
2525
pip install scrapegraph-py
2626
```
2727

28-
### Using Poetry (Recommended)
28+
### Using uv
29+
30+
We recommend using [uv](https://docs.astral.sh/uv/) to install the dependencies and pre-commit hooks.
2931

3032
```
31-
# Install poetry if you haven't already
32-
pip install poetry
33+
# Install uv if you haven't already
34+
pip install uv
3335
3436
# Install dependencies
35-
poetry install
37+
uv sync
3638
3739
# Install pre-commit hooks
38-
poetry run pre-commit install
40+
uv run pre-commit install
3941
```
4042

4143
## 🔧 Quick Start
@@ -51,15 +53,15 @@ from scrapegraph_py.logger import get_logger
5153
logger = get_logger(level="DEBUG")
5254

5355
# Initialize client
54-
client = SyncClient(api_key="sgai-your-api-key")
56+
sgai_client = SyncClient(api_key="your-api-key-here")
5557

5658
# Make a request
57-
response = client.smartscraper(
59+
response = sgai_client.smartscraper(
5860
website_url="https://example.com",
5961
user_prompt="Extract the main heading and description"
6062
)
6163

62-
print(response)
64+
print(response["result"])
6365
```
6466

6567
## 🎯 Examples
@@ -71,12 +73,12 @@ import asyncio
7173
from scrapegraph_py import AsyncClient
7274

7375
async def main():
74-
async with AsyncClient(api_key="sgai-your-api-key") as client:
75-
response = await client.smartscraper(
76+
async with AsyncClient(api_key="your-api-key-here") as sgai_client:
77+
response = await sgai_client.smartscraper(
7678
website_url="https://example.com",
77-
user_prompt="Extract the main heading"
79+
user_prompt="Summarize the main content"
7880
)
79-
print(response)
81+
print(response["result"])
8082

8183
asyncio.run(main())
8284
```
@@ -92,12 +94,14 @@ class WebsiteData(BaseModel):
9294
title: str = Field(description="The page title")
9395
description: str = Field(description="The meta description")
9496

95-
client = SyncClient(api_key="sgai-your-api-key")
96-
response = client.smartscraper(
97+
sgai_client = SyncClient(api_key="your-api-key-here")
98+
response = sgai_client.smartscraper(
9799
website_url="https://example.com",
98100
user_prompt="Extract the title and description",
99101
output_schema=WebsiteData
100102
)
103+
104+
print(response["result"])
101105
```
102106
</details>
103107

@@ -112,27 +116,24 @@ For detailed documentation, visit [docs.scrapegraphai.com](https://docs.scrapegr
112116
1. Clone the repository:
113117
```
114118
git clone https://github.com/ScrapeGraphAI/scrapegraph-sdk.git
115-
cd scrapegraph-sdk
119+
cd scrapegraph-sdk/scrapegraph-py
116120
```
117121

118122
2. Install dependencies:
119123
```
120-
poetry install
124+
uv sync
121125
```
122126

123127
3. Install pre-commit hooks:
124128
```
125-
poetry run pre-commit install
129+
uv run pre-commit install
126130
```
127131

128132
### Running Tests
129133

130134
```
131135
# Run all tests
132-
poetry run pytest
133-
134-
# Run with coverage
135-
poetry run pytest --cov=scrapegraph_py
136+
uv run pytest
136137
137138
# Run specific test file
138139
poetry run pytest tests/test_client.py
@@ -167,4 +168,4 @@ Contributions are welcome! Please feel free to submit a Pull Request. For major
167168

168169
---
169170

170-
Made with ❤️ by [ScrapeGraph AI](https://scrapegraphai.com)
171+
Made with ❤️ by [ScrapeGraph AI](https://scrapegraphai.com)

scrapegraph-py/pyproject.toml

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,6 @@ authors = [
88
{ name = "Lorenzo Padoan", email = "[email protected]" }
99
]
1010

11-
dependencies = [
12-
"requests>=2.32.3",
13-
"pydantic>=2.10.2",
14-
"python-dotenv>=1.0.1",
15-
"aiohttp>=3.11.8",
16-
"requests>=2.32.3",
17-
"validators>=0.34.0",
18-
]
19-
2011
license = "MIT"
2112
readme = "README.md"
2213
homepage = "https://scrapegraphai.com/"
@@ -43,7 +34,16 @@ classifiers = [
4334
"Programming Language :: Python :: 3",
4435
"Operating System :: OS Independent",
4536
]
46-
requires-python = ">=3.9,<4.0"
37+
requires-python = ">=3.10,<4.0"
38+
39+
dependencies = [
40+
"requests>=2.32.3",
41+
"pydantic>=2.10.2",
42+
"python-dotenv>=1.0.1",
43+
"aiohttp>=3.11.8",
44+
"requests>=2.32.3",
45+
"validators>=0.34.0",
46+
]
4747

4848
[project.optional-dependencies]
4949
docs = ["sphinx==6.0", "furo==2024.5.6"]
@@ -73,7 +73,7 @@ dev-dependencies = [
7373

7474
[tool.black]
7575
line-length = 88
76-
target-version = ["py39"]
76+
target-version = ["py310"]
7777

7878
[tool.isort]
7979
profile = "black"
@@ -86,7 +86,7 @@ select = ["F", "E", "W", "C"]
8686
ignore = ["E203", "E501"] # Ignore conflicts with Black
8787

8888
[tool.mypy]
89-
python_version = "3.9"
89+
python_version = "3.10"
9090
strict = true
9191
disallow_untyped_calls = true
9292
ignore_missing_imports = true

0 commit comments

Comments
 (0)