Skip to content

Commit a9c1fa5

Browse files
authored
docs: improved examples
1 parent 5913d5f commit a9c1fa5

File tree

4 files changed

+93
-102
lines changed

4 files changed

+93
-102
lines changed
Lines changed: 37 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,44 +1,43 @@
11
import asyncio
2+
23
from scrapegraph_py import AsyncClient
3-
from scrapegraph_py.exceptions import APIError
4+
from scrapegraph_py.logger import get_logger
5+
6+
get_logger(level="DEBUG")
7+
48

59
async def main():
6-
sgai_client = AsyncClient(api_key="sgai-your-api-key-here")
7-
8-
try:
9-
# Concurrent scraping requests
10-
urls = [
11-
"https://scrapegraphai.com/",
12-
"https://github.com/ScrapeGraphAI/Scrapegraph-ai"
13-
]
14-
15-
tasks = [
16-
sgai_client.smartscraper(
17-
website_url=url,
18-
user_prompt="Summarize the main content"
19-
) for url in urls
20-
]
21-
22-
# Execute requests concurrently
23-
responses = await asyncio.gather(*tasks, return_exceptions=True)
24-
25-
# Process results
26-
for i, response in enumerate(responses):
27-
if isinstance(response, Exception):
28-
print(f"\nError for {urls[i]}: {response}")
29-
else:
30-
print(f"\nPage {i+1} Summary:")
31-
print(f"URL: {urls[i]}")
32-
print(f"Result: {response['result']}")
33-
34-
# Check credits
35-
credits = await sgai_client.get_credits()
36-
print(f"Credits Info: {credits}")
37-
38-
except APIError as e:
39-
print(f"Error: {e}")
40-
finally:
41-
await sgai_client.close()
10+
11+
# Initialize async client
12+
sgai_client = AsyncClient(api_key="your-api-key-here")
13+
14+
# Concurrent scraping requests
15+
urls = [
16+
"https://scrapegraphai.com/",
17+
"https://github.com/ScrapeGraphAI/Scrapegraph-ai",
18+
]
19+
20+
tasks = [
21+
sgai_client.smartscraper(
22+
website_url=url, user_prompt="Summarize the main content"
23+
)
24+
for url in urls
25+
]
26+
27+
# Execute requests concurrently
28+
responses = await asyncio.gather(*tasks, return_exceptions=True)
29+
30+
# Process results
31+
for i, response in enumerate(responses):
32+
if isinstance(response, Exception):
33+
print(f"\nError for {urls[i]}: {response}")
34+
else:
35+
print(f"\nPage {i+1} Summary:")
36+
print(f"URL: {urls[i]}")
37+
print(f"Result: {response['result']}")
38+
39+
await sgai_client.close()
40+
4241

4342
if __name__ == "__main__":
44-
asyncio.run(main())
43+
asyncio.run(main())
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
from scrapegraph_py import SyncClient
2+
from scrapegraph_py.logger import get_logger
3+
4+
get_logger(level="DEBUG")
5+
6+
# Initialize the client
7+
sgai_client = SyncClient(api_key="your-api-key-here")
8+
9+
# Example request_id (replace with an actual request_id from a previous request)
10+
request_id = "your-request-id-here"
11+
12+
# Check remaining credits
13+
credits = sgai_client.get_credits()
14+
print(f"Credits Info: {credits}")
15+
16+
# Submit feedback for a previous request
17+
feedback_response = sgai_client.submit_feedback(
18+
request_id=request_id,
19+
rating=5, # Rating from 1-5
20+
feedback_text="The extraction was accurate and exactly what I needed!",
21+
)
22+
print(f"\nFeedback Response: {feedback_response}")
23+
24+
# Get previous results using get_smartscraper
25+
previous_result = sgai_client.get_smartscraper(request_id=request_id)
26+
print(f"\nRetrieved Previous Result: {previous_result}")
27+
28+
sgai_client.close()
Lines changed: 15 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,27 @@
1-
from scrapegraph_py import SyncClient
2-
from scrapegraph_py.exceptions import APIError
31
from pydantic import BaseModel, Field
42

3+
from scrapegraph_py import SyncClient
4+
5+
56
# Define a Pydantic model for the output schema
67
class WebpageSchema(BaseModel):
78
title: str = Field(description="The title of the webpage")
89
description: str = Field(description="The description of the webpage")
910
summary: str = Field(description="A brief summary of the webpage")
1011

12+
1113
# Initialize the client
12-
sgai_client = SyncClient(api_key="sgai-your-api-key-here")
14+
sgai_client = SyncClient(api_key="your-api-key-here")
1315

14-
try:
15-
# SmartScraper request with output schema
16-
response = sgai_client.smartscraper(
16+
# SmartScraper request with output schema
17+
response = sgai_client.smartscraper(
1718
website_url="https://example.com",
1819
user_prompt="Extract webpage information",
19-
output_schema=WebpageSchema
20-
)
21-
22-
# Print the response
23-
print(f"Request ID: {response['request_id']}")
24-
print(f"Result: {response['result']}")
25-
26-
# Check remaining credits
27-
credits = sgai_client.get_credits()
28-
print(f"\nCredits Info: {credits}")
29-
30-
# Submit feedback
31-
# feedback = sgai_client.submit_feedback(
32-
# request_id=response['request_id'],
33-
# rating=5,
34-
# feedback_text="Great results!"
35-
# )
36-
# print(f"\nFeedback Response: {feedback}")
37-
38-
# Get previous results using get_smartscraper
39-
# result = sgai_client.get_smartscraper(request_id=response['request_id'])
40-
# print(f"\nRetrieved Result: {result}")
41-
42-
except APIError as e:
43-
print(f"API Error: {e}")
44-
45-
finally:
46-
sgai_client.close() # Optional cleanup
20+
output_schema=WebpageSchema,
21+
)
22+
23+
# Print the response
24+
print(f"Request ID: {response['request_id']}")
25+
print(f"Result: {response['result']}")
26+
27+
sgai_client.close()
Lines changed: 13 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,40 +1,23 @@
11
from scrapegraph_py import SyncClient
2-
from scrapegraph_py.exceptions import APIError
32
from scrapegraph_py.logger import get_logger
43

54
get_logger(level="DEBUG")
65

76
# Initialize the client
8-
sgai_client = SyncClient(api_key="sgai-your-api-key-here")
7+
sgai_client = SyncClient(api_key="your-api-key-here")
98

10-
try:
11-
# SmartScraper request
12-
response = sgai_client.smartscraper(
13-
website_url="https://example.com",
14-
user_prompt="Extract the main heading, description, and summary of the webpage",
15-
)
9+
# SmartScraper request
10+
response = sgai_client.smartscraper(
11+
website_url="https://example.com",
12+
user_prompt="Extract the main heading, description, and summary of the webpage",
13+
)
1614

17-
# Print the response
18-
print(f"Request ID: {response['request_id']}")
19-
print(f"Result: {response['result']}")
15+
# Print the response
16+
print(f"Request ID: {response['request_id']}")
17+
print(f"Result: {response['result']}")
2018

21-
# Check remaining credits
22-
credits = sgai_client.get_credits()
23-
print(f"\nCredits Info: {credits}")
19+
# Get previous results using get_smartscraper
20+
# result = sgai_client.get_smartscraper(request_id=response['request_id'])
21+
# print(f"\nRetrieved Result: {result}")
2422

25-
# Submit feedback
26-
# feedback = sgai_client.submit_feedback(
27-
# request_id=response['request_id'],
28-
# rating=5,
29-
# feedback_text="Great results!"
30-
# )
31-
# print(f"\nFeedback Response: {feedback}")
32-
33-
# Get previous results using get_smartscraper
34-
# result = sgai_client.get_smartscraper(request_id=response['request_id'])
35-
# print(f"\nRetrieved Result: {result}")
36-
37-
except APIError as e:
38-
print(f"Error: {e}")
39-
finally:
40-
sgai_client.close()
23+
sgai_client.close()

0 commit comments

Comments
 (0)