Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add api integration #827

Open
wants to merge 1 commit into
base: pre/beta
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions examples/scrapegraph-api/smart_scraper_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""
Basic example of scraping pipeline using SmartScraper
"""
import os
import json
from dotenv import load_dotenv
from scrapegraphai.graphs import SmartScraperGraph
from scrapegraphai.utils import prettify_exec_info

load_dotenv()

# ************************************************
# Define the configuration for the graph
# ************************************************


graph_config = {
"llm": {
"model": "scrapegraphai/smart-scraper",
"api_key": os.getenv("SCRAPEGRAPH_API_KEY")
},
"verbose": True,
"headless": False,
}

# ************************************************
# Create the SmartScraperGraph instance and run it
# ************************************************

smart_scraper_graph = SmartScraperGraph(
prompt="Extract me all the articles",
source="https://www.wired.com",
config=graph_config
)

result = smart_scraper_graph.run()
print(json.dumps(result, indent=4))

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ dependencies = [
"transformers>=4.44.2",
"googlesearch-python>=1.2.5",
"simpleeval>=1.0.0",
"async_timeout>=4.0.3"
"async_timeout>=4.0.3",
"scrapegraph-py>=0.0.3"
]

license = "MIT"
Expand Down
9 changes: 7 additions & 2 deletions requirements-dev.lock
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ pyasn1==0.6.0
# via rsa
pyasn1-modules==0.4.0
# via google-auth
pydantic==2.8.2
pydantic==2.10.1
# via burr
# via fastapi
# via fastapi-pagination
Expand All @@ -368,7 +368,8 @@ pydantic==2.8.2
# via openai
# via pydantic-settings
# via qdrant-client
pydantic-core==2.20.1
# via scrapegraph-py
pydantic-core==2.27.1
# via pydantic
pydantic-settings==2.5.2
# via langchain-community
Expand Down Expand Up @@ -396,6 +397,7 @@ python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.0.1
# via pydantic-settings
# via scrapegraph-py
# via scrapegraphai
pytz==2024.1
# via pandas
Expand Down Expand Up @@ -424,6 +426,7 @@ requests==2.32.3
# via langchain-community
# via langsmith
# via mistral-common
# via scrapegraph-py
# via sphinx
# via streamlit
# via tiktoken
Expand All @@ -439,6 +442,8 @@ s3transfer==0.10.2
# via boto3
safetensors==0.4.5
# via transformers
scrapegraph-py==0.0.3
# via scrapegraphai
semchunk==2.2.0
# via scrapegraphai
sentencepiece==0.2.0
Expand Down
9 changes: 7 additions & 2 deletions requirements.lock
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ pyasn1==0.6.0
# via rsa
pyasn1-modules==0.4.0
# via google-auth
pydantic==2.8.2
pydantic==2.10.1
# via google-generativeai
# via langchain
# via langchain-aws
Expand All @@ -269,7 +269,8 @@ pydantic==2.8.2
# via openai
# via pydantic-settings
# via qdrant-client
pydantic-core==2.20.1
# via scrapegraph-py
pydantic-core==2.27.1
# via pydantic
pydantic-settings==2.5.2
# via langchain-community
Expand All @@ -286,6 +287,7 @@ python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.0.1
# via pydantic-settings
# via scrapegraph-py
# via scrapegraphai
pytz==2024.1
# via pandas
Expand Down Expand Up @@ -313,6 +315,7 @@ requests==2.32.3
# via langchain-community
# via langsmith
# via mistral-common
# via scrapegraph-py
# via tiktoken
# via transformers
rpds-py==0.20.0
Expand All @@ -324,6 +327,8 @@ s3transfer==0.10.2
# via boto3
safetensors==0.4.5
# via transformers
scrapegraph-py==0.0.3
# via scrapegraphai
semchunk==2.2.0
# via scrapegraphai
sentencepiece==0.2.0
Expand Down
6 changes: 6 additions & 0 deletions scrapegraphai/graphs/smart_scraper_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
ConditionalNode
)
from ..prompts import REGEN_ADDITIONAL_INFO
from scrapegraph_py import ScrapeGraphClient, smart_scraper

class SmartScraperGraph(AbstractGraph):
"""
Expand Down Expand Up @@ -59,6 +60,11 @@ def _create_graph(self) -> BaseGraph:
Returns:
BaseGraph: A graph instance representing the web scraping workflow.
"""
if self.llm_model == "scrapegraphai/smart-scraper":
client = ScrapeGraphClient(self.config.get("api_key"))

result = smart_scraper(client, self.source, self.prompt)
return result

fetch_node = FetchNode(
input="url| local_dir",
Expand Down
Loading