88"""
99
1010import os
11- import asyncio
12- from typing import Any , Dict , Optional
11+ from typing import Any , Dict
1312
13+ import httpx
1414from mcp .server .fastmcp import FastMCP
15- from scrapegraph_py import AsyncClient
1615
1716
18- class ScapeGraphAsyncClient :
19- """Async wrapper for the ScapeGraph Python SDK."""
17+ class ScapeGraphClient :
18+ """Client for interacting with the ScapeGraph API."""
19+
20+ BASE_URL = "https://api.scrapegraphai.com/v1"
2021
2122 def __init__ (self , api_key : str ):
2223 """
23- Initialize the ScapeGraph async client.
24+ Initialize the ScapeGraph API client.
2425
2526 Args:
2627 api_key: API key for ScapeGraph API
2728 """
28- self .client = AsyncClient (api_key = api_key )
29-
30- async def markdownify (self , website_url : str ) -> Dict [str , Any ]:
29+ self .api_key = api_key
30+ self .headers = {
31+ "SGAI-APIKEY" : api_key ,
32+ "Content-Type" : "application/json"
33+ }
34+ self .client = httpx .Client (timeout = 60.0 )
35+
36+ def markdownify (self , website_url : str ) -> Dict [str , Any ]:
3137 """
3238 Convert a webpage into clean, formatted markdown.
3339
@@ -37,13 +43,20 @@ async def markdownify(self, website_url: str) -> Dict[str, Any]:
3743 Returns:
3844 Dictionary containing the markdown result
3945 """
40- return await self .client .markdownify (website_url = website_url )
46+ url = f"{ self .BASE_URL } /markdownify"
47+ data = {
48+ "website_url" : website_url
49+ }
50+
51+ response = self .client .post (url , headers = self .headers , json = data )
52+
53+ if response .status_code != 200 :
54+ error_msg = f"Error { response .status_code } : { response .text } "
55+ raise Exception (error_msg )
4156
42- async def smartscraper (
43- self ,
44- user_prompt : str ,
45- website_url : str
46- ) -> Dict [str , Any ]:
57+ return response .json ()
58+
59+ def smartscraper (self , user_prompt : str , website_url : str ) -> Dict [str , Any ]:
4760 """
4861 Extract structured data from a webpage using AI.
4962
@@ -54,15 +67,21 @@ async def smartscraper(
5467 Returns:
5568 Dictionary containing the extracted data
5669 """
57- return await self .client .smartscraper (
58- user_prompt = user_prompt ,
59- website_url = website_url
60- )
61-
62- async def searchscraper (
63- self ,
64- user_prompt : str
65- ) -> Dict [str , Any ]:
70+ url = f"{ self .BASE_URL } /smartscraper"
71+ data = {
72+ "user_prompt" : user_prompt ,
73+ "website_url" : website_url
74+ }
75+
76+ response = self .client .post (url , headers = self .headers , json = data )
77+
78+ if response .status_code != 200 :
79+ error_msg = f"Error { response .status_code } : { response .text } "
80+ raise Exception (error_msg )
81+
82+ return response .json ()
83+
84+ def searchscraper (self , user_prompt : str ) -> Dict [str , Any ]:
6685 """
6786 Perform AI-powered web searches with structured results.
6887
@@ -72,26 +91,35 @@ async def searchscraper(
7291 Returns:
7392 Dictionary containing search results and reference URLs
7493 """
75- return await self .client .searchscraper (
76- user_prompt = user_prompt
77- )
94+ url = f"{ self .BASE_URL } /searchscraper"
95+ data = {
96+ "user_prompt" : user_prompt
97+ }
98+
99+ response = self .client .post (url , headers = self .headers , json = data )
100+
101+ if response .status_code != 200 :
102+ error_msg = f"Error { response .status_code } : { response .text } "
103+ raise Exception (error_msg )
78104
79- async def close (self ) -> None :
80- """Close the client to free up resources."""
81- await self .client .close ()
105+ return response .json ()
82106
107+ def close (self ) -> None :
108+ """Close the HTTP client."""
109+ self .client .close ()
83110
84- # Create MCP server and AsyncScapeGraphWrapper at module level
111+
112+ # Create MCP server
85113mcp = FastMCP ("ScapeGraph API MCP Server" )
86114
87115# Default API key (will be overridden in main or by direct assignment)
88116default_api_key = os .environ .get ("SGAI_API_KEY" )
89- scrapegraph_wrapper = ScapeGraphAsyncClient (default_api_key ) if default_api_key else None
117+ scrapegraph_client = ScapeGraphClient (default_api_key ) if default_api_key else None
90118
91119
92- # Add tools for markdownify
120+ # Add tool for markdownify
93121@mcp .tool ()
94- async def markdownify (website_url : str ) -> Dict [str , Any ]:
122+ def markdownify (website_url : str ) -> Dict [str , Any ]:
95123 """
96124 Convert a webpage into clean, formatted markdown.
97125
@@ -101,18 +129,18 @@ async def markdownify(website_url: str) -> Dict[str, Any]:
101129 Returns:
102130 Dictionary containing the markdown result
103131 """
104- if scrapegraph_wrapper is None :
132+ if scrapegraph_client is None :
105133 return {"error" : "ScapeGraph client not initialized. Please provide an API key." }
106134
107135 try :
108- return await scrapegraph_wrapper .markdownify (website_url )
136+ return scrapegraph_client .markdownify (website_url )
109137 except Exception as e :
110138 return {"error" : str (e )}
111139
112140
113- # Add tools for smartscraper
141+ # Add tool for smartscraper
114142@mcp .tool ()
115- async def smartscraper (
143+ def smartscraper (
116144 user_prompt : str ,
117145 website_url : str
118146) -> Dict [str , Any ]:
@@ -126,18 +154,18 @@ async def smartscraper(
126154 Returns:
127155 Dictionary containing the extracted data
128156 """
129- if scrapegraph_wrapper is None :
157+ if scrapegraph_client is None :
130158 return {"error" : "ScapeGraph client not initialized. Please provide an API key." }
131159
132160 try :
133- return await scrapegraph_wrapper .smartscraper (user_prompt , website_url )
161+ return scrapegraph_client .smartscraper (user_prompt , website_url )
134162 except Exception as e :
135163 return {"error" : str (e )}
136164
137165
138- # Add tools for searchscraper
166+ # Add tool for searchscraper
139167@mcp .tool ()
140- async def searchscraper (
168+ def searchscraper (
141169 user_prompt : str
142170) -> Dict [str , Any ]:
143171 """
@@ -149,21 +177,15 @@ async def searchscraper(
149177 Returns:
150178 Dictionary containing search results and reference URLs
151179 """
152- if scrapegraph_wrapper is None :
180+ if scrapegraph_client is None :
153181 return {"error" : "ScapeGraph client not initialized. Please provide an API key." }
154182
155183 try :
156- return await scrapegraph_wrapper .searchscraper (user_prompt )
184+ return scrapegraph_client .searchscraper (user_prompt )
157185 except Exception as e :
158186 return {"error" : str (e )}
159187
160188
161- async def cleanup () -> None :
162- """Clean up resources when the server is shutting down."""
163- if scrapegraph_wrapper is not None :
164- await scrapegraph_wrapper .close ()
165-
166-
167189def main () -> None :
168190 """Run the ScapeGraph MCP server."""
169191 print ("Starting ScapeGraph MCP server!" )
0 commit comments