Skip to content

Commit 80c577a

Browse files
VinciGit00claude
andcommitted
fix: remove deprecated LlmConfig params from all tools
Remove llm_model, llm_temperature, llm_max_tokens from smartscraper, searchscraper, and monitor_create tools. Remove _llm_config helper and llm_config_dict from all client methods. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent c6037dc commit 80c577a

1 file changed

Lines changed: 4 additions & 53 deletions

File tree

src/scrapegraph_mcp/server.py

Lines changed: 4 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -152,22 +152,6 @@ def _fetch_config(
152152
cfg["mock"] = mock
153153
return cfg or None
154154

155-
@staticmethod
156-
def _llm_config(
157-
*,
158-
model: Optional[str] = None,
159-
temperature: Optional[float] = None,
160-
max_tokens: Optional[int] = None,
161-
) -> Optional[Dict[str, Any]]:
162-
cfg: Dict[str, Any] = {}
163-
if model is not None:
164-
cfg["model"] = model
165-
if temperature is not None:
166-
cfg["temperature"] = temperature
167-
if max_tokens is not None:
168-
cfg["max_tokens"] = max_tokens
169-
return cfg or None
170-
171155
def scrape_v2(
172156
self,
173157
website_url: str,
@@ -218,15 +202,12 @@ def extract(
218202
website_url: str,
219203
output_schema: Optional[Dict[str, Any]] = None,
220204
fetch_config_dict: Optional[Dict[str, Any]] = None,
221-
llm_config_dict: Optional[Dict[str, Any]] = None,
222205
) -> Dict[str, Any]:
223206
body: Dict[str, Any] = {"url": website_url, "prompt": user_prompt}
224207
if output_schema is not None:
225208
body["output_schema"] = output_schema
226209
if fetch_config_dict:
227210
body["fetch_config"] = fetch_config_dict
228-
if llm_config_dict:
229-
body["llm_config"] = llm_config_dict
230211
return self._request("POST", "/extract", json_body=body)
231212

232213
def smartscraper(
@@ -235,34 +216,29 @@ def smartscraper(
235216
website_url: str,
236217
output_schema: Optional[Dict[str, Any]] = None,
237218
fetch_config_dict: Optional[Dict[str, Any]] = None,
238-
llm_config_dict: Optional[Dict[str, Any]] = None,
239219
) -> Dict[str, Any]:
240-
return self.extract(user_prompt, website_url, output_schema, fetch_config_dict, llm_config_dict)
220+
return self.extract(user_prompt, website_url, output_schema, fetch_config_dict)
241221

242222
def search_api(
243223
self,
244224
query: str,
245225
num_results: Optional[int] = None,
246226
output_schema: Optional[Dict[str, Any]] = None,
247-
llm_config_dict: Optional[Dict[str, Any]] = None,
248227
) -> Dict[str, Any]:
249228
n = 5 if num_results is None else num_results
250229
n = max(3, min(20, n))
251230
body: Dict[str, Any] = {"query": query, "num_results": n}
252231
if output_schema is not None:
253232
body["output_schema"] = output_schema
254-
if llm_config_dict:
255-
body["llm_config"] = llm_config_dict
256233
return self._request("POST", "/search", json_body=body)
257234

258235
def searchscraper(
259236
self,
260237
user_prompt: str,
261238
num_results: Optional[int] = None,
262239
output_schema: Optional[Dict[str, Any]] = None,
263-
llm_config_dict: Optional[Dict[str, Any]] = None,
264240
) -> Dict[str, Any]:
265-
return self.search_api(user_prompt, num_results=num_results, output_schema=output_schema, llm_config_dict=llm_config_dict)
241+
return self.search_api(user_prompt, num_results=num_results, output_schema=output_schema)
266242

267243
def scrape(
268244
self,
@@ -345,7 +321,6 @@ def monitor_create(
345321
cron: str,
346322
output_schema: Optional[Dict[str, Any]] = None,
347323
fetch_config_dict: Optional[Dict[str, Any]] = None,
348-
llm_config_dict: Optional[Dict[str, Any]] = None,
349324
) -> Dict[str, Any]:
350325
body: Dict[str, Any] = {
351326
"name": name,
@@ -357,8 +332,6 @@ def monitor_create(
357332
body["output_schema"] = output_schema
358333
if fetch_config_dict:
359334
body["fetch_config"] = fetch_config_dict
360-
if llm_config_dict:
361-
body["llm_config"] = llm_config_dict
362335
return self._request("POST", "/monitor", json_body=body)
363336

364337
def monitor_list(self) -> Dict[str, Any]:
@@ -1308,9 +1281,6 @@ def smartscraper(
13081281
wait: Optional[int] = None,
13091282
scrolls: Optional[int] = None,
13101283
mock: Optional[bool] = None,
1311-
llm_model: Optional[str] = None,
1312-
llm_temperature: Optional[float] = None,
1313-
llm_max_tokens: Optional[int] = None,
13141284
) -> Dict[str, Any]:
13151285
"""
13161286
Extract structured data from a webpage using AI (API v2 POST /extract).
@@ -1328,9 +1298,6 @@ def smartscraper(
13281298
wait: Milliseconds to wait after page load (0-30000).
13291299
scrolls: Number of scrolls to perform (0-100).
13301300
mock: Use mock mode for testing.
1331-
llm_model: LLM model to use for extraction.
1332-
llm_temperature: Sampling temperature (0.0-2.0).
1333-
llm_max_tokens: Maximum tokens in the response.
13341301
"""
13351302
try:
13361303
api_key = get_api_key(ctx)
@@ -1358,14 +1325,12 @@ def smartscraper(
13581325
mode=mode, timeout=timeout, wait=wait, headers=headers,
13591326
cookies=cookies, country=country, scrolls=scrolls, mock=mock,
13601327
)
1361-
lc = client._llm_config(model=llm_model, temperature=llm_temperature, max_tokens=llm_max_tokens)
13621328

13631329
return client.smartscraper(
13641330
user_prompt=user_prompt,
13651331
website_url=website_url,
13661332
output_schema=normalized_schema,
13671333
fetch_config_dict=fc,
1368-
llm_config_dict=lc,
13691334
)
13701335
except Exception as e:
13711336
return {"error": str(e)}
@@ -1387,9 +1352,6 @@ def searchscraper(
13871352
),
13881353
]
13891354
] = None,
1390-
llm_model: Optional[str] = None,
1391-
llm_temperature: Optional[float] = None,
1392-
llm_max_tokens: Optional[int] = None,
13931355
) -> Dict[str, Any]:
13941356
"""
13951357
AI-powered web search with structured data extraction (API v2 POST /search).
@@ -1398,9 +1360,6 @@ def searchscraper(
13981360
user_prompt: Search query or natural language instructions.
13991361
num_results: Number of search results (3-20, default 5).
14001362
output_schema: JSON schema (dict or JSON string) for structured output.
1401-
llm_model: LLM model to use for extraction.
1402-
llm_temperature: Sampling temperature (0.0-2.0).
1403-
llm_max_tokens: Maximum tokens in the response.
14041363
"""
14051364
try:
14061365
api_key = get_api_key(ctx)
@@ -1419,8 +1378,7 @@ def searchscraper(
14191378
except json.JSONDecodeError as e:
14201379
return {"error": f"Invalid JSON for output_schema: {e}"}
14211380

1422-
lc = client._llm_config(model=llm_model, temperature=llm_temperature, max_tokens=llm_max_tokens)
1423-
return client.searchscraper(user_prompt, num_results=num_results, output_schema=normalized_schema, llm_config_dict=lc)
1381+
return client.searchscraper(user_prompt, num_results=num_results, output_schema=normalized_schema)
14241382
except Exception as e:
14251383
return {"error": str(e)}
14261384

@@ -1608,9 +1566,6 @@ def monitor_create(
16081566
wait: Optional[int] = None,
16091567
scrolls: Optional[int] = None,
16101568
mock: Optional[bool] = None,
1611-
llm_model: Optional[str] = None,
1612-
llm_temperature: Optional[float] = None,
1613-
llm_max_tokens: Optional[int] = None,
16141569
) -> Dict[str, Any]:
16151570
"""
16161571
Create a scheduled monitor job (API v2 POST /monitor).
@@ -1629,9 +1584,6 @@ def monitor_create(
16291584
wait: Milliseconds to wait after page load (0-30000).
16301585
scrolls: Number of scrolls to perform (0-100).
16311586
mock: Use mock mode for testing.
1632-
llm_model: LLM model to use.
1633-
llm_temperature: Sampling temperature (0.0-2.0).
1634-
llm_max_tokens: Maximum tokens in the response.
16351587
"""
16361588
try:
16371589
api_key = get_api_key(ctx)
@@ -1653,10 +1605,9 @@ def monitor_create(
16531605
mode=mode, timeout=timeout, wait=wait, headers=headers,
16541606
cookies=cookies, country=country, scrolls=scrolls, mock=mock,
16551607
)
1656-
lc = client._llm_config(model=llm_model, temperature=llm_temperature, max_tokens=llm_max_tokens)
16571608
return client.monitor_create(
16581609
name=name, url=url, prompt=prompt, cron=cron,
1659-
output_schema=normalized_schema, fetch_config_dict=fc, llm_config_dict=lc,
1610+
output_schema=normalized_schema, fetch_config_dict=fc,
16601611
)
16611612
except Exception as e:
16621613
return {"error": str(e)}

0 commit comments

Comments
 (0)