Skip to content

Commit 1ba88db

Browse files
VinciGit00claude
andcommitted
docs: update JS SDK docs to match scrapegraph-js PR #13
Rewrite all JavaScript code examples to match the new v2 SDK API from ScrapeGraphAI/scrapegraph-js#13. Key changes: - Replace factory pattern (scrapegraphai({ apiKey })) with direct imports - All functions use (apiKey, params) signature - scrape() uses formats array instead of single format string - Return type is ApiResult<T> with status check, not throw-on-error - crawl.status() renamed to crawl.get(), crawl.delete() added - monitor.create() uses formats array, not prompt - Restore generateSchema and checkHealth in docs - Schema params use JSON objects, not Zod instances - history is now history.list() and history.get() Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent c2f904c commit 1ba88db

File tree

24 files changed

+620
-499
lines changed

24 files changed

+620
-499
lines changed

api-reference/errors.mdx

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -139,17 +139,17 @@ except APIError as e:
139139
```
140140

141141
```javascript JavaScript
142-
import { scrapegraphai } from 'scrapegraph-js';
142+
import { extract } from 'scrapegraph-js';
143143

144-
const sgai = scrapegraphai({ apiKey: 'your-api-key' });
144+
const result = await extract('your-api-key', {
145+
url: 'https://example.com',
146+
prompt: 'Extract data',
147+
});
145148

146-
try {
147-
const { data } = await sgai.extract('https://example.com', {
148-
prompt: 'Extract data',
149-
});
150-
console.log('Data:', data);
151-
} catch (error) {
152-
console.error('Error:', error.message);
149+
if (result.status === 'success') {
150+
console.log('Data:', result.data);
151+
} else {
152+
console.error('Error:', result.error);
153153
}
154154
```
155155
</CodeGroup>

cookbook/examples/pagination.mdx

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -349,20 +349,17 @@ if __name__ == "__main__":
349349
## JavaScript SDK Example
350350

351351
```javascript
352-
import { scrapegraphai } from 'scrapegraph-js';
352+
import { extract } from 'scrapegraph-js';
353353
import 'dotenv/config';
354354

355-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
355+
const result = await extract(process.env.SGAI_APIKEY, {
356+
url: 'https://www.amazon.in/s?k=tv&crid=1TEF1ZFVLU8R8&sprefix=t%2Caps%2C390&ref=nb_sb_noss_2',
357+
prompt: 'Extract all product info including name, price, rating, and image_url',
358+
});
356359

357-
const { data } = await sgai.extract(
358-
'https://www.amazon.in/s?k=tv&crid=1TEF1ZFVLU8R8&sprefix=t%2Caps%2C390&ref=nb_sb_noss_2',
359-
{
360-
prompt: 'Extract all product info including name, price, rating, and image_url',
361-
totalPages: 3,
362-
}
363-
);
364-
365-
console.log('Response:', JSON.stringify(data, null, 2));
360+
if (result.status === 'success') {
361+
console.log('Response:', JSON.stringify(result.data?.json, null, 2));
362+
}
366363
```
367364
368365
## Example Output

developer-guides/llm-sdks-and-frameworks/anthropic.mdx

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,16 +27,17 @@ If using Node < 20, install `dotenv` and add `import 'dotenv/config'` to your co
2727
This example demonstrates a simple workflow: scrape a website and summarize the content using Claude.
2828

2929
```typescript
30-
import { scrapegraphai } from 'scrapegraph-js';
30+
import { extract } from 'scrapegraph-js';
3131
import Anthropic from '@anthropic-ai/sdk';
3232

33-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
3433
const anthropic = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY });
3534

36-
const { data } = await sgai.extract('https://scrapegraphai.com', {
35+
const result = await extract(process.env.SGAI_APIKEY!, {
36+
url: 'https://scrapegraphai.com',
3737
prompt: 'Extract all content from this page',
3838
});
3939

40+
const data = result.data?.json;
4041
console.log('Scraped content length:', JSON.stringify(data).length);
4142

4243
const message = await anthropic.messages.create({
@@ -55,12 +56,11 @@ console.log('Response:', message);
5556
This example shows how to use Claude's tool use feature to let the model decide when to scrape websites based on user requests.
5657

5758
```typescript
58-
import { scrapegraphai } from 'scrapegraph-js';
59+
import { extract } from 'scrapegraph-js';
5960
import { Anthropic } from '@anthropic-ai/sdk';
6061
import { z } from 'zod';
6162
import { zodToJsonSchema } from 'zod-to-json-schema';
6263

63-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
6464
const anthropic = new Anthropic({
6565
apiKey: process.env.ANTHROPIC_API_KEY
6666
});
@@ -90,10 +90,12 @@ if (toolUse && toolUse.type === 'tool_use') {
9090
const input = toolUse.input as { url: string };
9191
console.log(`Calling tool: ${toolUse.name} | URL: ${input.url}`);
9292

93-
const { data } = await sgai.extract(input.url, {
93+
const result = await extract(process.env.SGAI_APIKEY!, {
94+
url: input.url,
9495
prompt: 'Extract all content from this page',
9596
});
9697

98+
const data = result.data?.json;
9799
console.log(`Scraped content preview: ${JSON.stringify(data)?.substring(0, 300)}...`);
98100
// Continue with the conversation or process the scraped content as needed
99101
}
@@ -104,11 +106,10 @@ if (toolUse && toolUse.type === 'tool_use') {
104106
This example demonstrates how to use Claude to extract structured data from scraped website content.
105107

106108
```typescript
107-
import { scrapegraphai } from 'scrapegraph-js';
109+
import { extract } from 'scrapegraph-js';
108110
import Anthropic from '@anthropic-ai/sdk';
109111
import { z } from 'zod';
110112

111-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
112113
const anthropic = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY });
113114

114115
const CompanyInfoSchema = z.object({
@@ -117,9 +118,11 @@ const CompanyInfoSchema = z.object({
117118
description: z.string().optional()
118119
});
119120

120-
const { data } = await sgai.extract('https://stripe.com', {
121+
const result = await extract(process.env.SGAI_APIKEY!, {
122+
url: 'https://stripe.com',
121123
prompt: 'Extract all content from this page',
122124
});
125+
const data = result.data?.json;
123126

124127
const prompt = `Extract company information from this website content.
125128

developer-guides/llm-sdks-and-frameworks/gemini.mdx

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,16 @@ If using Node < 20, install `dotenv` and add `import 'dotenv/config'` to your co
2727
This example demonstrates a simple workflow: scrape a website and summarize the content using Gemini.
2828

2929
```typescript
30-
import { scrapegraphai } from 'scrapegraph-js';
30+
import { extract } from 'scrapegraph-js';
3131
import { GoogleGenAI } from '@google/genai';
3232

33-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
3433
const ai = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
3534

36-
const { data } = await sgai.extract('https://scrapegraphai.com', {
35+
const result = await extract(process.env.SGAI_APIKEY!, {
36+
url: 'https://scrapegraphai.com',
3737
prompt: 'Extract all content from this page',
3838
});
39+
const data = result.data?.json;
3940

4041
console.log('Scraped content length:', JSON.stringify(data).length);
4142

@@ -52,15 +53,16 @@ console.log('Summary:', response.text);
5253
This example shows how to analyze website content using Gemini's multi-turn conversation capabilities.
5354

5455
```typescript
55-
import { scrapegraphai } from 'scrapegraph-js';
56+
import { extract } from 'scrapegraph-js';
5657
import { GoogleGenAI } from '@google/genai';
5758

58-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
5959
const ai = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
6060

61-
const { data } = await sgai.extract('https://news.ycombinator.com/', {
61+
const result = await extract(process.env.SGAI_APIKEY!, {
62+
url: 'https://news.ycombinator.com/',
6263
prompt: 'Extract all content from this page',
6364
});
65+
const data = result.data?.json;
6466

6567
console.log('Scraped content length:', JSON.stringify(data).length);
6668

@@ -86,15 +88,16 @@ console.log('4th and 5th Stories:', result2.text);
8688
This example demonstrates how to extract structured data using Gemini's JSON mode from scraped website content.
8789

8890
```typescript
89-
import { scrapegraphai } from 'scrapegraph-js';
91+
import { extract } from 'scrapegraph-js';
9092
import { GoogleGenAI, Type } from '@google/genai';
9193

92-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_APIKEY });
9394
const ai = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
9495

95-
const { data } = await sgai.extract('https://stripe.com', {
96+
const result = await extract(process.env.SGAI_APIKEY!, {
97+
url: 'https://stripe.com',
9698
prompt: 'Extract all content from this page',
9799
});
100+
const data = result.data?.json;
98101

99102
console.log('Scraped content length:', JSON.stringify(data).length);
100103

integrations/vercel_ai.mdx

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -57,21 +57,7 @@ The ScrapeGraph SDK can be used like any other tool. See [Vercel AI tool calling
5757
import { z } from "zod";
5858
import { generateText, tool } from "ai";
5959
import { openai } from "@ai-sdk/openai";
60-
import { scrapegraphai } from "scrapegraph-js";
61-
62-
const sgai = scrapegraphai({ apiKey: process.env.SGAI_API_KEY });
63-
64-
const ArticleSchema = z.object({
65-
title: z.string().describe("The article title"),
66-
author: z.string().describe("The author's name"),
67-
publishDate: z.string().describe("Article publication date"),
68-
content: z.string().describe("Main article content"),
69-
category: z.string().describe("Article category"),
70-
});
71-
72-
const ArticlesArraySchema = z
73-
.array(ArticleSchema)
74-
.describe("Array of articles");
60+
import { extract } from "scrapegraph-js";
7561

7662
const result = await generateText({
7763
model: openai("gpt-4.1-mini"),
@@ -82,11 +68,29 @@ const result = await generateText({
8268
url: z.string().describe("The exact URL."),
8369
}),
8470
execute: async ({ url }) => {
85-
const response = await sgai.extract(url, {
71+
const response = await extract(process.env.SGAI_API_KEY!, {
72+
url,
8673
prompt: "Extract the article information",
87-
schema: ArticlesArraySchema,
74+
schema: {
75+
type: "object",
76+
properties: {
77+
articles: {
78+
type: "array",
79+
items: {
80+
type: "object",
81+
properties: {
82+
title: { type: "string" },
83+
author: { type: "string" },
84+
publishDate: { type: "string" },
85+
content: { type: "string" },
86+
category: { type: "string" },
87+
},
88+
},
89+
},
90+
},
91+
},
8892
});
89-
return response.data;
93+
return response.data?.json;
9094
},
9195
}),
9296
},

knowledge-base/account/api-keys.mdx

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,12 @@ client = Client(api_key="your-api-key")
2424
```
2525

2626
```javascript JavaScript
27-
import { scrapegraphai } from "scrapegraph-js";
27+
import { extract } from "scrapegraph-js";
2828

29-
const sgai = scrapegraphai({ apiKey: "your-api-key" });
30-
const { data } = await sgai.extract(url, { prompt });
29+
const result = await extract("your-api-key", {
30+
url: "https://example.com",
31+
prompt: "Extract the title",
32+
});
3133
```
3234

3335
```bash cURL

knowledge-base/ai-tools/cursor.mdx

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,12 +53,11 @@ Ask Cursor:
5353
> Write a JavaScript function using scrapegraph-js that extracts product details from an e-commerce page.
5454
5555
```javascript
56-
import { scrapegraphai } from "scrapegraph-js";
56+
import { extract } from "scrapegraph-js";
5757

58-
const sgai = scrapegraphai({ apiKey: "your-api-key" });
59-
60-
async function extractProduct(url) {
61-
return await sgai.extract(url, {
58+
async function extractProduct(apiKey, url) {
59+
return await extract(apiKey, {
60+
url,
6261
prompt: "Extract the product name, price, and availability",
6362
});
6463
}

knowledge-base/scraping/custom-headers.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@ response = client.smartscraper(
2626
```
2727

2828
```javascript
29-
import { scrapegraphai } from "scrapegraph-js";
29+
import { extract } from "scrapegraph-js";
3030

31-
const sgai = scrapegraphai({ apiKey: "your-api-key" });
32-
const { data } = await sgai.extract("https://example.com/protected-page", {
31+
const result = await extract("your-api-key", {
32+
url: "https://example.com/protected-page",
3333
prompt: "Extract the main content",
3434
fetchConfig: {
3535
headers: {

knowledge-base/scraping/javascript-rendering.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@ response = client.smartscraper(
2626
```
2727

2828
```javascript
29-
import { scrapegraphai } from "scrapegraph-js";
29+
import { extract } from "scrapegraph-js";
3030

31-
const sgai = scrapegraphai({ apiKey: "your-api-key" });
32-
const { data } = await sgai.extract("https://example.com/products", {
31+
const result = await extract("your-api-key", {
32+
url: "https://example.com/products",
3333
prompt: "Extract all product names and prices",
3434
fetchConfig: { wait: 2000 },
3535
});

knowledge-base/scraping/pagination.mdx

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,17 +43,19 @@ print(f"Total products extracted: {len(all_results)}")
4343
```
4444

4545
```javascript
46-
import { scrapegraphai } from "scrapegraph-js";
46+
import { extract } from "scrapegraph-js";
4747

48-
const sgai = scrapegraphai({ apiKey: "your-api-key" });
4948
const allResults = [];
5049

5150
for (let page = 1; page <= 5; page++) {
5251
const url = `https://example.com/products?page=${page}`;
53-
const { data } = await sgai.extract(url, {
52+
const result = await extract("your-api-key", {
53+
url,
5454
prompt: "Extract all product names and prices on this page",
5555
});
56-
allResults.push(...(data?.products ?? []));
56+
if (result.status === "success") {
57+
allResults.push(...(result.data?.json?.products ?? []));
58+
}
5759
}
5860
```
5961

0 commit comments

Comments
 (0)