Created
April 30, 2026 09:23
-
-
Save manthankool/81cd61b6d0374ebeedf15b0d485e4dd7 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| const axios = require('axios'); | |
| const fs = require('fs'); | |
| const API_KEY = 'your-api-key'; | |
| const QUERY = 'scraping'; | |
| // ─── FUNCTION 1: Scrape a single page ─────────────────────────────────────── | |
| async function scrapeGoogleSearch(query) { | |
| const response = await axios.get('https://api.scrapingdog.com/google', { | |
| params: { | |
| api_key: API_KEY, | |
| query: query, | |
| results: 10, | |
| country: 'us', | |
| } | |
| }); | |
| const results = response.data.organic_results.map(result => ({ | |
| rank: result.rank, | |
| title: result.title, | |
| link: result.link, | |
| snippet: result.snippet | |
| })); | |
| return results; | |
| } | |
| // ─── FUNCTION 2: Scrape multiple pages ────────────────────────────────────── | |
| async function scrapeMultiplePages(query, pages = 3) { | |
| const allResults = []; | |
| for (let i = 0; i < pages; i++) { | |
| console.log(`Scraping page ${i + 1}...`); | |
| const response = await axios.get('https://api.scrapingdog.com/google', { | |
| params: { | |
| api_key: API_KEY, | |
| query: query, | |
| results: 10, | |
| country: 'us', | |
| page: i | |
| } | |
| }); | |
| const results = response.data.organic_results.map(result => ({ | |
| rank: result.rank, | |
| title: result.title, | |
| link: result.link, | |
| snippet: result.snippet | |
| })); | |
| allResults.push(...results); | |
| } | |
| return allResults; | |
| } | |
| // ─── FUNCTION 3: Convert results to CSV ───────────────────────────────────── | |
| function convertToCSV(results) { | |
| const headers = ['Rank', 'Title', 'Link', 'Snippet']; | |
| const rows = results.map(r => [ | |
| r.rank, | |
| `"${r.title.replace(/"/g, '""')}"`, | |
| r.link, | |
| `"${r.snippet.replace(/"/g, '""')}"` | |
| ]); | |
| return [headers.join(','), ...rows.map(row => row.join(','))].join('\n'); | |
| } | |
| // ─── FUNCTION 4: Save results to CSV file ─────────────────────────────────── | |
| function saveToCSV(results, query) { | |
| const csv = convertToCSV(results); | |
| const filename = `${query.replace(/\s+/g, '_')}_results.csv`; | |
| fs.writeFileSync(filename, csv, 'utf8'); | |
| console.log(`Saved ${results.length} results to ${filename}`); | |
| } | |
| // ─── MAIN: Run everything ──────────────────────────────────────────────────── | |
| async function main() { | |
| // Step 1: Scrape single page | |
| console.log('\n--- Single Page Results ---'); | |
| const singlePage = await scrapeGoogleSearch(QUERY); | |
| console.log(JSON.stringify(singlePage, null, 2)); | |
| // Step 2: Scrape multiple pages | |
| console.log('\n--- Multiple Pages Results ---'); | |
| const multiplePages = await scrapeMultiplePages(QUERY, 3); | |
| console.log(`Total results scraped: ${multiplePages.length}`); | |
| // Step 3: Save to CSV | |
| console.log('\n--- Saving to CSV ---'); | |
| saveToCSV(multiplePages, QUERY); | |
| } | |
| main().catch(console.error); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment