Make Requests with ScraperAPI in NodeJS Customizing ScraperAPI Requests in NodeJS Customize JavaScript-Rendered Pages via ScraperAPI in NodeJS Learn to scrape JavaScript-rendered pages using ScraperAPI in NodeJS. Enable headless browser rendering with render=true for dynamic content, SPAs, and JS-heavy sites.
If you are crawling a page that requires you to render the Javascript on the page to scrape the data you need, then we can fetch these pages using a headless browser.
To render Javascript, simply set render=true
and we will use a headless Google Chrome instance to fetch the page. This feature is available on all plans.
Pass the JavaScript rendering parameter within the URL :
Copy import fetch from 'node-fetch' ;
fetch ( 'https://api.scraperapi.com/?api_key=APIKEY&url=http://httpbin.org/ip&render=true' )
.then (response => {
console .log (response)
})
.catch (error => {
console .log (error)
})
Copy const axios = require ( 'axios' );
axios .get ( 'https://httpbin.org/ip' , {
method : 'GET' ,
proxy : {
host : 'proxy-server.scraperapi.com' ,
port : 8001 ,
auth : {
user : 'scraperapi.render=true' ,
password : 'APIKEY'
} ,
protocol : 'http'
}
})
.then (response => {
console .log (response)
})
.catch (error => {
console .log (error)
});
Copy const request = require('request-promise');
options = {
method: 'GET',
url: 'http://httpbin.org/ip',
proxy: 'http//scraperapi.render=true:[email protected] :8001'
}
request(options)
.then(response => {
console.log(response)
})
.catch(error => {
console.log(error)
})
Pass the parameter in the headers :
Copy import fetch from 'node-fetch';
const headers = {
'Content-Type': 'application/json',
'x-sapi-api_key': '<YOUR_API_KEY>',
'x-sapi-render': 'true'
};
fetch('https://api.scraperapi.com?url=http://httpbin.org/ip', {
method: 'GET',
headers: headers
})
.then(response => {
console.log(response);
})
.catch(error => {
console.error('Error fetching data:', error);
});
Copy const axios = require('axios');
const headers = {
'x-sapi-render': 'true'
};
axios.get('http://httpbin.org/ip', {
method: 'GET',
headers: headers,
proxy: {
host: 'proxy-server.scraperapi.com',
port: 8001,
auth: {
user: 'scraperapi',
password: '<YOUR_API_KEY>'
},
protocol: 'http'
}
})
.then(response => {
console.log(response)
})
.catch(error => {
console.log(error)
});
Copy const request = require('request-promise');
const headers = {
'x-sapi-render': 'true'
};
options = {
method: 'GET',
headers: headers,
url: 'http://httpbin.org/ip',
proxy: 'http://scraperapi:<YOUR_API_KEY>@proxy-server.scraperapi.com:8001'
}
request(options)
.then(response => {
console.log(response)
})
.catch(error => {
console.log(error)
})
Last updated 11 months ago