Getting started
ScrapingBee is a really simple API that allows you to extract HTML from every website in one single API call
If you need more to use our custom options such as JS rendering or our Premium Proxy take a look at our full documentation.
To get your API Key, you just need to create an account here.
Of course, don't forget to replace "YOUR_URL" by the URL of the page you want to scrape.
Curl
Python
NodeJS
Java
Ruby
Php
Go
If you need more to use our custom options such as JS rendering or our Premium Proxy take a look at our full documentation.
To get your API Key, you just need to create an account here.
Of course, don't forget to replace "YOUR_URL" by the URL of the page you want to scrape.
Curl
curl "https://app.scrapingbee.com/api/v1/?api_key=YOUR-API-KEY&url=YOUR-URL"
Python
# Install the Python ScrapingBee library:
# pip install scrapingbee
from scrapingbee import ScrapingBeeClient
client = ScrapingBeeClient(api_key='YOUR-API-KEY')
response = client.get('YOUR-URL')
print('Response HTTP Status Code: ', response.status_code)
print('Response HTTP Response Body: ', response.content)
NodeJS
// request Axios
const axios = require('axios');
axios.get('https://app.scrapingbee.com/api/v1', {
params: {
'api_key': 'YOUR-API-KEY',
'url': 'YOUR-URL',
}
}).then(function (response) {
// handle success
console.log(response);
})
Java
import java.io.IOException;
import org.apache.http.client.fluent.*;
public class SendRequest
{
public static void main(String[] args) {
sendRequest();
}
private static void sendRequest() {
// Classic (GET )
try {
// Create request
Content content = Request.Get("https://app.scrapingbee.com/api/v1/?api_key=YOUR-API-KEY&url=YOUR-URL")
// Fetch request and return content
.execute().returnContent();
// Print content
System.out.println(content);
}
catch (IOException e) { System.out.println(e); }
}
}
Ruby
require 'net/http'
require 'net/https'
# Classic (GET )
def send_request
uri = URI('https://app.scrapingbee.com/api/v1/?api_key=YOUR-API-KEY&url=YOUR-URL')
# Create client
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_PEER
# Create Request
req = Net::HTTP::Get.new(uri)
# Fetch Request
res = http.request(req)
puts "Response HTTP Status Code: #{ res.code }"
puts "Response HTTP Response Body: #{ res.body }"
rescue StandardError => e
puts "HTTP Request failed (#{ e.message })"
end
send_request()
Php
<?php
// get cURL resource
$ch = curl_init();
// set url
curl_setopt($ch, CURLOPT_URL, 'https://app.scrapingbee.com/api/v1/?api_key=YOUR-API-KEY&url=YOUR-URL');
// set method
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, 'GET');
// return the transfer as a string
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
// send the request and save response to $response
$response = curl_exec($ch);
// stop if fails
if (!$response) {
die('Error: "' . curl_error($ch) . '" - Code: ' . curl_errno($ch));
}
echo 'HTTP Status Code: ' . curl_getinfo($ch, CURLINFO_HTTP_CODE) . PHP_EOL;
echo 'Response Body: ' . $response . PHP_EOL;
// close curl resource to free up system resources
curl_close($ch);
?>
Go
package main
import (
"fmt"
"io/ioutil"
"net/http"
)
func sendClassic() {
// Create client
client := &http.Client{}
// Create request
req, err := http.NewRequest("GET", "https://app.scrapingbee.com/api/v1/?api_key=YOUR-API-KEY&url=YOUR-URL", nil)
parseFormErr := req.ParseForm()
if parseFormErr != nil {
fmt.Println(parseFormErr)
}
// Fetch Request
resp, err := client.Do(req)
if err != nil {
fmt.Println("Failure : ", err)
}
// Read Response Body
respBody, _ := ioutil.ReadAll(resp.Body)
// Display Results
fmt.Println("response Status : ", resp.Status)
fmt.Println("response Headers : ", resp.Header)
fmt.Println("response Body : ", string(respBody))
}
func main() {
sendClassic()
}
Updated on: 15/09/2021
Thank you!