If you would like to add custom/additional headers (user agents, cookies, etc.), simply add them as parameters to the request, with the “wsa- ” prefix.
Only use this feature to get customized results, do not use this feature to avoid blocks. Shifter's Scraping API has a powerful internal engine that takes care of everything for you.
On this page, you will find an example request used to scrape the URL https://httpbin.org/headers, which will mirror the headers sent.
GET https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders
cURL NodeJS Python PHP Golang Java .NET Ruby
Copy curl --request GET --url "https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders" --header "Wsa-My-header: test" --header "Wsa-User-Agent: potato"
Copy const http = require("https");
const options = {
"method": "GET",
"hostname": "scrape.shifter.io",
"port": null,
"path": "/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders",
"headers": {
"Wsa-My-header": "test",
"Wsa-User-Agent": "potato"
}
};
const req = http.request(options, function (res) {
const chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
const body = Buffer.concat(chunks);
console.log(body.toString());
});
});
Copy import http.client
conn = http.client.HTTPSConnection("scrape.shifter.io")
headers = {
'Wsa-My-header': "test",
'Wsa-User-Agent': "potato"
}
conn.request("GET", "/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders", headers=headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
Copy <?php
$curl = curl_init();
curl_setopt_array($curl, [
CURLOPT_URL => "https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders",
CURLOPT_RETURNTRANSFER => true,
CURLOPT_ENCODING => "",
CURLOPT_MAXREDIRS => 10,
CURLOPT_TIMEOUT => 30,
CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
CURLOPT_CUSTOMREQUEST => "GET",
CURLOPT_HTTPHEADER => [
"Wsa-My-header: test",
"Wsa-User-Agent: potato"
],
]);
$response = curl_exec($curl);
$err = curl_error($curl);
curl_close($curl);
if ($err) {
echo "cURL Error #:" . $err;
} else {
echo $response;
}
Copy package main
import (
"fmt"
"net/http"
"io/ioutil"
)
func main() {
url := "https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders"
req, _ := http.NewRequest("GET", url, nil)
req.Header.Add("Wsa-My-header", "test")
req.Header.Add("Wsa-User-Agent", "potato")
res, _ := http.DefaultClient.Do(req)
defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body)
fmt.Println(res)
fmt.Println(string(body))
Copy HttpResponse<String> response = Unirest.get("https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders")
.header("Wsa-My-header", "test")
.header("Wsa-User-Agent", "potato")
.asString();
Copy var client = new RestClient("https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders");
var request = new RestRequest(Method.GET);
request.AddHeader("Wsa-My-header", "test");
request.AddHeader("Wsa-User-Agent", "potato");
IRestResponse response = client.Execute(request);
Copy require 'uri'
require 'net/http'
require 'openssl'
url = URI("https://scrape.shifter.io/v1?api_key=api_key&url=http%3A%2F%2Fhttpbin.org%2Fheaders")
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
request = Net::HTTP::Get.new(url)
request["Wsa-My-header"] = 'test'
request["Wsa-User-Agent"] = 'potato'
response = http.request(request)
puts response.read_body
Copy {
"headers": {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Host": "httpbin.org",
"My-Header": "test",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "potato",
"X-Amzn-Trace-Id": "Root=1-6267c064-44fa9993017cfcc32e41dfc1"
}
}