Shifter Documentation
  • Getting Started
  • PROXIES
    • Rotating Residential Proxies
    • Static Residential Proxies
  • SCRAPING API
    • Web Scraping API
      • API Request
      • Basic Request
      • Rendering JavaScript
      • Custom Headers
      • Custom Cookies
      • Proxies
      • Geolocation
      • Sessions
      • Forcing Timeouts
      • Binary Files
      • Waiting for CSS
      • Screenshot
      • Extraction Rules
      • JSON Auto Parser
      • Javascript Instructions
      • Disable Stealth
      • POST Requests
      • PUT Requests
      • Conclusion
      • API Errors
  • Google SERP API
    • Search APIs
      • Google Search API
      • Google Product API
      • Google Scholar Cite API
      • Google Scholar Author API
      • Google Scholar Profiles API
      • Google Scholar API
      • Google Autocomplete API
      • Google Maps Photos API
      • Google Maps Reviews API
      • Google Reverse Image API
      • Google Events API
      • Google Finance API
      • Google Play API
      • Google Jobs API
      • Google Local Services API
      • Google Jobs Listing API
      • Google Maps API
      • Google Trends API
        • Google Trends Categories List
        • Geo Parameter Options List
    • API Response
  • Bing SERP API
    • Search API
    • API Parameters
    • Basic API Requests
  • Yandex SERP API
    • Search API
    • Basic API Requests
    • API Parameters
  • Amazon API
    • Getting Started
      • API Parameters
      • Access the API
      • Supported Domains
    • Amazon Search
    • Amazon Seller Products
    • Amazon Seller Profile
    • Amazon Seller Feedback
    • Amazon Product
    • Amazon Category
    • Amazon Bestsellers
    • Amazon Deals
  • Useful Links
    • Github
    • Shifter Homepage
    • Knowledge Base
Powered by GitBook
On this page
  • What are Data Center Proxies?
  • What are Residential Proxies?
  • Proxies examples

Was this helpful?

  1. SCRAPING API
  2. Web Scraping API

Proxies

The Shifter's Scraping API is making use of a pool of millions of IP addresses worldwide, making your requests impossible to block. We keep two separate pools made of private data center IPs and residential ones together with mobile IPs.

Across both data center and residential proxies, the Shifter's Web Scraping API supports more than 195 global geolocations your scraping request can be sent from.

Specify the proxy type you want to use with your request using the proxy_type parameter, for data center proxies proxy_type=datacenter , and for residential proxies proxy_type=residential.

What are Data Center Proxies?

Datacenter proxies are proxies that are not affiliated with an Internet Service Provider (ISP). They come from a secondary corporation and provide you with completely private IP authentication and anonymity.

However, data center proxies usually come from cloud service providers and are used by many at the same time. Since they are not listed as ISP providers, these IPs can already be flagged by some targets and certain precautionary measures might be taken. But keep in mind that this is not the case with Shifter's Scraping API datacenter proxies.

All Shifter's Web Scraping API data center proxies are private proxies, and ensure little to no IP blacklisting.

What are Residential Proxies?

A residential proxy is an IP address provided by an ISP (Internet Service Provider) to a homeowner. It is a real IP address attached to a physical location. So basically, whenever you move to a new location and set up your internet, your ISP will provide you with an IP address.

While these proxies are the most common proxies used on the internet, they are also much more likely to get blocked on an IP basis when attempting to scrape data.

The main difference between the data center and residential proxies lies in the source of the IP. All other differences that arise rely on the differences between household machines and servers.

Proxies examples

Proxies Datacenter

Datacenter proxies can be selected by passing the parameter proxy_type=datacenter.

GET https://scrape.shifter.io/v1?api_key=api_key&url=https://httpbin.org/get&proxy_type=datacenter

⇡ Input

curl --request GET --url "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter"
const http = require("https");

const options = {
  "method": "GET",
  "hostname": "scrape.shifter.io",
  "port": null,
  "path": "/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter",
  "headers": {}
};

const req = http.request(options, function (res) {
  const chunks = [];

  res.on("data", function (chunk) {
    chunks.push(chunk);
  });

  res.on("end", function () {
    const body = Buffer.concat(chunks);
    console.log(body.toString());
  });
});

req.end();
import http.client

conn = http.client.HTTPSConnection("scrape.shifter.io")

conn.request("GET", "/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter")

res = conn.getresponse()
data = res.read()

print(data.decode("utf-8"))
<?php

$curl = curl_init();

curl_setopt_array($curl, [
  CURLOPT_URL => "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter",
  CURLOPT_RETURNTRANSFER => true,
  CURLOPT_ENCODING => "",
  CURLOPT_MAXREDIRS => 10,
  CURLOPT_TIMEOUT => 30,
  CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
  CURLOPT_CUSTOMREQUEST => "GET",
]);

$response = curl_exec($curl);
$err = curl_error($curl);

curl_close($curl);

if ($err) {
  echo "cURL Error #:" . $err;
} else {
  echo $response;
}
package main

import (
	"fmt"
	"net/http"
	"io/ioutil"
)

func main() {

	url := "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter"

	req, _ := http.NewRequest("GET", url, nil)

	res, _ := http.DefaultClient.Do(req)

	defer res.Body.Close()
	body, _ := ioutil.ReadAll(res.Body)

	fmt.Println(res)
	fmt.Println(string(body))

}
HttpResponse<String> response = Unirest.get("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter")
  .asString();
var client = new RestClient("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter");
var request = new RestRequest(Method.GET);
IRestResponse response = client.Execute(request);
require 'uri'
require 'net/http'
require 'openssl'

url = URI("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=datacenter")

http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE

request = Net::HTTP::Get.new(url)

response = http.request(request)
puts response.read_body

⇣ Output

{
    "args": {},
    "headers": {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
        "Accept-Encoding": "gzip, deflate, br",
        "Host": "httpbin.org",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36",
        "X-Amzn-Trace-Id": "Root=1-6267d94c-6139009e7aa96c016119654f"
    },
    "origin": "5.153.235.233",
    "url": "https://httpbin.org/get"
}

Proxies Residential

Residential proxies can be selected by passing the parameter proxy_type=residential.

GET https://scrape.shifter.io/v1?api_key=api_key&url=https://httpbin.org/get&proxy_type=residential

⇡ Input

curl --request GET --url "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential"
const http = require("https");

const options = {
  "method": "GET",
  "hostname": "scrape.shifter.io",
  "port": null,
  "path": "/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential",
  "headers": {}
};

const req = http.request(options, function (res) {
  const chunks = [];

  res.on("data", function (chunk) {
    chunks.push(chunk);
  });

  res.on("end", function () {
    const body = Buffer.concat(chunks);
    console.log(body.toString());
  });
});

req.end();
import http.client

conn = http.client.HTTPSConnection("scrape.shifter.io")

conn.request("GET", "/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential")

res = conn.getresponse()
data = res.read()

print(data.decode("utf-8"))
<?php

$curl = curl_init();

curl_setopt_array($curl, [
  CURLOPT_URL => "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential",
  CURLOPT_RETURNTRANSFER => true,
  CURLOPT_ENCODING => "",
  CURLOPT_MAXREDIRS => 10,
  CURLOPT_TIMEOUT => 30,
  CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
  CURLOPT_CUSTOMREQUEST => "GET",
]);

$response = curl_exec($curl);
$err = curl_error($curl);

curl_close($curl);

if ($err) {
  echo "cURL Error #:" . $err;
} else {
  echo $response;
}
package main

import (
	"fmt"
	"net/http"
	"io/ioutil"
)

func main() {

	url := "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential"

	req, _ := http.NewRequest("GET", url, nil)

	res, _ := http.DefaultClient.Do(req)

	defer res.Body.Close()
	body, _ := ioutil.ReadAll(res.Body)

	fmt.Println(res)
	fmt.Println(string(body))

}
HttpResponse<String> response = Unirest.get("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential")
  .asString();
var client = new RestClient("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential");
var request = new RestRequest(Method.GET);
IRestResponse response = client.Execute(request);
require 'uri'
require 'net/http'
require 'openssl'

url = URI("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&proxy_type=residential")

http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE

request = Net::HTTP::Get.new(url)

response = http.request(request)
puts response.read_body

⇣ Output

{
    "args": {},
    "headers": {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
        "Accept-Encoding": "gzip, deflate, br",
        "Host": "httpbin.org",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.82 Safari/537.36",
        "X-Amzn-Trace-Id": "Root=1-6267d9e9-7b7217ea171b487901c4bfab"
    },
    "origin": "71.167.129.216",
    "url": "https://httpbin.org/get"
}

PreviousCustom CookiesNextGeolocation

Last updated 1 year ago

Was this helpful?