Sessions
To reuse the same proxy for multiple requests, simply use the session parameter (e.g. session=123).
The value of the session can be any integer, simply send a new integer to create a new session (this will allow you to continue using the same proxy for each request with that session number).
Sessions expire 5 minutes after the last usage.
Sessions examples
In order to reuse the same proxy (and thus IP address), you can specify a session by passing the session parameter.
GET https://scrape.shifter.io/v1?api_key=api_key&url=https://httpbin.org/get&country=us&proxy_type=datacenter&session=100
⇡ Input
curl --request GET --url "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100"const http = require("https");
const options = {
"method": "GET",
"hostname": "scrape.shifter.io",
"port": null,
"path": "/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100",
"headers": {}
};
const req = http.request(options, function (res) {
const chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
const body = Buffer.concat(chunks);
console.log(body.toString());
});
});
req.end();import http.client
conn = http.client.HTTPSConnection("scrape.shifter.io")
conn.request("GET", "/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100")
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))<?php
$curl = curl_init();
curl_setopt_array($curl, [
CURLOPT_URL => "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100",
CURLOPT_RETURNTRANSFER => true,
CURLOPT_ENCODING => "",
CURLOPT_MAXREDIRS => 10,
CURLOPT_TIMEOUT => 30,
CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
CURLOPT_CUSTOMREQUEST => "GET",
]);
$response = curl_exec($curl);
$err = curl_error($curl);
curl_close($curl);
if ($err) {
echo "cURL Error #:" . $err;
} else {
echo $response;
}package main
import (
"fmt"
"net/http"
"io/ioutil"
)
func main() {
url := "https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100"
req, _ := http.NewRequest("GET", url, nil)
res, _ := http.DefaultClient.Do(req)
defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body)
fmt.Println(res)
fmt.Println(string(body))
}HttpResponse<String> response = Unirest.get("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100")
.asString();var client = new RestClient("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100");
var request = new RestRequest(Method.GET);
IRestResponse response = client.Execute(request);require 'uri'
require 'net/http'
require 'openssl'
url = URI("https://scrape.shifter.io/v1?api_key=api_key&url=https%3A%2F%2Fhttpbin.org%2Fget&country=us&proxy_type=datacenter&session=100")
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
request = Net::HTTP::Get.new(url)
response = http.request(request)
puts response.read_body⇣ Output
{
"args": {},
"headers": {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Host": "httpbin.org",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4889.0 Safari/537.36",
"X-Amzn-Trace-Id": "Root=1-6267dd3f-42cb5973084b3ac25f46af1e"
},
"origin": "192.241.96.150",
"url": "https://httpbin.org/get"
}Last updated
Was this helpful?