Browser API Advanced API Features Sessions Save and reuse a session on multiple requests.
Sessions are the way websites recognise multiple requests coming from the same address. In WebSrapingAPI, you can create and use a new session in order to scrape multiple pages of the same website and reuse the same proxy (IP Address) for each request.
The value of the session
parameter can be any integer and is used to identify the new session created by you. This being said, to create a new session, simply add the session=<YOUR_SESSION_ID
to the request and then reuse this parameter and its value on upcoming requests.
Residential proxies switch the IP address once every 5 minutes. Hence, sessions set on this kind of proxy will expire in a maximum of 5 minutes after the last usage.
Your full GET request should then be sent to the following address:
Copy https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=<TARGETED_URL>&session=<YOUR_SESSION_ID>
Sessions Integration Examples
cURL NodeJS Python PHP Go Java .NET Ruby
Copy curl --request GET --url "https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100"
Copy const http = require ( "https" );
const options = {
"method" : "GET" ,
"hostname" : "api.webscrapingapi.com" ,
"port" : null ,
"path" : "/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100" ,
"headers" : {}
};
const req = http .request (options , function (res) {
const chunks = [];
res .on ( "data" , function (chunk) {
chunks .push (chunk);
});
res .on ( "end" , function () {
const body = Buffer .concat (chunks);
console .log ( body .toString ());
});
});
req .end ();
Copy import requests
API_KEY = '<YOUR_API_KEY>'
SCRAPER_URL = 'https://api.webscrapingapi.com/v1'
TARGET_URL = 'https://httpbin.org/get'
PARAMS = {
"api_key" : API_KEY ,
"url" : TARGET_URL ,
"country" : "us" ,
"session" : 100
}
response = requests . get (SCRAPER_URL, params = PARAMS)
print (response.text)
Copy <? php
$curl = curl_init () ;
curl_setopt_array ( $curl , [
CURLOPT_URL => "https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100" ,
CURLOPT_RETURNTRANSFER => true ,
CURLOPT_ENCODING => "" ,
CURLOPT_MAXREDIRS => 10 ,
CURLOPT_TIMEOUT => 30 ,
CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1 ,
CURLOPT_CUSTOMREQUEST => "GET" ,
] ) ;
$response = curl_exec ( $curl ) ;
$err = curl_error ( $curl ) ;
curl_close ( $curl ) ;
if ($err) {
echo "cURL Error #:" . $err;
} else {
echo $response;
}
Copy package main
import (
"fmt"
"net/http"
"io/ioutil"
)
func main () {
url := "https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100"
req, _ := http. NewRequest ( "GET" , url, nil )
res, _ := http.DefaultClient. Do (req)
defer res.Body. Close ()
body, _ := ioutil. ReadAll (res.Body)
fmt. Println (res)
fmt. Println ( string (body))
}
Copy HttpResponse < String > response = Unirest . get ( "https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100" )
. asString ();
Copy var client = new RestClient ( "https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100" );
var request = new RestRequest ( Method . GET );
IRestResponse response = client . Execute (request);
Copy require 'uri'
require 'net/http'
require 'openssl'
url = URI ( "https://api.webscrapingapi.com/v1?api_key=<YOUR_API_KEY>&url=https://httpbin.org/get&country=us&session=100" )
http = Net :: HTTP . new (url . host , url . port)
http . use_ssl = true
http . verify_mode = OpenSSL :: SSL :: VERIFY_NONE
request = Net :: HTTP :: Get . new (url)
response = http . request(request)
puts response . read_body
Important! The url
parameter has to be encoded.
( i.e. &url=https%3A%2F%2Fwww.webscrapingapi.com%2F )
Response Example
Copy {
"args" : {} ,
"headers" : {
"Accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8" ,
"Accept-Encoding" : "gzip, deflate, br" ,
"Host" : "httpbin.org" ,
"Upgrade-Insecure-Requests" : "1" ,
"User-Agent" : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4889.0 Safari/537.36" ,
"X-Amzn-Trace-Id" : "Root=1-6267dd3f-42cb5973084b3ac25f46af1e"
} ,
"origin" : "192.241.96.150" ,
"url" : "https://httpbin.org/get"
}