Java Code Examples
The following are code examples on how to integrate the ScrapeOps Residential Proxy Aggregator with your Java scrapers using the Java HttpClient library.
Authorisation - API Key
To use the ScrapeOps proxy, you first need an API key which you can get by signing up for a free account here.
Your API key must be included with every request using the password proxy port parameter otherwise the proxy port will return a 403 Forbidden Access status code.
Basic Request
The following is some example code to send a URL to the ScrapeOps Proxy port http://scrapeops:YOUR_API_KEY@residential-proxy.scrapeops.io:8181:
import java.io.IOException;
import java.net.*;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.List;
public class Main {
public static void main(String[] args) {
String apiKey = "YOUR_API_KEY";
String targetUrl = "https://httpbin.org/ip";
String proxyUrl = "http://scrapeops:" + apiKey + "@residential-proxy.scrapeops.io:8181";
ProxySelector proxySelector = new ProxySelector() {
@Override
public List<Proxy> select(URI uri) {
return List.of(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("residential-proxy.scrapeops.io", 8181)));
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
System.err.println("Proxy connection failed: " + ioe.getMessage());
}
};
HttpClient client = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(120))
.proxy(proxySelector)
.build();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(targetUrl))
.timeout(Duration.ofSeconds(120))
.header("Proxy-Authorization", "Basic " + Base64.getEncoder().encodeToString(("scrapeops:" + apiKey).getBytes()))
.GET()
.build();
try {
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
System.out.println("Body: " + response.body());
} catch (IOException | InterruptedException e) {
System.out.println("Error: " + e.getMessage());
}
}
}
ScrapeOps will take care of the proxy selection and rotation for you so you just need to send us the URL you want to scrape.