Installation
npm / yarn
npm install @zentislabs/proxy-sdk
# or
yarn add @zentislabs/proxy-sdkpip
pip install zentislabs-proxy
# or
poetry add zentislabs-proxygo
go get github.com/zentislabs/proxy-go@latest
Official SDKs for Python, Node.js, and Go with built-in retry logic, connection pooling, and type-safe configuration. Integrate ZentisLabs proxy infrastructure into any stack in minutes.
npm install @zentislabs/proxy-sdk
# or
yarn add @zentislabs/proxy-sdkpip install zentislabs-proxy
# or
poetry add zentislabs-proxygo get github.com/zentislabs/proxy-go@latestfrom zentislabs import ZentisLabsClient
# Initialize with API key
client = ZentisLabsClient(
api_key="YOUR_API_KEY",
timeout=30, # Request timeout in seconds
max_retries=3, # Auto-retry on transient errors
base_url="https://api.zentislabs.com/v1", # Optional override
)
# Resolve a proxy from a pool
proxy = client.proxies.resolve(
pool="residential-us",
country="US",
session="scrape-session-42",
)
print(f"Proxy: {proxy.host}:{proxy.port}")
print(f"Username: {proxy.username}")
print(f"Password: {proxy.password}")import requests
from zentislabs import ZentisLabsClient
client = ZentisLabsClient(api_key="YOUR_API_KEY")
# Get proxy credentials
proxy = client.proxies.resolve(pool="residential-eu", country="DE")
# Use with requests library
proxies = {
"http": f"http://{proxy.username}:{proxy.password}@{proxy.host}:{proxy.port}",
"https": f"http://{proxy.username}:{proxy.password}@{proxy.host}:{proxy.port}",
}
response = requests.get("https://httpbin.org/ip", proxies=proxies, timeout=30)
print("Your IP:", response.json()["origin"])import asyncio
import aiohttp
from zentislabs import ZentisLabsClient
client = ZentisLabsClient(api_key="YOUR_API_KEY")
async def scrape_urls(urls: list[str]):
"""Scrape multiple URLs concurrently with rotating proxies."""
results = []
async with aiohttp.ClientSession() as session:
tasks = []
for i, url in enumerate(urls):
# Each request gets a different proxy session
proxy = client.proxies.resolve(
pool="residential-us",
country="US",
session=f"batch-{i}",
)
proxy_url = f"http://{proxy.username}:{proxy.password}@{proxy.host}:{proxy.port}"
tasks.append(fetch(session, url, proxy_url))
results = await asyncio.gather(*tasks, return_exceptions=True)
success = sum(1 for r in results if not isinstance(r, Exception))
print(f"Completed: {success}/{len(urls)}")
return results
async def fetch(session, url, proxy_url):
async with session.get(url, proxy=proxy_url, timeout=aiohttp.ClientTimeout(total=30)) as resp:
return await resp.text()
# Run
urls = [f"https://example.com/page/{i}" for i in range(100)]
asyncio.run(scrape_urls(urls))# List all pools
pools = client.pools.list()
for pool in pools:
print(f"{pool.name} — {pool.type} — {pool.country} — {pool.active_sessions}")
# Create a new pool
new_pool = client.pools.create(
name="ecommerce-scraper-us",
type="residential",
rotation="per_request",
country="US",
max_concurrency=1000,
)
# Update pool settings
client.pools.update("ecommerce-scraper-us", max_concurrency=2000)
# Delete a pool
client.pools.delete("old-test-pool")import { ZentisLabsClient } from "@zentislabs/proxy-sdk";
const client = new ZentisLabsClient({
apiKey: process.env.ZENTISLABS_API_KEY!,
timeoutMs: 10000,
maxRetries: 3,
});
// Resolve a proxy
const proxy = await client.proxies.resolve({
pool: "residential-eu",
country: "DE",
session: "cart-sync-19",
});
console.log(`Proxy: ${proxy.host}:${proxy.port}`);import axios from "axios";
import { HttpsProxyAgent } from "https-proxy-agent";
import { ZentisLabsClient } from "@zentislabs/proxy-sdk";
const client = new ZentisLabsClient({
apiKey: process.env.ZENTISLABS_API_KEY!,
});
async function scrapeWithProxy(url: string, session: string) {
const proxy = await client.proxies.resolve({
pool: "residential-us",
country: "US",
session,
});
const agent = new HttpsProxyAgent(
`http://${proxy.username}:${proxy.password}@${proxy.host}:${proxy.port}`
);
const { data } = await axios.get(url, {
httpsAgent: agent,
timeout: 30000,
});
return data;
}
// Usage
const result = await scrapeWithProxy("https://example.com", "session-001");import { ZentisLabsClient } from "@zentislabs/proxy-sdk";
import { HttpsProxyAgent } from "https-proxy-agent";
import pLimit from "p-limit";
const client = new ZentisLabsClient({
apiKey: process.env.ZENTISLABS_API_KEY!,
});
// Limit concurrency to 20 simultaneous requests
const limit = pLimit(20);
async function scrapeAll(urls: string[]) {
const results = await Promise.allSettled(
urls.map((url, i) =>
limit(async () => {
const proxy = await client.proxies.resolve({
pool: "residential-us",
country: "US",
session: `batch-${i}`,
});
const agent = new HttpsProxyAgent(
`http://${proxy.username}:${proxy.password}@${proxy.host}:${proxy.port}`
);
const response = await fetch(url, {
// @ts-ignore - agent type
agent,
signal: AbortSignal.timeout(15000),
});
return response.text();
})
)
);
const fulfilled = results.filter((r) => r.status === "fulfilled").length;
console.log(`Success: ${fulfilled}/${urls.length}`);
return results;
}
// Scrape 500 URLs with 20 concurrent connections
const urls = Array.from({ length: 500 }, (_, i) => `https://example.com/page/${i}`);
await scrapeAll(urls);package main
import (
"context"
"fmt"
"log"
"time"
zentislabs "github.com/zentislabs/proxy-go"
)
func main() {
client := zentislabs.NewClient("YOUR_API_KEY", zentislabs.WithTimeout(10*time.Second))
ctx := context.Background()
// Resolve a proxy
proxy, err := client.Resolve(ctx, zentislabs.ResolveInput{
Pool: "residential-us",
Country: "US",
Session: "go-scraper-001",
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("Proxy: %s:%d\n", proxy.Host, proxy.Port)
fmt.Printf("Auth: %s:%s\n", proxy.Username, proxy.Password)
}package main
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"time"
zentislabs "github.com/zentislabs/proxy-go"
)
func main() {
client := zentislabs.NewClient("YOUR_API_KEY")
ctx := context.Background()
proxy, _ := client.Resolve(ctx, zentislabs.ResolveInput{
Pool: "residential-eu",
Country: "GB",
Session: "uk-scraper",
})
// Build proxy URL
proxyURL, _ := url.Parse(fmt.Sprintf(
"http://%s:%s@%s:%d",
proxy.Username, proxy.Password, proxy.Host, proxy.Port,
))
// Create HTTP client with proxy transport
httpClient := &http.Client{
Timeout: 30 * time.Second,
Transport: &http.Transport{
Proxy: http.ProxyURL(proxyURL),
},
}
resp, err := httpClient.Get("https://httpbin.org/ip")
if err != nil {
panic(err)
}
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
fmt.Println(string(body))
}All SDKs throw typed exceptions for common error scenarios:
from zentislabs import ZentisLabsClient
from zentislabs.exceptions import (
AuthenticationError, # 401 — invalid API key
ForbiddenError, # 403 — insufficient scopes
RateLimitError, # 429 — too many requests
PoolExhaustedError, # 503 — no available proxies
ZentisLabsError, # Base exception
)
client = ZentisLabsClient(api_key="YOUR_API_KEY")
try:
proxy = client.proxies.resolve(pool="residential-us", country="US")
except AuthenticationError:
print("Check your API key — it may be expired or revoked")
except RateLimitError as e:
print(f"Rate limited. Retry after {e.retry_after}s")
except PoolExhaustedError:
print("No proxies available. Try a different pool or country")
except ZentisLabsError as e:
print(f"API error: {e.status_code} — {e.message}")import { ZentisLabsClient, AuthError, RateLimitError, PoolExhaustedError } from "@zentislabs/proxy-sdk";
const client = new ZentisLabsClient({ apiKey: process.env.ZENTISLABS_API_KEY! });
try {
const proxy = await client.proxies.resolve({
pool: "residential-eu",
country: "DE",
});
} catch (error) {
if (error instanceof AuthError) {
console.error("Invalid API key:", error.message);
} else if (error instanceof RateLimitError) {
console.error(`Rate limited. Retry in ${error.retryAfterMs}ms`);
await new Promise((r) => setTimeout(r, error.retryAfterMs));
} else if (error instanceof PoolExhaustedError) {
console.error("No proxies available — switch pool or country");
} else {
throw error;
}
}from zentislabs import ZentisLabsClient
from concurrent.futures import ThreadPoolExecutor, as_completed
import requests
client = ZentisLabsClient(api_key="YOUR_API_KEY")
def scrape_with_proxy(url: str, session_id: str) -> dict:
"""Scrape a single URL with a dedicated proxy session."""
proxy = client.proxies.resolve(
pool="residential-us",
country="US",
session=session_id,
)
proxy_url = f"http://{proxy.username}:{proxy.password}@{proxy.host}:{proxy.port}"
response = requests.get(
url,
proxies={"http": proxy_url, "https": proxy_url},
timeout=20,
)
return {"url": url, "status": response.status_code, "size": len(response.content)}
# Scrape 200 URLs with 25 concurrent threads
urls = [f"https://example.com/product/{i}" for i in range(200)]
with ThreadPoolExecutor(max_workers=25) as executor:
futures = {
executor.submit(scrape_with_proxy, url, f"batch-{i}"): url
for i, url in enumerate(urls)
}
success = 0
for future in as_completed(futures):
try:
result = future.result()
success += 1
except Exception as e:
print(f"Failed: {futures[future]} — {e}")
print(f"Completed: {success}/{len(urls)}")session IDs for sticky sessions, omit for auto-rotationtimeout values (15-30s for scraping)X-RateLimit-Remaining headerszentislabs-proxy
pypi.org/project/zentislabs-proxy
@zentislabs/proxy-sdk
npmjs.com/package/@zentislabs/proxy-sdk
github.com/zentislabs/proxy-go
pkg.go.dev/github.com/zentislabs/proxy-go
Explore the full REST API documentation with all endpoints and parameters.