Python Integration
NinjasProxy works with every Python HTTP client. This guide covers the most common libraries with complete, copy-paste examples.
All examples use
r.ninjasproxy.com:8080 (residential rotating). Swap for dc.ninjasproxy.com:8080 (datacenter) or m.ninjasproxy.com:8080 (mobile).requests
import requests
PROXY_URL = "http://USERNAME:API_KEY@r.ninjasproxy.com:8080"
PROXIES = {"http": PROXY_URL, "https": PROXY_URL}
# ── Single request ──────────────────────────────────────────────
r = requests.get("https://api.ipify.org?format=json", proxies=PROXIES, timeout=30)
print(r.json()["ip"])
# ── Session — reuses TCP connections ────────────────────────────
session = requests.Session()
session.proxies = PROXIES
session.headers.update({
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
})
urls = ["https://api.ipify.org?format=json"] * 5
for url in urls:
r = session.get(url, timeout=30)
print(r.json())
# ── Sticky session ───────────────────────────────────────────────
sticky_url = "http://USERNAME-session-run01:API_KEY@r.ninjasproxy.com:8080"
sticky_sess = requests.Session()
sticky_sess.proxies = {"http": sticky_url, "https": sticky_url}
for i in range(3):
r = sticky_sess.get("https://api.ipify.org?format=json", timeout=30)
print(f"Request {i+1}:", r.json()["ip"]) # Same IP every timehttpx — Synchronous
import httpx
PROXY = "http://USERNAME:API_KEY@r.ninjasproxy.com:8080"
# httpx uses 'proxy' (singular) kwarg
with httpx.Client(proxy=PROXY, timeout=30) as client:
r = client.get("https://api.ipify.org?format=json")
print(r.json()["ip"])
# POST with JSON
r = client.post(
"https://httpbin.org/post",
json={"key": "value"},
headers={"Accept": "application/json"},
)
print(r.status_code, r.json()["json"])httpx — Async
import asyncio
import httpx
PROXY = "http://USERNAME:API_KEY@r.ninjasproxy.com:8080"
async def fetch(client: httpx.AsyncClient, url: str) -> dict:
r = await client.get(url)
r.raise_for_status()
return r.json()
async def main():
urls = [f"https://api.ipify.org?format=json"] * 10
async with httpx.AsyncClient(proxy=PROXY, timeout=30) as client:
tasks = [fetch(client, url) for url in urls]
results = await asyncio.gather(*tasks, return_exceptions=True)
for url, result in zip(urls, results):
if isinstance(result, Exception):
print(f"Error: {result}")
else:
print(result["ip"])
asyncio.run(main())Playwright (Browser Automation)
from playwright.sync_api import sync_playwright
PROXY_CONFIG = {
"server": "http://r.ninjasproxy.com:8080",
"username": "USERNAME",
"password": "API_KEY",
}
with sync_playwright() as p:
browser = p.chromium.launch(proxy=PROXY_CONFIG, headless=True)
page = browser.new_page()
page.goto("https://api.ipify.org?format=json", wait_until="networkidle")
print("Exit IP:", page.text_content("body"))
# With custom user-agent
context = browser.new_context(
proxy=PROXY_CONFIG,
user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
locale="en-US",
)
page = context.new_page()
page.goto("https://httpbin.org/ip")
print(page.text_content("body"))
browser.close()# Async Playwright
import asyncio
from playwright.async_api import async_playwright
async def main():
async with async_playwright() as p:
browser = await p.chromium.launch(proxy={
"server": "http://r.ninjasproxy.com:8080",
"username": "USERNAME",
"password": "API_KEY",
})
page = await browser.new_page()
await page.goto("https://api.ipify.org?format=json")
print(await page.text_content("body"))
await browser.close()
asyncio.run(main())Selenium
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
# Selenium does not support proxy auth natively — use IP whitelist
# or a local proxy that forwards to NinjasProxy with credentials
PROXY_HOST = "r.ninjasproxy.com"
PROXY_PORT = 8080
options = Options()
options.add_argument(f"--proxy-server=http://{PROXY_HOST}:{PROXY_PORT}")
options.add_argument("--headless=new")
driver = webdriver.Chrome(options=options)
driver.get("https://api.ipify.org?format=json")
print(driver.page_source)
driver.quit()Selenium WebDriver does not support proxy username/password in the proxy URL. Use IP whitelist authentication for Selenium, or run a local forward proxy (e.g. mitmproxy) that adds credentials before forwarding to NinjasProxy.
Scrapy Middleware
# settings.py
DOWNLOADER_MIDDLEWARES = {
"myspider.middlewares.RotatingProxyMiddleware": 610,
}
NINJASPROXY_USERNAME = "your_username"
NINJASPROXY_API_KEY = "your_api_key"
NINJASPROXY_HOST = "r.ninjasproxy.com"
NINJASPROXY_PORT = 8080# middlewares.py
import random, string
from scrapy import signals
class RotatingProxyMiddleware:
def __init__(self, username, api_key, host, port):
self.username = username
self.api_key = api_key
self.host = host
self.port = port
@classmethod
def from_crawler(cls, crawler):
return cls(
username=crawler.settings.get("NINJASPROXY_USERNAME"),
api_key =crawler.settings.get("NINJASPROXY_API_KEY"),
host =crawler.settings.get("NINJASPROXY_HOST"),
port =crawler.settings.getint("NINJASPROXY_PORT"),
)
def _session_id(self) -> str:
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=8))
def process_request(self, request, spider):
# Unique session per request for rotating behaviour
user = f"{self.username}-session-{self._session_id()}"
proxy = f"http://{user}:{self.api_key}@{self.host}:{self.port}"
request.meta["proxy"] = proxy
request.headers["Proxy-Authorization"] = "" # let the URL carry auth
return NoneRetry with tenacity
import requests
from tenacity import (
retry, stop_after_attempt, wait_exponential,
retry_if_exception_type, retry_if_result,
)
PROXIES = {
"http": "http://USERNAME:API_KEY@r.ninjasproxy.com:8080",
"https": "http://USERNAME:API_KEY@r.ninjasproxy.com:8080",
}
@retry(
retry=(
retry_if_exception_type((requests.ConnectionError, requests.Timeout))
| retry_if_result(lambda r: r.status_code in {500, 502, 503, 504, 407})
),
wait=wait_exponential(multiplier=1, min=2, max=30),
stop=stop_after_attempt(5),
reraise=True,
)
def safe_get(url: str, session: requests.Session) -> requests.Response:
return session.get(url, proxies=PROXIES, timeout=30)
with requests.Session() as s:
r = safe_get("https://api.ipify.org?format=json", s)
print(r.json())Session Management for Sticky Proxies
import requests
import uuid
class StickySession:
"""Maintains a single sticky proxy session for its lifetime."""
def __init__(self, username: str, api_key: str,
host: str = "r.ninjasproxy.com", port: int = 8080,
country: str | None = None):
self._session_id = uuid.uuid4().hex[:12]
user = username
if country:
user += f"-country-{country}"
user += f"-session-{self._session_id}"
proxy_url = f"http://{user}:{api_key}@{host}:{port}"
self._http_session = requests.Session()
self._http_session.proxies = {"http": proxy_url, "https": proxy_url}
@property
def session_id(self) -> str:
return self._session_id
def get(self, url: str, **kwargs) -> requests.Response:
return self._http_session.get(url, timeout=30, **kwargs)
def close(self):
self._http_session.close()
# Usage
with StickySession("myuser", "mykey", country="US") as sess:
for i in range(5):
r = sess.get("https://api.ipify.org?format=json")
print(f"[{sess.session_id}] Request {i+1}: {r.json()['ip']}")Next Steps
- Node.js integration — axios, Puppeteer, TypeScript
- Go integration — goroutine pool, Colly
- Rate limits — concurrency tuning
- API Reference — usage stats endpoint