Python Requests

Python Requests Proxy SetupSimple HTTP client with mobile IPs

The requests library is the most popular Python HTTP client. Configure SOCKS5 and HTTP proxies for web scraping, API calls, and data collection.

Quick Start

import requests

proxies = {
    "http": "socks5://user:pass@proxy.proxies.sx:10001",
    "https": "socks5://user:pass@proxy.proxies.sx:10001"
}

response = requests.get(
    "https://httpbin.org/ip",
    proxies=proxies
)
print(response.json())

Configuration Examples

1. Installation

# Install requests with SOCKS support
pip install requests[socks]

# Or install PySocks separately
pip install requests pysocks

2. SOCKS5 Proxy

import requests

proxies = {
    "http": "socks5://your_username:your_password@proxy.proxies.sx:10001",
    "https": "socks5://your_username:your_password@proxy.proxies.sx:10001"
}

# Single request
response = requests.get("https://httpbin.org/ip", proxies=proxies)
print(response.json())

# With timeout
response = requests.get(
    "https://example.com",
    proxies=proxies,
    timeout=30
)

3. SOCKS5h (DNS through Proxy)

import requests

# socks5h resolves DNS through the proxy (recommended)
proxies = {
    "http": "socks5h://your_username:your_password@proxy.proxies.sx:10001",
    "https": "socks5h://your_username:your_password@proxy.proxies.sx:10001"
}

response = requests.get("https://httpbin.org/ip", proxies=proxies)

# socks5 (without h) resolves DNS locally - may leak real IP
# Use socks5h for maximum privacy

4. Session-Based (Recommended)

import requests

# Create session for connection reuse
session = requests.Session()

session.proxies = {
    "http": "socks5h://your_username:your_password@proxy.proxies.sx:10001",
    "https": "socks5h://your_username:your_password@proxy.proxies.sx:10001"
}

# All requests use the same proxy
response1 = session.get("https://httpbin.org/ip")
response2 = session.get("https://httpbin.org/headers")
response3 = session.post("https://httpbin.org/post", data={"key": "value"})

# Session maintains cookies and connection pool
print(session.cookies.get_dict())

5. HTTP Proxy

import requests

# HTTP proxy (alternative to SOCKS5)
proxies = {
    "http": "http://your_username:your_password@proxy.proxies.sx:10001",
    "https": "http://your_username:your_password@proxy.proxies.sx:10001"
}

response = requests.get("https://httpbin.org/ip", proxies=proxies)
print(response.json())

6. Environment Variables

import os
import requests

# Set environment variables (requests reads these automatically)
os.environ["HTTP_PROXY"] = "socks5h://user:pass@proxy.proxies.sx:10001"
os.environ["HTTPS_PROXY"] = "socks5h://user:pass@proxy.proxies.sx:10001"

# No need to pass proxies parameter
response = requests.get("https://httpbin.org/ip")
print(response.json())

# Or set in shell before running Python:
# export HTTP_PROXY="socks5h://user:pass@proxy.proxies.sx:10001"
# export HTTPS_PROXY="socks5h://user:pass@proxy.proxies.sx:10001"
# python script.py

7. Proxy Rotation

import requests
import random
from typing import List, Dict

class ProxyRotator:
    def __init__(self, ports: List[int], username: str, password: str):
        self.ports = ports
        self.username = username
        self.password = password
        self.current_index = 0

    def get_proxy(self, rotate: bool = True) -> Dict[str, str]:
        if rotate:
            port = random.choice(self.ports)
        else:
            port = self.ports[self.current_index]
            self.current_index = (self.current_index + 1) % len(self.ports)

        proxy_url = f"socks5h://{self.username}:{self.password}@proxy.proxies.sx:{port}"
        return {"http": proxy_url, "https": proxy_url}

    def get(self, url: str, **kwargs) -> requests.Response:
        proxies = self.get_proxy()
        return requests.get(url, proxies=proxies, **kwargs)

# Usage
rotator = ProxyRotator(
    ports=[10001, 10002, 10003, 10004, 10005],
    username="your_username",
    password="your_password"
)

# Each request uses a different proxy
for i in range(10):
    response = rotator.get("https://httpbin.org/ip")
    print(f"Request {i+1}: {response.json()['origin']}")

8. With Retry Logic

import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry

session = requests.Session()

# Configure retries
retry_strategy = Retry(
    total=3,
    backoff_factor=1,
    status_forcelist=[429, 500, 502, 503, 504],
)

adapter = HTTPAdapter(max_retries=retry_strategy)
session.mount("http://", adapter)
session.mount("https://", adapter)

# Set proxy
session.proxies = {
    "http": "socks5h://user:pass@proxy.proxies.sx:10001",
    "https": "socks5h://user:pass@proxy.proxies.sx:10001"
}

# Requests will auto-retry on failure
response = session.get("https://httpbin.org/ip", timeout=30)

9. Async with aiohttp

# pip install aiohttp aiohttp-socks
import asyncio
import aiohttp
from aiohttp_socks import ProxyConnector

async def fetch_with_proxy(url: str) -> str:
    connector = ProxyConnector.from_url(
        "socks5://your_username:your_password@proxy.proxies.sx:10001"
    )

    async with aiohttp.ClientSession(connector=connector) as session:
        async with session.get(url) as response:
            return await response.text()

async def main():
    # Parallel requests through proxy
    urls = [
        "https://httpbin.org/ip",
        "https://httpbin.org/headers",
        "https://httpbin.org/user-agent"
    ]

    tasks = [fetch_with_proxy(url) for url in urls]
    results = await asyncio.gather(*tasks)

    for url, result in zip(urls, results):
        print(f"{url}: {result[:100]}...")

asyncio.run(main())
Pro Tip: Use socks5h:// instead of socks5:// to resolve DNS through the proxy. This prevents DNS leaks that could reveal your real IP.

Python Requests Best Practices

Use Sessions

Sessions maintain connection pools and cookies. More efficient than creating new connections for each request.

Always Set Timeouts

Default timeout is infinite. Always set timeout to prevent hung requests from blocking your application.

Use SOCKS5h

The 'h' suffix routes DNS through the proxy. Essential for privacy and accessing geo-restricted content.

Handle Exceptions

Catch requests.exceptions for network errors. Implement retry logic with proxy rotation for reliability.

Start Scraping with Python

Get mobile proxies for reliable HTTP requests.