CrewAI enables teams of AI agents working together on complex tasks. When your crews need web access for research, data collection, or verification, mobile proxies ensure every agent gets reliable, unblocked connectivity.
Understanding how CrewAI agents collaborate helps optimize proxy allocation.
Role-based workers with specific goals, backstories, and tools.
Each agent can have its own proxy port for identity isolation.
Specific work items assigned to agents with expected outputs.
Sequential tasks can share a sticky session for context.
Teams of agents working together on complex objectives.
Crews benefit from proxy pools for parallel execution.
Set environment variables before importing CrewAI to proxy all web requests.
import os
# Set proxy before any imports
os.environ["HTTP_PROXY"] = "socks5://user:pass@proxy.proxies.sx:10001"
os.environ["HTTPS_PROXY"] = "socks5://user:pass@proxy.proxies.sx:10001"
from crewai import Agent, Task, Crew, Process
from crewai_tools import SerperDevTool, WebsiteSearchTool
# Initialize tools - they'll use the proxy automatically
search_tool = SerperDevTool()
web_tool = WebsiteSearchTool()
# Create agent with web tools
researcher = Agent(
role="Senior Research Analyst",
goal="Find comprehensive market intelligence",
backstory="Expert at web research and data synthesis",
tools=[search_tool, web_tool],
verbose=True
)For crews with different roles, assign unique proxy ports to each agent.
import os
from crewai import Agent, Task, Crew
from crewai_tools import SerperDevTool
import requests
class ProxiedSerperTool(SerperDevTool):
"""Custom tool with configurable proxy"""
def __init__(self, proxy_port: int, **kwargs):
super().__init__(**kwargs)
self.proxy = f"socks5://user:pass@proxy.proxies.sx:{proxy_port}"
def _run(self, query: str) -> str:
# Override to use custom proxy
session = requests.Session()
session.proxies = {"http": self.proxy, "https": self.proxy}
# ... implement search with proxied session
# Assign different ports to different agents
researcher_tool = ProxiedSerperTool(proxy_port=10001)
competitor_tool = ProxiedSerperTool(proxy_port=10002)
validator_tool = ProxiedSerperTool(proxy_port=10003)
# Create agents with isolated identities
researcher = Agent(
role="Market Researcher",
goal="Research market trends",
tools=[researcher_tool]
)
competitor_analyst = Agent(
role="Competitor Analyst",
goal="Analyze competitor strategies",
tools=[competitor_tool]
)
fact_checker = Agent(
role="Fact Checker",
goal="Verify all claims",
tools=[validator_tool]
)For agents that need full browser capabilities, integrate with Browser Use.
from crewai import Agent, Task, Crew
from crewai.tools import BaseTool
from browser_use import Agent as BrowserAgent, Browser, BrowserConfig
from pydantic import Field
class BrowserUseTool(BaseTool):
name: str = "browser_use"
description: str = "Browse websites and interact with web pages"
proxy_server: str = Field(default="socks5://proxy.proxies.sx:10001")
def _run(self, task: str) -> str:
import asyncio
return asyncio.run(self._browse(task))
async def _browse(self, task: str) -> str:
browser = Browser(
config=BrowserConfig(
proxy={
"server": self.proxy_server,
"username": "your_username",
"password": "your_password"
}
)
)
agent = BrowserAgent(
task=task,
browser=browser,
llm=your_llm # Your LLM instance
)
result = await agent.run()
return result
# Create agent with browser capabilities
web_navigator = Agent(
role="Web Navigator",
goal="Navigate complex websites and extract data",
backstory="Expert at interacting with dynamic web applications",
tools=[BrowserUseTool(proxy_server="socks5://proxy.proxies.sx:10001")]
)import os
os.environ["HTTP_PROXY"] = "socks5://user:pass@proxy.proxies.sx:10001"
os.environ["HTTPS_PROXY"] = "socks5://user:pass@proxy.proxies.sx:10001"
from crewai import Agent, Task, Crew, Process
from crewai_tools import SerperDevTool, WebsiteSearchTool, ScrapeWebsiteTool
# Initialize proxied tools
search = SerperDevTool()
website_search = WebsiteSearchTool()
scraper = ScrapeWebsiteTool()
# Define agents
lead_researcher = Agent(
role="Lead Market Researcher",
goal="Identify key market trends and opportunities",
backstory="20 years of market research experience",
tools=[search, website_search],
verbose=True
)
data_analyst = Agent(
role="Data Analyst",
goal="Extract and structure data from sources",
backstory="Expert at web scraping and data processing",
tools=[scraper, website_search],
verbose=True
)
report_writer = Agent(
role="Report Writer",
goal="Synthesize findings into actionable reports",
backstory="Former journalist with business expertise",
tools=[],
verbose=True
)
# Define tasks
research_task = Task(
description="Research the current state of {topic}",
expected_output="Detailed findings with sources",
agent=lead_researcher
)
data_task = Task(
description="Extract specific data points from identified sources",
expected_output="Structured data in JSON format",
agent=data_analyst
)
report_task = Task(
description="Create executive summary of findings",
expected_output="2-page executive report",
agent=report_writer
)
# Create and run crew
crew = Crew(
agents=[lead_researcher, data_analyst, report_writer],
tasks=[research_task, data_task, report_task],
process=Process.sequential,
verbose=True
)
result = crew.kickoff(inputs={"topic": "AI automation market 2025"})For crews doing competitive research, assign unique proxy ports to each agent. This prevents correlation between agent activities.
When tasks build on each other, use Process.sequential with sticky sessions. This maintains login state and context across tasks.
Running multiple crews simultaneously? Use a proxy pool and rotate assignments to distribute load and avoid rate limits.
Implement retry logic in custom tools. If a request fails, rotate to a different proxy port before retrying.
Market research, competitive intelligence, trend analysis.
Need: Multiple ports, high bandwidth
Blog writing, social media, SEO content generation.
Need: Sticky sessions for research continuity
Lead generation, data entry, form submissions.
Need: Rotating IPs, browser tools
Price tracking, stock monitoring, news alerts.
Need: Geo-targeted proxies, 24/7 uptime
Code review, documentation, testing workflows.
Need: API access, moderate bandwidth
Web scraping, ETL pipelines, data enrichment.
Need: High concurrency, bulk bandwidth
Get reliable web access for CrewAI multi-agent systems with real mobile IPs.