Spaces:
Running
on
Zero
Running
on
Zero
Y Phung Nguyen
commited on
Commit
·
e6bba1f
1
Parent(s):
5096447
Upd MCP search max global
Browse files
agent.py
CHANGED
|
@@ -36,6 +36,8 @@ except ImportError:
|
|
| 36 |
print("Error: google-genai not installed. Install with: pip install google-genai", file=sys.stderr)
|
| 37 |
sys.exit(1)
|
| 38 |
|
|
|
|
|
|
|
| 39 |
# Configure logging
|
| 40 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
| 41 |
logger = logging.getLogger(__name__)
|
|
@@ -203,6 +205,25 @@ async def list_tools() -> list[Tool]:
|
|
| 203 |
},
|
| 204 |
"required": ["text"]
|
| 205 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 206 |
)
|
| 207 |
]
|
| 208 |
return tools
|
|
@@ -426,6 +447,82 @@ async def call_tool(name: str, arguments: dict) -> Sequence[TextContent | ImageC
|
|
| 426 |
except Exception as e:
|
| 427 |
logger.error(f"Error in text_to_speech: {e}")
|
| 428 |
return [TextContent(type="text", text=f"Error: {str(e)}")]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 429 |
else:
|
| 430 |
return [TextContent(type="text", text=f"Unknown tool: {name}")]
|
| 431 |
|
|
|
|
| 36 |
print("Error: google-genai not installed. Install with: pip install google-genai", file=sys.stderr)
|
| 37 |
sys.exit(1)
|
| 38 |
|
| 39 |
+
from supervisor import MAX_SEARCH_STRATEGIES
|
| 40 |
+
|
| 41 |
# Configure logging
|
| 42 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
| 43 |
logger = logging.getLogger(__name__)
|
|
|
|
| 205 |
},
|
| 206 |
"required": ["text"]
|
| 207 |
}
|
| 208 |
+
),
|
| 209 |
+
Tool(
|
| 210 |
+
name="search_web",
|
| 211 |
+
description="Search the web for information. Returns search results with titles, URLs, and content snippets.",
|
| 212 |
+
inputSchema={
|
| 213 |
+
"type": "object",
|
| 214 |
+
"properties": {
|
| 215 |
+
"query": {
|
| 216 |
+
"type": "string",
|
| 217 |
+
"description": "Search query string (required)"
|
| 218 |
+
},
|
| 219 |
+
"max_results": {
|
| 220 |
+
"type": "integer",
|
| 221 |
+
"description": "Maximum number of results to return (optional, defaults to 5)",
|
| 222 |
+
"default": MAX_SEARCH_STRATEGIES
|
| 223 |
+
}
|
| 224 |
+
},
|
| 225 |
+
"required": ["query"]
|
| 226 |
+
}
|
| 227 |
)
|
| 228 |
]
|
| 229 |
return tools
|
|
|
|
| 447 |
except Exception as e:
|
| 448 |
logger.error(f"Error in text_to_speech: {e}")
|
| 449 |
return [TextContent(type="text", text=f"Error: {str(e)}")]
|
| 450 |
+
elif name == "search_web":
|
| 451 |
+
try:
|
| 452 |
+
query = arguments.get("query")
|
| 453 |
+
if not query:
|
| 454 |
+
logger.error("❌ query is required but missing")
|
| 455 |
+
return [TextContent(type="text", text="Error: query is required")]
|
| 456 |
+
|
| 457 |
+
max_results = int(arguments.get("max_results", MAX_SEARCH_STRATEGIES))
|
| 458 |
+
|
| 459 |
+
# Use DuckDuckGo for web search
|
| 460 |
+
try:
|
| 461 |
+
from ddgs import DDGS
|
| 462 |
+
import requests
|
| 463 |
+
from bs4 import BeautifulSoup
|
| 464 |
+
except ImportError:
|
| 465 |
+
logger.error("DuckDuckGo dependencies not available (ddgs, requests, beautifulsoup4)")
|
| 466 |
+
return [TextContent(type="text", text="Error: Web search dependencies not available")]
|
| 467 |
+
|
| 468 |
+
logger.info(f"🔵 Performing web search for: {query[:100]}...")
|
| 469 |
+
|
| 470 |
+
try:
|
| 471 |
+
with DDGS() as ddgs:
|
| 472 |
+
results = list(ddgs.text(query, max_results=max_results))
|
| 473 |
+
web_content = []
|
| 474 |
+
for result in results:
|
| 475 |
+
try:
|
| 476 |
+
url = result.get('href', '')
|
| 477 |
+
title = result.get('title', '')
|
| 478 |
+
snippet = result.get('body', '')
|
| 479 |
+
|
| 480 |
+
try:
|
| 481 |
+
response = requests.get(url, timeout=5, headers={'User-Agent': 'Mozilla/5.0'})
|
| 482 |
+
if response.status_code == 200:
|
| 483 |
+
soup = BeautifulSoup(response.content, 'html.parser')
|
| 484 |
+
for script in soup(["script", "style"]):
|
| 485 |
+
script.decompose()
|
| 486 |
+
text = soup.get_text()
|
| 487 |
+
lines = (line.strip() for line in text.splitlines())
|
| 488 |
+
chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
|
| 489 |
+
text = ' '.join(chunk for chunk in chunks if chunk)
|
| 490 |
+
if len(text) > 1000:
|
| 491 |
+
text = text[:1000] + "..."
|
| 492 |
+
web_content.append({
|
| 493 |
+
'title': title,
|
| 494 |
+
'url': url,
|
| 495 |
+
'content': snippet + "\n" + text[:500] if text else snippet
|
| 496 |
+
})
|
| 497 |
+
else:
|
| 498 |
+
web_content.append({
|
| 499 |
+
'title': title,
|
| 500 |
+
'url': url,
|
| 501 |
+
'content': snippet
|
| 502 |
+
})
|
| 503 |
+
except:
|
| 504 |
+
web_content.append({
|
| 505 |
+
'title': title,
|
| 506 |
+
'url': url,
|
| 507 |
+
'content': snippet
|
| 508 |
+
})
|
| 509 |
+
except Exception as e:
|
| 510 |
+
logger.error(f"Error processing search result: {e}")
|
| 511 |
+
continue
|
| 512 |
+
|
| 513 |
+
# Return results as JSON string
|
| 514 |
+
results_json = json.dumps(web_content, indent=2)
|
| 515 |
+
logger.info(f"✅ Web search completed: {len(web_content)} results")
|
| 516 |
+
return [TextContent(type="text", text=results_json)]
|
| 517 |
+
except Exception as e:
|
| 518 |
+
logger.error(f"❌ Web search error: {type(e).__name__}: {e}")
|
| 519 |
+
import traceback
|
| 520 |
+
logger.debug(f"Full traceback: {traceback.format_exc()}")
|
| 521 |
+
return [TextContent(type="text", text=f"Error: {str(e)}")]
|
| 522 |
+
|
| 523 |
+
except Exception as e:
|
| 524 |
+
logger.error(f"Error in search_web: {e}")
|
| 525 |
+
return [TextContent(type="text", text=f"Error: {str(e)}")]
|
| 526 |
else:
|
| 527 |
return [TextContent(type="text", text=f"Unknown tool: {name}")]
|
| 528 |
|
search.py
CHANGED
|
@@ -11,8 +11,9 @@ try:
|
|
| 11 |
except ImportError:
|
| 12 |
nest_asyncio = None
|
| 13 |
|
|
|
|
| 14 |
|
| 15 |
-
async def search_web_mcp_tool(query: str, max_results: int =
|
| 16 |
"""Search web using MCP web search tool (e.g., DuckDuckGo MCP server)"""
|
| 17 |
if not MCP_AVAILABLE:
|
| 18 |
return []
|
|
@@ -97,7 +98,7 @@ async def search_web_mcp_tool(query: str, max_results: int = 5) -> list:
|
|
| 97 |
return []
|
| 98 |
|
| 99 |
|
| 100 |
-
async def search_web_mcp(query: str, max_results: int =
|
| 101 |
"""Search web using MCP tools - tries web search MCP tool first, then falls back to direct search"""
|
| 102 |
results = await search_web_mcp_tool(query, max_results)
|
| 103 |
if results:
|
|
@@ -108,7 +109,7 @@ async def search_web_mcp(query: str, max_results: int = 5) -> list:
|
|
| 108 |
return search_web_fallback(query, max_results)
|
| 109 |
|
| 110 |
|
| 111 |
-
def search_web_fallback(query: str, max_results: int =
|
| 112 |
"""Fallback web search using DuckDuckGo directly (when MCP is not available)"""
|
| 113 |
logger.info(f"🔍 [Direct API] Performing web search using DuckDuckGo API for: {query[:100]}...")
|
| 114 |
try:
|
|
|
|
| 11 |
except ImportError:
|
| 12 |
nest_asyncio = None
|
| 13 |
|
| 14 |
+
from supervisor import MAX_SEARCH_STRATEGIES
|
| 15 |
|
| 16 |
+
async def search_web_mcp_tool(query: str, max_results: int = MAX_SEARCH_STRATEGIES) -> list:
|
| 17 |
"""Search web using MCP web search tool (e.g., DuckDuckGo MCP server)"""
|
| 18 |
if not MCP_AVAILABLE:
|
| 19 |
return []
|
|
|
|
| 98 |
return []
|
| 99 |
|
| 100 |
|
| 101 |
+
async def search_web_mcp(query: str, max_results: int = MAX_SEARCH_STRATEGIES) -> list:
|
| 102 |
"""Search web using MCP tools - tries web search MCP tool first, then falls back to direct search"""
|
| 103 |
results = await search_web_mcp_tool(query, max_results)
|
| 104 |
if results:
|
|
|
|
| 109 |
return search_web_fallback(query, max_results)
|
| 110 |
|
| 111 |
|
| 112 |
+
def search_web_fallback(query: str, max_results: int = MAX_SEARCH_STRATEGIES) -> list:
|
| 113 |
"""Fallback web search using DuckDuckGo directly (when MCP is not available)"""
|
| 114 |
logger.info(f"🔍 [Direct API] Performing web search using DuckDuckGo API for: {query[:100]}...")
|
| 115 |
try:
|