PhoneWork/router/routing_agent.py
Yuyao Huang (Sam) 09b63341cd refactor: 统一使用现代类型注解替代传统类型注解
- 将 Dict、List 等传统类型注解替换为 dict、list 等现代类型注解
- 更新类型注解以更精确地反映变量类型
- 修复部分类型注解与实际使用不匹配的问题
- 优化部分代码逻辑以提高类型安全性
2026-03-28 14:27:21 +08:00

134 lines
4.3 KiB
Python

"""Routing LLM for deciding which node to forward messages to.
This is a lightweight, one-shot LLM call that decides routing.
No history, no multi-step loop. Single call with one tool.
"""
from __future__ import annotations
import json
import logging
from typing import List, Optional
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_openai import ChatOpenAI
from pydantic import SecretStr
from config import OPENAI_API_KEY, OPENAI_BASE_URL, OPENAI_MODEL
from router.nodes import NodeConnection, get_node_registry
logger = logging.getLogger(__name__)
ROUTING_SYSTEM_PROMPT = """You are a routing assistant. A user has sent a message. \
Choose which node to forward it to.
Connected nodes for this user:
{nodes_info}
Rules:
- If the message references an active session on a node, route to that node.
- If the user names a machine explicitly ("on work-server", "@home-pc"), route there.
- If only one node is connected, route there without asking.
- If ambiguous with multiple idle nodes, ask the user to clarify.
- For meta commands (/nodes, /help, /status), respond with "meta" as the node_id.
Respond with a JSON object:
{{"node_id": "<node_id>", "reason": "<brief reason>"}}
"""
def _format_nodes_info(nodes: list[NodeConnection], active_node_id: Optional[str] = None) -> str:
"""Format node information for the routing prompt."""
lines = []
for node in nodes:
marker = " [ACTIVE]" if node.node_id == active_node_id else ""
sessions = ", ".join(
s.get("working_dir", "unknown") for s in node.active_sessions[:3]
) or "none"
lines.append(
f"- {node.display_name or node.node_id}{marker}: "
f"sessions=[{sessions}], capabilities={node.capabilities}"
)
return "\n".join(lines)
async def route(user_id: str, chat_id: str, text: str) -> tuple[Optional[str], str]:
"""Determine which node to route a message to.
Args:
user_id: User's Feishu open_id
chat_id: Chat ID for context
text: User's message text
Returns:
Tuple of (node_id, reason). node_id is None if no suitable node found.
"""
registry = get_node_registry()
nodes = registry.get_nodes_for_user(user_id)
if not nodes:
return None, "No nodes available for this user"
online_nodes = [n for n in nodes if n.is_online]
if not online_nodes:
return None, "All nodes for this user are offline"
if len(online_nodes) == 1:
return online_nodes[0].node_id, "Only one node available"
if text.strip().startswith("/"):
return "meta", "Meta command"
active_node = registry.get_active_node(user_id)
active_node_id = active_node.node_id if active_node else None
nodes_info = _format_nodes_info(online_nodes, active_node_id)
try:
llm = ChatOpenAI(
model=OPENAI_MODEL,
api_key=SecretStr(OPENAI_API_KEY),
base_url=OPENAI_BASE_URL,
temperature=0,
)
prompt = ROUTING_SYSTEM_PROMPT.format(nodes_info=nodes_info)
messages = [
SystemMessage(content=prompt),
HumanMessage(content=text),
]
response = await llm.ainvoke(messages)
content = response.content
if isinstance(content, str):
content = content.strip()
else:
content = str(content).strip()
if content.startswith("```"):
content = content.split("\n", 1)[1]
content = content.rsplit("```", 1)[0]
result = json.loads(content)
node_id = result.get("node_id")
reason = result.get("reason", "")
if node_id == "meta":
return "meta", reason
for node in online_nodes:
if node.node_id == node_id or node.display_name == node_id:
return node.node_id, reason
if active_node:
return active_node.node_id, f"Defaulting to active node (LLM suggested unavailable: {node_id})"
return online_nodes[0].node_id, f"Defaulting to first available node (LLM suggested: {node_id})"
except Exception as e:
logger.warning("Routing LLM failed: %s, falling back to active node", e)
if active_node:
return active_node.node_id, "Fallback to active node"
return online_nodes[0].node_id, "Fallback to first available node"