新增路由器、主机客户端和共享协议模块,支持多主机部署模式: - 路由器作为中央节点管理主机连接和消息路由 - 主机客户端作为工作节点运行本地代理 - 共享协议定义通信消息格式 - 新增独立运行模式standalone.py - 更新配置系统支持路由模式
129 lines
4.2 KiB
Python
129 lines
4.2 KiB
Python
"""Routing LLM for deciding which node to forward messages to.
|
|
|
|
This is a lightweight, one-shot LLM call that decides routing.
|
|
No history, no multi-step loop. Single call with one tool.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import json
|
|
import logging
|
|
from typing import List, Optional
|
|
|
|
from langchain_core.messages import HumanMessage, SystemMessage
|
|
from langchain_openai import ChatOpenAI
|
|
|
|
from config import OPENAI_API_KEY, OPENAI_BASE_URL, OPENAI_MODEL
|
|
from router.nodes import NodeConnection, get_node_registry
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
ROUTING_SYSTEM_PROMPT = """You are a routing assistant. A user has sent a message. \
|
|
Choose which node to forward it to.
|
|
|
|
Connected nodes for this user:
|
|
{nodes_info}
|
|
|
|
Rules:
|
|
- If the message references an active session on a node, route to that node.
|
|
- If the user names a machine explicitly ("on work-server", "@home-pc"), route there.
|
|
- If only one node is connected, route there without asking.
|
|
- If ambiguous with multiple idle nodes, ask the user to clarify.
|
|
- For meta commands (/nodes, /help, /status), respond with "meta" as the node_id.
|
|
|
|
Respond with a JSON object:
|
|
{{"node_id": "<node_id>", "reason": "<brief reason>"}}
|
|
"""
|
|
|
|
|
|
def _format_nodes_info(nodes: List[NodeConnection], active_node_id: Optional[str] = None) -> str:
|
|
"""Format node information for the routing prompt."""
|
|
lines = []
|
|
for node in nodes:
|
|
marker = " [ACTIVE]" if node.node_id == active_node_id else ""
|
|
sessions = ", ".join(
|
|
s.get("working_dir", "unknown") for s in node.active_sessions[:3]
|
|
) or "none"
|
|
lines.append(
|
|
f"- {node.display_name or node.node_id}{marker}: "
|
|
f"sessions=[{sessions}], capabilities={node.capabilities}"
|
|
)
|
|
return "\n".join(lines)
|
|
|
|
|
|
async def route(user_id: str, chat_id: str, text: str) -> tuple[Optional[str], str]:
|
|
"""Determine which node to route a message to.
|
|
|
|
Args:
|
|
user_id: User's Feishu open_id
|
|
chat_id: Chat ID for context
|
|
text: User's message text
|
|
|
|
Returns:
|
|
Tuple of (node_id, reason). node_id is None if no suitable node found.
|
|
"""
|
|
registry = get_node_registry()
|
|
nodes = registry.get_nodes_for_user(user_id)
|
|
|
|
if not nodes:
|
|
return None, "No nodes available for this user"
|
|
|
|
online_nodes = [n for n in nodes if n.is_online]
|
|
if not online_nodes:
|
|
return None, "All nodes for this user are offline"
|
|
|
|
if len(online_nodes) == 1:
|
|
return online_nodes[0].node_id, "Only one node available"
|
|
|
|
if text.strip().startswith("/"):
|
|
return "meta", "Meta command"
|
|
|
|
active_node = registry.get_active_node(user_id)
|
|
active_node_id = active_node.node_id if active_node else None
|
|
|
|
nodes_info = _format_nodes_info(online_nodes, active_node_id)
|
|
|
|
try:
|
|
llm = ChatOpenAI(
|
|
model=OPENAI_MODEL,
|
|
openai_api_key=OPENAI_API_KEY,
|
|
openai_api_base=OPENAI_BASE_URL,
|
|
temperature=0,
|
|
)
|
|
|
|
prompt = ROUTING_SYSTEM_PROMPT.format(nodes_info=nodes_info)
|
|
messages = [
|
|
SystemMessage(content=prompt),
|
|
HumanMessage(content=text),
|
|
]
|
|
|
|
response = await llm.ainvoke(messages)
|
|
content = response.content.strip()
|
|
|
|
if content.startswith("```"):
|
|
content = content.split("\n", 1)[1]
|
|
content = content.rsplit("```", 1)[0]
|
|
|
|
result = json.loads(content)
|
|
node_id = result.get("node_id")
|
|
reason = result.get("reason", "")
|
|
|
|
if node_id == "meta":
|
|
return "meta", reason
|
|
|
|
for node in online_nodes:
|
|
if node.node_id == node_id or node.display_name == node_id:
|
|
return node.node_id, reason
|
|
|
|
if active_node:
|
|
return active_node.node_id, f"Defaulting to active node (LLM suggested unavailable: {node_id})"
|
|
|
|
return online_nodes[0].node_id, f"Defaulting to first available node (LLM suggested: {node_id})"
|
|
|
|
except Exception as e:
|
|
logger.warning("Routing LLM failed: %s, falling back to active node", e)
|
|
|
|
if active_node:
|
|
return active_node.node_id, "Fallback to active node"
|
|
return online_nodes[0].node_id, "Fallback to first available node"
|