feat(config): 实现深度搜索与网络搜索的互斥选择机制,以避免功能冲突 [优化:根据用户选型动态加载对应工具配置]
This commit is contained in:
parent
d80b17050d
commit
bd60a15acb
|
|
@ -43,7 +43,13 @@ async def chat_handler(body: dict):
|
|||
stream = body.get("stream", True)
|
||||
temperature = body.get("temperature", 0.7)
|
||||
max_tokens = body.get("max_tokens", body.get("maxTokens", 2000))
|
||||
web_search = body.get("webSearch", False) or body.get("deepSearch", False)
|
||||
# 区分搜索模式:深度搜索 > 简单搜索 > 不搜索
|
||||
if body.get("deepSearch", False):
|
||||
web_search = "deep"
|
||||
elif body.get("webSearch", False):
|
||||
web_search = "simple"
|
||||
else:
|
||||
web_search = False
|
||||
deep_think = body.get("deepThinking", False)
|
||||
files = body.get("files", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -234,6 +234,40 @@ def build_glm_messages(messages: list, files: list | None = None) -> tuple[list,
|
|||
return glm_messages, has_vision
|
||||
|
||||
|
||||
# ── 网络搜索 tool 构建 ──────────────────────────────────────────────
|
||||
def _build_web_search_tool(mode: str | bool) -> dict:
|
||||
"""
|
||||
根据搜索模式构建 web_search tool 配置。
|
||||
|
||||
mode:
|
||||
- True / "simple" : 简单搜索(search_std + medium, 10条)
|
||||
- "deep" : 深度搜索(search_pro + high, 20条)
|
||||
"""
|
||||
if mode == "deep":
|
||||
# 深度搜索:高阶搜索引擎 + 详细内容 + 更多结果
|
||||
return {
|
||||
"type": "web_search",
|
||||
"web_search": {
|
||||
"enable": True,
|
||||
"search_result": True,
|
||||
"search_engine": "search_pro",
|
||||
"content_size": "high",
|
||||
"count": 20,
|
||||
},
|
||||
}
|
||||
# 简单搜索(默认):基础搜索引擎 + 摘要内容
|
||||
return {
|
||||
"type": "web_search",
|
||||
"web_search": {
|
||||
"enable": True,
|
||||
"search_result": True,
|
||||
"search_engine": "search_std",
|
||||
"content_size": "medium",
|
||||
"count": 10,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ── 哨兵对象 ─────────────────────────────────────────────────────────
|
||||
_SENTINEL = object()
|
||||
|
||||
|
|
@ -245,13 +279,18 @@ async def glm_stream_generator(
|
|||
temperature: float,
|
||||
max_tokens: int,
|
||||
files: list | None = None,
|
||||
web_search: bool = False,
|
||||
web_search: str | bool = False,
|
||||
deep_thinking: bool = False,
|
||||
) -> AsyncGenerator[str, None]:
|
||||
"""
|
||||
GLM 流式 SSE 生成器。
|
||||
使用 queue.Queue + 专用线程(生产者)+ asyncio 消费者模式,
|
||||
让 zai-sdk 同步迭代器在单一线程内安全运行。
|
||||
|
||||
web_search:
|
||||
- False / "" : 不启用联网搜索
|
||||
- True / "simple" : 简单搜索(search_std + medium)
|
||||
- "deep" : 深度搜索(search_pro + high + 更多结果)
|
||||
"""
|
||||
import asyncio
|
||||
import queue
|
||||
|
|
@ -263,12 +302,7 @@ async def glm_stream_generator(
|
|||
|
||||
extra_kwargs: dict = {}
|
||||
if web_search:
|
||||
extra_kwargs["tools"] = [
|
||||
{
|
||||
"type": "web_search",
|
||||
"web_search": {"enable": True, "search_result": True},
|
||||
}
|
||||
]
|
||||
extra_kwargs["tools"] = [_build_web_search_tool(web_search)]
|
||||
if not deep_thinking:
|
||||
# 智普默认开启思考模式,所以要用非门(不知道“非门”描述是否准确。前端选择开启思考模式,这里不做变动。前端选择关闭思考模式,这里关闭。)
|
||||
extra_kwargs["thinking"] = {"type": "disabled"}
|
||||
|
|
@ -412,20 +446,21 @@ def glm_chat_sync(
|
|||
temperature: float,
|
||||
max_tokens: int,
|
||||
files: list | None = None,
|
||||
web_search: bool = False,
|
||||
web_search: str | bool = False,
|
||||
deep_thinking: bool = False,
|
||||
) -> dict:
|
||||
"""
|
||||
web_search:
|
||||
- False / "" : 不启用联网搜索
|
||||
- True / "simple" : 简单搜索(search_std + medium)
|
||||
- "deep" : 深度搜索(search_pro + high + 更多结果)
|
||||
"""
|
||||
glm_msgs, has_vision = build_glm_messages(messages, files)
|
||||
actual_model = resolve_model(model, has_vision)
|
||||
|
||||
extra_kwargs: dict = {}
|
||||
if web_search:
|
||||
extra_kwargs["tools"] = [
|
||||
{
|
||||
"type": "web_search",
|
||||
"web_search": {"enable": True, "search_result": True},
|
||||
}
|
||||
]
|
||||
extra_kwargs["tools"] = [_build_web_search_tool(web_search)]
|
||||
if deep_thinking:
|
||||
extra_kwargs["thinking"] = {"type": "enabled"}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,9 +389,13 @@ function removeAttachment(id: string) {
|
|||
}
|
||||
}
|
||||
|
||||
// 切换功能
|
||||
// 切换功能(深度搜索与联网搜索互斥)
|
||||
function toggleDeepSearch() {
|
||||
isDeepSearch.value = !isDeepSearch.value;
|
||||
if (isDeepSearch.value) {
|
||||
isWebSearch.value = false;
|
||||
localStorage.setItem("isWebSearch", "false");
|
||||
}
|
||||
localStorage.setItem("isDeepSearch", String(isDeepSearch.value));
|
||||
}
|
||||
|
||||
|
|
@ -402,6 +406,10 @@ function toggleDeepThink() {
|
|||
|
||||
function toggleWebSearch() {
|
||||
isWebSearch.value = !isWebSearch.value;
|
||||
if (isWebSearch.value) {
|
||||
isDeepSearch.value = false;
|
||||
localStorage.setItem("isDeepSearch", "false");
|
||||
}
|
||||
localStorage.setItem("isWebSearch", String(isWebSearch.value));
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue