add Ollama function/tool calling support similar to OpenAI-compatible providers
This commit is contained in:
parent
d4fdcbb54c
commit
264a0367b6
@ -89,6 +89,8 @@ export TUSHARE_TOKEN="<your-token>"
|
||||
|
||||
- 通过 Provider 管理供应商连接参数(Base URL、API Key、默认温度/超时/Prompt 模板),并支持在界面内一键调用 `client.models.list()` 拉取可用模型列表,便于扩展本地 Ollama 或各类云端服务(DeepSeek、文心一言、OpenAI 等)。
|
||||
- 全局与部门配置直接选择 Provider,并根据需要覆盖模型、温度、Prompt 模板、投票策略;保存后写入 `app/data/config.json`,下次启动自动加载。
|
||||
|
||||
Note: Ollama's `/api/chat` endpoint supports function/tool calling. The client forwards `tools` and optional `tool_choice` when the provider is configured as `ollama`, enabling function calls and tool-based workflows similar to OpenAI-compatible providers.
|
||||
- Streamlit “数据与设置” 页提供 Provider/全局/部门三栏编辑界面,保存后即时生效,并通过 `llm_config_snapshot()` 输出脱敏检查信息。
|
||||
- 支持使用环境变量注入敏感信息:`TUSHARE_TOKEN`、`LLM_API_KEY`。
|
||||
|
||||
|
||||
@ -231,14 +231,20 @@ def call_endpoint_with_messages(
|
||||
)
|
||||
|
||||
if mode == "ollama":
|
||||
if tools:
|
||||
raise LLMError("当前 provider 不支持函数调用/工具模式")
|
||||
# Ollama supports function/tool calling via the /api/chat endpoint.
|
||||
# Include `tools` and optional `tool_choice` in the payload when provided.
|
||||
payload = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"stream": False,
|
||||
"options": {"temperature": temperature},
|
||||
}
|
||||
if tools:
|
||||
# Ollama expects `tools` at the top level similar to OpenAI-compatible API
|
||||
payload["tools"] = tools
|
||||
if tool_choice is not None:
|
||||
payload["tool_choice"] = tool_choice
|
||||
|
||||
start_time = time.perf_counter()
|
||||
response = requests.post(
|
||||
f"{base_url.rstrip('/')}/api/chat",
|
||||
@ -252,6 +258,9 @@ def call_endpoint_with_messages(
|
||||
record_call(provider_key, model, duration=duration)
|
||||
if enforce_cost and cost_controller:
|
||||
cost_controller.record_usage(model or provider_key, 0, 0)
|
||||
# Ollama may return `tool_calls` under message.tool_calls when tools are used.
|
||||
# Return the raw response so callers can handle either OpenAI-like responses or
|
||||
# Ollama's message structure with `tool_calls`.
|
||||
return data
|
||||
|
||||
if not api_key:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user