humanus.cpp/config/config_llm.toml

9 lines
253 B
TOML

[default]
model = "deepseek-reasoner"
base_url = "https://api.deepseek.com"
endpoint = "/v1/chat/completions"
api_key = "sk-93c5bfcb920c4a8aa345791d429b8536"
max_tokens = 8192
oai_tool_support = false
tool_start = "<tool_call>"
tool_end = "</tool_call>"