diff --git a/CMakeLists.txt b/CMakeLists.txt
index 35a43fd..0dd252a 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -42,23 +42,18 @@ else()
message(FATAL_ERROR "OpenSSL not found. Please install OpenSSL development libraries.")
endif()
-find_package(Python3 COMPONENTS Development)
-if(Python3_FOUND)
- message(STATUS "Python3 found: ${Python3_VERSION}")
- message(STATUS "Python3 include directory: ${Python3_INCLUDE_DIRS}")
- message(STATUS "Python3 libraries: ${Python3_LIBRARIES}")
- include_directories(${Python3_INCLUDE_DIRS})
- add_compile_definitions(PYTHON_FOUND)
-else()
- message(WARNING "Python3 development libraries not found. Python interpreter will not be available.")
-endif()
-
# mcp
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/mcp)
# server
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/server)
+# tokenizer
+add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/tokenizer)
+
+# tests
+add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/tests)
+
# include
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include)
@@ -68,6 +63,11 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/common)
find_package(Threads REQUIRED)
+file(GLOB BASIC_SOURCES
+ "src/*.cpp"
+ "src/*.cc"
+)
+
file(GLOB AGENT_SOURCES
"agent/*.cpp"
"agent/*.cc"
@@ -88,28 +88,24 @@ file(GLOB MEMORY_SOURCES
"memory/*.cc"
"memory/*/*.cpp"
"memory/*/*.cc"
- "memory/*/*/*.cpp"
- "memory/*/*/*.cc"
+)
+
+file(GLOB TOKENIZER_SOURCES
+ "tokenizer/*.cpp"
+ "tokenizer/*.cc"
)
# humanus core
add_library(humanus
- src/config.cpp
- src/llm.cpp
- src/prompt.cpp
- src/logger.cpp
- src/schema.cpp
+ ${BASIC_SOURCES}
${AGENT_SOURCES}
${TOOL_SOURCES}
${FLOW_SOURCES}
${MEMORY_SOURCES}
+ ${TOKENIZER_SOURCES}
)
target_link_libraries(humanus PUBLIC Threads::Threads mcp ${OPENSSL_LIBRARIES})
-if(Python3_FOUND)
- target_link_libraries(humanus PUBLIC ${Python3_LIBRARIES})
-endif()
-
# examples
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/examples)
\ No newline at end of file
diff --git a/README.md b/README.md
index d1731f0..d428749 100644
--- a/README.md
+++ b/README.md
@@ -1,48 +1,14 @@
-## Introduction
+
+
+
-Humanus (meaning "human" in Latin) is a lightweight framework inspired by OpenManus, integrated with the Model Context Protocol (MCP). `humanus.cpp` enables more flexible tool choices, and provides a foundation for building powerful local LLM agents.
+# humanus.cpp
-Let's embrace local LLM agents w/ humanus.cpp!
+Humanus (meaning "human" in Latin) is a lightweight framework inspired by [OpenManus](https://github.com/mannaandpoem/OpenManus) and [mem0](https://github.com/mem0ai/mem0), integrated with the Model Context Protocol (MCP). `humanus.cpp` enables more flexible tool choices, and provides a foundation for building powerful local LLM agents.
-## Overview
-```bash
-humanus.cpp/
-├── 📄 config.cpp/.h # 配置系统头文件
-├── 📄 llm.cpp/.h # LLM集成主实现文件
-├── 📄 logger.cpp/.h # 日志系统实现文件
-├── 📄 main.cpp # 程序入口文件
-├── 📄 prompt.cpp/.h # 预定义提示词
-├── 📄 schema.cpp/.h # 数据结构定义实现文件
-├── 📄 toml.hpp # TOML配置文件解析库
-├── 📂 agent/ # 代理模块目录
-│ ├── 📄 base.h # 基础代理接口定义
-│ ├── 📄 humanus.h # Humanus核心代理实现
-│ ├── 📄 react.h # ReAct代理实现
-│ └── 📄 toolcall.cpp/.h # 工具调用实现文件
-├── 📂 flow/ # 工作流模块目录
-│ ├── 📄 base.h # 基础工作流接口定义
-│ ├── 📄 flow_factory.h # 工作流工厂类
-│ └── 📄 planning.cpp/.h # 规划型工作流实现文件
-├── 📂 mcp/ # 模型上下文协议(MCP)实现目录
-├── 📂 memory/ # 内存管理模块
-│ ├── 📄 base.h # 基础内存接口定义
-│ └── 📂 mem0/ # TODO: mem0记忆实现
-├── 📂 server/ # 服务器模块
-│ ├── 📄 mcp_server_main.cpp # MCP服务器入口文件
-│ └── 📄 python_execute.cpp # Python执行环境集成实现
-├── 📂 spdlog/ # 第三方日志库
-└── 📂 tool/ # 工具模块目录
- ├── 📄 base.h # 基础工具接口定义
- ├── 📄 filesystem.h # 文件系统操作工具
- ├── 📄 planning.cpp/.h # 规划工具实现
- ├── 📄 puppeteer.h # Puppeteer浏览器自动化工具
- ├── 📄 python_execute.h # Python执行工具
- ├── 📄 terminate.h # 终止工具
- └── 📄 tool_collection.h # 工具集合定义
-```
+Let's embrace local LLM agents **w/** humanus.cpp!
-
-## Features
+## Project Demo
## How to Build
diff --git a/agent/base.h b/agent/base.h
index 764fb69..f009d54 100644
--- a/agent/base.h
+++ b/agent/base.h
@@ -38,8 +38,6 @@ struct BaseAgent : std::enable_shared_from_this {
int duplicate_threshold; // Threshold for duplicate messages
- std::string current_request; // Current request from user
-
BaseAgent(
const std::string& name,
const std::string& description,
@@ -47,9 +45,7 @@ struct BaseAgent : std::enable_shared_from_this {
const std::string& next_step_prompt,
const std::shared_ptr& llm = nullptr,
const std::shared_ptr& memory = nullptr,
- AgentState state = AgentState::IDLE,
int max_steps = 10,
- int current_step = 0,
int duplicate_threshold = 2
) : name(name),
description(description),
@@ -57,9 +53,7 @@ struct BaseAgent : std::enable_shared_from_this {
next_step_prompt(next_step_prompt),
llm(llm),
memory(memory),
- state(state),
max_steps(max_steps),
- current_step(current_step),
duplicate_threshold(duplicate_threshold) {
initialize_agent();
}
@@ -70,8 +64,9 @@ struct BaseAgent : std::enable_shared_from_this {
llm = LLM::get_instance("default");
}
if (!memory) {
- memory = std::make_shared(max_steps);
+ memory = std::make_shared(MemoryConfig());
}
+ reset(true);
}
// Add a message to the agent's memory
@@ -92,7 +87,7 @@ struct BaseAgent : std::enable_shared_from_this {
// Execute the agent's main loop asynchronously
virtual std::string run(const std::string& request = "") {
- current_request = request;
+ memory->current_request = request;
if (state != AgentState::IDLE) {
throw std::runtime_error("Cannot run agent from state " + agent_state_map[state]);
@@ -118,7 +113,7 @@ struct BaseAgent : std::enable_shared_from_this {
}
if (is_stuck()) {
- this->handle_stuck_state();
+ handle_stuck_state();
}
results.push_back("Step " + std::to_string(current_step) + ": " + step_result);
@@ -157,13 +152,24 @@ struct BaseAgent : std::enable_shared_from_this {
// Handle stuck state by adding a prompt to change strategy
void handle_stuck_state() {
- std::string stuck_prompt = "\
- Observed duplicate responses. Consider new strategies and avoid repeating ineffective paths already attempted.";
- next_step_prompt = stuck_prompt + "\n" + next_step_prompt;
- if (!current_request.empty()) {
- next_step_prompt += "\nAnd don't for get your current task: " + current_request;
- }
+ std::string stuck_prompt = "Observed duplicate responses. Consider new strategies and avoid repeating ineffective paths already attempted.";
logger->warn("Agent detected stuck state. Added prompt: " + stuck_prompt);
+ memory->add_message(Message::user_message(stuck_prompt));
+ }
+
+ // O(nm) LCS algorithm, could basically handle current LLM context
+ size_t get_lcs_length(const std::string& s1, const std::string& s2) {
+ std::vector> dp(s1.size() + 1, std::vector(s2.size() + 1));
+ for (size_t i = 1; i <= s1.size(); i++) {
+ for (size_t j = 1; j <= s2.size(); j++) {
+ if (s1[i - 1] == s2[j - 1]) {
+ dp[i][j] = dp[i - 1][j - 1] + 1;
+ } else {
+ dp[i][j] = std::max(dp[i - 1][j], dp[i][j - 1]);
+ }
+ }
+ }
+ return dp[s1.size()][s2.size()];
}
// Check if the agent is stuck in a loop by detecting duplicate content
@@ -175,18 +181,24 @@ struct BaseAgent : std::enable_shared_from_this {
}
const Message& last_message = messages.back();
- if (last_message.content.empty() || last_message.content.is_null()) {
+ if (last_message.content.empty() || last_message.role != "assistant") {
return false;
}
// Count identical content occurrences
int duplicate_count = 0;
+ int duplicate_lcs_length = 0.6 * last_message.content.get().size(); // TODO: make this threshold configurable
for (auto r_it = messages.rbegin(); r_it != messages.rend(); ++r_it) {
+ if (r_it == messages.rbegin()) {
+ continue;
+ }
const Message& message = *r_it;
- if (message.role == "assistant" && message.content == last_message.content) {
- duplicate_count++;
- if (duplicate_count >= duplicate_threshold) {
- break;
+ if (message.role == "assistant" && !message.content.empty()) {
+ if (get_lcs_length(message.content, last_message.content) > duplicate_lcs_length) {
+ duplicate_count++;
+ if (duplicate_count >= duplicate_threshold) {
+ break;
+ }
}
}
}
diff --git a/agent/humanus.h b/agent/humanus.h
index 4b70996..d42fb56 100644
--- a/agent/humanus.h
+++ b/agent/humanus.h
@@ -8,6 +8,7 @@
#include "tool/python_execute.h"
#include "tool/terminate.h"
#include "tool/puppeteer.h"
+#include "tool/playwright.h"
#include "tool/filesystem.h"
namespace humanus {
@@ -25,6 +26,7 @@ struct Humanus : ToolCallAgent {
{
std::make_shared(),
std::make_shared(),
+ std::make_shared(),
std::make_shared()
}
),
@@ -36,9 +38,7 @@ struct Humanus : ToolCallAgent {
const std::string& next_step_prompt = prompt::humanus::NEXT_STEP_PROMPT,
const std::shared_ptr& llm = nullptr,
const std::shared_ptr& memory = nullptr,
- AgentState state = AgentState::IDLE,
int max_steps = 30,
- int current_step = 0,
int duplicate_threshold = 2
) : ToolCallAgent(
available_tools,
@@ -50,11 +50,37 @@ struct Humanus : ToolCallAgent {
next_step_prompt,
llm,
memory,
- state,
max_steps,
- current_step,
duplicate_threshold
) {}
+
+ std::string run(const std::string& request = "") override {
+ memory->current_request = request;
+
+ auto tmp_next_step_prompt = next_step_prompt;
+
+ size_t pos = next_step_prompt.find("{current_date}");
+ if (pos != std::string::npos) {
+ // %Y-%d-%m
+ auto current_date = std::chrono::system_clock::now();
+ auto in_time_t = std::chrono::system_clock::to_time_t(current_date);
+ std::stringstream ss;
+ std::tm tm_info = *std::localtime(&in_time_t);
+ ss << std::put_time(&tm_info, "%Y-%m-%d");
+ std::string formatted_date = ss.str(); // YYYY-MM-DD
+ next_step_prompt.replace(pos, 14, formatted_date);
+ }
+
+ pos = next_step_prompt.find("{current_request}");
+ if (pos != std::string::npos) {
+ next_step_prompt.replace(pos, 17, request);
+ }
+
+ auto result = BaseAgent::run(request);
+ next_step_prompt = tmp_next_step_prompt; // restore the original prompt
+
+ return result;
+ }
};
}
diff --git a/agent/mcp.h b/agent/mcp.h
new file mode 100644
index 0000000..8200caf
--- /dev/null
+++ b/agent/mcp.h
@@ -0,0 +1,77 @@
+#ifndef HUMANUS_AGENT_MCP_H
+#define HUMANUS_AGENT_MCP_H
+
+#include "base.h"
+#include "toolcall.h"
+#include "prompt.h"
+
+namespace humanus {
+
+struct MCPAgent : ToolCallAgent {
+ MCPAgent(
+ const std::vector& mcp_servers,
+ const ToolCollection& available_tools = ToolCollection(
+ {
+ std::make_shared()
+ }
+ ),
+ const std::string& tool_choice = "auto",
+ const std::set& special_tool_names = {"terminate"},
+ const std::string& name = "mcp_agent",
+ const std::string& description = "an agent that can execute tool calls.",
+ const std::string& system_prompt = prompt::toolcall::SYSTEM_PROMPT,
+ const std::string& next_step_prompt = prompt::toolcall::NEXT_STEP_PROMPT,
+ const std::shared_ptr& llm = nullptr,
+ const std::shared_ptr& memory = nullptr,
+ int max_steps = 30,
+ int duplicate_threshold = 2
+ ) : ToolCallAgent(
+ available_tools,
+ tool_choice,
+ special_tool_names,
+ name,
+ description,
+ system_prompt,
+ next_step_prompt,
+ llm,
+ memory,
+ max_steps,
+ duplicate_threshold
+ ) {
+ for (const auto& server_name : mcp_servers) {
+ this->available_tools.add_mcp_tools(server_name);
+ }
+ }
+
+ std::string run(const std::string& request = "") override {
+ memory->current_request = request;
+
+ auto tmp_next_step_prompt = next_step_prompt;
+
+ size_t pos = next_step_prompt.find("{current_date}");
+ if (pos != std::string::npos) {
+ // %Y-%d-%m
+ auto current_date = std::chrono::system_clock::now();
+ auto in_time_t = std::chrono::system_clock::to_time_t(current_date);
+ std::stringstream ss;
+ std::tm tm_info = *std::localtime(&in_time_t);
+ ss << std::put_time(&tm_info, "%Y-%m-%d");
+ std::string formatted_date = ss.str(); // YYYY-MM-DD
+ next_step_prompt.replace(pos, 14, formatted_date);
+ }
+
+ pos = next_step_prompt.find("{current_request}");
+ if (pos != std::string::npos) {
+ next_step_prompt.replace(pos, 17, request);
+ }
+
+ auto result = BaseAgent::run(request);
+ next_step_prompt = tmp_next_step_prompt; // restore the original prompt
+
+ return result;
+ }
+};
+
+}
+
+#endif // HUMANUS_AGENT_MCP_H
\ No newline at end of file
diff --git a/agent/react.h b/agent/react.h
index 1edd641..5d7451e 100644
--- a/agent/react.h
+++ b/agent/react.h
@@ -13,9 +13,7 @@ struct ReActAgent : BaseAgent {
const std::string& next_step_prompt,
const std::shared_ptr& llm = nullptr,
const std::shared_ptr& memory = nullptr,
- AgentState state = AgentState::IDLE,
int max_steps = 10,
- int current_step = 0,
int duplicate_threshold = 2
) : BaseAgent(
name,
@@ -24,9 +22,7 @@ struct ReActAgent : BaseAgent {
next_step_prompt,
llm,
memory,
- state,
- max_steps,
- current_step,
+ max_steps,
duplicate_threshold
) {}
@@ -39,6 +35,7 @@ struct ReActAgent : BaseAgent {
// Execute a single step: think and act.
virtual std::string step() {
bool should_act = think();
+ logger->info("Prompt tokens: " + std::to_string(llm->total_prompt_tokens()) + ", Completion tokens: " + std::to_string(llm->total_completion_tokens()));
if (!should_act) {
return "Thinking complete - no action needed";
}
diff --git a/agent/toolcall.cpp b/agent/toolcall.cpp
index 89c17b9..ead9b6a 100644
--- a/agent/toolcall.cpp
+++ b/agent/toolcall.cpp
@@ -6,7 +6,7 @@ namespace humanus {
bool ToolCallAgent::think() {
// Get response with tool options
auto response = llm->ask_tool(
- memory->get_messages(current_request),
+ memory->get_messages(memory->current_request),
system_prompt,
next_step_prompt,
available_tools.to_params(),
@@ -59,9 +59,6 @@ bool ToolCallAgent::think() {
return !tool_calls.empty();
} catch (const std::exception& e) {
logger->error("🚨 Oops! The " + name + "'s thinking process hit a snag: " + std::string(e.what()));
- memory->add_message(Message::assistant_message(
- "Error encountered while processing: " + std::string(e.what())
- ));
return false;
}
}
diff --git a/agent/toolcall.h b/agent/toolcall.h
index eea39c1..c2c9701 100644
--- a/agent/toolcall.h
+++ b/agent/toolcall.h
@@ -29,9 +29,7 @@ struct ToolCallAgent : ReActAgent {
const std::string& next_step_prompt = prompt::toolcall::NEXT_STEP_PROMPT,
const std::shared_ptr& llm = nullptr,
const std::shared_ptr& memory = nullptr,
- AgentState state = AgentState::IDLE,
int max_steps = 30,
- int current_step = 0,
int duplicate_threshold = 2
) : ReActAgent(
name,
@@ -40,14 +38,16 @@ struct ToolCallAgent : ReActAgent {
next_step_prompt,
llm,
memory,
- state,
max_steps,
- current_step,
duplicate_threshold
),
available_tools(available_tools),
tool_choice(tool_choice),
- special_tool_names(special_tool_names) {}
+ special_tool_names(special_tool_names) {
+ if (available_tools.tools_map.find("terminate") == available_tools.tools_map.end()) {
+ throw std::runtime_error("terminate tool must be present in available_tools");
+ }
+ }
// Process current state and decide next actions using tools
bool think() override;
diff --git a/assets/logo.png b/assets/logo.png
new file mode 100644
index 0000000..423dce1
Binary files /dev/null and b/assets/logo.png differ
diff --git a/config/config_embd.toml b/config/config_embd.toml
index 34f2d4e..518ca43 100644
--- a/config/config_embd.toml
+++ b/config/config_embd.toml
@@ -1,8 +1,17 @@
-[default]
+[nomic-embed-text-v1.5]
provider = "oai"
base_url = "http://localhost:8080"
endpoint = "/v1/embeddings"
model = "nomic-embed-text-v1.5.f16.gguf"
api_key = ""
embeddings_dim = 768
+max_retries = 3
+
+[default]
+provider = "oai"
+base_url = "https://dashscope.aliyuncs.com"
+endpoint = "/compatible-mode/v1/embeddings"
+model = "text-embedding-v3"
+api_key = "sk-cb1bb2a240d84182bb93f6dd0fe03600"
+embeddings_dim = 1024
max_retries = 3
\ No newline at end of file
diff --git a/config/config_llm.toml b/config/config_llm.toml
index 017c635..337350e 100644
--- a/config/config_llm.toml
+++ b/config/config_llm.toml
@@ -2,4 +2,36 @@
model = "qwen-max"
base_url = "https://dashscope.aliyuncs.com"
endpoint = "/compatible-mode/v1/chat/completions"
-api_key = "sk-cb1bb2a240d84182bb93f6dd0fe03600"
\ No newline at end of file
+api_key = "sk-cb1bb2a240d84182bb93f6dd0fe03600"
+
+[glm-4-plus]
+model = "glm-4-plus"
+base_url = "https://open.bigmodel.cn"
+endpoint = "/api/paas/v4/chat/completions"
+api_key = "7e12e1cb8fe5786d83c74d2ef48db511.xPVWzEZt8RvIciW9"
+
+[qwen-vl-max]
+model = "qwen-vl-max"
+base_url = "https://dashscope.aliyuncs.com"
+endpoint = "/compatible-mode/v1/chat/completions"
+api_key = "sk-cb1bb2a240d84182bb93f6dd0fe03600"
+
+[claude-3.5-sonnet]
+model = "anthropic/claude-3.5-sonnet"
+base_url = "https://openrouter.ai"
+endpoint = "/api/v1/chat/completions"
+api_key = "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"
+max_tokens = 8192
+
+[deepseek-chat]
+model = "deepseek-chat"
+base_url = "https://api.deepseek.com"
+endpoint = "/v1/chat/completions"
+api_key = "sk-93c5bfcb920c4a8aa345791d429b8536"
+
+[deepseek-r1]
+model = "deepseek-reasoner"
+base_url = "https://api.deepseek.com"
+endpoint = "/v1/chat/completions"
+api_key = "sk-93c5bfcb920c4a8aa345791d429b8536"
+oai_tool_support = false
diff --git a/config/config_mcp.toml b/config/config_mcp.toml
index f2ea211..4eecf3e 100644
--- a/config/config_mcp.toml
+++ b/config/config_mcp.toml
@@ -1,7 +1,7 @@
[python_execute]
type = "sse"
host = "localhost"
-port = 8818
+port = 8896
sse_endpoint = "/sse"
[puppeteer]
@@ -9,6 +9,11 @@ type = "stdio"
command = "npx"
args = ["-y", "@modelcontextprotocol/server-puppeteer"]
+[playwright]
+type = "stdio"
+command = "npx"
+args = ["-y", "@executeautomation/playwright-mcp-server"]
+
[filesystem]
type = "stdio"
command = "npx"
diff --git a/examples/chat/CMakeLists.txt b/examples/chat/CMakeLists.txt
new file mode 100644
index 0000000..1838281
--- /dev/null
+++ b/examples/chat/CMakeLists.txt
@@ -0,0 +1,6 @@
+set(target humanus_chat)
+
+add_executable(${target} humanus_chat.cpp)
+
+# 链接到核心库
+target_link_libraries(${target} PRIVATE humanus)
\ No newline at end of file
diff --git a/examples/chat_mem0/chat_mem0.cpp b/examples/chat/humanus_chat.cpp
similarity index 54%
rename from examples/chat_mem0/chat_mem0.cpp
rename to examples/chat/humanus_chat.cpp
index f85292c..b1e069d 100644
--- a/examples/chat_mem0/chat_mem0.cpp
+++ b/examples/chat/humanus_chat.cpp
@@ -2,7 +2,7 @@
#include "logger.h"
#include "prompt.h"
#include "flow/flow_factory.h"
-#include "memory/mem0/base.h"
+#include "memory/base.h"
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
#include
@@ -21,49 +21,14 @@ using namespace humanus;
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32)
static void sigint_handler(int signo) {
if (signo == SIGINT) {
+ // make sure all logs are flushed
logger->info("Interrupted by user\n");
- exit(0);
+ logger->flush();
+ _exit(130);
}
}
#endif
-static bool readline_utf8(std::string & line, bool multiline_input) {
-#if defined(_WIN32)
- std::wstring wline;
- if (!std::getline(std::wcin, wline)) {
- // Input stream is bad or EOF received
- line.clear();
- GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0);
- return false;
- }
-
- int size_needed = WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), NULL, 0, NULL, NULL);
- line.resize(size_needed);
- WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), &line[0], size_needed, NULL, NULL);
-#else
- if (!std::getline(std::cin, line)) {
- // Input stream is bad or EOF received
- line.clear();
- return false;
- }
-#endif
- if (!line.empty()) {
- char last = line.back();
- if (last == '/') { // Always return control on '/' symbol
- line.pop_back();
- return false;
- }
- if (last == '\\') { // '\\' changes the default action
- line.pop_back();
- multiline_input = !multiline_input;
- }
- }
- line += '\n';
-
- // By default, continue input if multiline_input is set
- return multiline_input;
-}
-
int main() {
// ctrl+C handling
@@ -85,16 +50,10 @@ int main() {
#endif
}
- auto memory_config = mem0::MemoryConfig();
-
- memory_config.max_messages = 1;
- memory_config.retrieval_limit = 10;
-
- auto memory = std::make_shared(memory_config);
- memory->current_request = "Chat with the user";
+ auto memory = std::make_shared(MemoryConfig());
Chatbot chatbot{
- "chat_mem0", // name
+ "chatbot", // name
"A chatbot agent that uses memory to remember conversation history", // description
"You are a helpful assistant.", // system_prompt
nullptr, // llm
@@ -103,13 +62,19 @@ int main() {
while (true) {
std::cout << "> ";
+
std::string prompt;
readline_utf8(prompt, false);
- if (prompt == "exit" || prompt == "exit\n") {
+
+ if (prompt == "exit") {
logger->info("Goodbye!");
break;
}
+
+ logger->info("Processing your request: " + prompt);
auto response = chatbot.run(prompt);
- std::cout << response << std::endl;
+ logger->info("✨ " + chatbot.name + "'s response: " + response);
}
+
+ return 0;
}
\ No newline at end of file
diff --git a/examples/main/main.cpp b/examples/main/main.cpp
index 7be8944..4cc5450 100644
--- a/examples/main/main.cpp
+++ b/examples/main/main.cpp
@@ -20,49 +20,14 @@ using namespace humanus;
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32)
static void sigint_handler(int signo) {
if (signo == SIGINT) {
+ // make sure all logs are flushed
logger->info("Interrupted by user\n");
- exit(0);
+ logger->flush();
+ _exit(130);
}
}
#endif
-static bool readline_utf8(std::string & line, bool multiline_input) {
-#if defined(_WIN32)
- std::wstring wline;
- if (!std::getline(std::wcin, wline)) {
- // Input stream is bad or EOF received
- line.clear();
- GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0);
- return false;
- }
-
- int size_needed = WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), NULL, 0, NULL, NULL);
- line.resize(size_needed);
- WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), &line[0], size_needed, NULL, NULL);
-#else
- if (!std::getline(std::cin, line)) {
- // Input stream is bad or EOF received
- line.clear();
- return false;
- }
-#endif
- if (!line.empty()) {
- char last = line.back();
- if (last == '/') { // Always return control on '/' symbol
- line.pop_back();
- return false;
- }
- if (last == '\\') { // '\\' changes the default action
- line.pop_back();
- multiline_input = !multiline_input;
- }
- }
- line += '\n';
-
- // By default, continue input if multiline_input is set
- return multiline_input;
-}
-
int main() {
// ctrl+C handling
@@ -85,6 +50,7 @@ int main() {
}
Humanus agent = Humanus();
+
while (true) {
if (agent.current_step == agent.max_steps) {
std::cout << "Automatically paused after " << agent.max_steps << " steps." << std::endl;
@@ -93,13 +59,17 @@ int main() {
} else {
std::cout << "Enter your prompt (or 'exit' to quit): ";
}
+
std::string prompt;
readline_utf8(prompt, false);
- if (prompt == "exit" || prompt == "exit\n") {
+ if (prompt == "exit") {
logger->info("Goodbye!");
break;
}
- logger->info("Processing your request...");
+
+ logger->info("Processing your request: " + prompt);
agent.run(prompt);
}
+
+ return 0;
}
\ No newline at end of file
diff --git a/examples/chat_mem0/CMakeLists.txt b/examples/mcp/CMakeLists.txt
similarity index 73%
rename from examples/chat_mem0/CMakeLists.txt
rename to examples/mcp/CMakeLists.txt
index 9377c5f..d1868b4 100644
--- a/examples/chat_mem0/CMakeLists.txt
+++ b/examples/mcp/CMakeLists.txt
@@ -1,6 +1,6 @@
-set(target humanus_chat_mem0)
+set(target humanus_cli_mcp)
-add_executable(${target} chat_mem0.cpp)
+add_executable(${target} humanus_mcp.cpp)
# 链接到核心库
target_link_libraries(${target} PRIVATE humanus)
diff --git a/examples/mcp/humanus_mcp.cpp b/examples/mcp/humanus_mcp.cpp
new file mode 100644
index 0000000..01d66ce
--- /dev/null
+++ b/examples/mcp/humanus_mcp.cpp
@@ -0,0 +1,85 @@
+#include "agent/mcp.h"
+#include "logger.h"
+#include "prompt.h"
+#include "flow/flow_factory.h"
+
+#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
+#include
+#include
+#elif defined (_WIN32)
+#define WIN32_LEAN_AND_MEAN
+#ifndef NOMINMAX
+#define NOMINMAX
+#endif
+#include
+#include
+#endif
+
+using namespace humanus;
+
+#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32)
+static void sigint_handler(int signo) {
+ if (signo == SIGINT) {
+ // make sure all logs are flushed
+ logger->info("Interrupted by user\n");
+ logger->flush();
+ _exit(130);
+ }
+}
+#endif
+
+int main(int argc, char* argv[]) {
+
+ // ctrl+C handling
+ {
+#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
+ struct sigaction sigint_action;
+ sigint_action.sa_handler = sigint_handler;
+ sigemptyset (&sigint_action.sa_mask);
+ sigint_action.sa_flags = 0;
+ sigaction(SIGINT, &sigint_action, NULL);
+#elif defined (_WIN32)
+ auto console_ctrl_handler = +[](DWORD ctrl_type) -> BOOL {
+ return (ctrl_type == CTRL_C_EVENT) ? (sigint_handler(SIGINT), true) : false;
+ };
+ SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true);
+ SetConsoleCP(CP_UTF8);
+ SetConsoleOutputCP(CP_UTF8);
+ _setmode(_fileno(stdin), _O_WTEXT); // wide character input mode
+#endif
+ }
+
+ if (argc <= 1) {
+ std::cout << "Usage: " << argv[0] << " ..." << std::endl;
+ return 0;
+ }
+
+ std::vector mcp_servers;
+ for (int i = 1; i < argc; i++) {
+ mcp_servers.emplace_back(argv[i]);
+ }
+
+ MCPAgent agent = MCPAgent(
+ mcp_servers
+ );
+
+ while (true) {
+ if (agent.current_step == agent.max_steps) {
+ std::cout << "Automatically paused after " << agent.max_steps << " steps." << std::endl;
+ std::cout << "Enter your prompt (enter an empty line to resume or 'exit' to quit): ";
+ agent.reset(false);
+ } else {
+ std::cout << "Enter your prompt (or 'exit' to quit): ";
+ }
+
+ std::string prompt;
+ readline_utf8(prompt, false);
+ if (prompt == "exit") {
+ logger->info("Goodbye!");
+ break;
+ }
+
+ logger->info("Processing your request: " + prompt);
+ agent.run(prompt);
+ }
+}
\ No newline at end of file
diff --git a/examples/plan/humanus_plan.cpp b/examples/plan/humanus_plan.cpp
index 0e8f61a..ab68e33 100644
--- a/examples/plan/humanus_plan.cpp
+++ b/examples/plan/humanus_plan.cpp
@@ -20,49 +20,14 @@ using namespace humanus;
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32)
static void sigint_handler(int signo) {
if (signo == SIGINT) {
+ // make sure all logs are flushed
logger->info("Interrupted by user\n");
- exit(0);
+ logger->flush();
+ _exit(130);
}
}
#endif
-static bool readline_utf8(std::string & line, bool multiline_input) {
-#if defined(_WIN32)
- std::wstring wline;
- if (!std::getline(std::wcin, wline)) {
- // Input stream is bad or EOF received
- line.clear();
- GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0);
- return false;
- }
-
- int size_needed = WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), NULL, 0, NULL, NULL);
- line.resize(size_needed);
- WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), &line[0], size_needed, NULL, NULL);
-#else
- if (!std::getline(std::cin, line)) {
- // Input stream is bad or EOF received
- line.clear();
- return false;
- }
-#endif
- if (!line.empty()) {
- char last = line.back();
- if (last == '/') { // Always return control on '/' symbol
- line.pop_back();
- return false;
- }
- if (last == '\\') { // '\\' changes the default action
- line.pop_back();
- multiline_input = !multiline_input;
- }
- }
- line += '\n';
-
- // By default, continue input if multiline_input is set
- return multiline_input;
-}
-
int main() {
// ctrl+C handling
@@ -104,23 +69,22 @@ int main() {
std::cout << "Automatically paused after " << agent_ptr->current_step << " steps." << std::endl;
std::cout << "Enter your prompt (enter an empty line to resume or 'exit' to quit): ";
agent_ptr->reset(false);
+ } else if (agent_ptr->state != AgentState::IDLE) {
+ std::cout << "Enter your prompt (enter an empty line to retry or 'exit' to quit): ";
+ agent_ptr->reset(false);
} else {
std::cout << "Enter your prompt (or 'exit' to quit): ";
}
-
- if (agent_ptr->state != AgentState::IDLE) {
- break;
- }
std::string prompt;
readline_utf8(prompt, false);
- if (prompt == "exit" || prompt == "exit\n") {
+ if (prompt == "exit") {
logger->info("Goodbye!");
break;
}
- std::cout << "Processing your request..." << std::endl;
+ logger->info("Processing your request: " + prompt);
auto result = flow->execute(prompt);
- std::cout << result << std::endl;
+ logger->info("🌟 " + agent_ptr->name + "'s summary: " + result);
}
}
\ No newline at end of file
diff --git a/examples/plan_mem0/CMakeLists.txt b/examples/plan_mem0/CMakeLists.txt
deleted file mode 100644
index 52c8c15..0000000
--- a/examples/plan_mem0/CMakeLists.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-set(target humanus_cli_plan_mem0)
-
-add_executable(${target} humanus_plan_mem0.cpp)
-
-# 链接到核心库
-target_link_libraries(${target} PRIVATE humanus)
-
-# 设置输出目录
-set_target_properties(${target}
- PROPERTIES
- RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin"
-)
\ No newline at end of file
diff --git a/examples/plan_mem0/humanus_plan_mem0.cpp b/examples/plan_mem0/humanus_plan_mem0.cpp
deleted file mode 100644
index a7b653d..0000000
--- a/examples/plan_mem0/humanus_plan_mem0.cpp
+++ /dev/null
@@ -1,148 +0,0 @@
-#include "agent/humanus.h"
-#include "logger.h"
-#include "prompt.h"
-#include "flow/flow_factory.h"
-#include "memory/mem0/base.h"
-
-#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
-#include
-#include
-#elif defined (_WIN32)
-#define WIN32_LEAN_AND_MEAN
-#ifndef NOMINMAX
-#define NOMINMAX
-#endif
-#include
-#include
-#endif
-
-using namespace humanus;
-
-#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32)
-static void sigint_handler(int signo) {
- if (signo == SIGINT) {
- logger->info("Interrupted by user\n");
- exit(0);
- }
-}
-#endif
-
-static bool readline_utf8(std::string & line, bool multiline_input) {
-#if defined(_WIN32)
- std::wstring wline;
- if (!std::getline(std::wcin, wline)) {
- // Input stream is bad or EOF received
- line.clear();
- GenerateConsoleCtrlEvent(CTRL_C_EVENT, 0);
- return false;
- }
-
- int size_needed = WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), NULL, 0, NULL, NULL);
- line.resize(size_needed);
- WideCharToMultiByte(CP_UTF8, 0, &wline[0], (int)wline.size(), &line[0], size_needed, NULL, NULL);
-#else
- if (!std::getline(std::cin, line)) {
- // Input stream is bad or EOF received
- line.clear();
- return false;
- }
-#endif
- if (!line.empty()) {
- char last = line.back();
- if (last == '/') { // Always return control on '/' symbol
- line.pop_back();
- return false;
- }
- if (last == '\\') { // '\\' changes the default action
- line.pop_back();
- multiline_input = !multiline_input;
- }
- }
- line += '\n';
-
- // By default, continue input if multiline_input is set
- return multiline_input;
-}
-
-int main() {
-
- // ctrl+C handling
- {
-#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
- struct sigaction sigint_action;
- sigint_action.sa_handler = sigint_handler;
- sigemptyset (&sigint_action.sa_mask);
- sigint_action.sa_flags = 0;
- sigaction(SIGINT, &sigint_action, NULL);
-#elif defined (_WIN32)
- auto console_ctrl_handler = +[](DWORD ctrl_type) -> BOOL {
- return (ctrl_type == CTRL_C_EVENT) ? (sigint_handler(SIGINT), true) : false;
- };
- SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true);
- SetConsoleCP(CP_UTF8);
- SetConsoleOutputCP(CP_UTF8);
- _setmode(_fileno(stdin), _O_WTEXT); // wide character input mode
-#endif
- }
-
- auto memory = std::make_shared(mem0::MemoryConfig());
-
- std::shared_ptr agent_ptr = std::make_shared(
- ToolCollection( // Add general-purpose tools to the tool collection
- {
- std::make_shared(),
- std::make_shared(), // for web browsing
- std::make_shared(),
- std::make_shared()
- }
- ),
- "auto",
- std::set{"terminate"},
- "humanus_mem0",
- "A versatile agent that can solve various tasks using multiple tools",
- prompt::humanus::SYSTEM_PROMPT,
- prompt::humanus::NEXT_STEP_PROMPT,
- nullptr,
- memory
- );
-
- std::map> agents;
- agents["default"] = agent_ptr;
-
- auto flow = FlowFactory::create_flow(
- FlowType::PLANNING,
- nullptr, // llm
- nullptr, // planning_tool
- std::vector{}, // executor_keys
- "", // active_plan_id
- agents, // agents
- std::vector>{}, // tools
- "default" // primary_agent_key
- );
-
- while (true) {
- if (agent_ptr->current_step == agent_ptr->max_steps) {
- std::cout << "Automatically paused after " << agent_ptr->current_step << " steps." << std::endl;
- std::cout << "Enter your prompt (enter an empty line to resume or 'exit' to quit): ";
- agent_ptr->reset(false);
- } else {
- std::cout << "Enter your prompt (or 'exit' to quit): ";
- }
-
- if (agent_ptr->state != AgentState::IDLE) {
- break;
- }
-
- std::string prompt;
- readline_utf8(prompt, false);
- if (prompt == "exit" || prompt == "exit\n") {
- logger->info("Goodbye!");
- break;
- }
-
- std::cout << "Processing your request..." << std::endl;
- memory->current_request = prompt;
- auto result = flow->execute(prompt);
- std::cout << result << std::endl;
- }
-}
\ No newline at end of file
diff --git a/flow/planning.cpp b/flow/planning.cpp
index 6ade389..678db97 100644
--- a/flow/planning.cpp
+++ b/flow/planning.cpp
@@ -63,16 +63,16 @@ std::string PlanningFlow::execute(const std::string& input) {
// Refactor memory
std::string prefix_sum = _summarize_plan(executor->memory->get_messages(step_result));
- executor->reset(true); // TODO: More fine-grained memory reset?
+ executor->reset(false);
executor->update_memory("assistant", prefix_sum);
if (!input.empty()) {
executor->update_memory("user", "Continue to accomplish the task: " + input);
}
- result += step_info.value("type", "Step " + std::to_string(current_step_index)) + ":\n" + prefix_sum + "\n\n";
+ result += "##" + step_info.value("type", "Step " + std::to_string(current_step_index)) + ":\n" + prefix_sum + "\n\n";
}
- reset(true); // Clear memory and state for next plan
+ reset(true); // Clear short-termmemory and state for next plan
return result;
} catch (const std::exception& e) {
@@ -89,13 +89,19 @@ void PlanningFlow::_create_initial_plan(const std::string& request) {
std::string system_prompt = "You are a planning assistant. Your task is to create a detailed plan with clear steps.";
// Create a user message with the request
- Message user_message = Message::user_message(
- "Create a detailed plan to accomplish this task: " + request
- );
+ std::string user_prompt = "Please provide a detailed plan to accomplish this task: " + request + "\n\n";
+ user_prompt += "**Note**: The following executors will be used to accomplish the plan.\n\n";
+ for (const auto& [key, agent] : agents) {
+ auto tool_call_agent = std::dynamic_pointer_cast(agent);
+ if (tool_call_agent) {
+ user_prompt += "Available tools for executor `" + key + "`:\n";
+ user_prompt += tool_call_agent->available_tools.to_params().dump(2) + "\n\n";
+ }
+ }
// Call LLM with PlanningTool
auto response = llm->ask_tool(
- {user_message},
+ {Message::user_message(user_prompt)},
system_prompt,
"", // No next_step_prompt for initial plan creation
json::array({planning_tool->to_param()}),
@@ -236,7 +242,7 @@ std::string PlanningFlow::_execute_step(const std::shared_ptr& execut
step_prompt += plan_status.dump(2);
step_prompt += "\n\nYOUR CURRENT TASK:\n";
step_prompt += "You are now working on step " + std::to_string(current_step_index) + ": \"" + step_text + "\"\n";
- step_prompt += "Please execute this step using the appropriate tools. When you're done, provide a summary of what you accomplished.";
+ step_prompt += "Please execute this step using the appropriate tools. When you're done, provide a summary of what you accomplished and call `terminate` to trigger the next step.";
// Use agent.run() to execute the step
try {
diff --git a/flow/planning.h b/flow/planning.h
index d727007..0974b54 100644
--- a/flow/planning.h
+++ b/flow/planning.h
@@ -3,6 +3,7 @@
#include "base.h"
#include "agent/base.h"
+#include "agent/toolcall.h"
#include "llm.h"
#include "logger.h"
#include "schema.h"
diff --git a/include/config.h b/include/config.h
index 6e1bddf..86d021a 100644
--- a/include/config.h
+++ b/include/config.h
@@ -5,23 +5,15 @@
#include "prompt.h"
#include "logger.h"
#include "toml.hpp"
+#include "utils.h"
#include
#include