main
hkr04 2025-03-17 16:35:11 +08:00
parent de1b7e3566
commit be606c1020
17 changed files with 160 additions and 187 deletions

View File

@ -4,7 +4,6 @@ project(humanus.cpp VERSION 0.1.0)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# OpenSSL
find_package(OpenSSL 3.0.0 REQUIRED)
if(OPENSSL_FOUND)
message(STATUS "OpenSSL found: ${OPENSSL_VERSION}")
@ -16,7 +15,6 @@ else()
message(FATAL_ERROR "OpenSSL not found. Please install OpenSSL development libraries.")
endif()
# Python
find_package(Python3 COMPONENTS Development)
if(Python3_FOUND)
message(STATUS "Python3 found: ${Python3_VERSION}")
@ -28,21 +26,19 @@ else()
message(WARNING "Python3 development libraries not found. Python interpreter will not be available.")
endif()
# MCP
# mcp
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/mcp)
#
# server
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/server)
#
# include
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/common)
#
find_package(Threads REQUIRED)
#
file(GLOB AGENT_SOURCES
"agent/*.cpp"
"agent/*.cc"
@ -58,7 +54,6 @@ file(GLOB FLOW_SOURCES
"flow/*.cc"
)
#
add_executable(humanus_cpp
main.cpp
config.cpp
@ -71,19 +66,9 @@ add_executable(humanus_cpp
${FLOW_SOURCES}
)
#
target_link_libraries(humanus_cpp PRIVATE Threads::Threads mcp server ${OPENSSL_LIBRARIES})
target_link_libraries(humanus_cpp PRIVATE Threads::Threads mcp ${OPENSSL_LIBRARIES})
if(Python3_FOUND)
target_link_libraries(humanus_cpp PRIVATE ${Python3_LIBRARIES})
endif()
#
# add_executable(humanus_simple main_simple.cpp logger.cpp schema.cpp)
# target_link_libraries(humanus_simple PRIVATE Threads::Threads ${OPENSSL_LIBRARIES})
# if(Python3_FOUND)
# target_link_libraries(humanus_simple PRIVATE ${Python3_LIBRARIES})
# endif()
#
install(TARGETS humanus_cpp DESTINATION bin)
# install(TARGETS humanus_simple DESTINATION bin)
install(TARGETS humanus_cpp DESTINATION bin)

View File

@ -1,5 +1,5 @@
#include "planning.h"
#include <iomanip> // 添加iomanip头文件用于std::setprecision
#include <iomanip>
namespace humanus {

View File

@ -6,7 +6,7 @@
namespace humanus {
// 初始化静态成员
// Initialize static members
Config* Config::_instance = nullptr;
std::mutex Config::_mutex;
@ -17,7 +17,7 @@ void Config::_load_initial_config() {
const auto& data = toml::parse_file(config_path.string());
// 检查工具配置是否存在
// Check if tool configuration exists
if (!data.contains("llm") || !data["llm"].is_table()) {
throw std::runtime_error("找不到llm配置: ");
}
@ -59,7 +59,7 @@ void Config::_load_initial_config() {
_config.llm["default"] = llm_settings;
} catch (const std::exception& e) {
std::cerr << "加载配置文件失败: " << e.what() << std::endl;
// 设置默认配置
// Set default configuration
LLMSettings default_settings;
default_settings.model = "gpt-3.5-turbo";
default_settings.api_key = "sk-";

View File

@ -13,17 +13,14 @@
namespace humanus {
// Get project root directory
static std::filesystem::path get_project_root() {
// 获取项目根目录
return std::filesystem::path(__FILE__).parent_path();
}
inline const std::filesystem::path PROJECT_ROOT = get_project_root();
inline const std::filesystem::path WORKSPACE_ROOT = PROJECT_ROOT / "workspace";
/**
* @brief LLM
*/
struct LLMSettings {
std::string model;
std::string api_key;
@ -65,19 +62,17 @@ private:
bool _initialized = false;
AppConfig _config;
// 私有构造函数
Config() {
_load_initial_config();
_initialized = true;
}
// 禁止拷贝和赋值
Config(const Config&) = delete;
Config& operator=(const Config&) = delete;
/**
* @brief
* @return
* @brief Get the config path
* @return The config path
*/
static std::filesystem::path _get_config_path() {
auto root = PROJECT_ROOT;
@ -89,18 +84,18 @@ private:
if (std::filesystem::exists(example_path)) {
return example_path;
}
throw std::runtime_error("无法找到配置文件");
throw std::runtime_error("Config file not found");
}
/**
* @brief
* @brief Load the initial config
*/
void _load_initial_config();
public:
/**
* @brief
* @return
* @brief Get the singleton instance
* @return The config instance
*/
static Config& get_instance() {
if (_instance == nullptr) {
@ -113,16 +108,16 @@ public:
}
/**
* @brief LLM
* @return LLM
* @brief Get the LLM settings
* @return The LLM settings map
*/
const std::map<std::string, LLMSettings>& llm() const {
return _config.llm;
}
/**
* @brief
* @return
* @brief Get the app config
* @return The app config
*/
const AppConfig& get_config() const {
return _config;

60
llm.h
View File

@ -24,7 +24,7 @@ private:
std::shared_ptr<LLMSettings> llm_config_;
public:
// 构造函数
// Constructor
LLM(const std::string& config_name, const std::shared_ptr<LLMSettings>& llm_config = nullptr) : llm_config_(llm_config) {
if (!llm_config_) {
if (Config::get_instance().llm().find(config_name) == Config::get_instance().llm().end()) {
@ -38,7 +38,7 @@ public:
});
}
// 单例模式获取实例
// Get the singleton instance
static std::shared_ptr<LLM> get_instance(const std::string& config_name = "default", const std::shared_ptr<LLMSettings>& llm_config = nullptr) {
if (_instances.find(config_name) == _instances.end()) {
_instances[config_name] = std::make_shared<LLM>(config_name, llm_config);
@ -47,11 +47,11 @@ public:
}
/**
* @brief LLM
* @param messages Message
* @return
* @throws std::invalid_argument
* @throws std::runtime_error
* @brief Format the message list to the format that LLM can accept
* @param messages Message object message list
* @return The formatted message list
* @throws std::invalid_argument If the message format is invalid or missing necessary fields
* @throws std::runtime_error If the message type is not supported
*/
static std::vector<json> format_messages(const std::vector<Message>& messages) {
std::vector<json> formatted_messages;
@ -73,18 +73,18 @@ public:
}
/**
* @brief LLM
* @param messages json
* @return
* @throws std::invalid_argument
* @throws std::runtime_error
* @brief Format the message list to the format that LLM can accept
* @param messages json object message list
* @return The formatted message list
* @throws std::invalid_argument If the message format is invalid or missing necessary fields
* @throws std::runtime_error If the message type is not supported
*/
static std::vector<json> format_messages(const std::vector<json>& messages) {
std::vector<json> formatted_messages;
for (const auto& message : messages) {
if (!message.contains("role")) {
throw std::invalid_argument("消息缺少必要字段: role");
throw std::invalid_argument("Message missing necessary field: role");
}
formatted_messages.push_back(message);
}
@ -102,13 +102,13 @@ public:
}
/**
* @brief LLM
* @param messages
* @param system_msgs
* @param max_retries
* @return assitant content
* @throws std::invalid_argument
* @throws std::runtime_error API
* @brief Send a request to the LLM and get the reply
* @param messages The conversation message list
* @param system_msgs Optional system messages
* @param max_retries The maximum number of retries
* @return The generated assistant content
* @throws std::invalid_argument If the message is invalid or the reply is empty
* @throws std::runtime_error If the API call fails
*/
std::string ask(
const std::vector<Message>& messages,
@ -169,16 +169,16 @@ public:
}
/**
* @brief 使LLM
* @param messages
* @param system_msgs
* @param timeout
* @param tools
* @param tool_choice
* @param max_retries
* @return assistant message (content, tool_calls)
* @throws std::invalid_argument
* @throws std::runtime_error API
* @brief Send a request to the LLM with tool functions
* @param messages The conversation message list
* @param system_msgs Optional system messages
* @param timeout The request timeout (seconds)
* @param tools The tool list
* @param tool_choice The tool choice strategy
* @param max_retries The maximum number of retries
* @return The generated assistant message (content, tool_calls)
* @throws std::invalid_argument If the tool, tool choice or message is invalid
* @throws std::runtime_error If the API call fails
*/
json ask_tool(
const std::vector<Message>& messages,

View File

@ -20,18 +20,16 @@ std::shared_ptr<spdlog::logger> define_log_level(spdlog::level::level_enum print
std::string log_name = name.empty() ? formatted_date : name + "_" + formatted_date;
std::string log_file_path = (PROJECT_ROOT / "logs" / (log_name + ".log")).string();
// 确保日志目录存在
// Ensure the log directory exists
std::filesystem::create_directories(PROJECT_ROOT / "logs");
// 重置日志输出
// Reset the log output
std::shared_ptr<spdlog::logger> _logger = std::make_shared<spdlog::logger>(log_name);
// 添加标准错误输出sink相当于Python中的sys.stderr
auto stderr_sink = std::make_shared<spdlog::sinks::stderr_color_sink_mt>();
stderr_sink->set_level(print_level);
_logger->sinks().push_back(stderr_sink);
// 添加文件sink相当于Python中的PROJECT_ROOT / f"logs/{log_name}.log"
auto file_sink = std::make_shared<spdlog::sinks::basic_file_sink_mt>(log_file_path, false);
file_sink->set_level(logfile_level);
_logger->sinks().push_back(file_sink);

View File

@ -14,17 +14,14 @@
namespace humanus {
// 使用config.h中定义的PROJECT_ROOT
// static const std::filesystem::path PROJECT_ROOT = std::filesystem::current_path();
static spdlog::level::level_enum _print_level = spdlog::level::info;
/**
* @brief
* @param print_level
* @param logfile_level
* @param name
* @return
* @brief Adjust the log level
* @param print_level The console output log level
* @param logfile_level The file record log level
* @param name The log file name prefix
* @return The log record instance
*/
extern std::shared_ptr<spdlog::logger> define_log_level(spdlog::level::level_enum print_level = spdlog::level::info,
spdlog::level::level_enum logfile_level = spdlog::level::debug,

2
mcp

@ -1 +1 @@
Subproject commit 5e9ff48b070a11ba20529feb22c68d0e9ef46f3d
Subproject commit 7f9862f91ca82118f31834570ee409381574eba0

View File

@ -1,9 +1,9 @@
/**
* @file mcp_server_main.cpp
* @brief OpenManus MCP
* @brief OpenManus MCP Server Implementation
*
* OpenManusMCP
* PythonExecute
* This file implements the OpenManus MCP server that provides tool invocation functionality.
* Currently implements the PythonExecute tool.
*/
#include "../mcp/include/mcp_server.h"
@ -15,27 +15,27 @@
#include <memory>
#include <filesystem>
// 导入Python执行工具
// Import Python execution tool
extern void register_python_execute_tool(mcp::server& server);
int main() {
// 创建并配置服务器
// Create and configure server
mcp::server server("localhost", 8818);
server.set_server_info("OpenManusMCPServer", "0.0.1");
// 设置服务器能力
// Set server capabilities
mcp::json capabilities = {
{"tools", mcp::json::object()}
};
server.set_capabilities(capabilities);
// 注册Python执行工具
// Register Python execution tool
register_python_execute_tool(server);
// 启动服务器
std::cout << "启动OpenManus MCP服务器地址: localhost:8818..." << std::endl;
std::cout << "按Ctrl+C停止服务器" << std::endl;
server.start(true); // 阻塞模式
// Start server
std::cout << "Starting OpenManus MCP server at localhost:8818..." << std::endl;
std::cout << "Press Ctrl+C to stop server" << std::endl;
server.start(true); // Blocking mode
return 0;
}
}

View File

@ -1,8 +1,8 @@
/**
* @file python_execute.cpp
* @brief OpenManus Python
* @brief OpenManus Python execution tool implementation
*
* OpenManusPython使Python.hPython
* This file implements the OpenManus Python execution tool, using Python.h to directly call the Python interpreter.
*/
#include "mcp/include/mcp_server.h"
@ -15,24 +15,24 @@
#include <stdexcept>
#include <mutex>
// 检查是否找到Python
// Check if Python is found
#ifdef PYTHON_FOUND
#include <Python.h>
#endif
/**
* @class python_interpreter
* @brief PythonPython
* @brief Python interpreter class for executing Python code
*/
class python_interpreter {
private:
// 互斥锁确保Python解释器的线程安全
// Mutex to ensure thread safety of Python interpreter
mutable std::mutex py_mutex;
bool is_initialized;
public:
/**
* @brief Python
* @brief Constructor, initializes Python interpreter
*/
python_interpreter() : is_initialized(false) {
#ifdef PYTHON_FOUND
@ -40,21 +40,21 @@ public:
Py_Initialize();
if (Py_IsInitialized()) {
is_initialized = true;
// 初始化线程支持
// Initialize thread support
PyEval_InitThreads();
// 释放GIL允许其他线程获取
// Release GIL to allow other threads to acquire
PyThreadState *_save = PyEval_SaveThread();
} else {
std::cerr << "Python解释器初始化失败" << std::endl;
std::cerr << "Failed to initialize Python interpreter" << std::endl;
}
} catch (const std::exception& e) {
std::cerr << "Python解释器初始化异常: " << e.what() << std::endl;
std::cerr << "Python interpreter initialization exception: " << e.what() << std::endl;
}
#endif
}
/**
* @brief Python
* @brief Destructor, releases Python interpreter
*/
~python_interpreter() {
#ifdef PYTHON_FOUND
@ -67,17 +67,17 @@ public:
}
/**
* @brief Python
* @param input PythonJSON
* @return JSON
* @brief Execute Python code
* @param input JSON object containing Python code
* @return JSON object with execution results
*/
mcp::json forward(const mcp::json& input) const {
#ifdef PYTHON_FOUND
if (!is_initialized) {
return mcp::json{{"error", "Python解释器未正确初始化"}};
return mcp::json{{"error", "Python interpreter not properly initialized"}};
}
// 获取GIL锁
// Acquire GIL lock
std::lock_guard<std::mutex> lock(py_mutex);
PyGILState_STATE gstate = PyGILState_Ensure();
@ -87,25 +87,25 @@ public:
if (input.contains("code") && input["code"].is_string()) {
std::string code = input["code"].get<std::string>();
// 获取主模块和字典
// Get main module and dictionary
PyObject *main_module = PyImport_AddModule("__main__");
if (!main_module) {
PyGILState_Release(gstate);
return mcp::json{{"error", "无法获取Python主模块"}};
return mcp::json{{"error", "Failed to get Python main module"}};
}
PyObject *main_dict = PyModule_GetDict(main_module);
if (!main_dict) {
PyGILState_Release(gstate);
return mcp::json{{"error", "无法获取Python主模块字典"}};
return mcp::json{{"error", "Failed to get Python main module dictionary"}};
}
// 导入sys和io模块
// Import sys and io modules
PyObject *sys_module = PyImport_ImportModule("sys");
if (!sys_module) {
PyErr_Print();
PyGILState_Release(gstate);
return mcp::json{{"error", "无法导入sys模块"}};
return mcp::json{{"error", "Failed to import sys module"}};
}
PyObject *io_module = PyImport_ImportModule("io");
@ -113,20 +113,20 @@ public:
Py_DECREF(sys_module);
PyErr_Print();
PyGILState_Release(gstate);
return mcp::json{{"error", "无法导入io模块"}};
return mcp::json{{"error", "Failed to import io module"}};
}
// 获取StringIO类
// Get StringIO class
PyObject *string_io = PyObject_GetAttrString(io_module, "StringIO");
if (!string_io) {
Py_DECREF(io_module);
Py_DECREF(sys_module);
PyErr_Print();
PyGILState_Release(gstate);
return mcp::json{{"error", "无法获取StringIO类"}};
return mcp::json{{"error", "Failed to get StringIO class"}};
}
// 创建StringIO对象
// Create StringIO objects
PyObject *sys_stdout = PyObject_CallObject(string_io, nullptr);
if (!sys_stdout) {
Py_DECREF(string_io);
@ -134,7 +134,7 @@ public:
Py_DECREF(sys_module);
PyErr_Print();
PyGILState_Release(gstate);
return mcp::json{{"error", "无法创建stdout StringIO对象"}};
return mcp::json{{"error", "Failed to create stdout StringIO object"}};
}
PyObject *sys_stderr = PyObject_CallObject(string_io, nullptr);
@ -145,17 +145,17 @@ public:
Py_DECREF(sys_module);
PyErr_Print();
PyGILState_Release(gstate);
return mcp::json{{"error", "无法创建stderr StringIO对象"}};
return mcp::json{{"error", "Failed to create stderr StringIO object"}};
}
// 保存原始的stdout和stderr
// Save original stdout and stderr
PyObject *old_stdout = PySys_GetObject("stdout");
PyObject *old_stderr = PySys_GetObject("stderr");
if (old_stdout) Py_INCREF(old_stdout);
if (old_stderr) Py_INCREF(old_stderr);
// 替换sys.stdout和sys.stderr
// Replace sys.stdout and sys.stderr
if (PySys_SetObject("stdout", sys_stdout) != 0 ||
PySys_SetObject("stderr", sys_stderr) != 0) {
Py_DECREF(sys_stderr);
@ -165,23 +165,23 @@ public:
Py_DECREF(sys_module);
PyErr_Print();
PyGILState_Release(gstate);
return mcp::json{{"error", "无法设置stdout/stderr重定向"}};
return mcp::json{{"error", "Failed to set stdout/stderr redirection"}};
}
// 执行Python代码
// Execute Python code
PyObject *result = PyRun_String(code.c_str(), Py_file_input, main_dict, main_dict);
if (!result) {
PyErr_Print();
}
Py_XDECREF(result);
// 获取输出和错误
// Get output and errors
PyObject *out_value = PyObject_CallMethod(sys_stdout, "getvalue", nullptr);
PyObject *err_value = PyObject_CallMethod(sys_stderr, "getvalue", nullptr);
std::string output, error;
// 安全地转换Python字符串到C++字符串
// Safely convert Python strings to C++ strings
if (out_value && PyUnicode_Check(out_value)) {
output = PyUnicode_AsUTF8(out_value);
}
@ -190,7 +190,7 @@ public:
error = PyUnicode_AsUTF8(err_value);
}
// 恢复原始的stdout和stderr
// Restore original stdout and stderr
if (old_stdout) {
PySys_SetObject("stdout", old_stdout);
Py_DECREF(old_stdout);
@ -201,7 +201,7 @@ public:
Py_DECREF(old_stderr);
}
// 清理
// Cleanup
Py_XDECREF(out_value);
Py_XDECREF(err_value);
Py_DECREF(sys_stdout);
@ -210,7 +210,7 @@ public:
Py_DECREF(io_module);
Py_DECREF(sys_module);
// 准备JSON输出
// Prepare JSON output
if (!output.empty()) {
result_json["output"] = output;
}
@ -237,10 +237,10 @@ public:
result_json["error"] = "Invalid parameters or code not provided";
}
} catch (const std::exception& e) {
result_json["error"] = std::string("Python执行异常: ") + e.what();
result_json["error"] = std::string("Python execution exception: ") + e.what();
}
// 释放GIL
// Release GIL
PyGILState_Release(gstate);
return result_json;
#else
@ -249,17 +249,17 @@ public:
}
};
// 全局Python解释器实例
// Global Python interpreter instance
static python_interpreter interpreter;
// Python执行工具处理函数
// Python execution tool handler function
mcp::json python_execute_handler(const mcp::json& args) {
if (!args.contains("code")) {
throw mcp::mcp_exception(mcp::error_code::invalid_params, "缺少'code'参数");
throw mcp::mcp_exception(mcp::error_code::invalid_params, "Missing 'code' parameter");
}
try {
// 使用Python解释器执行代码
// Use Python interpreter to execute code
mcp::json result = interpreter.forward(args);
return {{
@ -268,7 +268,7 @@ mcp::json python_execute_handler(const mcp::json& args) {
}};
} catch (const std::exception& e) {
throw mcp::mcp_exception(mcp::error_code::internal_error,
"执行Python代码失败: " + std::string(e.what()));
"Failed to execute Python code: " + std::string(e.what()));
}
}

View File

@ -11,11 +11,11 @@
namespace humanus {
// 从config_mcp.toml中读取工具配置
// Read tool configuration from config_mcp.toml
struct MCPToolConfig {
std::string type;
std::string host;
int port;
int port;
std::string url;
std::string command;
std::vector<std::string> args;
@ -25,36 +25,36 @@ struct MCPToolConfig {
MCPToolConfig config;
try {
// 获取配置文件路径
// Get config file path
auto config_path = PROJECT_ROOT / "config" / "config_mcp.toml";
if (!std::filesystem::exists(config_path)) {
throw std::runtime_error("找不到MCP配置文件: " + config_path.string());
throw std::runtime_error("MCP config file not found: " + config_path.string());
}
// 解析TOML文件
// Parse TOML file
const auto& data = toml::parse_file(config_path.string());
// 检查工具配置是否存在
// Check if tool config exists
if (!data.contains(tool_name) || !data[tool_name].is_table()) {
throw std::runtime_error("MCP配置文件中找不到工具配置: " + tool_name);
throw std::runtime_error("Tool configuration not found in MCP config file: " + tool_name);
}
const auto& tool_table = *data[tool_name].as_table();
// 读取类型
// Read type
if (!tool_table.contains("type") || !tool_table["type"].is_string()) {
throw std::runtime_error("工具配置缺少type字段: " + tool_name);
throw std::runtime_error("Tool configuration missing type field: " + tool_name);
}
config.type = tool_table["type"].as_string()->get();
if (config.type == "stdio") {
// 读取命令
// Read command
if (!tool_table.contains("command") || !tool_table["command"].is_string()) {
throw std::runtime_error("stdio类型工具配置缺少command字段: " + tool_name);
throw std::runtime_error("stdio type tool configuration missing command field: " + tool_name);
}
config.command = tool_table["command"].as_string()->get();
// 读取参数(如果有)
// Read arguments (if any)
if (tool_table.contains("args") && tool_table["args"].is_array()) {
const auto& args_array = *tool_table["args"].as_array();
for (const auto& arg : args_array) {
@ -64,7 +64,7 @@ struct MCPToolConfig {
}
}
// 读取环境变量
// Read environment variables
std::string env_section = tool_name + ".env";
if (data.contains(env_section) && data[env_section].is_table()) {
const auto& env_table = *data[env_section].as_table();
@ -81,25 +81,25 @@ struct MCPToolConfig {
}
}
} else if (config.type == "sse") {
// 读取host和port或url
// Read host and port or url
if (tool_table.contains("url") && tool_table["url"].is_string()) {
config.url = tool_table["url"].as_string()->get();
} else {
if (!tool_table.contains("host") || !tool_table["host"].is_string()) {
throw std::runtime_error("sse类型工具配置缺少host字段: " + tool_name);
throw std::runtime_error("sse type tool configuration missing host field: " + tool_name);
}
config.host = tool_table["host"].as_string()->get();
if (!tool_table.contains("port") || !tool_table["port"].is_integer()) {
throw std::runtime_error("sse类型工具配置缺少port字段: " + tool_name);
throw std::runtime_error("sse type tool configuration missing port field: " + tool_name);
}
config.port = tool_table["port"].as_integer()->get();
}
} else {
throw std::runtime_error("不支持的工具类型: " + config.type);
throw std::runtime_error("Unsupported tool type: " + config.type);
}
} catch (const std::exception& e) {
std::cerr << "加载MCP工具配置失败: " << e.what() << std::endl;
std::cerr << "Failed to load MCP tool configuration: " << e.what() << std::endl;
throw;
}
@ -182,7 +182,7 @@ struct BaseTool {
if (special_tool_name.find(name) != special_tool_name.end()) {
return;
}
// 从配置文件加载工具配置
// Load tool configuration from config file
auto _config = MCPToolConfig::load_from_toml(name);
if (_config.type == "stdio") {
@ -199,7 +199,7 @@ struct BaseTool {
} else if (!_config.url.empty()) {
_client = std::make_unique<mcp::sse_client>(_config.url, "/sse");
} else {
throw std::runtime_error("MCP SSE 配置缺少 host 或 port 或 url");
throw std::runtime_error("MCP SSE configuration missing host or port or url");
}
}
@ -215,11 +215,11 @@ struct BaseTool {
virtual ToolResult execute(const json& arguments) {
try {
if (!_client) {
throw std::runtime_error("MCP 客户端未初始化");
throw std::runtime_error("MCP client not initialized");
}
json result = _client->call_tool(name, arguments);
bool is_error = result.value("isError", false);
// 根据是否有错误返回不同的ToolResult
// Return different ToolResult based on whether there is an error
if (is_error) {
return ToolError(result.value("content", json::array()));
} else {

View File

@ -22,7 +22,7 @@ json CreateChatCompletion::_build_parameters() const {
}
json CreateChatCompletion::_create_type_schema(const std::string& type_hint) const {
// 处理基本类型
// Handle basic types
if (type_mapping.find(type_hint) != type_mapping.end()) {
return {
{"type", "object"},
@ -36,9 +36,9 @@ json CreateChatCompletion::_create_type_schema(const std::string& type_hint) con
};
}
// 处理数组类型
// Handle array types
if (type_hint.find("array") == 0) {
std::string item_type = "string"; // 默认项类型
std::string item_type = "string"; // Default item type
return {
{"type", "object"},
{"properties", {
@ -51,7 +51,7 @@ json CreateChatCompletion::_create_type_schema(const std::string& type_hint) con
};
}
// 处理字典类型
// Handle dictionary type
if (type_hint.find("object") == 0) {
return {
{"type", "object"},
@ -65,7 +65,7 @@ json CreateChatCompletion::_create_type_schema(const std::string& type_hint) con
};
}
// 默认返回字符串类型
// Default return string type
return {
{"type", "object"},
{"properties", {

View File

@ -14,7 +14,7 @@ struct CreateChatCompletion : BaseTool {
inline static const std::string name_ = "create_chat_completion";
inline static const std::string description_ = "Creates a structured completion with specified output formatting.";
// 类型映射表,用于JSON schema
// Type mapping table, used for JSON schema
inline static std::unordered_map<std::string, std::string> type_mapping = {
{"string", "string"},
{"int", "integer"},

View File

@ -83,7 +83,6 @@ struct FileSystem : BaseTool {
return ToolError("Failed to initialize shell client");
}
// 处理命令参数
std::string tool;
if (args.contains("tool")) {
if (args["tool"].is_string()) {
@ -107,7 +106,7 @@ struct FileSystem : BaseTool {
bool is_error = result.value("isError", false);
// 根据是否有错误返回不同的ToolResult
// Return different ToolResult based on whether there is an error
if (is_error) {
return ToolError(result.value("content", json::array()));
} else {

View File

@ -76,7 +76,6 @@ struct Puppeteer : BaseTool {
return ToolError("Failed to initialize puppeteer client");
}
// 处理命令参数
std::string tool;
if (args.contains("tool")) {
if (args["tool"].is_string()) {
@ -100,7 +99,7 @@ struct Puppeteer : BaseTool {
bool is_error = result.value("isError", false);
// 根据是否有错误返回不同的ToolResult
// Return different ToolResult based on whether there is an error
if (is_error) {
return ToolError(result.value("content", json::array()));
} else {

View File

@ -58,12 +58,12 @@ struct Shell : BaseTool {
ToolResult execute(const json& args) override {
try {
// 确保客户端已初始化
// Ensure client is initialized
if (!_client) {
return ToolError("Failed to initialize shell client");
}
// 处理命令参数
// Handle command parameters
std::string command;
if (args.contains("command")) {
if (args["command"].is_string()) {
@ -78,12 +78,12 @@ struct Shell : BaseTool {
json tool_args = args;
tool_args.erase("command");
// 调用shell工具
// Call shell tool
json result = _client->call_tool("shell_" + command, tool_args);
bool is_error = result.value("isError", false);
// 根据是否有错误返回不同的ToolResult
// Return different ToolResult based on whether there is an error
if (is_error) {
return ToolError(result.value("content", json::array()));
} else {

View File

@ -35,19 +35,19 @@ struct ToolCollection {
}
}
// // Execute all tools in the collection sequentially.
// std::vector<ToolResult> execute_all(const json& args) const { // No reference now
// std::vector<ToolResult> results;
// for (auto tool : tools) {
// try {
// auto result = tool->execute(args);
// results.push_back(result);
// } catch (const std::exception& e) {
// results.push_back(ToolError(e.what()));
// }
// }
// return results;
// }
// Execute all tools in the collection sequentially.
std::vector<ToolResult> execute_all(const json& args) const { // No reference now
std::vector<ToolResult> results;
for (auto tool : tools) {
try {
auto result = tool->execute(args);
results.push_back(result);
} catch (const std::exception& e) {
results.push_back(ToolError(e.what()));
}
}
return results;
}
void add_tool(const std::shared_ptr<BaseTool>& tool) {
tools.push_back(tool);