a workable version (but ...)

main
hkr04 2025-03-17 01:58:37 +08:00
parent d0bb48aced
commit bfe2430534
30 changed files with 444 additions and 416 deletions

View File

@ -4,8 +4,8 @@ project(humanus.cpp VERSION 0.1.0)
set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_STANDARD_REQUIRED ON)
# OpenSSL3.0.0 # OpenSSL
find_package(OpenSSL REQUIRED) find_package(OpenSSL 3.0.0 REQUIRED)
if(OPENSSL_FOUND) if(OPENSSL_FOUND)
message(STATUS "OpenSSL found: ${OPENSSL_VERSION}") message(STATUS "OpenSSL found: ${OPENSSL_VERSION}")
message(STATUS "OpenSSL include directory: ${OPENSSL_INCLUDE_DIR}") message(STATUS "OpenSSL include directory: ${OPENSSL_INCLUDE_DIR}")
@ -36,7 +36,6 @@ add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/server)
# #
include_directories(${CMAKE_CURRENT_SOURCE_DIR}) include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/common) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/common)
@ -44,13 +43,33 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}/mcp/common)
find_package(Threads REQUIRED) find_package(Threads REQUIRED)
# #
file(GLOB_RECURSE SOURCES file(GLOB AGENT_SOURCES
"src/*.cpp" "agent/*.cpp"
"src/*.cc" "agent/*.cc"
)
file(GLOB TOOL_SOURCES
"tool/*.cpp"
"tool/*.cc"
)
file(GLOB FLOW_SOURCES
"flow/*.cpp"
"flow/*.cc"
) )
# #
add_executable(humanus_cpp ${SOURCES} main.cpp) add_executable(humanus_cpp
main.cpp
config.cpp
llm.cpp
prompt.cpp
logger.cpp
schema.cpp
${AGENT_SOURCES}
${TOOL_SOURCES}
${FLOW_SOURCES}
)
# #
target_link_libraries(humanus_cpp PRIVATE Threads::Threads mcp server ${OPENSSL_LIBRARIES}) target_link_libraries(humanus_cpp PRIVATE Threads::Threads mcp server ${OPENSSL_LIBRARIES})
@ -58,5 +77,13 @@ if(Python3_FOUND)
target_link_libraries(humanus_cpp PRIVATE ${Python3_LIBRARIES}) target_link_libraries(humanus_cpp PRIVATE ${Python3_LIBRARIES})
endif() endif()
#
add_executable(humanus_simple main_simple.cpp logger.cpp schema.cpp)
target_link_libraries(humanus_simple PRIVATE Threads::Threads ${OPENSSL_LIBRARIES})
if(Python3_FOUND)
target_link_libraries(humanus_simple PRIVATE ${Python3_LIBRARIES})
endif()
# #
install(TARGETS humanus_cpp DESTINATION bin) install(TARGETS humanus_cpp DESTINATION bin)
install(TARGETS humanus_simple DESTINATION bin)

View File

@ -59,7 +59,7 @@ struct BaseAgent : std::enable_shared_from_this<BaseAgent> {
// Initialize agent with default settings if not provided. // Initialize agent with default settings if not provided.
void initialize_agent() { void initialize_agent() {
if (!llm) { if (!llm) {
llm = LLM::get_instance(name); llm = LLM::get_instance("default");
} }
if (!memory) { if (!memory) {
memory = std::make_shared<Memory>(); memory = std::make_shared<Memory>();
@ -159,10 +159,6 @@ struct BaseAgent : std::enable_shared_from_this<BaseAgent> {
if (duplicate_count >= duplicate_threshold) { if (duplicate_count >= duplicate_threshold) {
break; break;
} }
} else {
break;
// Stop counting if a non-duplicate message is encountered
// Slightly differenr from OpenManus implementation
} }
} }
@ -170,7 +166,7 @@ struct BaseAgent : std::enable_shared_from_this<BaseAgent> {
} }
void set_messages(const std::vector<Message>& messages) { void set_messages(const std::vector<Message>& messages) {
memory->set_messages(messages); memory->add_messages(messages);
} }
}; };

View File

@ -1,10 +1,11 @@
#include "planning.h" #include "planning.h"
#include <iomanip> // 添加iomanip头文件用于std::setprecision
namespace humanus { namespace humanus {
// Initialize the agent with a default plan ID and validate required tools. // Initialize the agent with a default plan ID and validate required tools.
void PlanningAgent::initialize_plan_and_verify_tools() { void PlanningAgent::initialize_plan_and_verify_tools() {
active_plan_id = "plan_" + std::chrono::system_clock::now().time_since_epoch().count(); active_plan_id = "plan_" + std::to_string(std::chrono::system_clock::now().time_since_epoch().count());
if (available_tools.tools_map.find("planning") == available_tools.tools_map.end()) { if (available_tools.tools_map.find("planning") == available_tools.tools_map.end()) {
available_tools.add_tool(std::make_shared<PlanningTool>()); available_tools.add_tool(std::make_shared<PlanningTool>());

View File

@ -3,6 +3,7 @@
#include "toolcall.h" #include "toolcall.h"
#include "../tool/planning.h" #include "../tool/planning.h"
#include "../prompt.h"
namespace humanus { namespace humanus {

View File

@ -20,7 +20,7 @@ bool ToolCallAgent::think() {
tool_calls = ToolCall::from_json_list(response["tool_calls"]); tool_calls = ToolCall::from_json_list(response["tool_calls"]);
// Log response info // Log response info
logger->info("{self.name}'s thoughts:" + response["content"].dump()); logger->info("" + name + "'s thoughts:" + response["content"].dump());
logger->info( logger->info(
"🛠️ " + name + " selected " + std::to_string(tool_calls.size()) + " tool(s) to use" "🛠️ " + name + " selected " + std::to_string(tool_calls.size()) + " tool(s) to use"
); );
@ -87,7 +87,7 @@ std::string ToolCallAgent::act() {
for (const auto& tool_call : tool_calls) { for (const auto& tool_call : tool_calls) {
auto result = execute_tool(tool_call); auto result = execute_tool(tool_call);
logger->info( logger->info(
"🎯 Tool '" + tool_call.function.name + "' ompleted its mission! Result: " + result "🎯 Tool '" + tool_call.function.name + "' completed its mission! Result: " + result
); );
// Add tool response to memory // Add tool response to memory

View File

@ -9,8 +9,6 @@
namespace humanus { namespace humanus {
const char* TOOL_CALL_REQUIRED = "Tool calls required but none provided";
// Base agent class for handling tool/function calls with enhanced abstraction // Base agent class for handling tool/function calls with enhanced abstraction
struct ToolCallAgent : ReActAgent { struct ToolCallAgent : ReActAgent {
std::vector<ToolCall> tool_calls; std::vector<ToolCall> tool_calls;

View File

@ -10,9 +10,6 @@ namespace humanus {
Config* Config::_instance = nullptr; Config* Config::_instance = nullptr;
std::mutex Config::_mutex; std::mutex Config::_mutex;
// 全局配置实例
Config& config = Config::get_instance();
void Config::_load_initial_config() { void Config::_load_initial_config() {
try { try {
auto config_path = _get_config_path(); auto config_path = _get_config_path();
@ -49,20 +46,14 @@ void Config::_load_initial_config() {
if (llm_table.contains("end_point") && llm_table["end_point"].is_string()) { if (llm_table.contains("end_point") && llm_table["end_point"].is_string()) {
llm_settings.end_point = llm_table["end_point"].as_string()->get(); llm_settings.end_point = llm_table["end_point"].as_string()->get();
} else {
throw std::runtime_error("Invalid `end_point` configuration");
} }
if (llm_table.contains("max_tokens") && llm_table["max_tokens"].is_integer()) { if (llm_table.contains("max_tokens") && llm_table["max_tokens"].is_integer()) {
llm_settings.max_tokens = llm_table["max_tokens"].as_integer()->get(); llm_settings.max_tokens = llm_table["max_tokens"].as_integer()->get();
} else {
llm_settings.max_tokens = 4096;
} }
if (llm_table.contains("temperature") && llm_table["temperature"].is_floating_point()) { if (llm_table.contains("temperature") && llm_table["temperature"].is_floating_point()) {
llm_settings.temperature = llm_table["temperature"].as_floating_point()->get(); llm_settings.temperature = llm_table["temperature"].as_floating_point()->get();
} else {
llm_settings.temperature = 1.0;
} }
_config.llm["default"] = llm_settings; _config.llm["default"] = llm_settings;

View File

@ -15,11 +15,11 @@ namespace humanus {
static std::filesystem::path get_project_root() { static std::filesystem::path get_project_root() {
// 获取项目根目录 // 获取项目根目录
return std::filesystem::path(__FILE__).parent_path().parent_path(); return std::filesystem::path(__FILE__).parent_path();
} }
const std::filesystem::path PROJECT_ROOT = get_project_root(); inline const std::filesystem::path PROJECT_ROOT = get_project_root();
const std::filesystem::path WORKSPACE_ROOT = PROJECT_ROOT / "workspace"; inline const std::filesystem::path WORKSPACE_ROOT = PROJECT_ROOT / "workspace";
/** /**
* @brief LLM * @brief LLM
@ -36,7 +36,7 @@ struct LLMSettings {
const std::string& model = "", const std::string& model = "",
const std::string& api_key = "", const std::string& api_key = "",
const std::string& base_url = "", const std::string& base_url = "",
const std::string& end_point = "/v1/chat/completions", const std::string& end_point = "/chat/completions",
int max_tokens = 4096, int max_tokens = 4096,
double temperature = 1.0 double temperature = 1.0
) : model(model), api_key(api_key), base_url(base_url), end_point(end_point), ) : model(model), api_key(api_key), base_url(base_url), end_point(end_point),
@ -129,9 +129,6 @@ public:
} }
}; };
// 全局配置实例
extern Config& config;
} // namespace humanus } // namespace humanus
#endif // HUMANUS_CONFIG_H #endif // HUMANUS_CONFIG_H

View File

@ -1,5 +1,6 @@
[llm] [llm]
model = "anthropic/claude-3.7-sonnet" model = "deepseek-chat"
base_url = "https://openrouter.ai/api/v1" base_url = "https://api.deepseek.com"
api_key = "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" end_point = "/v1/chat/completions"
max_tokens = 4096 api_key = "sk-93c5bfcb920c4a8aa345791d429b8536"
max_tokens = 8192

View File

@ -0,0 +1,6 @@
[llm]
model = "anthropic/claude-3.7-sonnet"
base_url = "https://openrouter.ai"
end_point = "/api/v1/chat/completions"
api_key = "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"
max_tokens = 8196

View File

@ -5,18 +5,17 @@ port = 8818
sse_endpoint = "/sse" sse_endpoint = "/sse"
[puppeteer] [puppeteer]
type = "command" type = "stdio"
command = "npx" command = "npx"
args = ["-y", "@modelcontextprotocol/server-puppeteer"] args = ["-y", "@modelcontextprotocol/server-puppeteer"]
[filesystem] [filesystem]
type = "command" type = "stdio"
command = "npx" command = "npx"
args = ["-y", args = ["-y",
"@modelcontextprotocol/server-filesystem", "@modelcontextprotocol/server-filesystem",
"/Users/username/Desktop", "/Users/hyde/Desktop"]
"/path/to/other/allowed/dir"]
[shell] [shell]
type = "command" type = "stdio"
command = "uvx mcp-shell-server" command = "uvx mcp-shell-server"

View File

@ -4,7 +4,7 @@ namespace humanus {
// Get an appropriate executor agent for the current step. // Get an appropriate executor agent for the current step.
// Can be extended to select agents based on step type/requirements. // Can be extended to select agents based on step type/requirements.
std::shared_ptr<BaseAgent> PlanningFlow::get_executor(const std::string& step_type = "") const { std::shared_ptr<BaseAgent> PlanningFlow::get_executor(const std::string& step_type) const {
// If step type is provided and matches an agent key, use that agent // If step type is provided and matches an agent key, use that agent
if (!step_type.empty() && agents.find(step_type) != agents.end()) { if (!step_type.empty() && agents.find(step_type) != agents.end()) {
return agents.at(step_type); return agents.at(step_type);
@ -101,7 +101,8 @@ void PlanningFlow::_create_initial_plan(const std::string& request) {
auto args = tool_call.function.arguments; auto args = tool_call.function.arguments;
if (args.is_string()) { if (args.is_string()) {
try { try {
args = json::parse(args); std::string args_str = args.get<std::string>();
args = json::parse(args_str);
} catch (...) { } catch (...) {
logger->error("Failed to parse tool arguments: " + args.dump()); logger->error("Failed to parse tool arguments: " + args.dump());
continue; continue;

6
llm.cpp 100644
View File

@ -0,0 +1,6 @@
#include "llm.h"
namespace humanus {
// 定义静态成员变量
std::map<std::string, std::shared_ptr<LLM>> LLM::_instances;
}

48
llm.h
View File

@ -21,19 +21,25 @@ private:
std::unique_ptr<httplib::Client> client_; std::unique_ptr<httplib::Client> client_;
LLMSettings llm_config_; std::shared_ptr<LLMSettings> llm_config_;
// 私有构造函数,防止直接创建实例 public:
LLM(const std::string& config_name, const LLMSettings llm_config) : llm_config_(llm_config) { // 构造函数
client_ = std::make_unique<httplib::Client>(llm_config.base_url); LLM(const std::string& config_name, const std::shared_ptr<LLMSettings>& llm_config = nullptr) : llm_config_(llm_config) {
if (!llm_config_) {
if (Config::get_instance().llm().find(config_name) == Config::get_instance().llm().end()) {
throw std::invalid_argument("Config not found: " + config_name);
}
llm_config_ = std::make_shared<LLMSettings>(Config::get_instance().llm().at(config_name));
}
client_ = std::make_unique<httplib::Client>(llm_config_->base_url);
client_->set_default_headers({ client_->set_default_headers({
{"Authorization", "Bearer " + llm_config_.api_key} {"Authorization", "Bearer " + llm_config_->api_key}
}); });
} }
public:
// 单例模式获取实例 // 单例模式获取实例
static std::shared_ptr<LLM> get_instance(const std::string& config_name = "default", const LLMSettings llm_config = LLMSettings()) { static std::shared_ptr<LLM> get_instance(const std::string& config_name = "default", const std::shared_ptr<LLMSettings>& llm_config = nullptr) {
if (_instances.find(config_name) == _instances.end()) { if (_instances.find(config_name) == _instances.end()) {
_instances[config_name] = std::make_shared<LLM>(config_name, llm_config); _instances[config_name] = std::make_shared<LLM>(config_name, llm_config);
} }
@ -56,7 +62,7 @@ public:
for (const auto& message : formatted_messages) { for (const auto& message : formatted_messages) {
if (message["role"] != "user" && message["role"] != "assistant" && message["role"] != "system" && message["role"] != "tool") { if (message["role"] != "user" && message["role"] != "assistant" && message["role"] != "system" && message["role"] != "tool") {
throw std::invalid_argument("Invalid role: " + message["role"]); throw std::invalid_argument("Invalid role: " + message["role"].get<std::string>());
} }
if (message["content"].empty() && message["tool_calls"].empty()) { if (message["content"].empty() && message["tool_calls"].empty()) {
throw std::invalid_argument("Message must contain either 'content' or 'tool_calls'"); throw std::invalid_argument("Message must contain either 'content' or 'tool_calls'");
@ -85,7 +91,7 @@ public:
for (const auto& message : formatted_messages) { for (const auto& message : formatted_messages) {
if (message["role"] != "user" && message["role"] != "assistant" && message["role"] != "system" && message["role"] != "tool") { if (message["role"] != "user" && message["role"] != "assistant" && message["role"] != "system" && message["role"] != "tool") {
throw std::invalid_argument("Invalid role: " + message["role"]); throw std::invalid_argument("Invalid role: " + message["role"].get<std::string>());
} }
if (message["content"].empty() && message["tool_calls"].empty()) { if (message["content"].empty() && message["tool_calls"].empty()) {
throw std::invalid_argument("Message must contain either 'content' or 'tool_calls'"); throw std::invalid_argument("Message must contain either 'content' or 'tool_calls'");
@ -120,10 +126,10 @@ public:
formatted_messages.insert(formatted_messages.end(), _formatted_messages.begin(), _formatted_messages.end()); formatted_messages.insert(formatted_messages.end(), _formatted_messages.begin(), _formatted_messages.end());
json body = { json body = {
{"model", llm_config_.model}, {"model", llm_config_->model},
{"messages", formatted_messages}, {"messages", formatted_messages},
{"temperature", llm_config_.temperature}, {"temperature", llm_config_->temperature},
{"max_tokens", llm_config_.max_tokens} {"max_tokens", llm_config_->max_tokens}
}; };
std::string body_str = body.dump(); std::string body_str = body.dump();
@ -132,7 +138,7 @@ public:
while (retry <= max_retries) { while (retry <= max_retries) {
// send request // send request
auto res = client_->Post(llm_config_.end_point, body_str, "application/json"); auto res = client_->Post(llm_config_->end_point, body_str, "application/json");
if (!res) { if (!res) {
logger->error("Failed to send request: " + httplib::to_string(res.error())); logger->error("Failed to send request: " + httplib::to_string(res.error()));
@ -149,6 +155,10 @@ public:
retry++; retry++;
if (retry > max_retries) {
break;
}
// wait for a while before retrying // wait for a while before retrying
std::this_thread::sleep_for(std::chrono::milliseconds(500)); std::this_thread::sleep_for(std::chrono::milliseconds(500));
@ -201,10 +211,10 @@ public:
} }
json body = { json body = {
{"model", llm_config_.model}, {"model", llm_config_->model},
{"messages", formatted_messages}, {"messages", formatted_messages},
{"temperature", llm_config_.temperature}, {"temperature", llm_config_->temperature},
{"max_tokens", llm_config_.max_tokens}, {"max_tokens", llm_config_->max_tokens},
{"tools", tools}, {"tools", tools},
{"tool_choice", tool_choice} {"tool_choice", tool_choice}
}; };
@ -217,7 +227,7 @@ public:
while (retry <= max_retries) { while (retry <= max_retries) {
// send request // send request
auto res = client_->Post(llm_config_.end_point, body_str, "application/json"); auto res = client_->Post(llm_config_->end_point, body_str, "application/json");
if (!res) { if (!res) {
logger->error("Failed to send request: " + httplib::to_string(res.error())); logger->error("Failed to send request: " + httplib::to_string(res.error()));
@ -234,6 +244,10 @@ public:
retry++; retry++;
if (retry > max_retries) {
break;
}
// wait for a while before retrying // wait for a while before retrying
std::this_thread::sleep_for(std::chrono::milliseconds(500)); std::this_thread::sleep_for(std::chrono::milliseconds(500));

42
logger.cpp 100644
View File

@ -0,0 +1,42 @@
#include "logger.h"
#include <sstream>
#include <iomanip>
namespace humanus {
std::shared_ptr<spdlog::logger> define_log_level(spdlog::level::level_enum print_level,
spdlog::level::level_enum logfile_level,
std::string name) {
_print_level = print_level;
auto current_date = std::chrono::system_clock::now();
auto in_time_t = std::chrono::system_clock::to_time_t(current_date);
std::stringstream ss;
std::tm tm_info = *std::localtime(&in_time_t);
ss << std::put_time(&tm_info, "%Y%m%d");
std::string formatted_date = ss.str(); // YYYYMMDD
std::string log_name = name.empty() ? formatted_date : name + "_" + formatted_date;
std::string log_file_path = (PROJECT_ROOT / "logs" / (log_name + ".log")).string();
// 确保日志目录存在
std::filesystem::create_directories(PROJECT_ROOT / "logs");
// 重置日志输出
std::shared_ptr<spdlog::logger> _logger = std::make_shared<spdlog::logger>(log_name);
// 添加标准错误输出sink相当于Python中的sys.stderr
auto stderr_sink = std::make_shared<spdlog::sinks::stderr_color_sink_mt>();
stderr_sink->set_level(print_level);
_logger->sinks().push_back(stderr_sink);
// 添加文件sink相当于Python中的PROJECT_ROOT / f"logs/{log_name}.log"
auto file_sink = std::make_shared<spdlog::sinks::basic_file_sink_mt>(log_file_path, false);
file_sink->set_level(logfile_level);
_logger->sinks().push_back(file_sink);
return _logger;
}
} // namespace humanus

View File

@ -9,9 +9,14 @@
#include "spdlog/sinks/dist_sink.h" #include "spdlog/sinks/dist_sink.h"
#include <string> #include <string>
#include <filesystem>
#include "config.h"
namespace humanus { namespace humanus {
// 使用config.h中定义的PROJECT_ROOT
// static const std::filesystem::path PROJECT_ROOT = std::filesystem::current_path();
static spdlog::level::level_enum _print_level = spdlog::level::info; static spdlog::level::level_enum _print_level = spdlog::level::info;
/** /**
@ -21,40 +26,9 @@ static spdlog::level::level_enum _print_level = spdlog::level::info;
* @param name * @param name
* @return * @return
*/ */
std::shared_ptr<spdlog::logger> define_log_level(spdlog::level::level_enum print_level = spdlog::level::info, extern std::shared_ptr<spdlog::logger> define_log_level(spdlog::level::level_enum print_level = spdlog::level::info,
spdlog::level::level_enum logfile_level = spdlog::level::debug, spdlog::level::level_enum logfile_level = spdlog::level::debug,
std::string name = "") { std::string name = "");
_print_level = print_level;
auto current_date = std::chrono::system_clock::now();
auto in_time_t = std::chrono::system_clock::to_time_t(current_date);
std::stringstream ss;
std::tm tm_info = *std::localtime(&in_time_t);
ss << std::put_time(&tm_info, "%Y%m%d");
std::string formatted_date = ss.str(); // YYYYMMDD
std::string log_name = name.empty() ? formatted_date : name + "_" + formatted_date;
std::string log_file_path = (PROJECT_ROOT / "logs" / (log_name + ".log")).string();
// 确保日志目录存在
std::filesystem::create_directories((PROJECT_ROOT / "logs").string());
// 重置日志输出
std::shared_ptr<spdlog::logger> _logger = spdlog::default_logger();
// 添加标准错误输出sink相当于Python中的sys.stderr
auto stderr_sink = std::make_shared<spdlog::sinks::stderr_color_sink_mt>();
stderr_sink->set_level(print_level);
_logger->sinks().push_back(stderr_sink);
// 添加文件sink相当于Python中的PROJECT_ROOT / f"logs/{log_name}.log"
auto file_sink = std::make_shared<spdlog::sinks::basic_file_sink_mt>(log_file_path, true);
file_sink->set_level(logfile_level);
_logger->sinks().push_back(file_sink);
return _logger;
}
static std::shared_ptr<spdlog::logger> logger = define_log_level(); static std::shared_ptr<spdlog::logger> logger = define_log_level();

View File

@ -1,5 +1,6 @@
#include "agent/manus.h" #include "agent/manus.h"
#include "logger.h" #include "logger.h"
#include "prompt.h"
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__))
#include <signal.h> #include <signal.h>
@ -18,7 +19,7 @@ using namespace humanus;
#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32) #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32)
static void sigint_handler(int signo) { static void sigint_handler(int signo) {
if (signo == SIGINT) { if (signo == SIGINT) {
logger->warn("Goodbye!"); logger->info("Received SIGINT, exiting...");
exit(0); exit(0);
} }
} }

77
prompt.cpp 100644
View File

@ -0,0 +1,77 @@
#include "prompt.h"
namespace humanus {
namespace prompt {
namespace manus {
const char* SYSTEM_PROMPT = "\
You are OpenManus, an all-capable AI assistant, aimed at solving any task presented by the user. You have various tools at your disposal that you can call upon to efficiently complete complex requests. Whether it's programming, information retrieval, file processing, or web browsing, you can handle it all.";
const char* NEXT_STEP_PROMPT = R"(You can interact with the computer using PythonExecute, save important content and information files through FileSaver, open browsers and retrieve information with Puppeteer.
PythonExecute: Execute Python code to interact with the computer system, data processing, automation tasks, etc.
FileSystem: Read/write files locally, such as txt, py, html, etc. Create/list/delete directories, move files/directories, search for files and get file metadata.
Puppeteer: Open, browse, and get screenshots of web pages using Puppeteer, a headless Chrome browser.
Based on user needs, proactively select the most appropriate tool or combination of tools. For complex tasks, you can break down the problem and use different tools step by step to solve it. After using each tool, clearly explain the execution results and suggest the next steps.)";
} // namespace manus
namespace planning {
const char* PLANNING_SYSTEM_PROMPT = R"(Based on the current state, what's your next step?
Consider:
1. Do you need to create or refine a plan?
2. Are you ready to execute a specific step?
3. Have you completed the task?
Provide reasoning, then select the appropriate tool or action.)";
const char* NEXT_STEP_PROMPT = R"(Based on the current state, what's your next step?
Consider:
1. Do you need to create or refine a plan?
2. Are you ready to execute a specific step?
3. Have you completed the task?
Provide reasoning, then select the appropriate tool or action.)";
} // namespace planning
namespace swe {
const char* SYSTEM_PROMPT = R"(SETTING: You are an autonomous programmer, and you're working directly in the command line with a special interface.
The special interface consists of a file editor that shows you {WINDOW} lines of a file at a time.
In addition to typical shell commands, you can also use specific commands to help you navigate and edit files.
To call a command, you need to invoke it with a function call/tool call.
Please note that THE EDIT COMMAND REQUIRES PROPER INDENTATION.
If you'd like to add the line ' print(x)' you must fully write that out, with all those spaces before the code! Indentation is important and code that is not indented correctly will fail and require fixing before it can be run.
RESPONSE FORMAT:
Your shell prompt is formatted as follows:
(Open file: <path>)
(Current directory: <cwd>)
shell-$
First, you should _always_ include a general thought about what you're going to do next.
Then, for every response, you must include exactly _ONE_ tool call/function call.
Remember, you should always include a _SINGLE_ tool call/function call and then wait for a response from the shell before continuing with more discussion and commands. Everything you include in the DISCUSSION section will be saved for future reference.
If you'd like to issue two commands at once, PLEASE DO NOT DO THAT! Please instead first submit just the first tool call, and then after receiving a response you'll be able to issue the second tool call.
Note that the environment does NOT support interactive session commands (e.g. python, vim), so please do not invoke them.)";
const char* NEXT_STEP_TEMPLATE = R"({observation}
(Open file: {open_file})
(Current directory: {working_dir})
shell-$)";
} // namespace swe
namespace toolcall {
const char* SYSTEM_PROMPT = "You are an agent that can execute tool calls";
const char* NEXT_STEP_PROMPT = "If you want to stop interaction, use `terminate` tool/function call.";
} // namespace toolcall
} // namespace prompt
} // namespace humanus

View File

@ -6,75 +6,36 @@ namespace humanus {
namespace prompt { namespace prompt {
namespace manus { namespace manus {
const char* SYSTEM_PROMPT = "\ extern const char* SYSTEM_PROMPT;
You are OpenManus, an all-capable AI assistant, aimed at solving any task presented by the user. You have various tools at your disposal that you can call upon to efficiently complete complex requests. Whether it's programming, information retrieval, file processing, or web browsing, you can handle it all."; extern const char* NEXT_STEP_PROMPT;
const char* NEXT_STEP_PROMPT = R"(You can interact with the computer using PythonExecute, save important content and information files through FileSaver, open browsers with BrowserUseTool, and retrieve information using GoogleSearch.
PythonExecute: Execute Python code to interact with the computer system, data processing, automation tasks, etc.
FileSystem: Read/write files locally, such as txt, py, html, etc. Create/list/delete directories, move files/directories, search for files and get file metadata.
Puppeteer: Open, browse, and get screenshots of web pages using Puppeteer, a headless Chrome browser.
Based on user needs, proactively select the most appropriate tool or combination of tools. For complex tasks, you can break down the problem and use different tools step by step to solve it. After using each tool, clearly explain the execution results and suggest the next steps.)";
} // namespace manus } // namespace manus
namespace planning { namespace planning {
const char* PLANNING_SYSTEM_PROMPT = R"(Based on the current state, what's your next step? extern const char* PLANNING_SYSTEM_PROMPT;
Consider: extern const char* NEXT_STEP_PROMPT;
1. Do you need to create or refine a plan?
2. Are you ready to execute a specific step?
3. Have you completed the task?
Provide reasoning, then select the appropriate tool or action.)";
const char* NEXT_STEP_PROMPT = R"(Based on the current state, what's your next step?
Consider:
1. Do you need to create or refine a plan?
2. Are you ready to execute a specific step?
3. Have you completed the task?
Provide reasoning, then select the appropriate tool or action.)";
} // namespace planning } // namespace planning
namespace swe { namespace swe {
const char* SYSTEM_PROMPT = R"(SETTING: You are an autonomous programmer, and you're working directly in the command line with a special interface. extern const char* SYSTEM_PROMPT;
extern const char* NEXT_STEP_TEMPLATE;
The special interface consists of a file editor that shows you {WINDOW} lines of a file at a time.
In addition to typical shell commands, you can also use specific commands to help you navigate and edit files.
To call a command, you need to invoke it with a function call/tool call.
Please note that THE EDIT COMMAND REQUIRES PROPER INDENTATION.
If you'd like to add the line ' print(x)' you must fully write that out, with all those spaces before the code! Indentation is important and code that is not indented correctly will fail and require fixing before it can be run.
RESPONSE FORMAT:
Your shell prompt is formatted as follows:
(Open file: <path>)
(Current directory: <cwd>)
shell-$
First, you should _always_ include a general thought about what you're going to do next.
Then, for every response, you must include exactly _ONE_ tool call/function call.
Remember, you should always include a _SINGLE_ tool call/function call and then wait for a response from the shell before continuing with more discussion and commands. Everything you include in the DISCUSSION section will be saved for future reference.
If you'd like to issue two commands at once, PLEASE DO NOT DO THAT! Please instead first submit just the first tool call, and then after receiving a response you'll be able to issue the second tool call.
Note that the environment does NOT support interactive session commands (e.g. python, vim), so please do not invoke them.)";
const char* NEXT_STEP_TEMPLATE = R"({observation}
(Open file: {open_file})
(Current directory: {working_dir})
shell-$)";
} // namespace swe } // namespace swe
namespace toolcall { namespace toolcall {
const char* SYSTEM_PROMPT = "You are an agent that can execute tool calls"; extern const char* SYSTEM_PROMPT;
extern const char* NEXT_STEP_PROMPT;
const char* NEXT_STEP_PROMPT = "If you want to stop interaction, use `terminate` tool/function call.";
} // namespace toolcall } // namespace toolcall
} // namespace prompt } // namespace prompt
// 使用内联函数来获取常量
inline const char* get_tool_call_required() { return "required"; }
inline const char* get_terminate_description() { return "Terminate the current interaction"; }
inline const char* get_planning_tool_description() { return "Create a plan for the given task"; }
#define TOOL_CALL_REQUIRED get_tool_call_required()
#define _TERMINATE_DESCRIPTION get_terminate_description()
#define _PLANNING_TOOL_DESCRIPTION get_planning_tool_description()
} // namespace humanus } // namespace humanus
#endif // HUMANUS_PROMPT_H #endif // HUMANUS_PROMPT_H

12
schema.cpp 100644
View File

@ -0,0 +1,12 @@
#include "schema.h"
namespace humanus {
std::map<AgentState, std::string> agent_state_map = {
{AgentState::IDLE, "IDLE"},
{AgentState::RUNNING, "RUNNING"},
{AgentState::FINISHED, "FINISHED"},
{AgentState::ERROR, "ERROR"}
};
} // namespace humanus

View File

@ -15,12 +15,7 @@ enum class AgentState {
ERROR = 3 ERROR = 3
}; };
std::map<AgentState, std::string> agent_state_map = { extern std::map<AgentState, std::string> agent_state_map;
{AgentState::IDLE, "IDLE"},
{AgentState::RUNNING, "RUNNING"},
{AgentState::FINISHED, "FINISHED"},
{AgentState::ERROR, "ERROR"}
};
struct Function { struct Function {
std::string name; std::string name;

View File

@ -1,6 +1,9 @@
# CMakeLists.txt # CMakeLists.txt
cmake_minimum_required(VERSION 3.10) cmake_minimum_required(VERSION 3.10)
#
find_package(Threads REQUIRED)
# #
set(SERVER_SOURCES set(SERVER_SOURCES
python_execute.cpp python_execute.cpp
@ -23,11 +26,18 @@ target_include_directories(server PRIVATE
${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/../ ${CMAKE_CURRENT_SOURCE_DIR}/../
${CMAKE_CURRENT_SOURCE_DIR}/../mcp/include ${CMAKE_CURRENT_SOURCE_DIR}/../mcp/include
${CMAKE_CURRENT_SOURCE_DIR}/../mcp/common
) )
# MCP # MCP
add_executable(mcp_server mcp_server_main.cpp) add_executable(mcp_server mcp_server_main.cpp)
target_link_libraries(mcp_server PRIVATE server mcp Threads::Threads ${OPENSSL_LIBRARIES}) target_link_libraries(mcp_server PRIVATE server mcp Threads::Threads ${OPENSSL_LIBRARIES})
target_include_directories(mcp_server PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/../
${CMAKE_CURRENT_SOURCE_DIR}/../mcp/include
${CMAKE_CURRENT_SOURCE_DIR}/../mcp/common
)
if(Python3_FOUND) if(Python3_FOUND)
target_link_libraries(mcp_server PRIVATE ${Python3_LIBRARIES}) target_link_libraries(mcp_server PRIVATE ${Python3_LIBRARIES})
endif() endif()

View File

@ -6,9 +6,9 @@
* PythonExecute * PythonExecute
*/ */
#include "mcp/include/mcp_server.h" #include "../mcp/include/mcp_server.h"
#include "mcp/include/mcp_tool.h" #include "../mcp/include/mcp_tool.h"
#include "mcp/include/mcp_resource.h" #include "../mcp/include/mcp_resource.h"
#include <iostream> #include <iostream>
#include <string> #include <string>

View File

@ -11,137 +11,11 @@
namespace humanus { namespace humanus {
// Execute the tool with given parameters.
struct BaseTool {
std::string name;
std::string description;
json parameters;
std::unique_ptr<mcp::client> _client;
BaseTool(const std::string& name, const std::string& description, const json& parameters) :
name(name), description(description), parameters(parameters) {
// 从配置文件加载工具配置
auto _config = MCPToolConfig::load_from_toml(name);
if (_config.type == "stdio") {
std::string command = _config.command;
if (!_config.args.empty()) {
for (const auto& arg : _config.args) {
command += " " + arg;
}
}
_client = std::make_unique<mcp::stdio_client>(command, _config.env_vars);
} else if (_config.type == "sse") {
if (!_config.host.empty() && !_config.port.empty()) {
_client = std::make_unique<mcp::sse_client>(_config.host, _config.port);
} else if (!_config.url.empty()) {
_client = std::make_unique<mcp::sse_client>(_config.url);
} else {
throw std::runtime_error("MCP SSE 配置缺少 host 或 port 或 url");
}
}
_client->initialize(name + "_client", "0.0.1");
}
// Execute the tool with given parameters.
ToolResult operator()(const json& arguments) {
return execute(arguments);
}
// Execute the tool with given parameters.
virtual ToolResult execute(const json& arguments) {
try {
if (!_client) {
throw std::runtime_error("MCP 客户端未初始化");
}
json result = _client->call_tool(name, arguments);
bool is_error = result.value("isError", false);
// 根据是否有错误返回不同的ToolResult
if (is_error) {
return ToolError(result.value("content", json::array()));
} else {
return ToolResult(result.value("content", json::array()));
}
} catch (const std::exception& e) {
return ToolError(e.what());
}
}
json to_param() const {
return {
{"type", "function"},
{"function", {
{"name", name},
{"description", description},
{"parameters", parameters}
}}
};
}
};
// Represents the result of a tool execution.
struct ToolResult {
json output;
json error;
json system;
ToolResult(const json& output, const json& error = {}, const json& system = {})
: output(output), error(error), system(system) {}
bool empty() const {
return output.empty() && error.empty() && system.empty();
}
ToolResult operator+(const ToolResult& other) const {
auto combined_field = [](const json& field, const json& other_field) {
if (field.empty()) {
return other_field;
}
if (other_field.empty()) {
return field;
}
json result = json::array();
if (field.is_array()) {
result.insert(result.end(), field.begin(), field.end());
} else {
result.push_back(field);
}
if (other_field.is_array()) {
result.insert(result.end(), other_field.begin(), other_field.end());
} else {
result.push_back(other_field);
}
return result;
};
return {
combined_field(output, other.output),
combined_field(error, other.error),
combined_field(system, other.system)
};
}
std::string to_string() const {
return !error.empty() ? "Error: " + error.dump() : output.dump();
}
};
// A ToolResult that represents a failure.
struct ToolError : ToolResult {
ToolError(const std::string& error) : ToolResult({}, error) {}
};
struct AgentAware : ToolResult {
std::shared_ptr<BaseAgent> agent = nullptr;
};
// 从config_mcp.toml中读取工具配置 // 从config_mcp.toml中读取工具配置
struct MCPToolConfig { struct MCPToolConfig {
std::string type; std::string type;
std::string host; std::string host;
std::string port; int port;
std::string url; std::string url;
std::string command; std::string command;
std::vector<std::string> args; std::vector<std::string> args;
@ -216,10 +90,10 @@ struct MCPToolConfig {
} }
config.host = tool_table["host"].as_string()->get(); config.host = tool_table["host"].as_string()->get();
if (!tool_table.contains("port") || !tool_table["port"].is_string()) { if (!tool_table.contains("port") || !tool_table["port"].is_integer()) {
throw std::runtime_error("sse类型工具配置缺少port字段: " + tool_name); throw std::runtime_error("sse类型工具配置缺少port字段: " + tool_name);
} }
config.port = tool_table["port"].as_string()->get(); config.port = tool_table["port"].as_integer()->get();
} }
} else { } else {
throw std::runtime_error("不支持的工具类型: " + config.type); throw std::runtime_error("不支持的工具类型: " + config.type);
@ -233,6 +107,137 @@ struct MCPToolConfig {
} }
}; };
// Represents the result of a tool execution.
struct ToolResult {
json output;
json error;
json system;
ToolResult(const json& output, const json& error = {}, const json& system = {})
: output(output), error(error), system(system) {}
bool empty() const {
return output.empty() && error.empty() && system.empty();
}
ToolResult operator+(const ToolResult& other) const {
auto combined_field = [](const json& field, const json& other_field) {
if (field.empty()) {
return other_field;
}
if (other_field.empty()) {
return field;
}
json result = json::array();
if (field.is_array()) {
result.insert(result.end(), field.begin(), field.end());
} else {
result.push_back(field);
}
if (other_field.is_array()) {
result.insert(result.end(), other_field.begin(), other_field.end());
} else {
result.push_back(other_field);
}
return result;
};
return {
combined_field(output, other.output),
combined_field(error, other.error),
combined_field(system, other.system)
};
}
std::string to_string() const {
return !error.empty() ? "Error: " + error.dump() : output.dump();
}
};
// A ToolResult that represents a failure.
struct ToolError : ToolResult {
ToolError(const std::string& error) : ToolResult({}, error) {}
};
// Execute the tool with given parameters.
struct BaseTool {
inline static std::set<std::string> special_tool_name = {"terminate"};
std::string name;
std::string description;
json parameters;
std::unique_ptr<mcp::client> _client;
BaseTool(const std::string& name, const std::string& description, const json& parameters) :
name(name), description(description), parameters(parameters) {
if (special_tool_name.find(name) != special_tool_name.end()) {
return;
}
// 从配置文件加载工具配置
auto _config = MCPToolConfig::load_from_toml(name);
if (_config.type == "stdio") {
std::string command = _config.command;
if (!_config.args.empty()) {
for (const auto& arg : _config.args) {
command += " " + arg;
}
}
_client = std::make_unique<mcp::stdio_client>(command, _config.env_vars);
} else if (_config.type == "sse") {
if (!_config.host.empty() && _config.port > 0) {
_client = std::make_unique<mcp::sse_client>(_config.host, _config.port);
} else if (!_config.url.empty()) {
_client = std::make_unique<mcp::sse_client>(_config.url, "/sse");
} else {
throw std::runtime_error("MCP SSE 配置缺少 host 或 port 或 url");
}
}
_client->initialize(name + "_client", "0.0.1");
}
// Execute the tool with given parameters.
ToolResult operator()(const json& arguments) {
return execute(arguments);
}
// Execute the tool with given parameters.
virtual ToolResult execute(const json& arguments) {
try {
if (!_client) {
throw std::runtime_error("MCP 客户端未初始化");
}
json result = _client->call_tool(name, arguments);
bool is_error = result.value("isError", false);
// 根据是否有错误返回不同的ToolResult
if (is_error) {
return ToolError(result.value("content", json::array()));
} else {
return ToolResult(result.value("content", json::array()));
}
} catch (const std::exception& e) {
return ToolError(e.what());
}
}
json to_param() const {
return {
{"type", "function"},
{"function", {
{"name", name},
{"description", description},
{"parameters", parameters}
}}
};
}
};
struct AgentAware : ToolResult {
std::shared_ptr<BaseAgent> agent = nullptr;
};
} }
#endif // HUMANUS_TOOL_BASE_H #endif // HUMANUS_TOOL_BASE_H

View File

@ -26,7 +26,7 @@ struct FileSystem : BaseTool {
"get_file_info", "get_file_info",
"list_allowed_directories" "list_allowed_directories"
] ]
} },
"path": { "path": {
"type": "string", "type": "string",
"description": "The path to the file or directory to operate on. Only works within allowed directories. Required by all tools except `read_multiple_files`, `move_file` and `list_allowed_directories`." "description": "The path to the file or directory to operate on. Only works within allowed directories. Required by all tools except `read_multiple_files`, `move_file` and `list_allowed_directories`."
@ -44,7 +44,7 @@ struct FileSystem : BaseTool {
}, },
"edits": { "edits": {
"type": "array", "type": "array",
"description": "Each edit replaces exact line sequences with new content. Required by `edit_file`.", "description": "Each edit replaces exact line sequences with new content. Required by `edit_file`."
}, },
"source": { "source": {
"type": "string", "type": "string",
@ -82,7 +82,7 @@ struct FileSystem : BaseTool {
return ToolError("Tool is required"); return ToolError("Tool is required");
} }
json result = _client->call_tool("puppeteer_" + tool, args); json result = _client->call_tool(tool, args);
bool is_error = result.value("isError", false); bool is_error = result.value("isError", false);

View File

@ -1,95 +0,0 @@
#ifndef HUMANUS_TOOL_GOOGLE_SEARCH_H
#define HUMANUS_TOOL_GOOGLE_SEARCH_H
#include "../mcp/common/httplib.h"
#include "base.h"
namespace humanus {
struct GoogleSearch : BaseTool {
inline static const std::string name_ = "google_search";
inline static const std::string description_ = R"(Perform a Google search and return a list of relevant links.
Use this tool when you need to find information on the web, get up-to-date data, or research specific topics.
The tool returns a list of URLs that match the search query.)";
inline static const json parameters_ = json::parse(R"json( {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "(required) The search query to submit to Google.",
},
"num_results": {
"type": "integer",
"description": "(optional) The number of search results to return. Default is 10.",
"default": 10,
},
},
"required": ["query"],
})json");
GoogleSearch() : BaseTool(name_, description_, parameters_) {}
ToolResult execute(const json& args) override {
try {
std::string query = args["query"];
int num_results = args.value("num_results", 10);
// 创建HTTP客户端连接到serper.dev API
httplib::Client cli("https://api.serper.dev");
// 准备请求体
json request_body = {
{"q", query},
{"num", num_results}
};
// 设置请求头
const char* api_key = std::getenv("X_API_KEY");
if (!api_key) {
return ToolError("X_API_KEY is not set");
}
httplib::Headers headers = {
{"Content-Type", "application/json"},
{"X-API-KEY", api_key}
};
// 发送POST请求
auto res = cli.Post("/search", headers, request_body.dump(), "application/json");
if (!res) {
return ToolError("Failed to connect to search API");
}
if (res->status != 200) {
return ToolError("Search API returned status code = " + std::to_string(res->status) + ", body = " + res->body);
}
// 解析响应
json response = json::parse(res->body);
// 格式化结果
std::string result = "Search results for: " + query + "\n\n";
if (response.contains("organic") && response["organic"].is_array()) {
for (size_t i = 0; i < response["organic"].size() && i < static_cast<size_t>(num_results); ++i) {
const auto& item = response["organic"][i];
result += std::to_string(i+1) + ". " + item.value("title", "No title") + "\n";
result += " URL: " + item.value("link", "No link") + "\n";
result += " Snippet: " + item.value("snippet", "No description") + "\n\n";
}
} else {
result += "No results found.";
}
return ToolResult(result);
} catch (const std::exception& e) {
return ToolResult("Error executing Google search: " + std::string(e.what()));
}
}
};
}
#endif // OPEMANUS_TOOL_GOOGLE_SEARCH_H

View File

@ -1,4 +1,5 @@
#include "planning.h" #include "planning.h"
#include <iomanip> // 添加iomanip头文件用于std::setprecision
namespace humanus { namespace humanus {

View File

@ -2,18 +2,14 @@
#define HUMANUS_TOOL_PLANNING_H #define HUMANUS_TOOL_PLANNING_H
#include "base.h" #include "base.h"
#include "../prompt.h"
namespace humanus { namespace humanus {
const char* _PLANNING_TOOL_DESCRIPTION = R"(
A planning tool that allows the agent to create and manage plans for solving complex tasks.
The tool provides functionality for creating plans, updating plan steps, and tracking progress.
)";
struct PlanningTool : BaseTool { struct PlanningTool : BaseTool {
inline static const std::string name_ = "planning"; inline static const std::string name_ = "planning";
inline static const std::string description_ = _PLANNING_TOOL_DESCRIPTION; inline static const std::string description_ = _PLANNING_TOOL_DESCRIPTION;
inline static const std::vector<std::string> parameters_ = json::parse(R"json({ inline static const json parameters_ = json::parse(R"json({
"type": "object", "type": "object",
"properties": { "properties": {
"command": { "command": {
@ -25,39 +21,39 @@ struct PlanningTool : BaseTool {
"get", "get",
"set_active", "set_active",
"mark_step", "mark_step",
"delete", "delete"
], ],
"type": "string", "type": "string"
}, },
"plan_id": { "plan_id": {
"description": "Unique identifier for the plan. Required for create, update, set_active, and delete commands. Optional for get and mark_step (uses active plan if not specified).", "description": "Unique identifier for the plan. Required for create, update, set_active, and delete commands. Optional for get and mark_step (uses active plan if not specified).",
"type": "string", "type": "string"
}, },
"title": { "title": {
"description": "Title for the plan. Required for create command, optional for update command.", "description": "Title for the plan. Required for create command, optional for update command.",
"type": "string", "type": "string"
}, },
"steps": { "steps": {
"description": "List of plan steps. Required for create command, optional for update command.", "description": "List of plan steps. Required for create command, optional for update command.",
"type": "array", "type": "array",
"items": {"type": "string"}, "items": {"type": "string"}
}, },
"step_index": { "step_index": {
"description": "Index of the step to update (0-based). Required for mark_step command.", "description": "Index of the step to update (0-based). Required for mark_step command.",
"type": "integer", "type": "integer"
}, },
"step_status": { "step_status": {
"description": "Status to set for a step. Used with mark_step command.", "description": "Status to set for a step. Used with mark_step command.",
"enum": ["not_started", "in_progress", "completed", "blocked"], "enum": ["not_started", "in_progress", "completed", "blocked"],
"type": "string", "type": "string"
}, },
"step_notes": { "step_notes": {
"description": "Additional notes for a step. Optional for mark_step command.", "description": "Additional notes for a step. Optional for mark_step command.",
"type": "string", "type": "string"
}, }
}, },
"required": ["command"], "required": ["command"],
"additionalProperties": false, "additionalProperties": false
})json"); })json");

View File

@ -28,10 +28,24 @@ struct Puppeteer : BaseTool {
"type": "string", "type": "string",
"description": "The URL to navigate to. Required by `navigate`." "description": "The URL to navigate to. Required by `navigate`."
}, },
"name": {
"type": "string",
"description": "The name of the screenshot. Required by `screenshot`."
},
"selector": { "selector": {
"type": "string", "type": "string",
"description": "The CSS selector for the element to interact with. Required by `click`, `hover`, `fill`, and `select`." "description": "The CSS selector for the element to interact with. Required by `click`, `hover`, `fill`, and `select`."
}, },
"width": {
"type": "number",
"description": "The width of the screenshot. Required by `screenshot`. Default: 800",
"default": 800
},
"height": {
"type": "number",
"description": "The height of the screenshot. Required by `screenshot`. Default: 600",
"default": 600
},
"value": { "value": {
"type": "string", "type": "string",
"description": "The value to fill in input fields. Required by `fill`." "description": "The value to fill in input fields. Required by `fill`."
@ -44,6 +58,7 @@ struct Puppeteer : BaseTool {
"required": ["tool"] "required": ["tool"]
})json"); })json");
Puppeteer() : BaseTool(name_, description_, parameters_) {}
ToolResult execute(const json& args) override { ToolResult execute(const json& args) override {
try { try {

View File

@ -1,15 +1,12 @@
#ifndef HUMANUS_TOOL_TERMINATE_H #ifndef HUMANUS_TOOL_TERMINATE_H
#define HUMANUS_TOOL_TERMINATE_H #define HUMANUS_TOOL_TERMINATE_H
#include "base.h" #include "base.h"
#include "../prompt.h"
namespace humanus { struct Terminate : humanus::BaseTool {
const char* _TERMINATE_DESCRIPTION = "Terminate the interaction when the request is met OR if the assistant cannot proceed further with the task.";
struct Terminate : BaseTool {
inline static const std::string name_ = "terminate"; inline static const std::string name_ = "terminate";
inline static const std::string description_ = _TERMINATE_DESCRIPTION; inline static const std::string description_ = "Terminate the interaction when the request is met OR if the assistant cannot proceed further with the task.";
inline static const json parameters_ = { inline static const humanus::json parameters_ = {
{"type", "object"}, {"type", "object"},
{"properties", { {"properties", {
{"status", { {"status", {
@ -24,13 +21,12 @@ struct Terminate : BaseTool {
Terminate() : BaseTool(name_, description_, parameters_) {} Terminate() : BaseTool(name_, description_, parameters_) {}
// Finish the current execution // Finish the current execution
ToolResult execute(const json& arguments) override { humanus::ToolResult execute(const humanus::json& arguments) override {
return ToolResult{ return humanus::ToolResult{
"The interaction has been completed with status: " + arguments.value("status", "unknown") "The interaction has been completed with status: " + arguments.value("status", "unknown")
}; };
} }
}; };
}
#endif // HUMANUS_TOOL_TERMINATE_H #endif // HUMANUS_TOOL_TERMINATE_H