openmanus.cpp/main.cpp

72 lines
2.4 KiB
C++
Raw Permalink Normal View History

2025-03-10 02:38:39 +08:00
#include <iostream>
#include <string>
#include <memory>
#include "include/manus.h"
#include "include/flow/flow_factory.h"
#include "include/config.h"
int main(int argc, char* argv[]) {
try {
// 获取配置文件路径
std::string config_file = "config.toml";
if (argc > 1) {
config_file = argv[1];
}
std::cout << "使用配置文件: " << config_file << std::endl;
// 创建Manus代理实例
auto agent = std::make_shared<openmanus::Manus>(config_file);
// 读取配置文件
openmanus::Config config(config_file);
std::string provider = config.getString("llm", "provider", "local");
// 根据提供商设置LLM API参数
if (provider == "local") {
std::string host = config.getString("llm", "host", "localhost");
int port = config.getInt("llm", "port", 8000);
std::string path = config.getString("llm", "path", "/chat/completions");
agent->setLLMAPIParams(host, port, path);
} else {
std::string base_url = config.getString("llm", "base_url", "");
std::string path = config.getString("llm", "path", "/v1/chat/completions");
agent->setCloudLLMAPIParams(provider, base_url, path);
}
// 设置API密钥
std::string api_key = config.getString("llm", "api_key", "");
if (!api_key.empty()) {
agent->setLLMAPIKey(api_key);
}
while (true) {
std::string prompt;
std::cout << "输入你的提示 (或输入 'exit' 退出): ";
std::getline(std::cin, prompt);
if (prompt == "exit") {
std::cout << "再见!" << std::endl;
break;
}
std::cout << "正在处理你的请求..." << std::endl;
// 创建规划流程
auto flow = openmanus::FlowFactory::createFlow(
openmanus::FlowType::PLANNING,
agent
);
// 执行流程
std::string result = flow->execute(prompt);
std::cout << "处理结果: " << result << std::endl;
}
} catch (const std::exception& e) {
std::cerr << "错误: " << e.what() << std::endl;
return 1;
}
return 0;
}