#include "config.h" #include "logger.h" #include "toml.hpp" #include #include namespace humanus { // 初始化静态成员 Config* Config::_instance = nullptr; std::mutex Config::_mutex; void Config::_load_initial_config() { try { auto config_path = _get_config_path(); std::cout << "加载配置文件: " << config_path.string() << std::endl; const auto& data = toml::parse_file(config_path.string()); // 检查工具配置是否存在 if (!data.contains("llm") || !data["llm"].is_table()) { throw std::runtime_error("MCP配置文件中找不到llm配置: "); } const auto& llm_table = *data["llm"].as_table(); LLMSettings llm_settings; if (llm_table.contains("model") && llm_table["model"].is_string()) { llm_settings.model = llm_table["model"].as_string()->get(); } else { throw std::runtime_error("Invalid `model` configuration"); } if (llm_table.contains("api_key") && llm_table["api_key"].is_string()) { llm_settings.api_key = llm_table["api_key"].as_string()->get(); } else { throw std::runtime_error("Invalid `api_key` configuration"); } if (llm_table.contains("base_url") && llm_table["base_url"].is_string()) { llm_settings.base_url = llm_table["base_url"].as_string()->get(); } else { throw std::runtime_error("Invalid `base_url` configuration"); } if (llm_table.contains("end_point") && llm_table["end_point"].is_string()) { llm_settings.end_point = llm_table["end_point"].as_string()->get(); } if (llm_table.contains("max_tokens") && llm_table["max_tokens"].is_integer()) { llm_settings.max_tokens = llm_table["max_tokens"].as_integer()->get(); } if (llm_table.contains("temperature") && llm_table["temperature"].is_floating_point()) { llm_settings.temperature = llm_table["temperature"].as_floating_point()->get(); } _config.llm["default"] = llm_settings; } catch (const std::exception& e) { std::cerr << "加载配置文件失败: " << e.what() << std::endl; // 设置默认配置 LLMSettings default_settings; default_settings.model = "gpt-3.5-turbo"; default_settings.api_key = "sk-"; default_settings.base_url = "https://api.openai.com"; default_settings.end_point = "/v1/chat/completions"; default_settings.max_tokens = 4096; default_settings.temperature = 1.0; _config.llm["default"] = default_settings; } } } // namespace humanus