humanus.cpp/config.cpp

75 lines
2.7 KiB
C++
Raw Normal View History

2025-03-16 17:17:01 +08:00
#include "config.h"
#include "logger.h"
#include "toml.hpp"
#include <iostream>
#include <filesystem>
namespace humanus {
// 初始化静态成员
Config* Config::_instance = nullptr;
std::mutex Config::_mutex;
void Config::_load_initial_config() {
try {
auto config_path = _get_config_path();
2025-03-17 14:24:03 +08:00
std::cout << "Loading config file from: " << config_path.string() << std::endl;
2025-03-16 17:17:01 +08:00
2025-03-16 22:56:03 +08:00
const auto& data = toml::parse_file(config_path.string());
// 检查工具配置是否存在
if (!data.contains("llm") || !data["llm"].is_table()) {
2025-03-17 14:24:03 +08:00
throw std::runtime_error("找不到llm配置: ");
2025-03-16 22:56:03 +08:00
}
2025-03-16 17:17:01 +08:00
2025-03-16 22:56:03 +08:00
const auto& llm_table = *data["llm"].as_table();
LLMSettings llm_settings;
if (llm_table.contains("model") && llm_table["model"].is_string()) {
llm_settings.model = llm_table["model"].as_string()->get();
} else {
throw std::runtime_error("Invalid `model` configuration");
}
if (llm_table.contains("api_key") && llm_table["api_key"].is_string()) {
llm_settings.api_key = llm_table["api_key"].as_string()->get();
} else {
throw std::runtime_error("Invalid `api_key` configuration");
}
2025-03-16 17:17:01 +08:00
2025-03-16 22:56:03 +08:00
if (llm_table.contains("base_url") && llm_table["base_url"].is_string()) {
llm_settings.base_url = llm_table["base_url"].as_string()->get();
} else {
throw std::runtime_error("Invalid `base_url` configuration");
}
if (llm_table.contains("end_point") && llm_table["end_point"].is_string()) {
llm_settings.end_point = llm_table["end_point"].as_string()->get();
2025-03-16 17:17:01 +08:00
}
2025-03-16 22:56:03 +08:00
if (llm_table.contains("max_tokens") && llm_table["max_tokens"].is_integer()) {
llm_settings.max_tokens = llm_table["max_tokens"].as_integer()->get();
}
if (llm_table.contains("temperature") && llm_table["temperature"].is_floating_point()) {
llm_settings.temperature = llm_table["temperature"].as_floating_point()->get();
}
_config.llm["default"] = llm_settings;
2025-03-16 17:17:01 +08:00
} catch (const std::exception& e) {
std::cerr << "加载配置文件失败: " << e.what() << std::endl;
// 设置默认配置
LLMSettings default_settings;
default_settings.model = "gpt-3.5-turbo";
default_settings.api_key = "sk-";
default_settings.base_url = "https://api.openai.com";
default_settings.end_point = "/v1/chat/completions";
default_settings.max_tokens = 4096;
default_settings.temperature = 1.0;
_config.llm["default"] = default_settings;
}
}
} // namespace humanus