humanus.cpp/config.cpp

98 lines
3.9 KiB
C++

#include "config.h"
#include "logger.h"
#include "toml.hpp"
#include <iostream>
#include <filesystem>
namespace humanus {
// Initialize static members
Config* Config::_instance = nullptr;
std::mutex Config::_mutex;
void Config::_load_initial_config() {
try {
auto config_path = _get_config_path();
std::cout << "Loading config file from: " << config_path.string() << std::endl;
const auto& data = toml::parse_file(config_path.string());
// Load LLM configuration
for (const auto& [key, value] : data) {
const auto& llm_table = *value.as_table();
LLMConfig llm_config;
if (llm_table.contains("model") && llm_table["model"].is_string()) {
llm_config.model = llm_table["model"].as_string()->get();
}
if (llm_table.contains("api_key") && llm_table["api_key"].is_string()) {
llm_config.api_key = llm_table["api_key"].as_string()->get();
}
if (llm_table.contains("base_url") && llm_table["base_url"].is_string()) {
llm_config.base_url = llm_table["base_url"].as_string()->get();
}
if (llm_table.contains("end_point") && llm_table["end_point"].is_string()) {
llm_config.end_point = llm_table["end_point"].as_string()->get();
}
if (llm_table.contains("max_tokens") && llm_table["max_tokens"].is_integer()) {
llm_config.max_tokens = llm_table["max_tokens"].as_integer()->get();
}
if (llm_table.contains("timeout") && llm_table["timeout"].is_integer()) {
llm_config.timeout = llm_table["timeout"].as_integer()->get();
}
if (llm_table.contains("temperature") && llm_table["temperature"].is_floating_point()) {
llm_config.temperature = llm_table["temperature"].as_floating_point()->get();
}
if (llm_table.contains("oai_tool_support") && llm_table["oai_tool_support"].is_boolean()) {
llm_config.oai_tool_support = llm_table["oai_tool_support"].as_boolean()->get();
}
_config.llm[std::string(key.str())] = llm_config;
if (!llm_config.oai_tool_support) {
// Load tool helper configuration
ToolHelper tool_helper;
if (llm_table.contains("tool_helper") && llm_table["tool_helper"].is_table()) {
const auto& tool_helper_table = *llm_table["tool_helper"].as_table();
if (tool_helper_table.contains("tool_start")) {
tool_helper.tool_start = tool_helper_table["tool_start"].as_string()->get();
}
if (tool_helper_table.contains("tool_end")) {
tool_helper.tool_end = tool_helper_table["tool_end"].as_string()->get();
}
if (tool_helper_table.contains("tool_hint_template")) {
tool_helper.tool_hint_template = tool_helper_table["tool_hint_template"].as_string()->get();
}
}
_config.tool_helper[std::string(key.str())] = tool_helper;
}
}
if (_config.llm.empty()) {
throw std::runtime_error("No LLM configuration found");
} else if (_config.llm.find("default") == _config.llm.end()) {
_config.llm["default"] = _config.llm.begin()->second;
}
if (_config.tool_helper.find("default") == _config.tool_helper.end()) {
_config.tool_helper["default"] = ToolHelper();
}
} catch (const std::exception& e) {
std::cerr << "Loading config file failed: " << e.what() << std::endl;
// Set default configuration
_config.llm["default"] = LLMConfig();
_config.tool_helper["default"] = ToolHelper();
}
}
} // namespace humanus