154 lines
3.6 KiB
C++
154 lines
3.6 KiB
C++
#ifndef HUMANUS_CONFIG_H
|
||
#define HUMANUS_CONFIG_H
|
||
|
||
#include <string>
|
||
#include <map>
|
||
#include <fstream>
|
||
#include <sstream>
|
||
#include <mutex>
|
||
#include <filesystem>
|
||
#include <memory>
|
||
|
||
#include "schema.h"
|
||
|
||
namespace humanus {
|
||
|
||
static std::filesystem::path get_project_root() {
|
||
// 获取项目根目录
|
||
return std::filesystem::path(__FILE__).parent_path().parent_path();
|
||
}
|
||
|
||
const std::filesystem::path PROJECT_ROOT = get_project_root();
|
||
const std::filesystem::path WORKSPACE_ROOT = PROJECT_ROOT / "workspace";
|
||
|
||
/**
|
||
* @brief LLM设置结构体
|
||
*/
|
||
struct LLMSettings {
|
||
std::string model;
|
||
std::string api_key;
|
||
std::string base_url;
|
||
std::string end_point;
|
||
int max_tokens;
|
||
double temperature;
|
||
|
||
LLMSettings(
|
||
std::string model = "",
|
||
std::string api_key = "",
|
||
std::string base_url = "",
|
||
std::string end_point = "/v1/chat/completions",
|
||
int max_tokens = 4096,
|
||
double temperature = 1.0
|
||
) : model(model), api_key(api_key), base_url(base_url), end_point(end_point),
|
||
max_tokens(max_tokens), temperature(temperature) {}
|
||
|
||
json to_json() const {
|
||
json j;
|
||
j["model"] = model;
|
||
j["api_key"] = api_key;
|
||
j["base_url"] = base_url;
|
||
j["end_point"] = end_point;
|
||
j["max_tokens"] = max_tokens;
|
||
j["temperature"] = temperature;
|
||
return j;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @brief 应用配置结构体
|
||
*/
|
||
struct AppConfig {
|
||
std::map<std::string, LLMSettings> llm;
|
||
|
||
json to_json() const {
|
||
json j;
|
||
json llm_json;
|
||
for (const auto& [name, settings] : llm) {
|
||
llm_json[name] = settings.to_json();
|
||
}
|
||
j["llm"] = llm_json;
|
||
return j;
|
||
}
|
||
};
|
||
|
||
/**
|
||
* @class Config
|
||
* @brief 配置单例类,用于读取TOML格式的配置文件
|
||
*/
|
||
class Config {
|
||
private:
|
||
static Config* _instance;
|
||
static std::mutex _mutex;
|
||
bool _initialized = false;
|
||
AppConfig _config;
|
||
|
||
// 私有构造函数
|
||
Config() {
|
||
_load_initial_config();
|
||
_initialized = true;
|
||
}
|
||
|
||
// 禁止拷贝和赋值
|
||
Config(const Config&) = delete;
|
||
Config& operator=(const Config&) = delete;
|
||
|
||
/**
|
||
* @brief 获取配置文件路径
|
||
* @return 配置文件路径
|
||
*/
|
||
static std::filesystem::path _get_config_path() {
|
||
auto root = PROJECT_ROOT;
|
||
auto config_path = root / "config" / "config.toml";
|
||
if (std::filesystem::exists(config_path)) {
|
||
return config_path;
|
||
}
|
||
auto example_path = root / "config" / "config.example.toml";
|
||
if (std::filesystem::exists(example_path)) {
|
||
return example_path;
|
||
}
|
||
throw std::runtime_error("无法找到配置文件");
|
||
}
|
||
|
||
/**
|
||
* @brief 加载配置文件
|
||
*/
|
||
void _load_initial_config();
|
||
|
||
public:
|
||
/**
|
||
* @brief 获取单例实例
|
||
* @return 配置实例
|
||
*/
|
||
static Config& getInstance() {
|
||
if (_instance == nullptr) {
|
||
std::lock_guard<std::mutex> lock(_mutex);
|
||
if (_instance == nullptr) {
|
||
_instance = new Config();
|
||
}
|
||
}
|
||
return *_instance;
|
||
}
|
||
|
||
/**
|
||
* @brief 获取LLM设置
|
||
* @return LLM设置映射
|
||
*/
|
||
const std::map<std::string, LLMSettings>& getLLMSettings() const {
|
||
return _config.llm;
|
||
}
|
||
|
||
/**
|
||
* @brief 获取应用配置
|
||
* @return 应用配置
|
||
*/
|
||
const AppConfig& getConfig() const {
|
||
return _config;
|
||
}
|
||
};
|
||
|
||
// 全局配置实例
|
||
extern Config& config;
|
||
|
||
} // namespace humanus
|
||
|
||
#endif // HUMANUS_CONFIG_H
|