134 lines
3.2 KiB
C++
134 lines
3.2 KiB
C++
#ifndef HUMANUS_CONFIG_H
|
|
#define HUMANUS_CONFIG_H
|
|
|
|
#include <string>
|
|
#include <map>
|
|
#include <fstream>
|
|
#include <sstream>
|
|
#include <mutex>
|
|
#include <filesystem>
|
|
#include <memory>
|
|
|
|
#include "schema.h"
|
|
|
|
namespace humanus {
|
|
|
|
static std::filesystem::path get_project_root() {
|
|
// 获取项目根目录
|
|
return std::filesystem::path(__FILE__).parent_path();
|
|
}
|
|
|
|
inline const std::filesystem::path PROJECT_ROOT = get_project_root();
|
|
inline const std::filesystem::path WORKSPACE_ROOT = PROJECT_ROOT / "workspace";
|
|
|
|
/**
|
|
* @brief LLM设置结构体
|
|
*/
|
|
struct LLMSettings {
|
|
std::string model;
|
|
std::string api_key;
|
|
std::string base_url;
|
|
std::string end_point;
|
|
int max_tokens;
|
|
double temperature;
|
|
|
|
LLMSettings(
|
|
const std::string& model = "",
|
|
const std::string& api_key = "",
|
|
const std::string& base_url = "",
|
|
const std::string& end_point = "/chat/completions",
|
|
int max_tokens = 4096,
|
|
double temperature = 1.0
|
|
) : model(model), api_key(api_key), base_url(base_url), end_point(end_point),
|
|
max_tokens(max_tokens), temperature(temperature) {}
|
|
|
|
json to_json() const {
|
|
json j;
|
|
j["model"] = model;
|
|
j["api_key"] = api_key;
|
|
j["base_url"] = base_url;
|
|
j["end_point"] = end_point;
|
|
j["max_tokens"] = max_tokens;
|
|
j["temperature"] = temperature;
|
|
return j;
|
|
}
|
|
};
|
|
|
|
struct AppConfig {
|
|
std::map<std::string, LLMSettings> llm;
|
|
};
|
|
|
|
class Config {
|
|
private:
|
|
static Config* _instance;
|
|
static std::mutex _mutex;
|
|
bool _initialized = false;
|
|
AppConfig _config;
|
|
|
|
// 私有构造函数
|
|
Config() {
|
|
_load_initial_config();
|
|
_initialized = true;
|
|
}
|
|
|
|
// 禁止拷贝和赋值
|
|
Config(const Config&) = delete;
|
|
Config& operator=(const Config&) = delete;
|
|
|
|
/**
|
|
* @brief 获取配置文件路径
|
|
* @return 配置文件路径
|
|
*/
|
|
static std::filesystem::path _get_config_path() {
|
|
auto root = PROJECT_ROOT;
|
|
auto config_path = root / "config" / "config.toml";
|
|
if (std::filesystem::exists(config_path)) {
|
|
return config_path;
|
|
}
|
|
auto example_path = root / "config" / "config.example.toml";
|
|
if (std::filesystem::exists(example_path)) {
|
|
return example_path;
|
|
}
|
|
throw std::runtime_error("无法找到配置文件");
|
|
}
|
|
|
|
/**
|
|
* @brief 加载配置文件
|
|
*/
|
|
void _load_initial_config();
|
|
|
|
public:
|
|
/**
|
|
* @brief 获取单例实例
|
|
* @return 配置实例
|
|
*/
|
|
static Config& get_instance() {
|
|
if (_instance == nullptr) {
|
|
std::lock_guard<std::mutex> lock(_mutex);
|
|
if (_instance == nullptr) {
|
|
_instance = new Config();
|
|
}
|
|
}
|
|
return *_instance;
|
|
}
|
|
|
|
/**
|
|
* @brief 获取LLM设置
|
|
* @return LLM设置映射
|
|
*/
|
|
const std::map<std::string, LLMSettings>& llm() const {
|
|
return _config.llm;
|
|
}
|
|
|
|
/**
|
|
* @brief 获取应用配置
|
|
* @return 应用配置
|
|
*/
|
|
const AppConfig& get_config() const {
|
|
return _config;
|
|
}
|
|
};
|
|
|
|
} // namespace humanus
|
|
|
|
#endif // HUMANUS_CONFIG_H
|