humanus.cpp/config.h

134 lines
3.2 KiB
C
Raw Normal View History

2025-03-16 17:17:01 +08:00
#ifndef HUMANUS_CONFIG_H
#define HUMANUS_CONFIG_H
#include <string>
#include <map>
#include <fstream>
#include <sstream>
#include <mutex>
#include <filesystem>
#include <memory>
#include "schema.h"
namespace humanus {
static std::filesystem::path get_project_root() {
// 获取项目根目录
2025-03-17 01:58:37 +08:00
return std::filesystem::path(__FILE__).parent_path();
2025-03-16 17:17:01 +08:00
}
2025-03-17 01:58:37 +08:00
inline const std::filesystem::path PROJECT_ROOT = get_project_root();
inline const std::filesystem::path WORKSPACE_ROOT = PROJECT_ROOT / "workspace";
2025-03-16 17:17:01 +08:00
/**
* @brief LLM
*/
struct LLMSettings {
std::string model;
std::string api_key;
std::string base_url;
std::string end_point;
int max_tokens;
double temperature;
LLMSettings(
2025-03-16 22:56:03 +08:00
const std::string& model = "",
const std::string& api_key = "",
const std::string& base_url = "",
2025-03-17 01:58:37 +08:00
const std::string& end_point = "/chat/completions",
2025-03-16 17:17:01 +08:00
int max_tokens = 4096,
double temperature = 1.0
) : model(model), api_key(api_key), base_url(base_url), end_point(end_point),
max_tokens(max_tokens), temperature(temperature) {}
json to_json() const {
json j;
j["model"] = model;
j["api_key"] = api_key;
j["base_url"] = base_url;
j["end_point"] = end_point;
j["max_tokens"] = max_tokens;
j["temperature"] = temperature;
return j;
}
};
struct AppConfig {
std::map<std::string, LLMSettings> llm;
};
class Config {
private:
static Config* _instance;
static std::mutex _mutex;
bool _initialized = false;
AppConfig _config;
// 私有构造函数
Config() {
_load_initial_config();
_initialized = true;
}
// 禁止拷贝和赋值
Config(const Config&) = delete;
Config& operator=(const Config&) = delete;
/**
* @brief
* @return
*/
static std::filesystem::path _get_config_path() {
auto root = PROJECT_ROOT;
auto config_path = root / "config" / "config.toml";
if (std::filesystem::exists(config_path)) {
return config_path;
}
auto example_path = root / "config" / "config.example.toml";
if (std::filesystem::exists(example_path)) {
return example_path;
}
throw std::runtime_error("无法找到配置文件");
}
/**
* @brief
*/
void _load_initial_config();
public:
/**
* @brief
* @return
*/
2025-03-16 22:56:03 +08:00
static Config& get_instance() {
2025-03-16 17:17:01 +08:00
if (_instance == nullptr) {
std::lock_guard<std::mutex> lock(_mutex);
if (_instance == nullptr) {
_instance = new Config();
}
}
return *_instance;
}
/**
* @brief LLM
* @return LLM
*/
2025-03-16 22:56:03 +08:00
const std::map<std::string, LLMSettings>& llm() const {
2025-03-16 17:17:01 +08:00
return _config.llm;
}
/**
* @brief
* @return
*/
2025-03-16 22:56:03 +08:00
const AppConfig& get_config() const {
2025-03-16 17:17:01 +08:00
return _config;
}
};
} // namespace humanus
#endif // HUMANUS_CONFIG_H