humanus.cpp/config.h

345 lines
11 KiB
C
Raw Normal View History

2025-03-16 17:17:01 +08:00
#ifndef HUMANUS_CONFIG_H
#define HUMANUS_CONFIG_H
#include <string>
#include <map>
#include <fstream>
#include <sstream>
#include <mutex>
#include <filesystem>
#include <memory>
#include "schema.h"
2025-03-19 18:44:54 +08:00
#include "prompt.h"
2025-03-16 17:17:01 +08:00
namespace humanus {
2025-03-17 16:35:11 +08:00
// Get project root directory
2025-03-16 17:17:01 +08:00
static std::filesystem::path get_project_root() {
2025-03-17 01:58:37 +08:00
return std::filesystem::path(__FILE__).parent_path();
2025-03-16 17:17:01 +08:00
}
2025-03-18 16:40:16 +08:00
static const std::filesystem::path PROJECT_ROOT = get_project_root();
2025-03-16 17:17:01 +08:00
2025-03-20 16:03:26 +08:00
struct LLMConfig {
2025-03-16 17:17:01 +08:00
std::string model;
std::string api_key;
std::string base_url;
std::string endpoint;
std::string vision_details;
2025-03-16 17:17:01 +08:00
int max_tokens;
2025-03-19 18:44:54 +08:00
int timeout;
2025-03-16 17:17:01 +08:00
double temperature;
bool enable_vision;
2025-03-19 18:44:54 +08:00
bool oai_tool_support;
2025-03-16 17:17:01 +08:00
2025-03-20 16:03:26 +08:00
LLMConfig(
2025-03-19 18:44:54 +08:00
const std::string& model = "deepseek-chat",
const std::string& api_key = "sk-",
const std::string& base_url = "https://api.deepseek.com",
const std::string& endpoint = "/v1/chat/completions",
const std::string& vision_details = "auto",
2025-03-26 19:28:02 +08:00
int max_tokens = -1, // -1 for default
2025-03-20 01:12:15 +08:00
int timeout = 120,
2025-03-26 19:28:02 +08:00
double temperature = -1, // -1 for default
bool enable_vision = false,
2025-03-19 18:44:54 +08:00
bool oai_tool_support = true
) : model(model), api_key(api_key), base_url(base_url), endpoint(endpoint), vision_details(vision_details),
max_tokens(max_tokens), timeout(timeout), temperature(temperature), enable_vision(enable_vision), oai_tool_support(oai_tool_support) {}
2025-03-16 17:17:01 +08:00
json to_json() const {
json j;
j["model"] = model;
j["api_key"] = api_key;
j["base_url"] = base_url;
j["endpoint"] = endpoint;
2025-03-16 17:17:01 +08:00
j["max_tokens"] = max_tokens;
j["temperature"] = temperature;
return j;
}
};
2025-03-23 14:35:54 +08:00
struct ToolParser {
2025-03-19 18:44:54 +08:00
std::string tool_start;
std::string tool_end;
std::string tool_hint_template;
2025-03-23 14:35:54 +08:00
ToolParser(const std::string& tool_start = "<tool_call>", const std::string& tool_end = "</tool_call>", const std::string& tool_hint_template = prompt::toolcall::TOOL_HINT_TEMPLATE)
2025-03-19 18:44:54 +08:00
: tool_start(tool_start), tool_end(tool_end), tool_hint_template(tool_hint_template) {}
2025-03-23 14:35:54 +08:00
static ToolParser get_instance() {
static ToolParser instance;
2025-03-19 18:44:54 +08:00
return instance;
}
static std::string str_replace(std::string& str, const std::string& from, const std::string& to) {
size_t start_pos = 0;
while ((start_pos = str.find(from, start_pos)) != std::string::npos) {
str.replace(start_pos, from.length(), to);
start_pos += to.length(); // In case 'to' contains 'from', like replacing 'x' with 'yx'
}
return str;
}
std::string hint(const std::string& tool_list) const {
std::string hint_str = tool_hint_template;
hint_str = str_replace(hint_str, "{tool_start}", tool_start);
hint_str = str_replace(hint_str, "{tool_end}", tool_end);
hint_str = str_replace(hint_str, "{tool_list}", tool_list);
return hint_str;
}
json parse(const std::string& content) const {
std::string new_content = content;
json tool_calls = json::array();
size_t pos_start = new_content.find(tool_start);
size_t pos_end = pos_start == std::string::npos ? std::string::npos : new_content.find(tool_end, pos_start + tool_start.size());
if (pos_start != std::string::npos && pos_end == std::string::npos) { // Some might not have tool_end
pos_end = new_content.size();
}
while (pos_start != std::string::npos) {
std::string tool_content = new_content.substr(pos_start + tool_start.size(), pos_end - pos_start - tool_start.size());
if (!tool_content.empty()) {
try {
tool_calls.push_back({
{"type", "function"},
{"function", json::parse(tool_content)}
});
tool_calls.back()["id"] = "call_" + std::to_string(std::chrono::system_clock::now().time_since_epoch().count());
2025-03-23 14:46:39 +08:00
} catch (const json::exception& /* e */) {
2025-03-19 18:44:54 +08:00
throw std::runtime_error("Invalid tool call: " + tool_content);
}
}
auto trim = [](const std::string& str) -> std::string {
auto not_space = [](unsigned char ch) { return !std::isspace(ch); };
auto start = std::find_if(str.begin(), str.end(), not_space);
auto end = std::find_if(str.rbegin(), str.rend(), not_space).base();
if (start >= end) return "";
return std::string(start, end);
};
std::string lhs = trim(new_content.substr(0, pos_start));
std::string rhs = trim(new_content.substr(std::min(pos_end + tool_end.size(), new_content.size())));
new_content = lhs + rhs;
pos_start = new_content.find(tool_start, pos_start); // Previous tool_call has been cut off
pos_end = pos_start == std::string::npos ? std::string::npos : new_content.find(tool_end, pos_start + tool_start.size());
if (pos_start != std::string::npos && pos_end == std::string::npos) { // Some might not have tool_end
pos_end = new_content.size();
}
}
return {
{"content", new_content},
{"tool_calls", tool_calls} // Might be empty if no tool calls found
};
}
json dump(const json& tool_calls) const {
std::string content;
if (!tool_calls.is_array()) {
throw std::runtime_error("Tool calls should be an array");
}
for (const auto& tool_call : tool_calls) {
content += tool_start;
content += tool_call[tool_call["type"]].dump(2);
content += tool_end;
}
return content;
}
};
enum class EmbeddingType {
ADD = 0,
SEARCH = 1,
UPDATE = 2
};
struct EmbeddingModelConfig {
std::string provider = "oai";
std::string base_url = "http://localhost:8080";
std::string endpoint = "/v1/embeddings";
std::string model = "nomic-embed-text-v1.5.f16.gguf";
std::string api_key = "";
int embedding_dims = 768;
int max_retries = 3;
};
struct VectorStoreConfig {
std::string provider = "hnswlib";
int dim = 16; // Dimension of the elements
int max_elements = 10000; // Maximum number of elements, should be known beforehand
int M = 16; // Tightly connected with internal dimensionality of the data
// strongly affects the memory consumption
int ef_construction = 200; // Controls index search speed/build speed tradeoff
enum class Metric {
L2,
IP
};
Metric metric = Metric::L2;
};
namespace mem0 {
struct MemoryConfig {
// Base config
2025-03-26 19:28:02 +08:00
int max_messages = 16; // Short-term memory capacity
int retrieval_limit = 8; // Number of results to retrive from long-term memory
// Prompt config
std::string fact_extraction_prompt = prompt::mem0::FACT_EXTRACTION_PROMPT;
std::string update_memory_prompt = prompt::mem0::UPDATE_MEMORY_PROMPT;
// Database config
// std::string history_db_path = ":memory:";
// EmbeddingModel config
std::shared_ptr<EmbeddingModelConfig> embedding_model_config = nullptr;
// Vector store config
std::shared_ptr<VectorStoreConfig> vector_store_config = nullptr;
2025-03-26 19:28:02 +08:00
FilterFunc filter = nullptr; // Filter to apply to search results
// Optional: LLM config
std::shared_ptr<LLMConfig> llm_config = nullptr;
};
} // namespace mem0
2025-03-16 17:17:01 +08:00
struct AppConfig {
std::unordered_map<std::string, LLMConfig> llm;
std::unordered_map<std::string, ToolParser> tool_parser;
std::unordered_map<std::string, EmbeddingModelConfig> embedding_model;
std::unordered_map<std::string, VectorStoreConfig> vector_store;
2025-03-16 17:17:01 +08:00
};
class Config {
private:
static Config* _instance;
static std::mutex _mutex;
bool _initialized = false;
AppConfig _config;
Config() {
_load_initial_llm_config();
_load_initial_embedding_model_config();
_load_initial_vector_store_config();
2025-03-16 17:17:01 +08:00
_initialized = true;
}
Config(const Config&) = delete;
Config& operator=(const Config&) = delete;
/**
2025-03-17 16:35:11 +08:00
* @brief Get the config path
* @return The config path
2025-03-16 17:17:01 +08:00
*/
static std::filesystem::path _get_llm_config_path() {
2025-03-16 17:17:01 +08:00
auto root = PROJECT_ROOT;
2025-03-20 16:03:26 +08:00
auto config_path = root / "config" / "config_llm.toml";
2025-03-16 17:17:01 +08:00
if (std::filesystem::exists(config_path)) {
return config_path;
}
throw std::runtime_error("LLM Config file not found");
}
static std::filesystem::path _get_embedding_model_config_path() {
auto root = PROJECT_ROOT;
auto config_path = root / "config" / "config_embd.toml";
if (std::filesystem::exists(config_path)) {
return config_path;
}
throw std::runtime_error("Embedding Model Config file not found");
}
static std::filesystem::path _get_vector_store_config_path() {
auto root = PROJECT_ROOT;
auto config_path = root / "config" / "config_vec.toml";
if (std::filesystem::exists(config_path)) {
return config_path;
}
throw std::runtime_error("Vector Store Config file not found");
2025-03-16 17:17:01 +08:00
}
/**
2025-03-17 16:35:11 +08:00
* @brief Load the initial config
2025-03-16 17:17:01 +08:00
*/
void _load_initial_llm_config();
/**
* @brief Load the initial embedding model config
*/
void _load_initial_embedding_model_config();
/**
* @brief Load the initial vector store config
*/
void _load_initial_vector_store_config();
2025-03-16 17:17:01 +08:00
public:
/**
2025-03-17 16:35:11 +08:00
* @brief Get the singleton instance
* @return The config instance
2025-03-16 17:17:01 +08:00
*/
2025-03-16 22:56:03 +08:00
static Config& get_instance() {
2025-03-16 17:17:01 +08:00
if (_instance == nullptr) {
std::lock_guard<std::mutex> lock(_mutex);
if (_instance == nullptr) {
_instance = new Config();
}
}
return *_instance;
}
/**
2025-03-17 16:35:11 +08:00
* @brief Get the LLM settings
* @return The LLM settings map
2025-03-16 17:17:01 +08:00
*/
const std::unordered_map<std::string, LLMConfig>& llm() const {
2025-03-16 17:17:01 +08:00
return _config.llm;
}
2025-03-19 18:44:54 +08:00
/**
* @brief Get the tool helpers
* @return The tool helpers map
*/
const std::unordered_map<std::string, ToolParser>& tool_parser() const {
2025-03-23 14:35:54 +08:00
return _config.tool_parser;
2025-03-19 18:44:54 +08:00
}
/**
* @brief Get the embedding model settings
* @return The embedding model settings map
*/
const std::unordered_map<std::string, EmbeddingModelConfig>& embedding_model() const {
return _config.embedding_model;
}
/**
* @brief Get the vector store settings
* @return The vector store settings map
*/
const std::unordered_map<std::string, VectorStoreConfig>& vector_store() const {
return _config.vector_store;
}
2025-03-16 17:17:01 +08:00
/**
2025-03-17 16:35:11 +08:00
* @brief Get the app config
* @return The app config
2025-03-16 17:17:01 +08:00
*/
2025-03-16 22:56:03 +08:00
const AppConfig& get_config() const {
2025-03-16 17:17:01 +08:00
return _config;
}
};
} // namespace humanus
#endif // HUMANUS_CONFIG_H