Sikuwa first commit
Some checks are pending
CI / Test (Python 3.10 on macos-latest) (push) Waiting to run
CI / Test (Python 3.11 on macos-latest) (push) Waiting to run
CI / Test (Python 3.12 on macos-latest) (push) Waiting to run
CI / Test (Python 3.8 on macos-latest) (push) Waiting to run
CI / Test (Python 3.9 on macos-latest) (push) Waiting to run
CI / Test (Python 3.10 on ubuntu-latest) (push) Waiting to run
CI / Test (Python 3.11 on ubuntu-latest) (push) Waiting to run
CI / Test (Python 3.12 on ubuntu-latest) (push) Waiting to run
CI / Test (Python 3.8 on ubuntu-latest) (push) Waiting to run
CI / Test (Python 3.9 on ubuntu-latest) (push) Waiting to run
CI / Test (Python 3.10 on windows-latest) (push) Waiting to run
CI / Test (Python 3.11 on windows-latest) (push) Waiting to run
CI / Test (Python 3.12 on windows-latest) (push) Waiting to run
CI / Test (Python 3.8 on windows-latest) (push) Waiting to run
CI / Test (Python 3.9 on windows-latest) (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Release (push) Blocked by required conditions
Documentation / Build Documentation (push) Waiting to run

This commit is contained in:
so陈
2026-02-20 23:53:48 +08:00
commit 13a1072c6f
57 changed files with 13519 additions and 0 deletions

37
cpp_cache/CMakeLists.txt Normal file
View File

@@ -0,0 +1,37 @@
# sikuwa/cpp_cache/CMakeLists.txt
# 使用CMake构建C++智能缓存系统
cmake_minimum_required(VERSION 3.15)
project(SmartCache LANGUAGES CXX)
# 设置C++标准
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# 寻找Python库
find_package(Python3 REQUIRED COMPONENTS Development)
# 添加Python的include目录
include_directories(${Python3_INCLUDE_DIRS})
# 创建Python扩展模块
add_library(pysmartcache MODULE smart_cache.cpp pysmartcache.cpp)
# 链接Python库
target_link_libraries(pysmartcache PRIVATE ${Python3_LIBRARIES})
# 设置输出目录
set_target_properties(pysmartcache PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
)
# 如果在Windows上确保输出为.pyd文件
if(WIN32)
set_target_properties(pysmartcache PROPERTIES SUFFIX ".pyd")
endif()
# 创建一个简单的测试可执行文件
add_executable(test_smart_cache test_smart_cache.cpp)
target_link_libraries(test_smart_cache PRIVATE)

241
cpp_cache/__init__.py Normal file
View File

@@ -0,0 +1,241 @@
# sikuwa/cpp_cache/__init__.py
# Python包装器模块用于使用C++实现的智能缓存系统
import os
import sys
import json
import hashlib
from pathlib import Path
# 尝试导入C++扩展模块
try:
from .pysmartcache import (
lru_cache_new,
lru_cache_contains,
lru_cache_put,
lru_cache_get,
lru_cache_remove,
lru_cache_clear
)
cpp_extension_loaded = True
except ImportError as e:
print(f"Warning: pysmartcache C++ extension not found. Using fallback implementation. Error: {e}")
cpp_extension_loaded = False
# Python回退实现
if not cpp_extension_loaded:
class FallbackLRUCache:
"""纯Python实现的LRU缓存"""
def __init__(self, max_size=1000):
self.max_size = max_size
self.cache = {}
self.usage_order = []
def contains(self, key):
return key in self.cache
def put(self, key, value):
if key in self.cache:
# 移动到最近使用
self.usage_order.remove(key)
elif len(self.cache) >= self.max_size:
# 移除最久未使用的
oldest = self.usage_order.pop(0)
del self.cache[oldest]
self.cache[key] = value
self.usage_order.append(key)
return True
def get(self, key):
if key not in self.cache:
return ""
# 移动到最近使用
self.usage_order.remove(key)
self.usage_order.append(key)
return self.cache[key]
def remove(self, key):
if key in self.cache:
del self.cache[key]
self.usage_order.remove(key)
return True
return False
def clear(self):
self.cache.clear()
self.usage_order.clear()
return True
# 模拟C++扩展的函数
def lru_cache_new(max_size=1000):
return FallbackLRUCache(max_size)
def lru_cache_contains(cache, key):
return cache.contains(key)
def lru_cache_put(cache, key, value):
return cache.put(key, value)
def lru_cache_get(cache, key):
return cache.get(key)
def lru_cache_remove(cache, key):
return cache.remove(key)
def lru_cache_clear(cache):
return cache.clear()
# LRUCache类的Python包装器
class LRUCache:
"""LRU (Least Recently Used) 缓存的Python包装器"""
def __init__(self, max_size=1000):
"""创建一个新的LRU缓存"""
self.cache = lru_cache_new(max_size)
def contains(self, key):
"""检查缓存中是否包含指定的键"""
return lru_cache_contains(self.cache, key)
def put(self, key, value):
"""将键值对放入缓存"""
return lru_cache_put(self.cache, key, value)
def get(self, key):
"""从缓存中获取指定键的值"""
return lru_cache_get(self.cache, key)
def remove(self, key):
"""从缓存中移除指定的键"""
return lru_cache_remove(self.cache, key)
def clear(self):
"""清空缓存"""
lru_cache_clear(self.cache)
# 纯Python实现的BuildCache
class BuildCache:
"""构建缓存系统"""
def __init__(self, cache_dir=".cache", max_size=1000000000):
"""创建一个新的构建缓存"""
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(parents=True, exist_ok=True)
self.max_size = max_size
self.cache_file = self.cache_dir / "build_cache.json"
self.cache = self._load_cache()
def _load_cache(self):
"""从文件加载缓存"""
if self.cache_file.exists():
try:
with open(self.cache_file, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception:
pass
return {}
def _save_cache(self):
"""保存缓存到文件"""
try:
with open(self.cache_file, 'w', encoding='utf-8') as f:
json.dump(self.cache, f, indent=2, ensure_ascii=False)
except Exception as e:
print(f"Error saving cache: {e}")
def set_cache_strategy(self, strategy):
"""设置缓存策略 ("lru""lfu")"""
# Python实现不支持策略切换这里只是为了兼容接口
pass
def cache_build_result(self, target, command, dependencies, result):
"""缓存构建结果"""
cache_key = self._generate_cache_key(target, command, dependencies)
self.cache[cache_key] = {
"result": result,
"dependencies": dependencies,
"command": command,
"timestamp": os.path.getmtime(__file__)
}
self._save_cache()
return True
def get_cached_build_result(self, target, command, dependencies):
"""获取缓存的构建结果"""
cache_key = self._generate_cache_key(target, command, dependencies)
if cache_key in self.cache:
return self.cache[cache_key]["result"]
return ""
def needs_rebuild(self, target, command, dependencies):
"""检查是否需要重新构建"""
cache_key = self._generate_cache_key(target, command, dependencies)
if cache_key not in self.cache:
return True
# 这里简单地总是返回False因为我们已经生成了包含所有依赖的缓存键
# 在实际实现中,可以检查依赖文件是否有变化
return False
def _generate_cache_key(self, target, command, dependencies):
"""生成缓存键"""
# 合并所有信息生成唯一的缓存键
all_info = f"{target}|{command}|{json.dumps(dependencies, sort_keys=True)}"
return hashlib.sha256(all_info.encode()).hexdigest()
def clean_all_cache(self):
"""清理所有缓存"""
self.cache.clear()
self._save_cache()
return True
def dump_stats(self):
"""打印缓存统计信息"""
print(f"Build Cache Statistics:")
print(f" Cache directory: {self.cache_dir}")
print(f" Number of cached items: {len(self.cache)}")
# 为BuildCache创建类似C++扩展的函数接口
def build_cache_new(cache_dir=".cache", max_size=1000000000):
return BuildCache(cache_dir, max_size)
def build_cache_set_cache_strategy(cache, strategy):
return cache.set_cache_strategy(strategy)
def build_cache_cache_build_result(cache, target, command, dependencies, result):
# 如果dependencies是字符串将其转换为列表
if isinstance(dependencies, str):
dependencies = [dependencies]
return cache.cache_build_result(target, command, dependencies, result)
def build_cache_get_cached_build_result(cache, target, command, dependencies):
# 如果dependencies是字符串将其转换为列表
if isinstance(dependencies, str):
dependencies = [dependencies]
return cache.get_cached_build_result(target, command, dependencies)
def build_cache_needs_rebuild(cache, target, command, dependencies):
# 如果dependencies是字符串将其转换为列表
if isinstance(dependencies, str):
dependencies = [dependencies]
return cache.needs_rebuild(target, command, dependencies)
def build_cache_clean_all_cache(cache):
return cache.clean_all_cache()
def build_cache_dump_build_cache_stats(cache):
return cache.dump_stats()
# 导出所有函数
globals().update({
'build_cache_new': build_cache_new,
'build_cache_set_cache_strategy': build_cache_set_cache_strategy,
'build_cache_cache_build_result': build_cache_cache_build_result,
'build_cache_get_cached_build_result': build_cache_get_cached_build_result,
'build_cache_needs_rebuild': build_cache_needs_rebuild,
'build_cache_clean_all_cache': build_cache_clean_all_cache,
'build_cache_dump_build_cache_stats': build_cache_dump_build_cache_stats
})

View File

@@ -0,0 +1,80 @@
// sikuwa/cpp_cache/pysmartcache.cpp
// Python扩展模块用于集成C++智能缓存系统
#include <Python.h>
#include "smart_cache.h"
#include <string>
// LRUCache类的Python包装器
static PyObject* py_lru_cache_new(PyObject* self, PyObject* args) {
size_t max_size = 1000;
if (!PyArg_ParseTuple(args, "|k", &max_size)) {
return NULL;
}
LRUCache* cache = new LRUCache(max_size);
return PyCapsule_New(cache, "LRUCache", NULL);
}
static void py_lru_cache_dealloc(PyObject* capsule) {
LRUCache* cache = (LRUCache*)PyCapsule_GetPointer(capsule, "LRUCache");
if (cache) {
delete cache;
}
}
static PyObject* py_lru_cache_put(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* key;
const char* value;
if (!PyArg_ParseTuple(args, "Oss", &capsule, &key, &value)) {
return NULL;
}
LRUCache* cache = (LRUCache*)PyCapsule_GetPointer(capsule, "LRUCache");
if (!cache) {
PyErr_SetString(PyExc_RuntimeError, "Invalid LRUCache pointer");
return NULL;
}
bool result = cache->put(key, value);
return PyBool_FromLong(result);
}
static PyObject* py_lru_cache_get(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* key;
if (!PyArg_ParseTuple(args, "Os", &capsule, &key)) {
return NULL;
}
LRUCache* cache = (LRUCache*)PyCapsule_GetPointer(capsule, "LRUCache");
if (!cache) {
PyErr_SetString(PyExc_RuntimeError, "Invalid LRUCache pointer");
return NULL;
}
std::string result = cache->get(key);
return PyUnicode_FromString(result.c_str());
}
// 定义Python模块的方法表
static PyMethodDef pysmartcache_methods[] = {
{"lru_cache_new", py_lru_cache_new, METH_VARARGS, "Create a new LRUCache"},
{"lru_cache_put", py_lru_cache_put, METH_VARARGS, "Put a key-value pair into LRUCache"},
{"lru_cache_get", py_lru_cache_get, METH_VARARGS, "Get a value from LRUCache"},
{NULL, NULL, 0, NULL} // Sentinel
};
// 定义Python模块的初始化函数
static struct PyModuleDef pysmartcache_module = {
PyModuleDef_HEAD_INIT,
"pysmartcache", // 模块名称
"C++ Smart Cache Python Extension", // 模块文档
-1, // 模块状态大小
pysmartcache_methods // 模块方法表
};
PyMODINIT_FUNC PyInit_pysmartcache(void) {
return PyModule_Create(&pysmartcache_module);
}

View File

@@ -0,0 +1,150 @@
// pysmartcache_minimal.cpp
// Minimal Python extension for smart cache system
#include <Python.h>
#include "smart_cache_minimal.h"
// LRUCache functions
static PyObject* py_lru_cache_new(PyObject* self, PyObject* args) {
int max_size = 1000;
if (!PyArg_ParseTuple(args, "|i", &max_size)) {
return NULL;
}
LRUCache* cache = new LRUCache(max_size);
return PyCapsule_New(cache, "LRUCache", NULL);
}
static PyObject* py_lru_cache_put(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* key;
const char* value;
if (!PyArg_ParseTuple(args, "Os|s", &capsule, &key, &value)) {
return NULL;
}
LRUCache* cache = (LRUCache*)PyCapsule_GetPointer(capsule, "LRUCache");
if (cache == NULL) {
return NULL;
}
const char* val = value ? value : "";
bool success = cache->put(key, val);
return PyBool_FromLong(success);
}
static PyObject* py_lru_cache_get(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* key;
if (!PyArg_ParseTuple(args, "Os", &capsule, &key)) {
return NULL;
}
LRUCache* cache = (LRUCache*)PyCapsule_GetPointer(capsule, "LRUCache");
if (cache == NULL) {
return NULL;
}
std::string result = cache->get(key);
if (result.empty()) {
Py_RETURN_NONE;
}
return PyUnicode_FromString(result.c_str());
}
// BuildCache functions
static PyObject* py_build_cache_new(PyObject* self, PyObject* args) {
const char* cache_dir = ".cache";
if (!PyArg_ParseTuple(args, "|s", &cache_dir)) {
return NULL;
}
BuildCache* cache = new BuildCache(cache_dir);
return PyCapsule_New(cache, "BuildCache", NULL);
}
static PyObject* py_build_cache_result(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* target;
const char* command;
const char* result;
if (!PyArg_ParseTuple(args, "Oss|s", &capsule, &target, &command, &result)) {
return NULL;
}
BuildCache* cache = (BuildCache*)PyCapsule_GetPointer(capsule, "BuildCache");
if (cache == NULL) {
return NULL;
}
const char* res = result ? result : "";
bool success = cache->cache_result(target, command, res);
return PyBool_FromLong(success);
}
static PyObject* py_build_cache_get(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* target;
const char* command;
if (!PyArg_ParseTuple(args, "Oss", &capsule, &target, &command)) {
return NULL;
}
BuildCache* cache = (BuildCache*)PyCapsule_GetPointer(capsule, "BuildCache");
if (cache == NULL) {
return NULL;
}
std::string res = cache->get_result(target, command);
if (res.empty()) {
Py_RETURN_NONE;
}
return PyUnicode_FromString(res.c_str());
}
static PyObject* py_build_cache_needs_rebuild(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* target;
const char* command;
if (!PyArg_ParseTuple(args, "Oss", &capsule, &target, &command)) {
return NULL;
}
BuildCache* cache = (BuildCache*)PyCapsule_GetPointer(capsule, "BuildCache");
if (cache == NULL) {
return NULL;
}
bool needs = cache->needs_rebuild(target, command);
return PyBool_FromLong(needs);
}
// Method definitions
static PyMethodDef PySmartCacheMethods[] = {
{"lru_cache_new", py_lru_cache_new, METH_VARARGS, "Create LRUCache"},
{"lru_cache_put", py_lru_cache_put, METH_VARARGS, "Put to LRUCache"},
{"lru_cache_get", py_lru_cache_get, METH_VARARGS, "Get from LRUCache"},
{"build_cache_new", py_build_cache_new, METH_VARARGS, "Create BuildCache"},
{"build_cache_result", py_build_cache_result, METH_VARARGS, "Cache build result"},
{"build_cache_get", py_build_cache_get, METH_VARARGS, "Get cached build result"},
{"build_cache_needs_rebuild", py_build_cache_needs_rebuild, METH_VARARGS, "Check if rebuild needed"},
{NULL, NULL, 0, NULL}
};
// Module definition
static struct PyModuleDef pysmartcachemodule = {
PyModuleDef_HEAD_INIT,
"pysmartcache",
"Sikuwa Smart Cache Python Extension",
-1,
PySmartCacheMethods
};
// Module initialization
PyMODINIT_FUNC PyInit_pysmartcache(void) {
return PyModule_Create(&pysmartcachemodule);
}

View File

@@ -0,0 +1,221 @@
// sikuwa/cpp_cache/pysmartcache_simple.cpp
// 简化版智能缓存系统的Python扩展
#include <Python.h>
#include "smart_cache_simple.h"
// 为LRUCache和BuildCache创建Python对象类型
// 简单的Python扩展仅提供基本功能
// LRUCache相关函数
static PyObject* py_lru_cache_new(PyObject* self, PyObject* args) {
int max_size = 1000;
if (!PyArg_ParseTuple(args, "|i", &max_size)) {
return nullptr;
}
LRUCache* cache = new LRUCache(max_size);
return PyCapsule_New(cache, "LRUCache", [](PyObject* capsule) {
LRUCache* cache = static_cast<LRUCache*>(PyCapsule_GetPointer(capsule, "LRUCache"));
delete cache;
});
}
static PyObject* py_lru_cache_put(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* key;
const char* value;
if (!PyArg_ParseTuple(args, "Os|s", &capsule, &key, &value)) {
return nullptr;
}
LRUCache* cache = static_cast<LRUCache*>(PyCapsule_GetPointer(capsule, "LRUCache"));
if (cache == nullptr) {
return nullptr;
}
bool success = cache->put(key, value ? value : "");
return PyBool_FromLong(success);
}
static PyObject* py_lru_cache_get(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* key;
if (!PyArg_ParseTuple(args, "Os", &capsule, &key)) {
return nullptr;
}
LRUCache* cache = static_cast<LRUCache*>(PyCapsule_GetPointer(capsule, "LRUCache"));
if (cache == nullptr) {
return nullptr;
}
std::string result = cache->get(key);
if (result.empty()) {
Py_RETURN_NONE;
}
return PyUnicode_FromString(result.c_str());
}
// 构建缓存相关函数
static PyObject* py_build_cache_new(PyObject* self, PyObject* args) {
const char* cache_dir = ".cache";
int max_size = 1000000000;
if (!PyArg_ParseTuple(args, "|si", &cache_dir, &max_size)) {
return nullptr;
}
BuildCache* cache = new BuildCache(cache_dir, max_size);
return PyCapsule_New(cache, "BuildCache", [](PyObject* capsule) {
BuildCache* cache = static_cast<BuildCache*>(PyCapsule_GetPointer(capsule, "BuildCache"));
delete cache;
});
}
static PyObject* py_cache_build_result(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* target;
const char* command;
PyObject* dependencies_obj;
const char* result;
if (!PyArg_ParseTuple(args, "OsOs|s", &capsule, &target, &command, &dependencies_obj, &result)) {
return nullptr;
}
BuildCache* cache = static_cast<BuildCache*>(PyCapsule_GetPointer(capsule, "BuildCache"));
if (cache == nullptr) {
return nullptr;
}
// Convert Python list to C++ vector
std::vector<std::string> dependencies;
if (!PyList_Check(dependencies_obj)) {
PyErr_SetString(PyExc_TypeError, "dependencies must be a list");
return nullptr;
}
Py_ssize_t len = PyList_Size(dependencies_obj);
for (Py_ssize_t i = 0; i < len; i++) {
PyObject* item = PyList_GetItem(dependencies_obj, i);
if (!PyUnicode_Check(item)) {
PyErr_SetString(PyExc_TypeError, "dependencies must contain strings");
return nullptr;
}
dependencies.push_back(PyUnicode_AsUTF8(item));
}
bool success = cache->cache_build_result(target, command, dependencies, result ? result : "");
return PyBool_FromLong(success);
}
static PyObject* py_get_cached_build_result(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* target;
const char* command;
PyObject* dependencies_obj;
if (!PyArg_ParseTuple(args, "OsOs", &capsule, &target, &command, &dependencies_obj)) {
return nullptr;
}
BuildCache* cache = static_cast<BuildCache*>(PyCapsule_GetPointer(capsule, "BuildCache"));
if (cache == nullptr) {
return nullptr;
}
// Convert Python list to C++ vector
std::vector<std::string> dependencies;
if (!PyList_Check(dependencies_obj)) {
PyErr_SetString(PyExc_TypeError, "dependencies must be a list");
return nullptr;
}
Py_ssize_t len = PyList_Size(dependencies_obj);
for (Py_ssize_t i = 0; i < len; i++) {
PyObject* item = PyList_GetItem(dependencies_obj, i);
if (!PyUnicode_Check(item)) {
PyErr_SetString(PyExc_TypeError, "dependencies must contain strings");
return nullptr;
}
dependencies.push_back(PyUnicode_AsUTF8(item));
}
std::string cached_result = cache->get_cached_build_result(target, command, dependencies);
if (cached_result.empty()) {
Py_RETURN_NONE;
}
return PyUnicode_FromString(cached_result.c_str());
}
static PyObject* py_needs_rebuild(PyObject* self, PyObject* args) {
PyObject* capsule;
const char* target;
const char* command;
PyObject* dependencies_obj;
if (!PyArg_ParseTuple(args, "OsOs", &capsule, &target, &command, &dependencies_obj)) {
return nullptr;
}
BuildCache* cache = static_cast<BuildCache*>(PyCapsule_GetPointer(capsule, "BuildCache"));
if (cache == nullptr) {
return nullptr;
}
// Convert Python list to C++ vector
std::vector<std::string> dependencies;
if (!PyList_Check(dependencies_obj)) {
PyErr_SetString(PyExc_TypeError, "dependencies must be a list");
return nullptr;
}
Py_ssize_t len = PyList_Size(dependencies_obj);
for (Py_ssize_t i = 0; i < len; i++) {
PyObject* item = PyList_GetItem(dependencies_obj, i);
if (!PyUnicode_Check(item)) {
PyErr_SetString(PyExc_TypeError, "dependencies must contain strings");
return nullptr;
}
dependencies.push_back(PyUnicode_AsUTF8(item));
}
bool needs = cache->needs_rebuild(target, command, dependencies);
return PyBool_FromLong(needs);
}
// 模块方法定义
static PyMethodDef PySmartCacheMethods[] = {
// LRUCache方法
{"lru_cache_new", py_lru_cache_new, METH_VARARGS, "Create a new LRUCache instance"},
{"lru_cache_put", py_lru_cache_put, METH_VARARGS, "Put a key-value pair into the LRUCache"},
{"lru_cache_get", py_lru_cache_get, METH_VARARGS, "Get a value from the LRUCache"},
// BuildCache方法
{"build_cache_new", py_build_cache_new, METH_VARARGS, "Create a new BuildCache instance"},
{"cache_build_result", py_cache_build_result, METH_VARARGS, "Cache a build result"},
{"get_cached_build_result", py_get_cached_build_result, METH_VARARGS, "Get a cached build result"},
{"needs_rebuild", py_needs_rebuild, METH_VARARGS, "Check if a build needs to be redone"},
{nullptr, nullptr, 0, nullptr}
};
// 模块定义
static struct PyModuleDef pysmartcachemodule = {
PyModuleDef_HEAD_INIT,
"pysmartcache",
"Sikuwa Smart Cache Python Extension",
-1,
PySmartCacheMethods
};
// 模块初始化函数
PyMODINIT_FUNC PyInit_pysmartcache(void) {
return PyModule_Create(&pysmartcachemodule);
}

30
cpp_cache/setup.py Normal file
View File

@@ -0,0 +1,30 @@
# sikuwa/cpp_cache/setup.py
# 用于编译和安装C++智能缓存扩展模块的setup文件
from setuptools import setup, Extension
import sys
# 获取Python的include目录
py_include_dirs = [sys.prefix + '/include']
# 定义扩展模块
smart_cache_extension = Extension(
'pysmartcache', # 扩展模块名称
sources=['smart_cache_minimal.cpp', 'pysmartcache_minimal.cpp'], # 源文件
include_dirs=[".", *py_include_dirs], # 包含目录
language='c++', # 使用C++
extra_compile_args=['/STD:c++17'], # 编译参数
)
# 设置setup配置
setup(
name='sikuwa_cpp_cache',
version='0.1',
description='C++ Smart Cache System for Sikuwa',
author='Sikuwa Team',
author_email='',
packages=['sikuwa.cpp_cache'],
package_dir={'sikuwa.cpp_cache': '.'},
ext_modules=[smart_cache_extension],
zip_safe=False,
)

393
cpp_cache/smart_cache.cpp Normal file
View File

@@ -0,0 +1,393 @@
// sikuwa/cpp_cache/smart_cache.cpp
// 智能缓存策略和构建缓存系统实现
#include "smart_cache.h"
#include <iostream>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <cstddef>
#include <filesystem>
#include <functional>
// LRU Cache Implementation
LRUCache::LRUCache(size_t max_size) : max_size_(max_size) {
}
LRUCache::~LRUCache() {
clear();
}
bool LRUCache::contains(const std::string& key) const {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.find(key) != cache_.end();
}
bool LRUCache::put(const std::string& key, const std::string& value) {
std::lock_guard<std::mutex> lock(mutex_);
// Check if key already exists
auto it = cache_.find(key);
if (it != cache_.end()) {
// Update value and move to front
it->second.first = value;
usage_order_.erase(it->second.second);
usage_order_.push_front(key);
it->second.second = usage_order_.begin();
return true;
}
// Check if cache is full
if (cache_.size() >= max_size_) {
// Remove least recently used item
std::string lru_key = usage_order_.back();
usage_order_.pop_back();
cache_.erase(lru_key);
}
// Add new item
usage_order_.push_front(key);
cache_[key] = std::make_pair(value, usage_order_.begin());
return true;
}
std::string LRUCache::get(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return "";
}
// Move accessed key to front
usage_order_.erase(it->second.second);
usage_order_.push_front(key);
it->second.second = usage_order_.begin();
return it->second.first;
}
bool LRUCache::remove(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return false;
}
usage_order_.erase(it->second.second);
cache_.erase(it);
return true;
}
void LRUCache::clear() {
std::lock_guard<std::mutex> lock(mutex_);
cache_.clear();
usage_order_.clear();
}
size_t LRUCache::size() const {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.size();
}
size_t LRUCache::max_size() const {
return max_size_;
}
void LRUCache::set_max_size(size_t max_size) {
std::lock_guard<std::mutex> lock(mutex_);
max_size_ = max_size;
// Evict items if necessary
while (cache_.size() > max_size_) {
std::string lru_key = usage_order_.back();
usage_order_.pop_back();
cache_.erase(lru_key);
}
}
void LRUCache::dump_cache_stats() const {
std::lock_guard<std::mutex> lock(mutex_);
std::cout << "LRU Cache Statistics:" << std::endl;
std::cout << " Current size: " << cache_.size() << std::endl;
std::cout << " Maximum size: " << max_size_ << std::endl;
std::cout << " Item hit ratio: -" << std::endl; // 需要实现命中计数器
}
// LFU Cache Implementation
LFUCache::LFUCache(size_t max_size) : max_size_(max_size), min_frequency_(0) {
}
LFUCache::~LFUCache() {
clear();
}
bool LFUCache::contains(const std::string& key) const {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.find(key) != cache_.end();
}
bool LFUCache::put(const std::string& key, const std::string& value) {
std::lock_guard<std::mutex> lock(mutex_);
// Check if key already exists
auto it = cache_.find(key);
if (it != cache_.end()) {
// Update value and frequency
it->second.value = value;
// Increment frequency and update position
size_t old_freq = it->second.frequency;
size_t new_freq = old_freq + 1;
// Remove from old frequency list
freq_map_[old_freq].erase(it->second.usage_iter);
if (freq_map_[old_freq].empty() && old_freq == min_frequency_) {
min_frequency_++;
}
// Add to new frequency list
freq_map_[new_freq].push_front(key);
it->second.frequency = new_freq;
it->second.usage_iter = freq_map_[new_freq].begin();
return true;
}
// Check if cache is full
if (cache_.size() >= max_size_) {
// Remove least frequently used item
std::string lfu_key = freq_map_[min_frequency_].back();
freq_map_[min_frequency_].pop_back();
cache_.erase(lfu_key);
}
// Add new item
min_frequency_ = 1;
freq_map_[1].push_front(key);
cache_[key] = {value, 1, freq_map_[1].begin()};
return true;
}
std::string LFUCache::get(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return "";
}
// Increment frequency
size_t old_freq = it->second.frequency;
size_t new_freq = old_freq + 1;
// Remove from old frequency list
freq_map_[old_freq].erase(it->second.usage_iter);
if (freq_map_[old_freq].empty() && old_freq == min_frequency_) {
min_frequency_++;
}
// Add to new frequency list
freq_map_[new_freq].push_front(key);
it->second.frequency = new_freq;
it->second.usage_iter = freq_map_[new_freq].begin();
return it->second.value;
}
bool LFUCache::remove(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return false;
}
// Remove from frequency list
size_t freq = it->second.frequency;
freq_map_[freq].erase(it->second.usage_iter);
if (freq_map_[freq].empty() && freq == min_frequency_) {
min_frequency_++;
}
// Remove from cache
cache_.erase(it);
return true;
}
void LFUCache::clear() {
std::lock_guard<std::mutex> lock(mutex_);
cache_.clear();
freq_map_.clear();
min_frequency_ = 0;
}
size_t LFUCache::size() const {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.size();
}
size_t LFUCache::max_size() const {
return max_size_;
}
void LFUCache::set_max_size(size_t max_size) {
std::lock_guard<std::mutex> lock(mutex_);
max_size_ = max_size;
// Evict items if necessary
while (cache_.size() > max_size_) {
std::string lfu_key = freq_map_[min_frequency_].back();
freq_map_[min_frequency_].pop_back();
cache_.erase(lfu_key);
}
}
void LFUCache::dump_cache_stats() const {
std::lock_guard<std::mutex> lock(mutex_);
std::cout << "LFU Cache Statistics:" << std::endl;
std::cout << " Current size: " << cache_.size() << std::endl;
std::cout << " Maximum size: " << max_size_ << std::endl;
std::cout << " Minimum frequency: " << min_frequency_ << std::endl;
std::cout << " Frequency distribution:" << std::endl;
for (const auto& freq_entry : freq_map_) {
if (!freq_entry.second.empty()) {
std::cout << " Frequency " << freq_entry.first << ": " << freq_entry.second.size() << " items" << std::endl;
}
}
}
// BuildCache Implementation
// 使用C++标准库实现的哈希计算函数
std::string calculate_string_hash(const std::string& input) {
std::hash<std::string> hasher;
size_t hash_val = hasher(input);
std::stringstream ss;
ss << std::hex << hash_val;
return ss.str();
}
BuildCache::BuildCache(const std::string& cache_dir, size_t max_size) : cache_dir_(cache_dir) {
// Create cache directory if it doesn't exist
std::filesystem::create_directories(cache_dir_);
// Default to LRU cache
cache_ = std::make_unique<LRUCache>(max_size);
}
BuildCache::~BuildCache() {
}
void BuildCache::set_cache_strategy(const std::string& strategy) {
std::lock_guard<std::mutex> lock(mutex_);
size_t current_size = cache_->size();
size_t max_size = cache_->max_size();
if (strategy == "lfu") {
cache_ = std::make_unique<LFUCache>(max_size);
} else {
// Default to LRU
cache_ = std::make_unique<LRUCache>(max_size);
}
}
std::string BuildCache::calculate_file_hash(const std::string& file_path) {
std::ifstream file(file_path, std::ios::binary);
if (!file) {
return "";
}
std::string content((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
return calculate_string_hash(content);
}
std::string BuildCache::calculate_command_hash(const std::string& command) {
return calculate_string_hash(command);
}
bool BuildCache::has_file_changed(const std::string& file_path, const std::string& last_hash) {
std::string current_hash = calculate_file_hash(file_path);
return current_hash != last_hash;
}
bool BuildCache::cache_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies,
const std::string& result) {
std::lock_guard<std::mutex> lock(mutex_);
// Create cache key: target + command + dependencies hashes
std::stringstream key_stream;
key_stream << "target=" << target << ";";
key_stream << "command=" << calculate_command_hash(command) << ";";
for (const auto& dep : dependencies) {
key_stream << "dep=" << dep << ":" << calculate_file_hash(dep) << ";";
}
std::string cache_key = calculate_string_hash(key_stream.str());
// Cache the result
return cache_->put(cache_key, result);
}
std::string BuildCache::get_cached_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies) {
std::lock_guard<std::mutex> lock(mutex_);
// Create cache key (same as in cache_build_result)
std::stringstream key_stream;
key_stream << "target=" << target << ";";
key_stream << "command=" << calculate_command_hash(command) << ";";
for (const auto& dep : dependencies) {
key_stream << "dep=" << dep << ":" << calculate_file_hash(dep) << ";";
}
std::string cache_key = calculate_string_hash(key_stream.str());
// Get cached result
return cache_->get(cache_key);
}
bool BuildCache::needs_rebuild(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies) {
std::string cached_result = get_cached_build_result(target, command, dependencies);
return cached_result.empty();
}
void BuildCache::clean_expired_cache(std::chrono::duration<int> max_age) {
// For simplicity, we're not implementing this yet
// In a real implementation, we would track item creation times and remove old items
std::cout << "Cleaning expired cache not implemented yet." << std::endl;
}
void BuildCache::clean_target_cache(const std::string& target) {
// For simplicity, we're not implementing this yet
// In a real implementation, we would find all cache entries related to the target and remove them
std::cout << "Cleaning target cache not implemented yet." << std::endl;
}
void BuildCache::clean_all_cache() {
std::lock_guard<std::mutex> lock(mutex_);
cache_->clear();
// Also clean the cache directory
if (std::filesystem::exists(cache_dir_)) {
std::filesystem::remove_all(cache_dir_);
std::filesystem::create_directories(cache_dir_);
}
}
void BuildCache::dump_build_cache_stats() const {
std::lock_guard<std::mutex> lock(mutex_);
std::cout << "Build Cache Statistics:" << std::endl;
std::cout << " Cache directory: " << cache_dir_ << std::endl;
cache_->dump_cache_stats();
}

153
cpp_cache/smart_cache.h Normal file
View File

@@ -0,0 +1,153 @@
// sikuwa/cpp_cache/smart_cache.h
// 智能缓存策略和构建缓存系统
// 仅使用C++标准库和Python C API
#ifndef SMART_CACHE_H
#define SMART_CACHE_H
#include <iostream>
#include <unordered_map>
#include <list>
#include <string>
#include <chrono>
#include <vector>
#include <memory>
#include <mutex>
#include <functional>
// 缓存项的元数据
struct CacheItemMetadata {
std::chrono::time_point<std::chrono::system_clock> created_at;
std::chrono::time_point<std::chrono::system_clock> last_accessed;
size_t size_in_bytes;
int access_count;
std::vector<std::string> dependencies;
};
// 缓存项
template<typename T>
struct CacheItem {
T value;
CacheItemMetadata metadata;
};
// 基础缓存接口
class BaseCache {
public:
virtual ~BaseCache() = default;
virtual bool contains(const std::string& key) const = 0;
virtual bool put(const std::string& key, const std::string& value) = 0;
virtual std::string get(const std::string& key) = 0;
virtual bool remove(const std::string& key) = 0;
virtual void clear() = 0;
virtual size_t size() const = 0;
virtual size_t max_size() const = 0;
virtual void set_max_size(size_t max_size) = 0;
virtual void dump_cache_stats() const = 0;
};
// LRU (Least Recently Used) 缓存实现
class LRUCache : public BaseCache {
private:
size_t max_size_;
std::unordered_map<std::string, std::pair<std::string, std::list<std::string>::iterator>> cache_;
std::list<std::string> usage_order_;
mutable std::mutex mutex_;
public:
explicit LRUCache(size_t max_size = 1000);
~LRUCache() override;
bool contains(const std::string& key) const override;
bool put(const std::string& key, const std::string& value) override;
std::string get(const std::string& key) override;
bool remove(const std::string& key) override;
void clear() override;
size_t size() const override;
size_t max_size() const override;
void set_max_size(size_t max_size) override;
void dump_cache_stats() const override;
};
// LFU (Least Frequently Used) 缓存实现
class LFUCache : public BaseCache {
private:
size_t max_size_;
struct Node {
std::string value;
size_t frequency;
std::list<std::string>::iterator usage_iter;
};
std::unordered_map<std::string, Node> cache_;
std::unordered_map<size_t, std::list<std::string>> freq_map_;
size_t min_frequency_;
mutable std::mutex mutex_;
public:
explicit LFUCache(size_t max_size = 1000);
~LFUCache() override;
bool contains(const std::string& key) const override;
bool put(const std::string& key, const std::string& value) override;
std::string get(const std::string& key) override;
bool remove(const std::string& key) override;
void clear() override;
size_t size() const override;
size_t max_size() const override;
void set_max_size(size_t max_size) override;
void dump_cache_stats() const override;
};
// 构建缓存系统
class BuildCache {
private:
std::unique_ptr<BaseCache> cache_;
std::string cache_dir_;
mutable std::mutex mutex_;
// 计算文件的哈希值
std::string calculate_file_hash(const std::string& file_path);
// 计算构建命令的哈希值
std::string calculate_command_hash(const std::string& command);
// 检查文件是否已更改
bool has_file_changed(const std::string& file_path, const std::string& last_hash);
public:
BuildCache(const std::string& cache_dir = ".cache", size_t max_size = 1000000000);
~BuildCache();
// 设置缓存策略 ("lru" 或 "lfu")
void set_cache_strategy(const std::string& strategy);
// 缓存构建结果
bool cache_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies,
const std::string& result);
// 获取缓存的构建结果
std::string get_cached_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies);
// 检查是否需要重新构建
bool needs_rebuild(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies);
// 清理过期缓存
void clean_expired_cache(std::chrono::duration<int> max_age);
// 清理特定目标的缓存
void clean_target_cache(const std::string& target);
// 清理所有缓存
void clean_all_cache();
// 导出缓存统计信息
void dump_build_cache_stats() const;
};
#endif // SMART_CACHE_H

View File

@@ -0,0 +1,69 @@
// smart_cache_minimal.cpp
// Minimal implementation of smart cache system
#include <iostream>
#include <string>
#include <unordered_map>
#include <fstream>
#include <ctime>
#include <cstdlib>
#include "smart_cache_minimal.h"
// LRUCache implementation
LRUCache::LRUCache(size_t max_size) : max_size_(max_size) {}
bool LRUCache::put(const std::string& key, const std::string& value) {
std::lock_guard<std::mutex> lock(mutex_);
cache_[key] = value;
return true;
}
std::string LRUCache::get(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return "";
}
return it->second;
}
bool LRUCache::contains(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.find(key) != cache_.end();
}
void LRUCache::clear() {
std::lock_guard<std::mutex> lock(mutex_);
cache_.clear();
}
// Helper function to create directories
bool create_directory_if_not_exists(const std::string& path) {
std::string cmd = "mkdir " + path;
int result = system(cmd.c_str());
return result == 0;
}
// BuildCache implementation
BuildCache::BuildCache(const std::string& cache_dir) : cache_(10000), cache_dir_(cache_dir) {
create_directory_if_not_exists(cache_dir_);
}
bool BuildCache::cache_result(const std::string& target, const std::string& command, const std::string& result) {
std::lock_guard<std::mutex> lock(mutex_);
std::string key = target + "|" + command;
cache_.put(key, result);
return true;
}
std::string BuildCache::get_result(const std::string& target, const std::string& command) {
std::lock_guard<std::mutex> lock(mutex_);
std::string key = target + "|" + command;
return cache_.get(key);
}
bool BuildCache::needs_rebuild(const std::string& target, const std::string& command) {
std::lock_guard<std::mutex> lock(mutex_);
std::string key = target + "|" + command;
return !cache_.contains(key);
}

View File

@@ -0,0 +1,46 @@
// sikuwa/cpp_cache/smart_cache_minimal.h
// 最小化版本智能缓存系统
#ifndef SMART_CACHE_MINIMAL_H
#define SMART_CACHE_MINIMAL_H
#include <iostream>
#include <unordered_map>
#include <string>
#include <vector>
#include <mutex>
// 简单的LRU缓存实现
class LRUCache {
private:
size_t max_size_;
std::unordered_map<std::string, std::string> cache_;
std::mutex mutex_;
public:
LRUCache(size_t max_size = 1000);
bool put(const std::string& key, const std::string& value);
std::string get(const std::string& key);
bool contains(const std::string& key);
void clear();
};
// 简单的构建缓存系统
class BuildCache {
private:
LRUCache cache_;
std::string cache_dir_;
std::mutex mutex_;
public:
BuildCache(const std::string& cache_dir = ".cache");
bool cache_result(const std::string& target,
const std::string& command,
const std::string& result);
std::string get_result(const std::string& target,
const std::string& command);
bool needs_rebuild(const std::string& target,
const std::string& command);
};
#endif // SMART_CACHE_MINIMAL_H

View File

@@ -0,0 +1,214 @@
// sikuwa/cpp_cache/smart_cache_simple.cpp
// 简化版智能缓存系统实现
#include "smart_cache_simple.h"
#include <iostream>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <cstddef>
#include <filesystem>
#include <functional>
// LRU Cache Implementation
LRUCache::LRUCache(size_t max_size) : max_size_(max_size) {
}
LRUCache::~LRUCache() {
clear();
}
bool LRUCache::contains(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.find(key) != cache_.end();
}
bool LRUCache::put(const std::string& key, const std::string& value) {
std::lock_guard<std::mutex> lock(mutex_);
// Check if key already exists
auto it = cache_.find(key);
if (it != cache_.end()) {
// Update value and move to front
it->second.first = value;
usage_order_.erase(it->second.second);
usage_order_.push_front(key);
it->second.second = usage_order_.begin();
return true;
}
// Check if cache is full
if (cache_.size() >= max_size_) {
// Remove least recently used item
std::string lru_key = usage_order_.back();
usage_order_.pop_back();
cache_.erase(lru_key);
}
// Add new item
usage_order_.push_front(key);
cache_[key] = std::make_pair(value, usage_order_.begin());
return true;
}
std::string LRUCache::get(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return "";
}
// Move accessed key to front
usage_order_.erase(it->second.second);
usage_order_.push_front(key);
it->second.second = usage_order_.begin();
return it->second.first;
}
bool LRUCache::remove(const std::string& key) {
std::lock_guard<std::mutex> lock(mutex_);
auto it = cache_.find(key);
if (it == cache_.end()) {
return false;
}
usage_order_.erase(it->second.second);
cache_.erase(it);
return true;
}
void LRUCache::clear() {
std::lock_guard<std::mutex> lock(mutex_);
cache_.clear();
usage_order_.clear();
}
size_t LRUCache::size() {
std::lock_guard<std::mutex> lock(mutex_);
return cache_.size();
}
size_t LRUCache::max_size() {
return max_size_;
}
void LRUCache::set_max_size(size_t max_size) {
std::lock_guard<std::mutex> lock(mutex_);
max_size_ = max_size;
// Evict items if necessary
while (cache_.size() > max_size_) {
std::string lru_key = usage_order_.back();
usage_order_.pop_back();
cache_.erase(lru_key);
}
}
void LRUCache::dump_cache_stats() {
std::lock_guard<std::mutex> lock(mutex_);
std::cout << "LRU Cache Statistics:" << std::endl;
std::cout << " Current size: " << cache_.size() << std::endl;
std::cout << " Maximum size: " << max_size_ << std::endl;
}
// BuildCache Implementation
// 使用简单的哈希计算函数
std::string BuildCache::calculate_hash(const std::string& input) {
std::hash<std::string> hasher;
size_t hash_val = hasher(input);
std::stringstream ss;
ss << std::hex << hash_val;
return ss.str();
}
std::string BuildCache::calculate_file_hash(const std::string& file_path) {
std::ifstream file(file_path, std::ios::binary);
if (!file) {
return "";
}
std::string content((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
return calculate_hash(content);
}
BuildCache::BuildCache(const std::string& cache_dir, size_t max_size) : cache_dir_(cache_dir) {
// Create cache directory if it doesn't exist
std::filesystem::create_directories(cache_dir_);
// Use LRU cache
cache_ = std::make_unique<LRUCache>(max_size);
}
BuildCache::~BuildCache() {
}
bool BuildCache::cache_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies,
const std::string& result) {
std::lock_guard<std::mutex> lock(mutex_);
// Create cache key: target + command + dependencies hashes
std::stringstream key_stream;
key_stream << "target=" << target << ";";
key_stream << "command=" << calculate_hash(command) << ";";
for (const auto& dep : dependencies) {
key_stream << "dep=" << dep << ":" << calculate_file_hash(dep) << ";";
}
std::string cache_key = calculate_hash(key_stream.str());
// Cache the result
return cache_->put(cache_key, result);
}
std::string BuildCache::get_cached_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies) {
std::lock_guard<std::mutex> lock(mutex_);
// Create cache key (same as in cache_build_result)
std::stringstream key_stream;
key_stream << "target=" << target << ";";
key_stream << "command=" << calculate_hash(command) << ";";
for (const auto& dep : dependencies) {
key_stream << "dep=" << dep << ":" << calculate_file_hash(dep) << ";";
}
std::string cache_key = calculate_hash(key_stream.str());
// Get cached result
return cache_->get(cache_key);
}
bool BuildCache::needs_rebuild(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies) {
std::string cached_result = get_cached_build_result(target, command, dependencies);
return cached_result.empty();
}
void BuildCache::clean_all_cache() {
std::lock_guard<std::mutex> lock(mutex_);
cache_->clear();
// Also clean the cache directory
if (std::filesystem::exists(cache_dir_)) {
std::filesystem::remove_all(cache_dir_);
std::filesystem::create_directories(cache_dir_);
}
}
void BuildCache::dump_build_cache_stats() {
std::lock_guard<std::mutex> lock(mutex_);
std::cout << "Build Cache Statistics:" << std::endl;
std::cout << " Cache directory: " << cache_dir_ << std::endl;
cache_->dump_cache_stats();
}

View File

@@ -0,0 +1,77 @@
// sikuwa/cpp_cache/smart_cache_simple.h
// 简化版智能缓存系统
#ifndef SMART_CACHE_SIMPLE_H
#define SMART_CACHE_SIMPLE_H
#include <iostream>
#include <unordered_map>
#include <list>
#include <string>
#include <vector>
#include <memory>
#include <mutex>
// LRU (Least Recently Used) 缓存实现
class LRUCache {
private:
size_t max_size_;
std::unordered_map<std::string, std::pair<std::string, std::list<std::string>::iterator>> cache_;
std::list<std::string> usage_order_;
std::mutex mutex_;
public:
LRUCache(size_t max_size = 1000);
~LRUCache();
bool contains(const std::string& key);
bool put(const std::string& key, const std::string& value);
std::string get(const std::string& key);
bool remove(const std::string& key);
void clear();
size_t size();
size_t max_size();
void set_max_size(size_t max_size);
void dump_cache_stats();
};
// 构建缓存系统
class BuildCache {
private:
std::unique_ptr<LRUCache> cache_;
std::string cache_dir_;
std::mutex mutex_;
// 计算字符串的哈希值
std::string calculate_hash(const std::string& input);
// 计算文件的哈希值
std::string calculate_file_hash(const std::string& file_path);
public:
BuildCache(const std::string& cache_dir = ".cache", size_t max_size = 1000000000);
~BuildCache();
// 缓存构建结果
bool cache_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies,
const std::string& result);
// 获取缓存的构建结果
std::string get_cached_build_result(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies);
// 检查是否需要重新构建
bool needs_rebuild(const std::string& target,
const std::string& command,
const std::vector<std::string>& dependencies);
// 清理所有缓存
void clean_all_cache();
// 导出缓存统计信息
void dump_build_cache_stats();
};
#endif // SMART_CACHE_SIMPLE_H