Skip to content

Cache

TTL and LRU caches with persistent storage support.

Auto-saving cache implementations with TTL and LRU eviction strategies.

LRUCache

Bases: PresistentMixin

A key-value cache with least-recently-used eviction.

Keys are hashed with SHA-256 before storage. When the cache exceeds max_size, the least recently accessed entries are evicted.

Attributes:

Name Type Description
max_size

Maximum number of entries to keep.

Source code in shutils/cache.py
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
class LRUCache(PresistentMixin):
    """A key-value cache with least-recently-used eviction.

    Keys are hashed with SHA-256 before storage. When the cache exceeds
    ``max_size``, the least recently accessed entries are evicted.

    Attributes:
        max_size: Maximum number of entries to keep.
    """

    def __init__(self, max_size=10000, cache_file: str | None = None, save_step: int = 0, save_interval: float = 0):
        """Initialize the LRU cache.

        Args:
            max_size: Maximum number of entries before eviction.
            cache_file: Path to the persistence file.
            save_step: Auto-save after this many writes.
            save_interval: Auto-save after this many seconds.
        """
        self.cache = OrderedDict()
        super().__init__(cache_file, save_step, save_interval)
        self.max_size = max_size
        self.load_cache()
        self.cleanup()

    def get(self, key):
        """Retrieve a value by key, promoting it to the most-recent position.

        Args:
            key: The cache key.

        Returns:
            The cached value, or None if the key does not exist.
        """
        hash_key = self._get_hash_str(key)
        if hash_key not in self.cache:
            return None
        self.cache.move_to_end(hash_key)
        return self.cache[hash_key]

    def set(self, key, value):
        """Store a key-value pair, moving it to the most-recent position.

        Args:
            key: The cache key.
            value: The value to store.
        """
        hash_key = self._get_hash_str(key)
        if hash_key in self.cache:
            self.cache.move_to_end(hash_key)
        self.cache[hash_key] = value
        self.cleanup() # cleanup 可能会删除元素
        self._trigger_auto_save() # 触发检查

    def delete(self, key):
        """Remove a key from the cache.

        Args:
            key: The cache key to delete.
        """
        hash_key = self._get_hash_str(key)
        if hash_key in self.cache:
            del self.cache[hash_key]
            self._trigger_auto_save()

    def clear(self):
        """Remove all entries from the cache and persist immediately."""
        self.cache.clear()
        self.save_cache()

    def cleanup(self):
        """Evict the least-recently-used entries until size is within max_size."""
        while len(self.cache) > self.max_size:
            self.cache.popitem(last=False)

__init__(max_size=10000, cache_file=None, save_step=0, save_interval=0)

Initialize the LRU cache.

Parameters:

Name Type Description Default
max_size

Maximum number of entries before eviction.

10000
cache_file str | None

Path to the persistence file.

None
save_step int

Auto-save after this many writes.

0
save_interval float

Auto-save after this many seconds.

0
Source code in shutils/cache.py
282
283
284
285
286
287
288
289
290
291
292
293
294
295
def __init__(self, max_size=10000, cache_file: str | None = None, save_step: int = 0, save_interval: float = 0):
    """Initialize the LRU cache.

    Args:
        max_size: Maximum number of entries before eviction.
        cache_file: Path to the persistence file.
        save_step: Auto-save after this many writes.
        save_interval: Auto-save after this many seconds.
    """
    self.cache = OrderedDict()
    super().__init__(cache_file, save_step, save_interval)
    self.max_size = max_size
    self.load_cache()
    self.cleanup()

cleanup()

Evict the least-recently-used entries until size is within max_size.

Source code in shutils/cache.py
342
343
344
345
def cleanup(self):
    """Evict the least-recently-used entries until size is within max_size."""
    while len(self.cache) > self.max_size:
        self.cache.popitem(last=False)

clear()

Remove all entries from the cache and persist immediately.

Source code in shutils/cache.py
337
338
339
340
def clear(self):
    """Remove all entries from the cache and persist immediately."""
    self.cache.clear()
    self.save_cache()

delete(key)

Remove a key from the cache.

Parameters:

Name Type Description Default
key

The cache key to delete.

required
Source code in shutils/cache.py
326
327
328
329
330
331
332
333
334
335
def delete(self, key):
    """Remove a key from the cache.

    Args:
        key: The cache key to delete.
    """
    hash_key = self._get_hash_str(key)
    if hash_key in self.cache:
        del self.cache[hash_key]
        self._trigger_auto_save()

get(key)

Retrieve a value by key, promoting it to the most-recent position.

Parameters:

Name Type Description Default
key

The cache key.

required

Returns:

Type Description

The cached value, or None if the key does not exist.

Source code in shutils/cache.py
297
298
299
300
301
302
303
304
305
306
307
308
309
310
def get(self, key):
    """Retrieve a value by key, promoting it to the most-recent position.

    Args:
        key: The cache key.

    Returns:
        The cached value, or None if the key does not exist.
    """
    hash_key = self._get_hash_str(key)
    if hash_key not in self.cache:
        return None
    self.cache.move_to_end(hash_key)
    return self.cache[hash_key]

set(key, value)

Store a key-value pair, moving it to the most-recent position.

Parameters:

Name Type Description Default
key

The cache key.

required
value

The value to store.

required
Source code in shutils/cache.py
312
313
314
315
316
317
318
319
320
321
322
323
324
def set(self, key, value):
    """Store a key-value pair, moving it to the most-recent position.

    Args:
        key: The cache key.
        value: The value to store.
    """
    hash_key = self._get_hash_str(key)
    if hash_key in self.cache:
        self.cache.move_to_end(hash_key)
    self.cache[hash_key] = value
    self.cleanup() # cleanup 可能会删除元素
    self._trigger_auto_save() # 触发检查

PresistentMixin

Mixin that adds auto-saving persistence to cache stores.

Data is serialized with pickle and compressed with lzma (.pkl.xz). Supports step-based and interval-based auto-save, plus graceful shutdown via atexit and signal handlers.

Attributes:

Name Type Description
cache_file_path

Absolute path to the .pkl.xz persistence file.

save_step

Number of write operations between auto-saves. 0 disables.

save_interval

Seconds between auto-saves. 0 disables.

Source code in shutils/cache.py
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
class PresistentMixin:
    """Mixin that adds auto-saving persistence to cache stores.

    Data is serialized with pickle and compressed with lzma (``.pkl.xz``).
    Supports step-based and interval-based auto-save, plus graceful shutdown
    via ``atexit`` and signal handlers.

    Attributes:
        cache_file_path: Absolute path to the ``.pkl.xz`` persistence file.
        save_step: Number of write operations between auto-saves. 0 disables.
        save_interval: Seconds between auto-saves. 0 disables.
    """

    def __init__(self, cache_file: str | None = None, save_step: int = 0, save_interval: float = 0):
        """Initialize the persistence mixin.

        Args:
            cache_file: Path to the cache file. If provided, a ``.pkl.xz`` suffix
                is appended automatically.
            save_step: Save automatically after every *save_step* write operations.
                0 disables step-based auto-save.
            save_interval: Save automatically when this many seconds have elapsed
                since the last save. 0 disables interval-based auto-save.
        """
        self.cache_file_path = None
        if cache_file:
            self._init_path(cache_file)

        # Auto-save config
        self.save_step = save_step
        self.save_interval = save_interval

        # Internal state
        self._write_count = 0
        self._last_save_time = time.time()

        # Data container (Managed by subclasses, but initialized here for safety)
        if not hasattr(self, 'cache'):
            self.cache = {}

        # Handle Normal Exit & SIGINT/Ctrl+C
        atexit.register(self._handle_exit)
        signal.signal(signal.SIGTERM, self._signal_handler)
        signal.signal(signal.SIGINT, self._signal_handler)

    def _init_path(self, cache_file: str):
        path = Path(cache_file).absolute()
        # 强制添加 .pkl.xz 后缀,防止误操作
        if not path.name.endswith(".pkl.xz"):
            path = path.parent / f"{path.name}.pkl.xz"
        self.cache_file_path = path
        self.cache_file_path.parent.mkdir(parents=True, exist_ok=True)

    def load_cache(self):
        """Load cache data from disk if the persistence file exists.

        Merges loaded data into the existing ``cache`` dict. Resets the
        save timer on success.
        """
        if not self.cache_file_path or not self.cache_file_path.exists():
            return

        self.cache_file_md5 = ""
        try:
            self.cache_file_md5 = calculate_md5(self.cache_file_path)
            with lzma.open(self.cache_file_path, "rb") as f:
                loaded_data = pickle.load(f)
                # 兼容处理:确保加载的数据能正确update到当前实例
                if isinstance(self.cache, OrderedDict) and isinstance(loaded_data, dict):
                    self.cache.update(loaded_data)
                    # 如果是LRU,加载后可能需要重新move_to_end? 暂时简单update
                else:
                    self.cache = loaded_data

            self._last_save_time = time.time() # 重置计时器
            logger.info(f"[Cache]: Loaded from {self.cache_file_path}")
        except Exception as e:
            logger.error(f"[Cache]: Load failed: {e}")

    def save_cache(self):
        """Persist the cache to disk with optimistic-lock merge.

        If the file was modified externally (detected via MD5), disk data is
        merged with in-memory data before writing.
        """
        if not self.cache_file_path:
            return

        try:
            # 简单的乐观锁逻辑:检查文件是否被外部修改
            if self.cache_file_path.exists():
                current_md5 = calculate_md5(self.cache_file_path)
                if hasattr(self, "cache_file_md5") and current_md5 != self.cache_file_md5:
                    logger.warning("[Cache]: File changed on disk, merging...")
                    with lzma.open(self.cache_file_path, "rb") as f:
                        disk_cache = pickle.load(f)
                        # 保留内存中较新的修改,合并磁盘上的旧Key
                        disk_cache.update(self.cache)
                        self.cache = disk_cache

            with lzma.open(self.cache_file_path, "wb") as f:
                pickle.dump(self.cache, f)

            # 更新状态
            self.cache_file_md5 = calculate_md5(self.cache_file_path)
            self._last_save_time = time.time()
            self._write_count = 0
            logger.debug(f"[Cache]: Saved to {self.cache_file_path}")
        except Exception as e:
            logger.error(f"[Cache]: Save failed: {e}")

    def _trigger_auto_save(self):
        """Check whether auto-save conditions are met after a write operation."""
        if not self.cache_file_path:
            return

        should_save = False

        # 1. Check Step
        self._write_count += 1
        if self.save_step > 0 and self._write_count >= self.save_step:
            should_save = True
            logger.debug("[Cache]: Auto-save triggered by step count")

        # 2. Check Interval (Time)
        if not should_save and self.save_interval > 0 and time.time() - self._last_save_time >= self.save_interval:
            should_save = True
            logger.debug("[Cache]: Auto-save triggered by time interval")

        if should_save:
            self.save_cache()

    def _signal_handler(self, signum, frame):
        """Handle termination signals by saving the cache before exit."""
        logger.info(f"[Cache]: Received signal {signum}, saving to {self.cache_file_path}...")
        self.save_cache()
        sys.exit(0)

    def _handle_exit(self):
        logger.info(f"[Cache]: Program will exit, saving to {self.cache_file_path}...")
        self.save_cache()

    def _get_hash_str(self, key_str: str) -> str:
        # 直接生成 64位长度的字符串
        return hashlib.sha256(key_str.encode('utf-8')).hexdigest()

__init__(cache_file=None, save_step=0, save_interval=0)

Initialize the persistence mixin.

Parameters:

Name Type Description Default
cache_file str | None

Path to the cache file. If provided, a .pkl.xz suffix is appended automatically.

None
save_step int

Save automatically after every save_step write operations. 0 disables step-based auto-save.

0
save_interval float

Save automatically when this many seconds have elapsed since the last save. 0 disables interval-based auto-save.

0
Source code in shutils/cache.py
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
def __init__(self, cache_file: str | None = None, save_step: int = 0, save_interval: float = 0):
    """Initialize the persistence mixin.

    Args:
        cache_file: Path to the cache file. If provided, a ``.pkl.xz`` suffix
            is appended automatically.
        save_step: Save automatically after every *save_step* write operations.
            0 disables step-based auto-save.
        save_interval: Save automatically when this many seconds have elapsed
            since the last save. 0 disables interval-based auto-save.
    """
    self.cache_file_path = None
    if cache_file:
        self._init_path(cache_file)

    # Auto-save config
    self.save_step = save_step
    self.save_interval = save_interval

    # Internal state
    self._write_count = 0
    self._last_save_time = time.time()

    # Data container (Managed by subclasses, but initialized here for safety)
    if not hasattr(self, 'cache'):
        self.cache = {}

    # Handle Normal Exit & SIGINT/Ctrl+C
    atexit.register(self._handle_exit)
    signal.signal(signal.SIGTERM, self._signal_handler)
    signal.signal(signal.SIGINT, self._signal_handler)

load_cache()

Load cache data from disk if the persistence file exists.

Merges loaded data into the existing cache dict. Resets the save timer on success.

Source code in shutils/cache.py
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def load_cache(self):
    """Load cache data from disk if the persistence file exists.

    Merges loaded data into the existing ``cache`` dict. Resets the
    save timer on success.
    """
    if not self.cache_file_path or not self.cache_file_path.exists():
        return

    self.cache_file_md5 = ""
    try:
        self.cache_file_md5 = calculate_md5(self.cache_file_path)
        with lzma.open(self.cache_file_path, "rb") as f:
            loaded_data = pickle.load(f)
            # 兼容处理:确保加载的数据能正确update到当前实例
            if isinstance(self.cache, OrderedDict) and isinstance(loaded_data, dict):
                self.cache.update(loaded_data)
                # 如果是LRU,加载后可能需要重新move_to_end? 暂时简单update
            else:
                self.cache = loaded_data

        self._last_save_time = time.time() # 重置计时器
        logger.info(f"[Cache]: Loaded from {self.cache_file_path}")
    except Exception as e:
        logger.error(f"[Cache]: Load failed: {e}")

save_cache()

Persist the cache to disk with optimistic-lock merge.

If the file was modified externally (detected via MD5), disk data is merged with in-memory data before writing.

Source code in shutils/cache.py
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
def save_cache(self):
    """Persist the cache to disk with optimistic-lock merge.

    If the file was modified externally (detected via MD5), disk data is
    merged with in-memory data before writing.
    """
    if not self.cache_file_path:
        return

    try:
        # 简单的乐观锁逻辑:检查文件是否被外部修改
        if self.cache_file_path.exists():
            current_md5 = calculate_md5(self.cache_file_path)
            if hasattr(self, "cache_file_md5") and current_md5 != self.cache_file_md5:
                logger.warning("[Cache]: File changed on disk, merging...")
                with lzma.open(self.cache_file_path, "rb") as f:
                    disk_cache = pickle.load(f)
                    # 保留内存中较新的修改,合并磁盘上的旧Key
                    disk_cache.update(self.cache)
                    self.cache = disk_cache

        with lzma.open(self.cache_file_path, "wb") as f:
            pickle.dump(self.cache, f)

        # 更新状态
        self.cache_file_md5 = calculate_md5(self.cache_file_path)
        self._last_save_time = time.time()
        self._write_count = 0
        logger.debug(f"[Cache]: Saved to {self.cache_file_path}")
    except Exception as e:
        logger.error(f"[Cache]: Save failed: {e}")

StableCacheEncoder

Bases: JSONEncoder

JSON encoder that handles dataclasses, Pydantic models, enums, and datetimes.

Produces stable, deterministic output by serializing objects into dictionary representations without memory addresses.

Source code in shutils/cache.py
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
class StableCacheEncoder(json.JSONEncoder):
    """JSON encoder that handles dataclasses, Pydantic models, enums, and datetimes.

    Produces stable, deterministic output by serializing objects into
    dictionary representations without memory addresses.
    """

    def default(self, o: Any) -> Any:
        """Serialize a non-standard object to a JSON-compatible type.

        Args:
            o: The object to serialize.

        Returns:
            A JSON-serializable representation of the object.
        """
        # 1. 处理 Dataclass
        if dataclasses.is_dataclass(o):
            return asdict(o)

        # 2. 处理 Pydantic 模型 (兼容 v1 和 v2)
        if hasattr(o, "model_dump"):  # Pydantic v2
            return o.model_dump()
        if hasattr(o, "dict") and callable(o.dict):  # Pydantic v1
            return o.dict()

        # 3. 处理 Enum
        if isinstance(o, Enum):
            return o.value

        # 4. 处理时间
        if isinstance(o, (date, datetime)):
            return o.isoformat()

        # 5. 处理带有 __dict__ 的通用对象 (去除 object at 0x...)
        # 这是解决你那个 "Chat object" 的关键
        if hasattr(o, "__dict__"):
            # 只取对象的属性状态,忽略内存地址
            return o.__dict__

        # 6. 兜底:如果实在无法序列化,转字符串,但尝试正则去掉地址(可选)
        # 或者简单粗暴返回 str(o),但在复杂对象上依然可能有风险
        try:
            return super().default(o)
        except TypeError:
            return str(o)

default(o)

Serialize a non-standard object to a JSON-compatible type.

Parameters:

Name Type Description Default
o Any

The object to serialize.

required

Returns:

Type Description
Any

A JSON-serializable representation of the object.

Source code in shutils/cache.py
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
def default(self, o: Any) -> Any:
    """Serialize a non-standard object to a JSON-compatible type.

    Args:
        o: The object to serialize.

    Returns:
        A JSON-serializable representation of the object.
    """
    # 1. 处理 Dataclass
    if dataclasses.is_dataclass(o):
        return asdict(o)

    # 2. 处理 Pydantic 模型 (兼容 v1 和 v2)
    if hasattr(o, "model_dump"):  # Pydantic v2
        return o.model_dump()
    if hasattr(o, "dict") and callable(o.dict):  # Pydantic v1
        return o.dict()

    # 3. 处理 Enum
    if isinstance(o, Enum):
        return o.value

    # 4. 处理时间
    if isinstance(o, (date, datetime)):
        return o.isoformat()

    # 5. 处理带有 __dict__ 的通用对象 (去除 object at 0x...)
    # 这是解决你那个 "Chat object" 的关键
    if hasattr(o, "__dict__"):
        # 只取对象的属性状态,忽略内存地址
        return o.__dict__

    # 6. 兜底:如果实在无法序列化,转字符串,但尝试正则去掉地址(可选)
    # 或者简单粗暴返回 str(o),但在复杂对象上依然可能有风险
    try:
        return super().default(o)
    except TypeError:
        return str(o)

TTLCache

Bases: PresistentMixin

A key-value cache with per-entry time-to-live expiration.

Keys are hashed with SHA-256 before storage.

Attributes:

Name Type Description
ttl

Default time-to-live in seconds for cache entries.

Source code in shutils/cache.py
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
class TTLCache(PresistentMixin):
    """A key-value cache with per-entry time-to-live expiration.

    Keys are hashed with SHA-256 before storage.

    Attributes:
        ttl: Default time-to-live in seconds for cache entries.
    """

    def __init__(self, ttl: float = 300, cache_file: str | None = None, save_step: int = 0, save_interval: float = 0):
        """Initialize the TTL cache.

        Args:
            ttl: Default time-to-live in seconds for entries.
            cache_file: Path to the persistence file.
            save_step: Auto-save after this many writes.
            save_interval: Auto-save after this many seconds.
        """
        self.cache: dict[str, tuple[Any, float]] = {}
        # 先初始化Mixin,设置好路径和参数
        super().__init__(cache_file, save_step, save_interval)
        self.ttl = ttl
        self.load_cache() # 初始化时加载
        self.cleanup()

    def get(self, key: str) -> Any | None:
        """Retrieve a value by key, returning None if missing or expired.

        Args:
            key: The cache key.

        Returns:
            The cached value, or None if the key does not exist or has expired.
        """
        hash_key = self._get_hash_str(key)
        if hash_key not in self.cache:
            return None
        value, expiry = self.cache[hash_key]
        if time.time() > expiry:
            self.delete(key) # 过期删除也会触发auto_save检查
            return None
        return value

    def set(self, key: str, value: Any, ttl: float | None = None) -> None:
        """Store a value with an optional per-key TTL.

        Args:
            key: The cache key.
            value: The value to store.
            ttl: Time-to-live in seconds. Falls back to the instance default.
        """
        ttl = ttl if ttl is not None else self.ttl
        expiry = time.time() + ttl
        hash_key = self._get_hash_str(key)
        self.cache[hash_key] = (value, expiry)
        self._trigger_auto_save() # 触发检查

    def delete(self, key: str) -> None:
        """Remove a key from the cache.

        Args:
            key: The cache key to delete.
        """
        hash_key = self._get_hash_str(key)
        if hash_key in self.cache:
            del self.cache[hash_key]
            self._trigger_auto_save()

    def clear(self) -> None:
        """Remove all entries from the cache and persist immediately."""
        self.cache.clear()
        self.save_cache() # Clear 属于重大变更,强制保存

    def cleanup(self) -> None:
        """Remove all expired entries from the cache."""
        expired_keys = [k for k, (_, exp) in self.cache.items() if time.time() > exp]
        for k in expired_keys:
            del self.cache[k]
        if expired_keys:
            self._trigger_auto_save() # 清理也算写入

__init__(ttl=300, cache_file=None, save_step=0, save_interval=0)

Initialize the TTL cache.

Parameters:

Name Type Description Default
ttl float

Default time-to-live in seconds for entries.

300
cache_file str | None

Path to the persistence file.

None
save_step int

Auto-save after this many writes.

0
save_interval float

Auto-save after this many seconds.

0
Source code in shutils/cache.py
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
def __init__(self, ttl: float = 300, cache_file: str | None = None, save_step: int = 0, save_interval: float = 0):
    """Initialize the TTL cache.

    Args:
        ttl: Default time-to-live in seconds for entries.
        cache_file: Path to the persistence file.
        save_step: Auto-save after this many writes.
        save_interval: Auto-save after this many seconds.
    """
    self.cache: dict[str, tuple[Any, float]] = {}
    # 先初始化Mixin,设置好路径和参数
    super().__init__(cache_file, save_step, save_interval)
    self.ttl = ttl
    self.load_cache() # 初始化时加载
    self.cleanup()

cleanup()

Remove all expired entries from the cache.

Source code in shutils/cache.py
263
264
265
266
267
268
269
def cleanup(self) -> None:
    """Remove all expired entries from the cache."""
    expired_keys = [k for k, (_, exp) in self.cache.items() if time.time() > exp]
    for k in expired_keys:
        del self.cache[k]
    if expired_keys:
        self._trigger_auto_save() # 清理也算写入

clear()

Remove all entries from the cache and persist immediately.

Source code in shutils/cache.py
258
259
260
261
def clear(self) -> None:
    """Remove all entries from the cache and persist immediately."""
    self.cache.clear()
    self.save_cache() # Clear 属于重大变更,强制保存

delete(key)

Remove a key from the cache.

Parameters:

Name Type Description Default
key str

The cache key to delete.

required
Source code in shutils/cache.py
247
248
249
250
251
252
253
254
255
256
def delete(self, key: str) -> None:
    """Remove a key from the cache.

    Args:
        key: The cache key to delete.
    """
    hash_key = self._get_hash_str(key)
    if hash_key in self.cache:
        del self.cache[hash_key]
        self._trigger_auto_save()

get(key)

Retrieve a value by key, returning None if missing or expired.

Parameters:

Name Type Description Default
key str

The cache key.

required

Returns:

Type Description
Any | None

The cached value, or None if the key does not exist or has expired.

Source code in shutils/cache.py
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
def get(self, key: str) -> Any | None:
    """Retrieve a value by key, returning None if missing or expired.

    Args:
        key: The cache key.

    Returns:
        The cached value, or None if the key does not exist or has expired.
    """
    hash_key = self._get_hash_str(key)
    if hash_key not in self.cache:
        return None
    value, expiry = self.cache[hash_key]
    if time.time() > expiry:
        self.delete(key) # 过期删除也会触发auto_save检查
        return None
    return value

set(key, value, ttl=None)

Store a value with an optional per-key TTL.

Parameters:

Name Type Description Default
key str

The cache key.

required
value Any

The value to store.

required
ttl float | None

Time-to-live in seconds. Falls back to the instance default.

None
Source code in shutils/cache.py
233
234
235
236
237
238
239
240
241
242
243
244
245
def set(self, key: str, value: Any, ttl: float | None = None) -> None:
    """Store a value with an optional per-key TTL.

    Args:
        key: The cache key.
        value: The value to store.
        ttl: Time-to-live in seconds. Falls back to the instance default.
    """
    ttl = ttl if ttl is not None else self.ttl
    expiry = time.time() + ttl
    hash_key = self._get_hash_str(key)
    self.cache[hash_key] = (value, expiry)
    self._trigger_auto_save() # 触发检查

cache_async_wrapper(cache, func, key=None, ignore_self=True)

Wrap an async function with cache look-aside logic.

Parameters:

Name Type Description Default
cache TTLCache | LRUCache

The cache backend instance.

required
func Callable[T_ParamSpec, Awaitable[T_Retval]]

The async function to wrap.

required
key str | None

Optional fixed cache key. If None, a key is derived from arguments.

None
ignore_self bool

If True, exclude the first argument when building the key.

True

Returns:

Type Description
Callable[T_ParamSpec, Awaitable[T_Retval]]

The wrapped async function.

Source code in shutils/cache.py
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
def cache_async_wrapper[**T_ParamSpec, T_Retval](
    cache: TTLCache | LRUCache,
    func: Callable[T_ParamSpec, Awaitable[T_Retval]],
    key: str | None = None,
    ignore_self: bool = True,
) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
    """Wrap an async function with cache look-aside logic.

    Args:
        cache: The cache backend instance.
        func: The async function to wrap.
        key: Optional fixed cache key. If None, a key is derived from arguments.
        ignore_self: If True, exclude the first argument when building the key.

    Returns:
        The wrapped async function.
    """
    @wraps(func)
    async def async_wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
        if os.environ.get("DISABLE_CACHE"):
            return await func(*args, **kwargs)
        cache_key = key if key is not None else make_stable_key(
            get_callable_info(func), args, kwargs, ignore_self=ignore_self
        )
        cache_result = cache.get(cache_key)
        logger.debug(f"[Cache]: async_wrapper func={func} hit={cache_result is not None}")
        if cache_result is not None:
            return cache_result
        result = await func(*args, **kwargs)
        cache.set(cache_key, result)
        return result
    return async_wrapper

cache_sync_wrapper(cache, func, key=None, ignore_self=False)

Wrap a synchronous function with cache look-aside logic.

Parameters:

Name Type Description Default
cache TTLCache | LRUCache

The cache backend instance.

required
func Callable[T_ParamSpec, T_Retval]

The synchronous function to wrap.

required
key str | None

Optional fixed cache key. If None, a key is derived from arguments.

None
ignore_self bool

If True, exclude the first argument when building the key.

False

Returns:

Type Description
Callable[T_ParamSpec, T_Retval]

The wrapped function.

Source code in shutils/cache.py
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
def cache_sync_wrapper[**T_ParamSpec, T_Retval](
    cache: TTLCache | LRUCache, func: Callable[T_ParamSpec, T_Retval], key: str | None = None, ignore_self: bool = False
) -> Callable[T_ParamSpec, T_Retval]:
    """Wrap a synchronous function with cache look-aside logic.

    Args:
        cache: The cache backend instance.
        func: The synchronous function to wrap.
        key: Optional fixed cache key. If None, a key is derived from arguments.
        ignore_self: If True, exclude the first argument when building the key.

    Returns:
        The wrapped function.
    """
    @wraps(func)
    def sync_wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
        if os.environ.get("DISABLE_CACHE"):
            return func(*args, **kwargs)
        cache_key = key if key is not None else make_stable_key(
            get_callable_info(func), args, kwargs, ignore_self=ignore_self
        )
        cache_result = cache.get(cache_key)
        logger.debug(f"[Cache]: sync_wrapper func={func} hit={cache_result is not None}")
        if cache_result is not None:
            return cache_result
        result = func(*args, **kwargs)
        cache.set(cache_key, result)
        return result
    return sync_wrapper

cached(backend='ttl', ignore_self=False, **kwargs)

Decorator factory that caches function results using a TTL or LRU backend.

Parameters:

Name Type Description Default
backend Literal['ttl', 'lru']

Cache backend type, either "ttl" or "lru".

'ttl'
ignore_self bool

If True, exclude the first argument when building cache keys.

False
**kwargs

Additional keyword arguments forwarded to the cache constructor. Common options include ttl, max_size, cache_file, save_step, and save_interval.

{}

Returns:

Type Description

A decorator that wraps the target function with caching.

Source code in shutils/cache.py
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
def cached(
    backend: Literal["ttl", "lru"] = "ttl",
    ignore_self: bool = False,
    **kwargs
):
    """Decorator factory that caches function results using a TTL or LRU backend.

    Args:
        backend: Cache backend type, either ``"ttl"`` or ``"lru"``.
        ignore_self: If True, exclude the first argument when building cache keys.
        **kwargs: Additional keyword arguments forwarded to the cache constructor.
            Common options include ``ttl``, ``max_size``, ``cache_file``,
            ``save_step``, and ``save_interval``.

    Returns:
        A decorator that wraps the target function with caching.
    """

    # 统一将 auto-save 参数放入 kwargs 传给 Cache 构造函数
    cache_instance: TTLCache | LRUCache

    if backend == "ttl":
        cache_instance = TTLCache(**kwargs)
    elif backend == "lru":
        cache_instance = LRUCache(**kwargs)
    else:
        raise ValueError(f"Unsupported backend: {backend}")

    def decorator(func: Callable[T_ParamSpec, T_Retval] | Callable[T_ParamSpec, Awaitable[T_Retval]]) -> Any:
        wrapper: Callable
        if inspect.iscoroutinefunction(func):
            wrapper = cache_async_wrapper(cache_instance, func, ignore_self=ignore_self) # type: ignore
        else:
            wrapper = cache_sync_wrapper(cache_instance, func, ignore_self=ignore_self) # type: ignore

        wrapper.cache = cache_instance
        return wrapper

    return decorator

make_stable_key(func_info, args, kwargs, ignore_self=False)

Generate a deterministic cache key from function info and arguments.

Parameters:

Name Type Description Default
func_info str

Fully qualified callable name.

required
args tuple

Positional arguments to the function.

required
kwargs dict

Keyword arguments to the function.

required
ignore_self bool

If True, drop the first positional argument (typically self).

False

Returns:

Type Description
str

A JSON string that uniquely and deterministically identifies the call.

Source code in shutils/cache.py
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
def make_stable_key(func_info: str, args: tuple, kwargs: dict, ignore_self: bool = False) -> str:
    """Generate a deterministic cache key from function info and arguments.

    Args:
        func_info: Fully qualified callable name.
        args: Positional arguments to the function.
        kwargs: Keyword arguments to the function.
        ignore_self: If True, drop the first positional argument (typically ``self``).

    Returns:
        A JSON string that uniquely and deterministically identifies the call.
    """
    if ignore_self and len(args) > 0:
        args = args[1:] # 去掉第一个参数

    # 构造一个包含所有信息的结构
    key_data = {
        "func": func_info,
        "args": args,
        "kwargs": kwargs
    }

    # 核心:使用 JSON 序列化,并开启 sort_keys=True 保证字典顺序一致
    # separators=(',', ':') 去除空格,减少 key 长度
    try:
        json_str = json.dumps(
            key_data,
            cls=StableCacheEncoder,
            sort_keys=True,
            separators=(',', ':'),
            ensure_ascii=False
        )
    except Exception as e:
        # 极端情况兜底:如果 JSON 失败,回退到 repr 但可能不命中
        logger.warn(f"[Cache Warning]: Key serialization failed: {e}")
        json_str = f"{func_info}-{args!s}-{kwargs!s}"

    return json_str