PACS System 0.1.0
PACS DICOM system library
Loading...
Searching...
No Matches
simple_lru_cache.h
Go to the documentation of this file.
1// BSD 3-Clause License
2// Copyright (c) 2021-2025, 🍀☀🌕🌥 🌊
3// See the LICENSE file in the project root for full license information.
4
22#pragma once
23
24#include <atomic>
25#include <chrono>
26#include <cstdint>
27#include <functional>
28#include <list>
29#include <memory>
30#include <mutex>
31#include <optional>
32#include <shared_mutex>
33#include <string>
34#include <unordered_map>
35
37
38// ─────────────────────────────────────────────────────
39// Forward Declarations
40// ─────────────────────────────────────────────────────
41
42template <typename Key, typename Value, typename Hash, typename KeyEqual>
43class simple_lru_cache;
44
45// ─────────────────────────────────────────────────────
46// Cache Configuration
47// ─────────────────────────────────────────────────────
48
55 std::size_t max_size{1000};
56
58 std::chrono::seconds ttl{300};
59
61 bool enable_metrics{true};
62
64 std::string cache_name{"lru_cache"};
65};
66
67// ─────────────────────────────────────────────────────
68// Cache Statistics
69// ─────────────────────────────────────────────────────
70
79 std::atomic<std::uint64_t> hits{0};
80 std::atomic<std::uint64_t> misses{0};
81 std::atomic<std::uint64_t> insertions{0};
82 std::atomic<std::uint64_t> evictions{0};
83 std::atomic<std::uint64_t> expirations{0};
84 std::atomic<std::size_t> current_size{0};
85
90 [[nodiscard]] double hit_rate() const noexcept {
91 const auto total_hits = hits.load(std::memory_order_relaxed);
92 const auto total_misses = misses.load(std::memory_order_relaxed);
93 const auto total = total_hits + total_misses;
94 if (total == 0) {
95 return 0.0;
96 }
97 return (static_cast<double>(total_hits) / static_cast<double>(total)) * 100.0;
98 }
99
104 [[nodiscard]] std::uint64_t total_accesses() const noexcept {
105 return hits.load(std::memory_order_relaxed) +
106 misses.load(std::memory_order_relaxed);
107 }
108
112 void reset() noexcept {
113 hits.store(0, std::memory_order_relaxed);
114 misses.store(0, std::memory_order_relaxed);
115 insertions.store(0, std::memory_order_relaxed);
116 evictions.store(0, std::memory_order_relaxed);
117 expirations.store(0, std::memory_order_relaxed);
118 // Note: current_size is not reset as it reflects actual cache state
119 }
120};
121
122// ─────────────────────────────────────────────────────
123// LRU Cache Implementation
124// ─────────────────────────────────────────────────────
125
173template <typename Key,
174 typename Value,
175 typename Hash = std::hash<Key>,
176 typename KeyEqual = std::equal_to<Key>>
178public:
179 using key_type = Key;
180 using value_type = Value;
181 using size_type = std::size_t;
182 using clock_type = std::chrono::steady_clock;
183 using time_point = clock_type::time_point;
184
185 // =========================================================================
186 // Construction
187 // =========================================================================
188
197
203 simple_lru_cache(size_type max_size, std::chrono::seconds ttl)
204 : max_size_(max_size > 0 ? max_size : 1) {
206 config_.ttl = ttl;
207 }
208
212
215 std::unique_lock lock(other.mutex_);
216 config_ = std::move(other.config_);
217 max_size_ = other.max_size_;
218 lru_list_ = std::move(other.lru_list_);
219 cache_map_ = std::move(other.cache_map_);
220 // Stats are not moved (fresh start for moved-to instance)
221 }
222
224 if (this != &other) {
225 std::unique_lock lock1(mutex_, std::defer_lock);
226 std::unique_lock lock2(other.mutex_, std::defer_lock);
227 std::lock(lock1, lock2);
228
229 config_ = std::move(other.config_);
230 max_size_ = other.max_size_;
231 lru_list_ = std::move(other.lru_list_);
232 cache_map_ = std::move(other.cache_map_);
233 }
234 return *this;
235 }
236
237 ~simple_lru_cache() = default;
238
239 // =========================================================================
240 // Cache Operations
241 // =========================================================================
242
253 [[nodiscard]] std::optional<Value> get(const Key& key) {
254 std::unique_lock lock(mutex_);
255
256 auto it = cache_map_.find(key);
257 if (it == cache_map_.end()) {
258 // Cache miss
259 stats_.misses.fetch_add(1, std::memory_order_relaxed);
260 return std::nullopt;
261 }
262
263 auto& entry = it->second;
264
265 // Check TTL expiration
266 if (is_expired(entry->expiry_time)) {
267 // Entry expired - remove it
268 stats_.expirations.fetch_add(1, std::memory_order_relaxed);
269 stats_.misses.fetch_add(1, std::memory_order_relaxed);
270 remove_entry(it);
271 return std::nullopt;
272 }
273
274 // Cache hit - move to front (most recently used)
275 stats_.hits.fetch_add(1, std::memory_order_relaxed);
276 lru_list_.splice(lru_list_.begin(), lru_list_, entry);
277
278 return entry->value;
279 }
280
290 void put(const Key& key, const Value& value) {
291 std::unique_lock lock(mutex_);
292
293 auto it = cache_map_.find(key);
294 if (it != cache_map_.end()) {
295 // Update existing entry
296 auto& entry = it->second;
297 entry->value = value;
298 entry->expiry_time = calculate_expiry();
299 lru_list_.splice(lru_list_.begin(), lru_list_, entry);
300 return;
301 }
302
303 // Evict if at capacity
304 while (cache_map_.size() >= max_size_ && !lru_list_.empty()) {
305 evict_oldest();
306 }
307
308 // Insert new entry at front
309 lru_list_.emplace_front(cache_entry{key, value, calculate_expiry()});
310 cache_map_[key] = lru_list_.begin();
311
312 stats_.insertions.fetch_add(1, std::memory_order_relaxed);
313 stats_.current_size.store(cache_map_.size(), std::memory_order_relaxed);
314 }
315
322 void put(const Key& key, Value&& value) {
323 std::unique_lock lock(mutex_);
324
325 auto it = cache_map_.find(key);
326 if (it != cache_map_.end()) {
327 // Update existing entry
328 auto& entry = it->second;
329 entry->value = std::move(value);
330 entry->expiry_time = calculate_expiry();
331 lru_list_.splice(lru_list_.begin(), lru_list_, entry);
332 return;
333 }
334
335 // Evict if at capacity
336 while (cache_map_.size() >= max_size_ && !lru_list_.empty()) {
337 evict_oldest();
338 }
339
340 // Insert new entry at front
341 lru_list_.emplace_front(cache_entry{key, std::move(value), calculate_expiry()});
342 cache_map_[key] = lru_list_.begin();
343
344 stats_.insertions.fetch_add(1, std::memory_order_relaxed);
345 stats_.current_size.store(cache_map_.size(), std::memory_order_relaxed);
346 }
347
357 [[nodiscard]] bool contains(const Key& key) const {
358 std::shared_lock lock(mutex_);
359 return cache_map_.find(key) != cache_map_.end();
360 }
361
368 bool invalidate(const Key& key) {
369 std::unique_lock lock(mutex_);
370
371 auto it = cache_map_.find(key);
372 if (it == cache_map_.end()) {
373 return false;
374 }
375
376 remove_entry(it);
377 return true;
378 }
379
405 template <typename Predicate>
406 size_type invalidate_if(Predicate pred) {
407 std::unique_lock lock(mutex_);
408
409 size_type removed = 0;
410 auto it = lru_list_.begin();
411
412 while (it != lru_list_.end()) {
413 if (pred(it->key, it->value)) {
414 cache_map_.erase(it->key);
415 it = lru_list_.erase(it);
416 ++removed;
417 } else {
418 ++it;
419 }
420 }
421
422 stats_.current_size.store(cache_map_.size(), std::memory_order_relaxed);
423 return removed;
424 }
425
429 void clear() {
430 std::unique_lock lock(mutex_);
431
432 lru_list_.clear();
433 cache_map_.clear();
434 stats_.current_size.store(0, std::memory_order_relaxed);
435 }
436
446 std::unique_lock lock(mutex_);
447
448 size_type removed = 0;
449 const auto now = clock_type::now();
450
451 auto it = lru_list_.begin();
452 while (it != lru_list_.end()) {
453 if (is_expired(it->expiry_time, now)) {
454 cache_map_.erase(it->key);
455 it = lru_list_.erase(it);
456 ++removed;
457 stats_.expirations.fetch_add(1, std::memory_order_relaxed);
458 } else {
459 ++it;
460 }
461 }
462
463 stats_.current_size.store(cache_map_.size(), std::memory_order_relaxed);
464 return removed;
465 }
466
467 // =========================================================================
468 // Cache Information
469 // =========================================================================
470
475 [[nodiscard]] size_type size() const {
476 std::shared_lock lock(mutex_);
477 return cache_map_.size();
478 }
479
484 [[nodiscard]] bool empty() const {
485 std::shared_lock lock(mutex_);
486 return cache_map_.empty();
487 }
488
493 [[nodiscard]] size_type max_size() const noexcept {
494 return max_size_;
495 }
496
501 [[nodiscard]] std::chrono::seconds ttl() const noexcept {
502 return config_.ttl;
503 }
504
509 [[nodiscard]] const std::string& name() const noexcept {
510 return config_.cache_name;
511 }
512
517 [[nodiscard]] const cache_config& config() const noexcept {
518 return config_;
519 }
520
521 // =========================================================================
522 // Statistics
523 // =========================================================================
524
529 [[nodiscard]] const cache_stats& stats() const noexcept {
530 return stats_;
531 }
532
537 [[nodiscard]] double hit_rate() const noexcept {
538 return stats_.hit_rate();
539 }
540
546 void reset_stats() noexcept {
547 stats_.reset();
548 }
549
550private:
551 // =========================================================================
552 // Internal Types
553 // =========================================================================
554
560
561 using list_type = std::list<cache_entry>;
562 using list_iterator = typename list_type::iterator;
563 using map_type = std::unordered_map<Key, list_iterator, Hash, KeyEqual>;
564
565 // =========================================================================
566 // Internal Helpers
567 // =========================================================================
568
569 [[nodiscard]] time_point calculate_expiry() const {
570 return clock_type::now() + config_.ttl;
571 }
572
573 [[nodiscard]] bool is_expired(time_point expiry) const {
574 return clock_type::now() > expiry;
575 }
576
577 [[nodiscard]] bool is_expired(time_point expiry, time_point now) const {
578 return now > expiry;
579 }
580
582 // Evict from the back (least recently used)
583 if (lru_list_.empty()) {
584 return;
585 }
586
587 auto& oldest = lru_list_.back();
588 cache_map_.erase(oldest.key);
589 lru_list_.pop_back();
590
591 stats_.evictions.fetch_add(1, std::memory_order_relaxed);
592 stats_.current_size.store(cache_map_.size(), std::memory_order_relaxed);
593 }
594
595 void remove_entry(typename map_type::iterator map_it) {
596 lru_list_.erase(map_it->second);
597 cache_map_.erase(map_it);
598 stats_.current_size.store(cache_map_.size(), std::memory_order_relaxed);
599 }
600
601 // =========================================================================
602 // Member Variables
603 // =========================================================================
604
607
608 mutable std::shared_mutex mutex_;
611
613};
614
615// ─────────────────────────────────────────────────────
616// Type Aliases for Common Use Cases
617// ─────────────────────────────────────────────────────
618
625template <typename Value>
627
628} // namespace kcenon::pacs::services::cache
Thread-safe LRU cache with TTL support.
std::chrono::seconds ttl() const noexcept
Get the TTL duration.
size_type max_size() const noexcept
Get the maximum cache size.
void put(const Key &key, const Value &value)
Store a value in the cache.
const cache_config & config() const noexcept
Get the cache configuration.
const cache_stats & stats() const noexcept
Get cache statistics.
const std::string & name() const noexcept
Get the cache name (for metrics identification)
void clear()
Remove all entries from the cache.
bool is_expired(time_point expiry, time_point now) const
simple_lru_cache(const cache_config &config=cache_config{})
Construct a cache with the given configuration.
bool invalidate(const Key &key)
Remove a specific entry from the cache.
std::unordered_map< Key, list_iterator, Hash, KeyEqual > map_type
bool empty() const
Check if the cache is empty.
void remove_entry(typename map_type::iterator map_it)
bool contains(const Key &key) const
Check if a key exists in the cache (without affecting LRU order)
double hit_rate() const noexcept
Get the cache hit rate.
size_type purge_expired()
Remove all expired entries from the cache.
size_type size() const
Get the current number of entries in the cache.
void put(const Key &key, Value &&value)
Store a value in the cache (move semantics)
simple_lru_cache(simple_lru_cache &&other) noexcept
Movable.
simple_lru_cache(const simple_lru_cache &)=delete
Non-copyable.
std::optional< Value > get(const Key &key)
Retrieve a value from the cache.
void reset_stats() noexcept
Reset cache statistics.
simple_lru_cache(size_type max_size, std::chrono::seconds ttl)
Construct a cache with size and TTL.
simple_lru_cache & operator=(const simple_lru_cache &)=delete
simple_lru_cache & operator=(simple_lru_cache &&other) noexcept
Configuration options for the LRU cache.
bool enable_metrics
Enable metrics collection for hit/miss tracking.
std::size_t max_size
Maximum number of entries in the cache.
std::string cache_name
Name for metrics identification (e.g., "query_cache", "study_cache")
std::chrono::seconds ttl
Time-To-Live for cache entries in seconds (default: 300 = 5 minutes)
Statistics for cache performance monitoring.
void reset() noexcept
Reset all statistics to zero.
std::atomic< std::uint64_t > hits
Number of cache hits.
std::atomic< std::uint64_t > evictions
Number of LRU evictions.
double hit_rate() const noexcept
Calculate the cache hit rate.
std::atomic< std::size_t > current_size
Current number of entries.
std::uint64_t total_accesses() const noexcept
Get total number of cache accesses (hits + misses)
std::atomic< std::uint64_t > expirations
Number of TTL expirations.
std::atomic< std::uint64_t > misses
Number of cache misses.
std::atomic< std::uint64_t > insertions
Number of insertions.
Key key
Value value
time_point expiry_time