|
| 1 | +// Copyright (c) 2021 Veil developers |
| 2 | +// Distributed under the MIT software license, see the accompanying |
| 3 | +// file COPYING or http://www.opensource.org/licenses/mit-license.php. |
| 4 | + |
| 5 | +#ifndef VEIL_LRU_CACHE_H |
| 6 | +#define VEIL_LRU_CACHE_H |
| 7 | + |
| 8 | +#include "sync.h" |
| 9 | + |
| 10 | +#include <list> |
| 11 | +#include <unordered_map> |
| 12 | +#include <utility> |
| 13 | + |
| 14 | +namespace veil { |
| 15 | + |
| 16 | +/** |
| 17 | + * The SimpleLRUCache is a fixed-size key-value map that automatically |
| 18 | + * evicts the least-recently-used item when a new item is added while the cache is full. |
| 19 | + * |
| 20 | + * This is a naive, non-optimized implementation of an LRU cache that uses an |
| 21 | + * internal mutex to prevent concurrent access. |
| 22 | + * |
| 23 | + * K must be a hashable type, but you can define your own Hash for it, or equivalently implement |
| 24 | + * std::hash<K> for your type. It must be a default-constructible struct or class |
| 25 | + * defining std::size_t operator()(const K&) const, e.g. |
| 26 | + * |
| 27 | + * namespace std { |
| 28 | + * template<> struct hash<MyType> |
| 29 | + * { |
| 30 | + * std::size_t operator()(const MyType& m) const { |
| 31 | + * return std::hash<std::string>()(m.ImportantString()) ^ m.ImportantInteger; |
| 32 | + * } |
| 33 | + * }; |
| 34 | + * } |
| 35 | + * SimpleLRUCache<MyType, MyValue> cache(100); |
| 36 | + */ |
| 37 | +template<typename K, typename V = K, class Hash = std::hash<K>> |
| 38 | +class SimpleLRUCache |
| 39 | +{ |
| 40 | + |
| 41 | +private: |
| 42 | + std::list<K> items; |
| 43 | + std::unordered_map<K, std::pair<V, typename std::list<K>::iterator>, Hash> keyValuesMap; |
| 44 | + int csize; |
| 45 | + CCriticalSection cs_mycache; |
| 46 | + |
| 47 | +public: |
| 48 | + SimpleLRUCache(int s) : csize(s < 1 ? 10 : s), keyValuesMap(csize) {} |
| 49 | + |
| 50 | + void set(const K key, const V value) { |
| 51 | + LOCK(cs_mycache); |
| 52 | + auto pos = keyValuesMap.find(key); |
| 53 | + if (pos == keyValuesMap.end()) { |
| 54 | + items.push_front(key); |
| 55 | + keyValuesMap[key] = { value, items.begin() }; |
| 56 | + if (keyValuesMap.size() > csize) { |
| 57 | + keyValuesMap.erase(items.back()); |
| 58 | + items.pop_back(); |
| 59 | + } |
| 60 | + } else { |
| 61 | + items.erase(pos->second.second); |
| 62 | + items.push_front(key); |
| 63 | + keyValuesMap[key] = { value, items.begin() }; |
| 64 | + } |
| 65 | + } |
| 66 | + |
| 67 | + bool get(const K key, V &value) { |
| 68 | + LOCK(cs_mycache); |
| 69 | + auto pos = keyValuesMap.find(key); |
| 70 | + if (pos == keyValuesMap.end()) |
| 71 | + return false; |
| 72 | + items.erase(pos->second.second); |
| 73 | + items.push_front(key); |
| 74 | + keyValuesMap[key] = { pos->second.first, items.begin() }; |
| 75 | + value = pos->second.first; |
| 76 | + return true; |
| 77 | + } |
| 78 | +}; |
| 79 | + |
| 80 | +} // namespace veil |
| 81 | + |
| 82 | +#endif // VEIL_LRU_CACHE_H |
0 commit comments