-
Notifications
You must be signed in to change notification settings - Fork 0
/
least_recently_used.hpp
111 lines (96 loc) · 3.08 KB
/
least_recently_used.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
// Andrew Naplavkov
#ifndef STEP20_LEAST_RECENTLY_USED_HPP
#define STEP20_LEAST_RECENTLY_USED_HPP
#include <list>
#include <unordered_map>
namespace step20::least_recently_used {
namespace detail {
template <class Key,
class T,
class Hash = std::hash<Key>,
class KeyEqual = std::equal_to<Key>>
class linked_hash_map {
public:
using value_type = std::pair<const Key, T>;
using list_type = std::list<value_type>;
using iterator = typename list_type::iterator;
linked_hash_map() = default;
linked_hash_map(linked_hash_map&&) = default;
linked_hash_map& operator=(linked_hash_map&&) = default;
linked_hash_map(const linked_hash_map&) = delete;
linked_hash_map& operator=(const linked_hash_map&) = delete;
virtual ~linked_hash_map() = default;
auto size() const { return list_.size(); }
iterator begin() { return list_.begin(); }
iterator end() { return list_.end(); }
void transfer(iterator from, iterator to) { list_.splice(to, list_, from); }
iterator find(const Key& key)
{
auto it = map_.find(key);
return it == map_.end() ? list_.end() : it->second;
}
iterator erase(iterator it)
{
map_.erase(it->first);
return list_.erase(it);
}
template <class M>
std::pair<iterator, bool> emplace(iterator it, const Key& key, M&& val)
{
if (auto pos = find(key); pos != end())
return {pos, false};
it = list_.emplace(it, key, std::forward<M>(val));
try {
map_.emplace(key, it);
return {it, true};
}
catch (...) {
list_.erase(it);
throw;
}
}
private:
list_type list_;
std::unordered_map<Key, iterator, Hash, KeyEqual> map_;
};
} // namespace detail
/// An O(1) algorithm for implementing the LRU cache eviction scheme
template <class Key,
class T,
class Hash = std::hash<Key>,
class KeyEqual = std::equal_to<Key>>
class cache {
detail::linked_hash_map<Key, T, Hash, KeyEqual> map_;
std::size_t capacity_;
public:
explicit cache(std::size_t capacity) : capacity_(capacity) {}
cache(cache&&) = default;
cache& operator=(cache&&) = default;
cache(const cache&) = delete;
cache& operator=(const cache&) = delete;
virtual ~cache() = default;
/// @return nullptr if key is not found
const T* find(const Key& key)
{
auto it = map_.find(key);
if (it == map_.end())
return nullptr;
map_.transfer(it, map_.end());
return std::addressof(it->second);
}
/// Basic exception guarantee.
/// Inserted item remains if an exception occurs.
void insert_or_assign(const Key& key, const T& val)
{
auto [it, success] = map_.emplace(map_.end(), key, val);
if (!success) {
map_.transfer(it, map_.end());
it->second = val;
return;
}
while (map_.size() > capacity_)
map_.erase(map_.begin());
}
};
} // namespace step20::least_recently_used
#endif // STEP20_LEAST_RECENTLY_USED_HPP