blob: 0e19517ed2cafdba717bf7e45036814f72b41dce (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
|
#pragma once
///@file
#include <cassert>
#include <map>
#include <list>
#include <optional>
namespace nix {
/**
* A simple least-recently used cache. Not thread-safe.
*/
template<typename Key, typename Value>
class LRUCache
{
private:
size_t capacity;
// Stupid wrapper to get around circular dependency between Data
// and LRU.
struct LRUIterator;
using Data = std::map<Key, std::pair<LRUIterator, Value>>;
using LRU = std::list<typename Data::iterator>;
struct LRUIterator { typename LRU::iterator it; };
Data data;
LRU lru;
public:
LRUCache(size_t capacity) : capacity(capacity) { }
/**
* Insert or upsert an item in the cache.
*/
void upsert(const Key & key, const Value & value)
{
if (capacity == 0) return;
erase(key);
if (data.size() >= capacity) {
/**
* Retire the oldest item.
*/
auto oldest = lru.begin();
data.erase(*oldest);
lru.erase(oldest);
}
auto res = data.emplace(key, std::make_pair(LRUIterator(), value));
assert(res.second);
auto & i(res.first);
auto j = lru.insert(lru.end(), i);
i->second.first.it = j;
}
bool erase(const Key & key)
{
auto i = data.find(key);
if (i == data.end()) return false;
lru.erase(i->second.first.it);
data.erase(i);
return true;
}
/**
* Look up an item in the cache. If it exists, it becomes the most
* recently used item.
* */
std::optional<Value> get(const Key & key)
{
auto i = data.find(key);
if (i == data.end()) return {};
/**
* Move this item to the back of the LRU list.
*/
lru.erase(i->second.first.it);
auto j = lru.insert(lru.end(), i);
i->second.first.it = j;
return i->second.second;
}
size_t size()
{
return data.size();
}
void clear()
{
data.clear();
lru.clear();
}
};
}
|