ACloudViewer  3.9.4
A Modern Library for 3D Data Processing
cache.h
Go to the documentation of this file.
1 // ----------------------------------------------------------------------------
2 // - CloudViewer: www.cloudViewer.org -
3 // ----------------------------------------------------------------------------
4 // Copyright (c) 2018-2024 www.cloudViewer.org
5 // SPDX-License-Identifier: MIT
6 // ----------------------------------------------------------------------------
7 
8 #pragma once
9 
10 #include <functional>
11 #include <future>
12 #include <list>
13 #include <memory>
14 #include <shared_mutex>
15 #include <unordered_map>
16 
17 #include "util/logging.h"
18 
19 namespace colmap {
20 
21 // Least Recently Used cache implementation. Whenever the cache size is
22 // exceeded, the least recently used (by Get) is deleted.
23 template <typename key_t, typename value_t>
24 class LRUCache {
25 public:
26  using LoadFn = std::function<std::shared_ptr<value_t>(const key_t&)>;
27 
28  LRUCache(size_t max_num_elems, LoadFn load_fn);
29 
30  // The number of elements in the cache.
31  size_t NumElems() const;
32  size_t MaxNumElems() const;
33 
34  // Check whether the element with the given key exists.
35  bool Exists(const key_t& key) const;
36 
37  // Get the value of an element either from the cache or compute the new
38  // value.
39  std::shared_ptr<value_t> Get(const key_t& key);
40 
41  // Manually evict an element from the cache.
42  // Returns true if the element was evicted.
43  bool Evict(const key_t& key);
44 
45  // Pop least recently used element from cache.
46  void Pop();
47 
48  // Clear all elements from cache.
49  void Clear();
50 
51 private:
52  typedef typename std::pair<key_t, std::shared_ptr<value_t>>
53  key_value_pair_t;
54  typedef typename std::list<key_value_pair_t>::iterator list_iterator_t;
55 
56  // Maximum number of least-recently-used elements the cache remembers.
57  const size_t max_num_elems_;
58 
59  // List to keep track of the least-recently-used elements.
60  std::list<key_value_pair_t> elems_list_;
61 
62  // Mapping from key to location in the list.
63  std::unordered_map<key_t, list_iterator_t> elems_map_;
64 
65  // Function to compute new values if not in the cache.
66  const LoadFn load_fn_;
67 };
68 
69 // Thread-safe Least Recently Used cache implementation.
70 template <typename key_t, typename value_t>
72 public:
73  using LoadFn = std::function<std::shared_ptr<value_t>(const key_t&)>;
74 
75  ThreadSafeLRUCache(size_t max_num_elems, LoadFn load_fn);
76 
77  // The number of elements in the cache.
78  size_t NumElems() const;
79  size_t MaxNumElems() const;
80 
81  // Check whether the element with the given key exists.
82  bool Exists(const key_t& key) const;
83 
84  // Get the value of an element either from the cache or compute the new
85  // value.
86  std::shared_ptr<value_t> Get(const key_t& key);
87 
88  // Manually evict an element from the cache.
89  // Returns true if the element was evicted.
90  bool Evict(const key_t& key);
91 
92  // Pop least recently used element from cache.
93  void Pop();
94 
95  // Clear all elements from cache.
96  void Clear();
97 
98 protected:
99  struct Entry {
100  Entry() : future(promise.get_future()) {}
101  std::promise<std::shared_ptr<value_t>> promise;
102  std::shared_future<std::shared_ptr<value_t>> future;
103  bool is_loading = false;
104  };
105 
106  // Function to compute new values if not in the cache.
108 
109  mutable std::shared_mutex cache_mutex_;
111 };
112 
113 // Least Recently Used cache implementation that is constrained by a maximum
114 // memory limitation of its elements. Whenever the memory limit is exceeded, the
115 // least recently used (by Get) is deleted. Each element must implement a
116 // `size_t NumBytes()` method that returns its size in memory.
117 template <typename key_t, typename value_t>
119 public:
120  using LoadFn = std::function<std::shared_ptr<value_t>(const key_t&)>;
121 
122  MemoryConstrainedLRUCache(size_t max_num_bytes, LoadFn load_fn);
123 
124  // The size in bytes of the elements in the cache.
125  size_t NumBytes() const;
126  size_t MaxNumBytes() const;
127  void UpdateNumBytes(const key_t& key);
128 
129  // The number of elements in the cache.
130  size_t NumElems() const;
131  size_t MaxNumElems() const;
132 
133  // Check whether the element with the given key exists.
134  bool Exists(const key_t& key) const;
135 
136  // Get the value of an element either from the cache or compute the new
137  // value.
138  std::shared_ptr<value_t> Get(const key_t& key);
139 
140  // Manually evict an element from the cache.
141  // Returns true if the element was evicted.
142  bool Evict(const key_t& key);
143 
144  // Pop least recently used element from cache.
145  void Pop();
146 
147  // Clear all elements from cache.
148  void Clear();
149 
150 private:
151  typedef typename std::pair<key_t, std::shared_ptr<value_t>>
152  key_value_pair_t;
153  typedef typename std::list<key_value_pair_t>::iterator list_iterator_t;
154 
155  const size_t max_num_bytes_;
156  size_t num_bytes_;
157 
158  // List to keep track of the least-recently-used elements.
159  std::list<key_value_pair_t> elems_list_;
160 
161  // Mapping from key to (location in list, num_bytes).
162  std::unordered_map<key_t, std::pair<list_iterator_t, size_t>> elems_map_;
163 
164  // Function to compute new values if not in the cache.
165  const LoadFn load_fn_;
166 };
167 
169 // Implementation
171 
172 template <typename key_t, typename value_t>
173 LRUCache<key_t, value_t>::LRUCache(const size_t max_num_elems, LoadFn load_fn)
174  : max_num_elems_(max_num_elems), load_fn_(std::move(load_fn)) {
175  CHECK(load_fn_);
176  CHECK_GT(max_num_elems, 0);
177 }
178 
179 template <typename key_t, typename value_t>
181  return elems_map_.size();
182 }
183 
184 template <typename key_t, typename value_t>
186  return max_num_elems_;
187 }
188 
189 template <typename key_t, typename value_t>
190 bool LRUCache<key_t, value_t>::Exists(const key_t& key) const {
191  return elems_map_.find(key) != elems_map_.end();
192 }
193 
194 template <typename key_t, typename value_t>
195 std::shared_ptr<value_t> LRUCache<key_t, value_t>::Get(const key_t& key) {
196  const auto it = elems_map_.find(key);
197  if (it == elems_map_.end()) {
198  auto it = elems_map_.find(key);
199  elems_list_.emplace_front(key, load_fn_(key));
200  if (it != elems_map_.end()) {
201  elems_list_.erase(it->second);
202  elems_map_.erase(it);
203  }
204  it = elems_map_.emplace_hint(it, key, elems_list_.begin());
205  if (elems_map_.size() > max_num_elems_) {
206  Pop();
207  }
208  return it->second->second;
209  } else {
210  elems_list_.splice(elems_list_.begin(), elems_list_, it->second);
211  return it->second->second;
212  }
213 }
214 
215 template <typename key_t, typename value_t>
216 bool LRUCache<key_t, value_t>::Evict(const key_t& key) {
217  const auto it = elems_map_.find(key);
218  if (it != elems_map_.end()) {
219  elems_list_.erase(it->second);
220  elems_map_.erase(it);
221  return true;
222  }
223  return false;
224 }
225 
226 template <typename key_t, typename value_t>
228  if (!elems_list_.empty()) {
229  auto last = elems_list_.end();
230  --last;
231  elems_map_.erase(last->first);
232  elems_list_.pop_back();
233  }
234 }
235 
236 template <typename key_t, typename value_t>
238  elems_list_.clear();
239  elems_map_.clear();
240 }
241 
242 template <typename key_t, typename value_t>
244  const size_t max_num_elems, LoadFn load_fn)
245  : load_fn_(load_fn), cache_(max_num_elems, [](const key_t&) {
246  return std::make_shared<Entry>();
247  }) {
248  CHECK(load_fn_);
249 }
250 
251 template <typename key_t, typename value_t>
253  std::shared_lock lock(cache_mutex_);
254  return cache_.NumElems();
255 }
256 
257 template <typename key_t, typename value_t>
259  std::shared_lock lock(cache_mutex_);
260  return cache_.MaxNumElems();
261 }
262 
263 template <typename key_t, typename value_t>
264 bool ThreadSafeLRUCache<key_t, value_t>::Exists(const key_t& key) const {
265  std::shared_lock lock(cache_mutex_);
266  return cache_.Exists(key);
267 }
268 
269 template <typename key_t, typename value_t>
271  const key_t& key) {
272  bool should_load = false;
273  std::shared_ptr<Entry> entry;
274  std::shared_future<std::shared_ptr<value_t>> shared_future;
275 
276  {
277  std::unique_lock lock(cache_mutex_);
278  entry = cache_.Get(key);
279  if (entry == nullptr) {
280  return nullptr;
281  }
282  shared_future = entry->future;
283  if (!entry->is_loading) {
284  should_load = true;
285  entry->is_loading = true;
286  }
287  }
288 
289  if (should_load) {
290  try {
291  entry->promise.set_value(CHECK_NOTNULL(load_fn_(key)));
292  } catch (...) {
293  // Evict the cache entry after load failed and set the exception.
294  std::unique_lock lock(cache_mutex_);
295  cache_.Evict(key);
296  entry->promise.set_exception(std::current_exception());
297  }
298  }
299 
300  return shared_future.get();
301 }
302 
303 template <typename key_t, typename value_t>
305  std::unique_lock lock(cache_mutex_);
306  return cache_.Evict(key);
307 }
308 
309 template <typename key_t, typename value_t>
311  std::unique_lock lock(cache_mutex_);
312  return cache_.Pop();
313 }
314 
315 template <typename key_t, typename value_t>
317  std::unique_lock lock(cache_mutex_);
318  return cache_.Clear();
319 }
320 
321 template <typename key_t, typename value_t>
323  const size_t max_num_bytes, LoadFn load_fn)
324  : max_num_bytes_(max_num_bytes),
325  num_bytes_(0),
326  load_fn_(std::move(load_fn)) {
327  CHECK(load_fn_);
328  CHECK_GT(max_num_bytes, 0);
329 }
330 
331 template <typename key_t, typename value_t>
333  return elems_map_.size();
334 }
335 
336 template <typename key_t, typename value_t>
338  return num_bytes_;
339 }
340 
341 template <typename key_t, typename value_t>
343  return max_num_bytes_;
344 }
345 
346 template <typename key_t, typename value_t>
348  return elems_map_.find(key) != elems_map_.end();
349 }
350 
351 template <typename key_t, typename value_t>
353  const key_t& key) {
354  const auto it = elems_map_.find(key);
355  if (it == elems_map_.end()) {
356  std::shared_ptr<value_t> value = load_fn_(key);
357  const size_t num_bytes = value->NumBytes();
358  auto it = elems_map_.find(key);
359  elems_list_.emplace_front(key, std::move(value));
360  if (it != elems_map_.end()) {
361  elems_list_.erase(it->second.first);
362  elems_map_.erase(it);
363  }
364  it = elems_map_.emplace_hint(
365  it, key, std::make_pair(elems_list_.begin(), num_bytes));
366 
367  num_bytes_ += num_bytes;
368  while (num_bytes_ > max_num_bytes_ && elems_map_.size() > 1) {
369  Pop();
370  }
371 
372  return it->second.first->second;
373  } else {
374  elems_list_.splice(elems_list_.begin(), elems_list_, it->second.first);
375  return it->second.first->second;
376  }
377 }
378 
379 template <typename key_t, typename value_t>
381  const auto it = elems_map_.find(key);
382  if (it != elems_map_.end()) {
383  num_bytes_ -= it->second.second;
384  elems_list_.erase(it->second.first);
385  elems_map_.erase(it);
386  return true;
387  }
388  return false;
389 }
390 
391 template <typename key_t, typename value_t>
393  if (!elems_list_.empty()) {
394  auto last = elems_list_.end();
395  --last;
396  const auto it = elems_map_.find(last->first);
397  num_bytes_ -= it->second.second;
398  CHECK_GE(num_bytes_, 0);
399  elems_map_.erase(it);
400  elems_list_.pop_back();
401  }
402 }
403 
404 template <typename key_t, typename value_t>
406  const key_t& key) {
407  size_t& num_bytes = elems_map_.at(key).second;
408  num_bytes_ -= num_bytes;
409  CHECK_GE(num_bytes_, 0);
410  num_bytes = Get(key)->NumBytes();
411  num_bytes_ += num_bytes;
412 
413  while (num_bytes_ > max_num_bytes_ && elems_map_.size() > 1) {
414  Pop();
415  }
416 }
417 
418 template <typename key_t, typename value_t>
420  elems_list_.clear();
421  elems_map_.clear();
422  num_bytes_ = 0;
423 }
424 
425 } // namespace colmap
void Clear()
Definition: cache.h:237
void Pop()
Definition: cache.h:227
LRUCache(size_t max_num_elems, LoadFn load_fn)
Definition: cache.h:173
std::function< std::shared_ptr< value_t >(const key_t &)> LoadFn
Definition: cache.h:26
std::shared_ptr< value_t > Get(const key_t &key)
Definition: cache.h:195
size_t MaxNumElems() const
Definition: cache.h:185
bool Evict(const key_t &key)
Definition: cache.h:216
bool Exists(const key_t &key) const
Definition: cache.h:190
size_t NumElems() const
Definition: cache.h:180
std::shared_ptr< value_t > Get(const key_t &key)
Definition: cache.h:352
void UpdateNumBytes(const key_t &key)
Definition: cache.h:405
bool Evict(const key_t &key)
Definition: cache.h:380
std::function< std::shared_ptr< value_t >(const key_t &)> LoadFn
Definition: cache.h:120
bool Exists(const key_t &key) const
Definition: cache.h:347
size_t MaxNumBytes() const
Definition: cache.h:342
MemoryConstrainedLRUCache(size_t max_num_bytes, LoadFn load_fn)
Definition: cache.h:322
std::shared_ptr< value_t > Get(const key_t &key)
Definition: cache.h:270
bool Exists(const key_t &key) const
Definition: cache.h:264
std::shared_mutex cache_mutex_
Definition: cache.h:109
std::function< std::shared_ptr< value_t >(const key_t &)> LoadFn
Definition: cache.h:73
size_t NumElems() const
Definition: cache.h:252
ThreadSafeLRUCache(size_t max_num_elems, LoadFn load_fn)
Definition: cache.h:243
const LoadFn load_fn_
Definition: cache.h:107
bool Evict(const key_t &key)
Definition: cache.h:304
LRUCache< key_t, Entry > cache_
Definition: cache.h:110
size_t MaxNumElems() const
Definition: cache.h:258
CLOUDVIEWER_HOST_DEVICE Pair< First, Second > make_pair(const First &_first, const Second &_second)
Definition: SlabTraits.h:49
Definition: Eigen.h:85
std::promise< std::shared_ptr< value_t > > promise
Definition: cache.h:101
std::shared_future< std::shared_ptr< value_t > > future
Definition: cache.h:102