LLVM  8.0.1
DenseMap.h
Go to the documentation of this file.
1 //===- llvm/ADT/DenseMap.h - Dense probed hash table ------------*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines the DenseMap class.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef LLVM_ADT_DENSEMAP_H
15 #define LLVM_ADT_DENSEMAP_H
16 
17 #include "llvm/ADT/DenseMapInfo.h"
18 #include "llvm/ADT/EpochTracker.h"
19 #include "llvm/Support/AlignOf.h"
20 #include "llvm/Support/Compiler.h"
24 #include <algorithm>
25 #include <cassert>
26 #include <cstddef>
27 #include <cstring>
28 #include <initializer_list>
29 #include <iterator>
30 #include <new>
31 #include <type_traits>
32 #include <utility>
33 
34 namespace llvm {
35 
36 namespace detail {
37 
38 // We extend a pair to allow users to override the bucket type with their own
39 // implementation without requiring two members.
40 template <typename KeyT, typename ValueT>
41 struct DenseMapPair : public std::pair<KeyT, ValueT> {
42 
43  // FIXME: Switch to inheriting constructors when we drop support for older
44  // clang versions.
45  // NOTE: This default constructor is declared with '{}' rather than
46  // '= default' to work around a separate bug in clang-3.8. This can
47  // also go when we switch to inheriting constructors.
49 
50  DenseMapPair(const KeyT &Key, const ValueT &Value)
51  : std::pair<KeyT, ValueT>(Key, Value) {}
52 
54  : std::pair<KeyT, ValueT>(std::move(Key), std::move(Value)) {}
55 
56  template <typename AltKeyT, typename AltValueT>
57  DenseMapPair(AltKeyT &&AltKey, AltValueT &&AltValue,
58  typename std::enable_if<
59  std::is_convertible<AltKeyT, KeyT>::value &&
60  std::is_convertible<AltValueT, ValueT>::value>::type * = 0)
61  : std::pair<KeyT, ValueT>(std::forward<AltKeyT>(AltKey),
62  std::forward<AltValueT>(AltValue)) {}
63 
64  template <typename AltPairT>
65  DenseMapPair(AltPairT &&AltPair,
66  typename std::enable_if<std::is_convertible<
67  AltPairT, std::pair<KeyT, ValueT>>::value>::type * = 0)
68  : std::pair<KeyT, ValueT>(std::forward<AltPairT>(AltPair)) {}
69 
71  const KeyT &getFirst() const { return std::pair<KeyT, ValueT>::first; }
74 };
75 
76 } // end namespace detail
77 
78 template <typename KeyT, typename ValueT,
79  typename KeyInfoT = DenseMapInfo<KeyT>,
81  bool IsConst = false>
83 
84 template <typename DerivedT, typename KeyT, typename ValueT, typename KeyInfoT,
85  typename BucketT>
86 class DenseMapBase : public DebugEpochBase {
87  template <typename T>
88  using const_arg_type_t = typename const_pointer_or_const_ref<T>::type;
89 
90 public:
92  using key_type = KeyT;
93  using mapped_type = ValueT;
94  using value_type = BucketT;
95 
97  using const_iterator =
99 
100  inline iterator begin() {
101  // When the map is empty, avoid the overhead of advancing/retreating past
102  // empty buckets.
103  if (empty())
104  return end();
105  if (shouldReverseIterate<KeyT>())
106  return makeIterator(getBucketsEnd() - 1, getBuckets(), *this);
107  return makeIterator(getBuckets(), getBucketsEnd(), *this);
108  }
109  inline iterator end() {
110  return makeIterator(getBucketsEnd(), getBucketsEnd(), *this, true);
111  }
112  inline const_iterator begin() const {
113  if (empty())
114  return end();
115  if (shouldReverseIterate<KeyT>())
116  return makeConstIterator(getBucketsEnd() - 1, getBuckets(), *this);
117  return makeConstIterator(getBuckets(), getBucketsEnd(), *this);
118  }
119  inline const_iterator end() const {
120  return makeConstIterator(getBucketsEnd(), getBucketsEnd(), *this, true);
121  }
122 
123  LLVM_NODISCARD bool empty() const {
124  return getNumEntries() == 0;
125  }
126  unsigned size() const { return getNumEntries(); }
127 
128  /// Grow the densemap so that it can contain at least \p NumEntries items
129  /// before resizing again.
130  void reserve(size_type NumEntries) {
131  auto NumBuckets = getMinBucketToReserveForEntries(NumEntries);
132  incrementEpoch();
133  if (NumBuckets > getNumBuckets())
134  grow(NumBuckets);
135  }
136 
137  void clear() {
138  incrementEpoch();
139  if (getNumEntries() == 0 && getNumTombstones() == 0) return;
140 
141  // If the capacity of the array is huge, and the # elements used is small,
142  // shrink the array.
143  if (getNumEntries() * 4 < getNumBuckets() && getNumBuckets() > 64) {
144  shrink_and_clear();
145  return;
146  }
147 
148  const KeyT EmptyKey = getEmptyKey(), TombstoneKey = getTombstoneKey();
150  // Use a simpler loop when these are trivial types.
151  for (BucketT *P = getBuckets(), *E = getBucketsEnd(); P != E; ++P)
152  P->getFirst() = EmptyKey;
153  } else {
154  unsigned NumEntries = getNumEntries();
155  for (BucketT *P = getBuckets(), *E = getBucketsEnd(); P != E; ++P) {
156  if (!KeyInfoT::isEqual(P->getFirst(), EmptyKey)) {
157  if (!KeyInfoT::isEqual(P->getFirst(), TombstoneKey)) {
158  P->getSecond().~ValueT();
159  --NumEntries;
160  }
161  P->getFirst() = EmptyKey;
162  }
163  }
164  assert(NumEntries == 0 && "Node count imbalance!");
165  }
166  setNumEntries(0);
167  setNumTombstones(0);
168  }
169 
170  /// Return 1 if the specified key is in the map, 0 otherwise.
171  size_type count(const_arg_type_t<KeyT> Val) const {
172  const BucketT *TheBucket;
173  return LookupBucketFor(Val, TheBucket) ? 1 : 0;
174  }
175 
176  iterator find(const_arg_type_t<KeyT> Val) {
177  BucketT *TheBucket;
178  if (LookupBucketFor(Val, TheBucket))
179  return makeIterator(TheBucket, getBucketsEnd(), *this, true);
180  return end();
181  }
182  const_iterator find(const_arg_type_t<KeyT> Val) const {
183  const BucketT *TheBucket;
184  if (LookupBucketFor(Val, TheBucket))
185  return makeConstIterator(TheBucket, getBucketsEnd(), *this, true);
186  return end();
187  }
188 
189  /// Alternate version of find() which allows a different, and possibly
190  /// less expensive, key type.
191  /// The DenseMapInfo is responsible for supplying methods
192  /// getHashValue(LookupKeyT) and isEqual(LookupKeyT, KeyT) for each key
193  /// type used.
194  template<class LookupKeyT>
195  iterator find_as(const LookupKeyT &Val) {
196  BucketT *TheBucket;
197  if (LookupBucketFor(Val, TheBucket))
198  return makeIterator(TheBucket, getBucketsEnd(), *this, true);
199  return end();
200  }
201  template<class LookupKeyT>
202  const_iterator find_as(const LookupKeyT &Val) const {
203  const BucketT *TheBucket;
204  if (LookupBucketFor(Val, TheBucket))
205  return makeConstIterator(TheBucket, getBucketsEnd(), *this, true);
206  return end();
207  }
208 
209  /// lookup - Return the entry for the specified key, or a default
210  /// constructed value if no such entry exists.
211  ValueT lookup(const_arg_type_t<KeyT> Val) const {
212  const BucketT *TheBucket;
213  if (LookupBucketFor(Val, TheBucket))
214  return TheBucket->getSecond();
215  return ValueT();
216  }
217 
218  // Inserts key,value pair into the map if the key isn't already in the map.
219  // If the key is already in the map, it returns false and doesn't update the
220  // value.
221  std::pair<iterator, bool> insert(const std::pair<KeyT, ValueT> &KV) {
222  return try_emplace(KV.first, KV.second);
223  }
224 
225  // Inserts key,value pair into the map if the key isn't already in the map.
226  // If the key is already in the map, it returns false and doesn't update the
227  // value.
228  std::pair<iterator, bool> insert(std::pair<KeyT, ValueT> &&KV) {
229  return try_emplace(std::move(KV.first), std::move(KV.second));
230  }
231 
232  // Inserts key,value pair into the map if the key isn't already in the map.
233  // The value is constructed in-place if the key is not in the map, otherwise
234  // it is not moved.
235  template <typename... Ts>
236  std::pair<iterator, bool> try_emplace(KeyT &&Key, Ts &&... Args) {
237  BucketT *TheBucket;
238  if (LookupBucketFor(Key, TheBucket))
239  return std::make_pair(
240  makeIterator(TheBucket, getBucketsEnd(), *this, true),
241  false); // Already in map.
242 
243  // Otherwise, insert the new element.
244  TheBucket =
245  InsertIntoBucket(TheBucket, std::move(Key), std::forward<Ts>(Args)...);
246  return std::make_pair(
247  makeIterator(TheBucket, getBucketsEnd(), *this, true),
248  true);
249  }
250 
251  // Inserts key,value pair into the map if the key isn't already in the map.
252  // The value is constructed in-place if the key is not in the map, otherwise
253  // it is not moved.
254  template <typename... Ts>
255  std::pair<iterator, bool> try_emplace(const KeyT &Key, Ts &&... Args) {
256  BucketT *TheBucket;
257  if (LookupBucketFor(Key, TheBucket))
258  return std::make_pair(
259  makeIterator(TheBucket, getBucketsEnd(), *this, true),
260  false); // Already in map.
261 
262  // Otherwise, insert the new element.
263  TheBucket = InsertIntoBucket(TheBucket, Key, std::forward<Ts>(Args)...);
264  return std::make_pair(
265  makeIterator(TheBucket, getBucketsEnd(), *this, true),
266  true);
267  }
268 
269  /// Alternate version of insert() which allows a different, and possibly
270  /// less expensive, key type.
271  /// The DenseMapInfo is responsible for supplying methods
272  /// getHashValue(LookupKeyT) and isEqual(LookupKeyT, KeyT) for each key
273  /// type used.
274  template <typename LookupKeyT>
275  std::pair<iterator, bool> insert_as(std::pair<KeyT, ValueT> &&KV,
276  const LookupKeyT &Val) {
277  BucketT *TheBucket;
278  if (LookupBucketFor(Val, TheBucket))
279  return std::make_pair(
280  makeIterator(TheBucket, getBucketsEnd(), *this, true),
281  false); // Already in map.
282 
283  // Otherwise, insert the new element.
284  TheBucket = InsertIntoBucketWithLookup(TheBucket, std::move(KV.first),
285  std::move(KV.second), Val);
286  return std::make_pair(
287  makeIterator(TheBucket, getBucketsEnd(), *this, true),
288  true);
289  }
290 
291  /// insert - Range insertion of pairs.
292  template<typename InputIt>
293  void insert(InputIt I, InputIt E) {
294  for (; I != E; ++I)
295  insert(*I);
296  }
297 
298  bool erase(const KeyT &Val) {
299  BucketT *TheBucket;
300  if (!LookupBucketFor(Val, TheBucket))
301  return false; // not in map.
302 
303  TheBucket->getSecond().~ValueT();
304  TheBucket->getFirst() = getTombstoneKey();
305  decrementNumEntries();
306  incrementNumTombstones();
307  return true;
308  }
309  void erase(iterator I) {
310  BucketT *TheBucket = &*I;
311  TheBucket->getSecond().~ValueT();
312  TheBucket->getFirst() = getTombstoneKey();
313  decrementNumEntries();
314  incrementNumTombstones();
315  }
316 
318  BucketT *TheBucket;
319  if (LookupBucketFor(Key, TheBucket))
320  return *TheBucket;
321 
322  return *InsertIntoBucket(TheBucket, Key);
323  }
324 
325  ValueT &operator[](const KeyT &Key) {
326  return FindAndConstruct(Key).second;
327  }
328 
330  BucketT *TheBucket;
331  if (LookupBucketFor(Key, TheBucket))
332  return *TheBucket;
333 
334  return *InsertIntoBucket(TheBucket, std::move(Key));
335  }
336 
337  ValueT &operator[](KeyT &&Key) {
338  return FindAndConstruct(std::move(Key)).second;
339  }
340 
341  /// isPointerIntoBucketsArray - Return true if the specified pointer points
342  /// somewhere into the DenseMap's array of buckets (i.e. either to a key or
343  /// value in the DenseMap).
344  bool isPointerIntoBucketsArray(const void *Ptr) const {
345  return Ptr >= getBuckets() && Ptr < getBucketsEnd();
346  }
347 
348  /// getPointerIntoBucketsArray() - Return an opaque pointer into the buckets
349  /// array. In conjunction with the previous method, this can be used to
350  /// determine whether an insertion caused the DenseMap to reallocate.
351  const void *getPointerIntoBucketsArray() const { return getBuckets(); }
352 
353 protected:
354  DenseMapBase() = default;
355 
356  void destroyAll() {
357  if (getNumBuckets() == 0) // Nothing to do.
358  return;
359 
360  const KeyT EmptyKey = getEmptyKey(), TombstoneKey = getTombstoneKey();
361  for (BucketT *P = getBuckets(), *E = getBucketsEnd(); P != E; ++P) {
362  if (!KeyInfoT::isEqual(P->getFirst(), EmptyKey) &&
363  !KeyInfoT::isEqual(P->getFirst(), TombstoneKey))
364  P->getSecond().~ValueT();
365  P->getFirst().~KeyT();
366  }
367  }
368 
369  void initEmpty() {
370  setNumEntries(0);
371  setNumTombstones(0);
372 
373  assert((getNumBuckets() & (getNumBuckets()-1)) == 0 &&
374  "# initial buckets must be a power of two!");
375  const KeyT EmptyKey = getEmptyKey();
376  for (BucketT *B = getBuckets(), *E = getBucketsEnd(); B != E; ++B)
377  ::new (&B->getFirst()) KeyT(EmptyKey);
378  }
379 
380  /// Returns the number of buckets to allocate to ensure that the DenseMap can
381  /// accommodate \p NumEntries without need to grow().
382  unsigned getMinBucketToReserveForEntries(unsigned NumEntries) {
383  // Ensure that "NumEntries * 4 < NumBuckets * 3"
384  if (NumEntries == 0)
385  return 0;
386  // +1 is required because of the strict equality.
387  // For example if NumEntries is 48, we need to return 401.
388  return NextPowerOf2(NumEntries * 4 / 3 + 1);
389  }
390 
391  void moveFromOldBuckets(BucketT *OldBucketsBegin, BucketT *OldBucketsEnd) {
392  initEmpty();
393 
394  // Insert all the old elements.
395  const KeyT EmptyKey = getEmptyKey();
396  const KeyT TombstoneKey = getTombstoneKey();
397  for (BucketT *B = OldBucketsBegin, *E = OldBucketsEnd; B != E; ++B) {
398  if (!KeyInfoT::isEqual(B->getFirst(), EmptyKey) &&
399  !KeyInfoT::isEqual(B->getFirst(), TombstoneKey)) {
400  // Insert the key/value into the new table.
401  BucketT *DestBucket;
402  bool FoundVal = LookupBucketFor(B->getFirst(), DestBucket);
403  (void)FoundVal; // silence warning.
404  assert(!FoundVal && "Key already in new map?");
405  DestBucket->getFirst() = std::move(B->getFirst());
406  ::new (&DestBucket->getSecond()) ValueT(std::move(B->getSecond()));
407  incrementNumEntries();
408 
409  // Free the value.
410  B->getSecond().~ValueT();
411  }
412  B->getFirst().~KeyT();
413  }
414  }
415 
416  template <typename OtherBaseT>
417  void copyFrom(
419  assert(&other != this);
420  assert(getNumBuckets() == other.getNumBuckets());
421 
422  setNumEntries(other.getNumEntries());
423  setNumTombstones(other.getNumTombstones());
424 
426  memcpy(reinterpret_cast<void *>(getBuckets()), other.getBuckets(),
427  getNumBuckets() * sizeof(BucketT));
428  else
429  for (size_t i = 0; i < getNumBuckets(); ++i) {
430  ::new (&getBuckets()[i].getFirst())
431  KeyT(other.getBuckets()[i].getFirst());
432  if (!KeyInfoT::isEqual(getBuckets()[i].getFirst(), getEmptyKey()) &&
433  !KeyInfoT::isEqual(getBuckets()[i].getFirst(), getTombstoneKey()))
434  ::new (&getBuckets()[i].getSecond())
435  ValueT(other.getBuckets()[i].getSecond());
436  }
437  }
438 
439  static unsigned getHashValue(const KeyT &Val) {
440  return KeyInfoT::getHashValue(Val);
441  }
442 
443  template<typename LookupKeyT>
444  static unsigned getHashValue(const LookupKeyT &Val) {
445  return KeyInfoT::getHashValue(Val);
446  }
447 
448  static const KeyT getEmptyKey() {
449  static_assert(std::is_base_of<DenseMapBase, DerivedT>::value,
450  "Must pass the derived type to this template!");
451  return KeyInfoT::getEmptyKey();
452  }
453 
454  static const KeyT getTombstoneKey() {
455  return KeyInfoT::getTombstoneKey();
456  }
457 
458 private:
459  iterator makeIterator(BucketT *P, BucketT *E,
460  DebugEpochBase &Epoch,
461  bool NoAdvance=false) {
462  if (shouldReverseIterate<KeyT>()) {
463  BucketT *B = P == getBucketsEnd() ? getBuckets() : P + 1;
464  return iterator(B, E, Epoch, NoAdvance);
465  }
466  return iterator(P, E, Epoch, NoAdvance);
467  }
468 
469  const_iterator makeConstIterator(const BucketT *P, const BucketT *E,
470  const DebugEpochBase &Epoch,
471  const bool NoAdvance=false) const {
472  if (shouldReverseIterate<KeyT>()) {
473  const BucketT *B = P == getBucketsEnd() ? getBuckets() : P + 1;
474  return const_iterator(B, E, Epoch, NoAdvance);
475  }
476  return const_iterator(P, E, Epoch, NoAdvance);
477  }
478 
479  unsigned getNumEntries() const {
480  return static_cast<const DerivedT *>(this)->getNumEntries();
481  }
482 
483  void setNumEntries(unsigned Num) {
484  static_cast<DerivedT *>(this)->setNumEntries(Num);
485  }
486 
487  void incrementNumEntries() {
488  setNumEntries(getNumEntries() + 1);
489  }
490 
491  void decrementNumEntries() {
492  setNumEntries(getNumEntries() - 1);
493  }
494 
495  unsigned getNumTombstones() const {
496  return static_cast<const DerivedT *>(this)->getNumTombstones();
497  }
498 
499  void setNumTombstones(unsigned Num) {
500  static_cast<DerivedT *>(this)->setNumTombstones(Num);
501  }
502 
503  void incrementNumTombstones() {
504  setNumTombstones(getNumTombstones() + 1);
505  }
506 
507  void decrementNumTombstones() {
508  setNumTombstones(getNumTombstones() - 1);
509  }
510 
511  const BucketT *getBuckets() const {
512  return static_cast<const DerivedT *>(this)->getBuckets();
513  }
514 
515  BucketT *getBuckets() {
516  return static_cast<DerivedT *>(this)->getBuckets();
517  }
518 
519  unsigned getNumBuckets() const {
520  return static_cast<const DerivedT *>(this)->getNumBuckets();
521  }
522 
523  BucketT *getBucketsEnd() {
524  return getBuckets() + getNumBuckets();
525  }
526 
527  const BucketT *getBucketsEnd() const {
528  return getBuckets() + getNumBuckets();
529  }
530 
531  void grow(unsigned AtLeast) {
532  static_cast<DerivedT *>(this)->grow(AtLeast);
533  }
534 
535  void shrink_and_clear() {
536  static_cast<DerivedT *>(this)->shrink_and_clear();
537  }
538 
539  template <typename KeyArg, typename... ValueArgs>
540  BucketT *InsertIntoBucket(BucketT *TheBucket, KeyArg &&Key,
541  ValueArgs &&... Values) {
542  TheBucket = InsertIntoBucketImpl(Key, Key, TheBucket);
543 
544  TheBucket->getFirst() = std::forward<KeyArg>(Key);
545  ::new (&TheBucket->getSecond()) ValueT(std::forward<ValueArgs>(Values)...);
546  return TheBucket;
547  }
548 
549  template <typename LookupKeyT>
550  BucketT *InsertIntoBucketWithLookup(BucketT *TheBucket, KeyT &&Key,
551  ValueT &&Value, LookupKeyT &Lookup) {
552  TheBucket = InsertIntoBucketImpl(Key, Lookup, TheBucket);
553 
554  TheBucket->getFirst() = std::move(Key);
555  ::new (&TheBucket->getSecond()) ValueT(std::move(Value));
556  return TheBucket;
557  }
558 
559  template <typename LookupKeyT>
560  BucketT *InsertIntoBucketImpl(const KeyT &Key, const LookupKeyT &Lookup,
561  BucketT *TheBucket) {
562  incrementEpoch();
563 
564  // If the load of the hash table is more than 3/4, or if fewer than 1/8 of
565  // the buckets are empty (meaning that many are filled with tombstones),
566  // grow the table.
567  //
568  // The later case is tricky. For example, if we had one empty bucket with
569  // tons of tombstones, failing lookups (e.g. for insertion) would have to
570  // probe almost the entire table until it found the empty bucket. If the
571  // table completely filled with tombstones, no lookup would ever succeed,
572  // causing infinite loops in lookup.
573  unsigned NewNumEntries = getNumEntries() + 1;
574  unsigned NumBuckets = getNumBuckets();
575  if (LLVM_UNLIKELY(NewNumEntries * 4 >= NumBuckets * 3)) {
576  this->grow(NumBuckets * 2);
577  LookupBucketFor(Lookup, TheBucket);
578  NumBuckets = getNumBuckets();
579  } else if (LLVM_UNLIKELY(NumBuckets-(NewNumEntries+getNumTombstones()) <=
580  NumBuckets/8)) {
581  this->grow(NumBuckets);
582  LookupBucketFor(Lookup, TheBucket);
583  }
584  assert(TheBucket);
585 
586  // Only update the state after we've grown our bucket space appropriately
587  // so that when growing buckets we have self-consistent entry count.
588  incrementNumEntries();
589 
590  // If we are writing over a tombstone, remember this.
591  const KeyT EmptyKey = getEmptyKey();
592  if (!KeyInfoT::isEqual(TheBucket->getFirst(), EmptyKey))
593  decrementNumTombstones();
594 
595  return TheBucket;
596  }
597 
598  /// LookupBucketFor - Lookup the appropriate bucket for Val, returning it in
599  /// FoundBucket. If the bucket contains the key and a value, this returns
600  /// true, otherwise it returns a bucket with an empty marker or tombstone and
601  /// returns false.
602  template<typename LookupKeyT>
603  bool LookupBucketFor(const LookupKeyT &Val,
604  const BucketT *&FoundBucket) const {
605  const BucketT *BucketsPtr = getBuckets();
606  const unsigned NumBuckets = getNumBuckets();
607 
608  if (NumBuckets == 0) {
609  FoundBucket = nullptr;
610  return false;
611  }
612 
613  // FoundTombstone - Keep track of whether we find a tombstone while probing.
614  const BucketT *FoundTombstone = nullptr;
615  const KeyT EmptyKey = getEmptyKey();
616  const KeyT TombstoneKey = getTombstoneKey();
617  assert(!KeyInfoT::isEqual(Val, EmptyKey) &&
618  !KeyInfoT::isEqual(Val, TombstoneKey) &&
619  "Empty/Tombstone value shouldn't be inserted into map!");
620 
621  unsigned BucketNo = getHashValue(Val) & (NumBuckets-1);
622  unsigned ProbeAmt = 1;
623  while (true) {
624  const BucketT *ThisBucket = BucketsPtr + BucketNo;
625  // Found Val's bucket? If so, return it.
626  if (LLVM_LIKELY(KeyInfoT::isEqual(Val, ThisBucket->getFirst()))) {
627  FoundBucket = ThisBucket;
628  return true;
629  }
630 
631  // If we found an empty bucket, the key doesn't exist in the set.
632  // Insert it and return the default value.
633  if (LLVM_LIKELY(KeyInfoT::isEqual(ThisBucket->getFirst(), EmptyKey))) {
634  // If we've already seen a tombstone while probing, fill it in instead
635  // of the empty bucket we eventually probed to.
636  FoundBucket = FoundTombstone ? FoundTombstone : ThisBucket;
637  return false;
638  }
639 
640  // If this is a tombstone, remember it. If Val ends up not in the map, we
641  // prefer to return it than something that would require more probing.
642  if (KeyInfoT::isEqual(ThisBucket->getFirst(), TombstoneKey) &&
643  !FoundTombstone)
644  FoundTombstone = ThisBucket; // Remember the first tombstone found.
645 
646  // Otherwise, it's a hash collision or a tombstone, continue quadratic
647  // probing.
648  BucketNo += ProbeAmt++;
649  BucketNo &= (NumBuckets-1);
650  }
651  }
652 
653  template <typename LookupKeyT>
654  bool LookupBucketFor(const LookupKeyT &Val, BucketT *&FoundBucket) {
655  const BucketT *ConstFoundBucket;
656  bool Result = const_cast<const DenseMapBase *>(this)
657  ->LookupBucketFor(Val, ConstFoundBucket);
658  FoundBucket = const_cast<BucketT *>(ConstFoundBucket);
659  return Result;
660  }
661 
662 public:
663  /// Return the approximate size (in bytes) of the actual map.
664  /// This is just the raw memory used by DenseMap.
665  /// If entries are pointers to objects, the size of the referenced objects
666  /// are not included.
667  size_t getMemorySize() const {
668  return getNumBuckets() * sizeof(BucketT);
669  }
670 };
671 
672 /// Equality comparison for DenseMap.
673 ///
674 /// Iterates over elements of LHS confirming that each (key, value) pair in LHS
675 /// is also in RHS, and that no additional pairs are in RHS.
676 /// Equivalent to N calls to RHS.find and N value comparisons. Amortized
677 /// complexity is linear, worst case is O(N^2) (if every hash collides).
678 template <typename DerivedT, typename KeyT, typename ValueT, typename KeyInfoT,
679  typename BucketT>
683  if (LHS.size() != RHS.size())
684  return false;
685 
686  for (auto &KV : LHS) {
687  auto I = RHS.find(KV.first);
688  if (I == RHS.end() || I->second != KV.second)
689  return false;
690  }
691 
692  return true;
693 }
694 
695 /// Inequality comparison for DenseMap.
696 ///
697 /// Equivalent to !(LHS == RHS). See operator== for performance notes.
698 template <typename DerivedT, typename KeyT, typename ValueT, typename KeyInfoT,
699  typename BucketT>
703  return !(LHS == RHS);
704 }
705 
706 template <typename KeyT, typename ValueT,
707  typename KeyInfoT = DenseMapInfo<KeyT>,
708  typename BucketT = llvm::detail::DenseMapPair<KeyT, ValueT>>
709 class DenseMap : public DenseMapBase<DenseMap<KeyT, ValueT, KeyInfoT, BucketT>,
710  KeyT, ValueT, KeyInfoT, BucketT> {
711  friend class DenseMapBase<DenseMap, KeyT, ValueT, KeyInfoT, BucketT>;
712 
713  // Lift some types from the dependent base class into this class for
714  // simplicity of referring to them.
716 
717  BucketT *Buckets;
718  unsigned NumEntries;
719  unsigned NumTombstones;
720  unsigned NumBuckets;
721 
722 public:
723  /// Create a DenseMap wth an optional \p InitialReserve that guarantee that
724  /// this number of elements can be inserted in the map without grow()
725  explicit DenseMap(unsigned InitialReserve = 0) { init(InitialReserve); }
726 
727  DenseMap(const DenseMap &other) : BaseT() {
728  init(0);
729  copyFrom(other);
730  }
731 
732  DenseMap(DenseMap &&other) : BaseT() {
733  init(0);
734  swap(other);
735  }
736 
737  template<typename InputIt>
738  DenseMap(const InputIt &I, const InputIt &E) {
739  init(std::distance(I, E));
740  this->insert(I, E);
741  }
742 
743  DenseMap(std::initializer_list<typename BaseT::value_type> Vals) {
744  init(Vals.size());
745  this->insert(Vals.begin(), Vals.end());
746  }
747 
749  this->destroyAll();
750  operator delete(Buckets);
751  }
752 
753  void swap(DenseMap& RHS) {
754  this->incrementEpoch();
755  RHS.incrementEpoch();
756  std::swap(Buckets, RHS.Buckets);
757  std::swap(NumEntries, RHS.NumEntries);
758  std::swap(NumTombstones, RHS.NumTombstones);
759  std::swap(NumBuckets, RHS.NumBuckets);
760  }
761 
762  DenseMap& operator=(const DenseMap& other) {
763  if (&other != this)
764  copyFrom(other);
765  return *this;
766  }
767 
769  this->destroyAll();
770  operator delete(Buckets);
771  init(0);
772  swap(other);
773  return *this;
774  }
775 
776  void copyFrom(const DenseMap& other) {
777  this->destroyAll();
778  operator delete(Buckets);
779  if (allocateBuckets(other.NumBuckets)) {
780  this->BaseT::copyFrom(other);
781  } else {
782  NumEntries = 0;
783  NumTombstones = 0;
784  }
785  }
786 
787  void init(unsigned InitNumEntries) {
788  auto InitBuckets = BaseT::getMinBucketToReserveForEntries(InitNumEntries);
789  if (allocateBuckets(InitBuckets)) {
790  this->BaseT::initEmpty();
791  } else {
792  NumEntries = 0;
793  NumTombstones = 0;
794  }
795  }
796 
797  void grow(unsigned AtLeast) {
798  unsigned OldNumBuckets = NumBuckets;
799  BucketT *OldBuckets = Buckets;
800 
801  allocateBuckets(std::max<unsigned>(64, static_cast<unsigned>(NextPowerOf2(AtLeast-1))));
802  assert(Buckets);
803  if (!OldBuckets) {
804  this->BaseT::initEmpty();
805  return;
806  }
807 
808  this->moveFromOldBuckets(OldBuckets, OldBuckets+OldNumBuckets);
809 
810  // Free the old table.
811  operator delete(OldBuckets);
812  }
813 
815  unsigned OldNumEntries = NumEntries;
816  this->destroyAll();
817 
818  // Reduce the number of buckets.
819  unsigned NewNumBuckets = 0;
820  if (OldNumEntries)
821  NewNumBuckets = std::max(64, 1 << (Log2_32_Ceil(OldNumEntries) + 1));
822  if (NewNumBuckets == NumBuckets) {
823  this->BaseT::initEmpty();
824  return;
825  }
826 
827  operator delete(Buckets);
828  init(NewNumBuckets);
829  }
830 
831 private:
832  unsigned getNumEntries() const {
833  return NumEntries;
834  }
835 
836  void setNumEntries(unsigned Num) {
837  NumEntries = Num;
838  }
839 
840  unsigned getNumTombstones() const {
841  return NumTombstones;
842  }
843 
844  void setNumTombstones(unsigned Num) {
845  NumTombstones = Num;
846  }
847 
848  BucketT *getBuckets() const {
849  return Buckets;
850  }
851 
852  unsigned getNumBuckets() const {
853  return NumBuckets;
854  }
855 
856  bool allocateBuckets(unsigned Num) {
857  NumBuckets = Num;
858  if (NumBuckets == 0) {
859  Buckets = nullptr;
860  return false;
861  }
862 
863  Buckets = static_cast<BucketT*>(operator new(sizeof(BucketT) * NumBuckets));
864  return true;
865  }
866 };
867 
868 template <typename KeyT, typename ValueT, unsigned InlineBuckets = 4,
869  typename KeyInfoT = DenseMapInfo<KeyT>,
870  typename BucketT = llvm::detail::DenseMapPair<KeyT, ValueT>>
872  : public DenseMapBase<
873  SmallDenseMap<KeyT, ValueT, InlineBuckets, KeyInfoT, BucketT>, KeyT,
874  ValueT, KeyInfoT, BucketT> {
875  friend class DenseMapBase<SmallDenseMap, KeyT, ValueT, KeyInfoT, BucketT>;
876 
877  // Lift some types from the dependent base class into this class for
878  // simplicity of referring to them.
880 
881  static_assert(isPowerOf2_64(InlineBuckets),
882  "InlineBuckets must be a power of 2.");
883 
884  unsigned Small : 1;
885  unsigned NumEntries : 31;
886  unsigned NumTombstones;
887 
888  struct LargeRep {
889  BucketT *Buckets;
890  unsigned NumBuckets;
891  };
892 
893  /// A "union" of an inline bucket array and the struct representing
894  /// a large bucket. This union will be discriminated by the 'Small' bit.
896 
897 public:
898  explicit SmallDenseMap(unsigned NumInitBuckets = 0) {
899  init(NumInitBuckets);
900  }
901 
902  SmallDenseMap(const SmallDenseMap &other) : BaseT() {
903  init(0);
904  copyFrom(other);
905  }
906 
907  SmallDenseMap(SmallDenseMap &&other) : BaseT() {
908  init(0);
909  swap(other);
910  }
911 
912  template<typename InputIt>
913  SmallDenseMap(const InputIt &I, const InputIt &E) {
914  init(NextPowerOf2(std::distance(I, E)));
915  this->insert(I, E);
916  }
917 
919  this->destroyAll();
920  deallocateBuckets();
921  }
922 
923  void swap(SmallDenseMap& RHS) {
924  unsigned TmpNumEntries = RHS.NumEntries;
925  RHS.NumEntries = NumEntries;
926  NumEntries = TmpNumEntries;
927  std::swap(NumTombstones, RHS.NumTombstones);
928 
929  const KeyT EmptyKey = this->getEmptyKey();
930  const KeyT TombstoneKey = this->getTombstoneKey();
931  if (Small && RHS.Small) {
932  // If we're swapping inline bucket arrays, we have to cope with some of
933  // the tricky bits of DenseMap's storage system: the buckets are not
934  // fully initialized. Thus we swap every key, but we may have
935  // a one-directional move of the value.
936  for (unsigned i = 0, e = InlineBuckets; i != e; ++i) {
937  BucketT *LHSB = &getInlineBuckets()[i],
938  *RHSB = &RHS.getInlineBuckets()[i];
939  bool hasLHSValue = (!KeyInfoT::isEqual(LHSB->getFirst(), EmptyKey) &&
940  !KeyInfoT::isEqual(LHSB->getFirst(), TombstoneKey));
941  bool hasRHSValue = (!KeyInfoT::isEqual(RHSB->getFirst(), EmptyKey) &&
942  !KeyInfoT::isEqual(RHSB->getFirst(), TombstoneKey));
943  if (hasLHSValue && hasRHSValue) {
944  // Swap together if we can...
945  std::swap(*LHSB, *RHSB);
946  continue;
947  }
948  // Swap separately and handle any assymetry.
949  std::swap(LHSB->getFirst(), RHSB->getFirst());
950  if (hasLHSValue) {
951  ::new (&RHSB->getSecond()) ValueT(std::move(LHSB->getSecond()));
952  LHSB->getSecond().~ValueT();
953  } else if (hasRHSValue) {
954  ::new (&LHSB->getSecond()) ValueT(std::move(RHSB->getSecond()));
955  RHSB->getSecond().~ValueT();
956  }
957  }
958  return;
959  }
960  if (!Small && !RHS.Small) {
961  std::swap(getLargeRep()->Buckets, RHS.getLargeRep()->Buckets);
962  std::swap(getLargeRep()->NumBuckets, RHS.getLargeRep()->NumBuckets);
963  return;
964  }
965 
966  SmallDenseMap &SmallSide = Small ? *this : RHS;
967  SmallDenseMap &LargeSide = Small ? RHS : *this;
968 
969  // First stash the large side's rep and move the small side across.
970  LargeRep TmpRep = std::move(*LargeSide.getLargeRep());
971  LargeSide.getLargeRep()->~LargeRep();
972  LargeSide.Small = true;
973  // This is similar to the standard move-from-old-buckets, but the bucket
974  // count hasn't actually rotated in this case. So we have to carefully
975  // move construct the keys and values into their new locations, but there
976  // is no need to re-hash things.
977  for (unsigned i = 0, e = InlineBuckets; i != e; ++i) {
978  BucketT *NewB = &LargeSide.getInlineBuckets()[i],
979  *OldB = &SmallSide.getInlineBuckets()[i];
980  ::new (&NewB->getFirst()) KeyT(std::move(OldB->getFirst()));
981  OldB->getFirst().~KeyT();
982  if (!KeyInfoT::isEqual(NewB->getFirst(), EmptyKey) &&
983  !KeyInfoT::isEqual(NewB->getFirst(), TombstoneKey)) {
984  ::new (&NewB->getSecond()) ValueT(std::move(OldB->getSecond()));
985  OldB->getSecond().~ValueT();
986  }
987  }
988 
989  // The hard part of moving the small buckets across is done, just move
990  // the TmpRep into its new home.
991  SmallSide.Small = false;
992  new (SmallSide.getLargeRep()) LargeRep(std::move(TmpRep));
993  }
994 
996  if (&other != this)
997  copyFrom(other);
998  return *this;
999  }
1000 
1002  this->destroyAll();
1003  deallocateBuckets();
1004  init(0);
1005  swap(other);
1006  return *this;
1007  }
1008 
1009  void copyFrom(const SmallDenseMap& other) {
1010  this->destroyAll();
1011  deallocateBuckets();
1012  Small = true;
1013  if (other.getNumBuckets() > InlineBuckets) {
1014  Small = false;
1015  new (getLargeRep()) LargeRep(allocateBuckets(other.getNumBuckets()));
1016  }
1017  this->BaseT::copyFrom(other);
1018  }
1019 
1020  void init(unsigned InitBuckets) {
1021  Small = true;
1022  if (InitBuckets > InlineBuckets) {
1023  Small = false;
1024  new (getLargeRep()) LargeRep(allocateBuckets(InitBuckets));
1025  }
1026  this->BaseT::initEmpty();
1027  }
1028 
1029  void grow(unsigned AtLeast) {
1030  if (AtLeast >= InlineBuckets)
1031  AtLeast = std::max<unsigned>(64, NextPowerOf2(AtLeast-1));
1032 
1033  if (Small) {
1034  if (AtLeast < InlineBuckets)
1035  return; // Nothing to do.
1036 
1037  // First move the inline buckets into a temporary storage.
1039  BucketT *TmpBegin = reinterpret_cast<BucketT *>(TmpStorage.buffer);
1040  BucketT *TmpEnd = TmpBegin;
1041 
1042  // Loop over the buckets, moving non-empty, non-tombstones into the
1043  // temporary storage. Have the loop move the TmpEnd forward as it goes.
1044  const KeyT EmptyKey = this->getEmptyKey();
1045  const KeyT TombstoneKey = this->getTombstoneKey();
1046  for (BucketT *P = getBuckets(), *E = P + InlineBuckets; P != E; ++P) {
1047  if (!KeyInfoT::isEqual(P->getFirst(), EmptyKey) &&
1048  !KeyInfoT::isEqual(P->getFirst(), TombstoneKey)) {
1049  assert(size_t(TmpEnd - TmpBegin) < InlineBuckets &&
1050  "Too many inline buckets!");
1051  ::new (&TmpEnd->getFirst()) KeyT(std::move(P->getFirst()));
1052  ::new (&TmpEnd->getSecond()) ValueT(std::move(P->getSecond()));
1053  ++TmpEnd;
1054  P->getSecond().~ValueT();
1055  }
1056  P->getFirst().~KeyT();
1057  }
1058 
1059  // Now make this map use the large rep, and move all the entries back
1060  // into it.
1061  Small = false;
1062  new (getLargeRep()) LargeRep(allocateBuckets(AtLeast));
1063  this->moveFromOldBuckets(TmpBegin, TmpEnd);
1064  return;
1065  }
1066 
1067  LargeRep OldRep = std::move(*getLargeRep());
1068  getLargeRep()->~LargeRep();
1069  if (AtLeast <= InlineBuckets) {
1070  Small = true;
1071  } else {
1072  new (getLargeRep()) LargeRep(allocateBuckets(AtLeast));
1073  }
1074 
1075  this->moveFromOldBuckets(OldRep.Buckets, OldRep.Buckets+OldRep.NumBuckets);
1076 
1077  // Free the old table.
1078  operator delete(OldRep.Buckets);
1079  }
1080 
1082  unsigned OldSize = this->size();
1083  this->destroyAll();
1084 
1085  // Reduce the number of buckets.
1086  unsigned NewNumBuckets = 0;
1087  if (OldSize) {
1088  NewNumBuckets = 1 << (Log2_32_Ceil(OldSize) + 1);
1089  if (NewNumBuckets > InlineBuckets && NewNumBuckets < 64u)
1090  NewNumBuckets = 64;
1091  }
1092  if ((Small && NewNumBuckets <= InlineBuckets) ||
1093  (!Small && NewNumBuckets == getLargeRep()->NumBuckets)) {
1094  this->BaseT::initEmpty();
1095  return;
1096  }
1097 
1098  deallocateBuckets();
1099  init(NewNumBuckets);
1100  }
1101 
1102 private:
1103  unsigned getNumEntries() const {
1104  return NumEntries;
1105  }
1106 
1107  void setNumEntries(unsigned Num) {
1108  // NumEntries is hardcoded to be 31 bits wide.
1109  assert(Num < (1U << 31) && "Cannot support more than 1<<31 entries");
1110  NumEntries = Num;
1111  }
1112 
1113  unsigned getNumTombstones() const {
1114  return NumTombstones;
1115  }
1116 
1117  void setNumTombstones(unsigned Num) {
1118  NumTombstones = Num;
1119  }
1120 
1121  const BucketT *getInlineBuckets() const {
1122  assert(Small);
1123  // Note that this cast does not violate aliasing rules as we assert that
1124  // the memory's dynamic type is the small, inline bucket buffer, and the
1125  // 'storage.buffer' static type is 'char *'.
1126  return reinterpret_cast<const BucketT *>(storage.buffer);
1127  }
1128 
1129  BucketT *getInlineBuckets() {
1130  return const_cast<BucketT *>(
1131  const_cast<const SmallDenseMap *>(this)->getInlineBuckets());
1132  }
1133 
1134  const LargeRep *getLargeRep() const {
1135  assert(!Small);
1136  // Note, same rule about aliasing as with getInlineBuckets.
1137  return reinterpret_cast<const LargeRep *>(storage.buffer);
1138  }
1139 
1140  LargeRep *getLargeRep() {
1141  return const_cast<LargeRep *>(
1142  const_cast<const SmallDenseMap *>(this)->getLargeRep());
1143  }
1144 
1145  const BucketT *getBuckets() const {
1146  return Small ? getInlineBuckets() : getLargeRep()->Buckets;
1147  }
1148 
1149  BucketT *getBuckets() {
1150  return const_cast<BucketT *>(
1151  const_cast<const SmallDenseMap *>(this)->getBuckets());
1152  }
1153 
1154  unsigned getNumBuckets() const {
1155  return Small ? InlineBuckets : getLargeRep()->NumBuckets;
1156  }
1157 
1158  void deallocateBuckets() {
1159  if (Small)
1160  return;
1161 
1162  operator delete(getLargeRep()->Buckets);
1163  getLargeRep()->~LargeRep();
1164  }
1165 
1166  LargeRep allocateBuckets(unsigned Num) {
1167  assert(Num > InlineBuckets && "Must allocate more buckets than are inline");
1168  LargeRep Rep = {
1169  static_cast<BucketT*>(operator new(sizeof(BucketT) * Num)), Num
1170  };
1171  return Rep;
1172  }
1173 };
1174 
1175 template <typename KeyT, typename ValueT, typename KeyInfoT, typename Bucket,
1176  bool IsConst>
1178  friend class DenseMapIterator<KeyT, ValueT, KeyInfoT, Bucket, true>;
1179  friend class DenseMapIterator<KeyT, ValueT, KeyInfoT, Bucket, false>;
1180 
1182 
1183 public:
1185  using value_type =
1186  typename std::conditional<IsConst, const Bucket, Bucket>::type;
1187  using pointer = value_type *;
1189  using iterator_category = std::forward_iterator_tag;
1190 
1191 private:
1192  pointer Ptr = nullptr;
1193  pointer End = nullptr;
1194 
1195 public:
1196  DenseMapIterator() = default;
1197 
1199  bool NoAdvance = false)
1200  : DebugEpochBase::HandleBase(&Epoch), Ptr(Pos), End(E) {
1201  assert(isHandleInSync() && "invalid construction!");
1202 
1203  if (NoAdvance) return;
1204  if (shouldReverseIterate<KeyT>()) {
1205  RetreatPastEmptyBuckets();
1206  return;
1207  }
1208  AdvancePastEmptyBuckets();
1209  }
1210 
1211  // Converting ctor from non-const iterators to const iterators. SFINAE'd out
1212  // for const iterator destinations so it doesn't end up as a user defined copy
1213  // constructor.
1214  template <bool IsConstSrc,
1215  typename = typename std::enable_if<!IsConstSrc && IsConst>::type>
1218  : DebugEpochBase::HandleBase(I), Ptr(I.Ptr), End(I.End) {}
1219 
1221  assert(isHandleInSync() && "invalid iterator access!");
1222  if (shouldReverseIterate<KeyT>())
1223  return Ptr[-1];
1224  return *Ptr;
1225  }
1227  assert(isHandleInSync() && "invalid iterator access!");
1228  if (shouldReverseIterate<KeyT>())
1229  return &(Ptr[-1]);
1230  return Ptr;
1231  }
1232 
1233  bool operator==(const ConstIterator &RHS) const {
1234  assert((!Ptr || isHandleInSync()) && "handle not in sync!");
1235  assert((!RHS.Ptr || RHS.isHandleInSync()) && "handle not in sync!");
1236  assert(getEpochAddress() == RHS.getEpochAddress() &&
1237  "comparing incomparable iterators!");
1238  return Ptr == RHS.Ptr;
1239  }
1240  bool operator!=(const ConstIterator &RHS) const {
1241  assert((!Ptr || isHandleInSync()) && "handle not in sync!");
1242  assert((!RHS.Ptr || RHS.isHandleInSync()) && "handle not in sync!");
1243  assert(getEpochAddress() == RHS.getEpochAddress() &&
1244  "comparing incomparable iterators!");
1245  return Ptr != RHS.Ptr;
1246  }
1247 
1248  inline DenseMapIterator& operator++() { // Preincrement
1249  assert(isHandleInSync() && "invalid iterator access!");
1250  if (shouldReverseIterate<KeyT>()) {
1251  --Ptr;
1252  RetreatPastEmptyBuckets();
1253  return *this;
1254  }
1255  ++Ptr;
1256  AdvancePastEmptyBuckets();
1257  return *this;
1258  }
1259  DenseMapIterator operator++(int) { // Postincrement
1260  assert(isHandleInSync() && "invalid iterator access!");
1261  DenseMapIterator tmp = *this; ++*this; return tmp;
1262  }
1263 
1264 private:
1265  void AdvancePastEmptyBuckets() {
1266  assert(Ptr <= End);
1267  const KeyT Empty = KeyInfoT::getEmptyKey();
1268  const KeyT Tombstone = KeyInfoT::getTombstoneKey();
1269 
1270  while (Ptr != End && (KeyInfoT::isEqual(Ptr->getFirst(), Empty) ||
1271  KeyInfoT::isEqual(Ptr->getFirst(), Tombstone)))
1272  ++Ptr;
1273  }
1274 
1275  void RetreatPastEmptyBuckets() {
1276  assert(Ptr >= End);
1277  const KeyT Empty = KeyInfoT::getEmptyKey();
1278  const KeyT Tombstone = KeyInfoT::getTombstoneKey();
1279 
1280  while (Ptr != End && (KeyInfoT::isEqual(Ptr[-1].getFirst(), Empty) ||
1281  KeyInfoT::isEqual(Ptr[-1].getFirst(), Tombstone)))
1282  --Ptr;
1283  }
1284 };
1285 
1286 template <typename KeyT, typename ValueT, typename KeyInfoT>
1288  return X.getMemorySize();
1289 }
1290 
1291 } // end namespace llvm
1292 
1293 #endif // LLVM_ADT_DENSEMAP_H
value_type & reference
Definition: DenseMap.h:1188
unsigned Log2_32_Ceil(uint32_t Value)
Return the ceil log base 2 of the specified value, 32 if the value is zero.
Definition: MathExtras.h:552
const_iterator end(StringRef path)
Get end iterator over path.
Definition: Path.cpp:259
DenseMapPair(KeyT &&Key, ValueT &&Value)
Definition: DenseMap.h:53
static GCMetadataPrinterRegistry::Add< ErlangGCPrinter > X("erlang", "erlang-compatible garbage collector")
ValueT & operator[](const KeyT &Key)
Definition: DenseMap.h:325
void copyFrom(const DenseMap &other)
Definition: DenseMap.h:776
void moveFromOldBuckets(BucketT *OldBucketsBegin, BucketT *OldBucketsEnd)
Definition: DenseMap.h:391
GCNRegPressure max(const GCNRegPressure &P1, const GCNRegPressure &P2)
const KeyT & getFirst() const
Definition: DenseMap.h:71
This class represents lattice values for constants.
Definition: AllocatorList.h:24
void init(unsigned InitNumEntries)
Definition: DenseMap.h:787
typename std::conditional< IsConst, const Bucket, Bucket >::type value_type
Definition: DenseMap.h:1186
DenseMapPair(const KeyT &Key, const ValueT &Value)
Definition: DenseMap.h:50
void init(unsigned InitBuckets)
Definition: DenseMap.h:1020
unsigned second
static const KeyT getTombstoneKey()
Definition: DenseMap.h:454
const void * getPointerIntoBucketsArray() const
getPointerIntoBucketsArray() - Return an opaque pointer into the buckets array.
Definition: DenseMap.h:351
block Block Frequency true
bool operator!=(const DenseSetImpl< ValueT, MapTy, ValueInfoT > &LHS, const DenseSetImpl< ValueT, MapTy, ValueInfoT > &RHS)
Inequality comparison for DenseSet.
Definition: DenseSet.h:241
const_iterator end() const
Definition: DenseMap.h:119
#define LLVM_UNLIKELY(EXPR)
Definition: Compiler.h:192
DenseMap(unsigned InitialReserve=0)
Create a DenseMap wth an optional InitialReserve that guarantee that this number of elements can be i...
Definition: DenseMap.h:725
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:221
#define LLVM_NODISCARD
LLVM_NODISCARD - Warn if a type or return value is discarded.
Definition: Compiler.h:129
reference operator*() const
Definition: DenseMap.h:1220
Definition: BitVector.h:938
A base class for data structure classes wishing to make iterators ("handles") pointing into themselve...
Definition: EpochTracker.h:36
size_t capacity_in_bytes(const BitVector &X)
Definition: BitVector.h:932
static unsigned getMinBucketToReserveForEntries(unsigned NumEntries)
Returns the number of buckets to allocate to ensure that the DenseMap can accommodate NumEntries with...
Definition: StringMap.cpp:25
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
value_type * pointer
Definition: DenseMap.h:1187
void incrementEpoch()
Calling incrementEpoch invalidates all handles pointing into the calling instance.
Definition: EpochTracker.h:44
DenseMapPair(AltPairT &&AltPair, typename std::enable_if< std::is_convertible< AltPairT, std::pair< KeyT, ValueT >>::value >::type *=0)
Definition: DenseMap.h:65
SmallDenseMap(SmallDenseMap &&other)
Definition: DenseMap.h:907
A base class for iterator classes ("handles") that wish to poll for iterator invalidating modificatio...
Definition: EpochTracker.h:58
Key
PAL metadata keys.
void shrink_and_clear()
Definition: DenseMap.h:1081
static bool isEqual(const Function &Caller, const Function &Callee)
unsigned getMinBucketToReserveForEntries(unsigned NumEntries)
Returns the number of buckets to allocate to ensure that the DenseMap can accommodate NumEntries with...
Definition: DenseMap.h:382
static const KeyT getEmptyKey()
Definition: DenseMap.h:448
void copyFrom(const DenseMapBase< OtherBaseT, KeyT, ValueT, KeyInfoT, BucketT > &other)
Definition: DenseMap.h:417
std::pair< iterator, bool > try_emplace(const KeyT &Key, Ts &&... Args)
Definition: DenseMap.h:255
SmallDenseMap(unsigned NumInitBuckets=0)
Definition: DenseMap.h:898
bool operator==(const DenseSetImpl< ValueT, MapTy, ValueInfoT > &LHS, const DenseSetImpl< ValueT, MapTy, ValueInfoT > &RHS)
Equality comparison for DenseSet.
Definition: DenseSet.h:225
void grow(unsigned AtLeast)
Definition: DenseMap.h:1029
value_type & FindAndConstruct(const KeyT &Key)
Definition: DenseMap.h:317
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:176
std::forward_iterator_tag iterator_category
Definition: DenseMap.h:1189
#define P(N)
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:423
bool erase(const KeyT &Val)
Definition: DenseMap.h:298
const ValueT & getSecond() const
Definition: DenseMap.h:73
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
DenseMap(DenseMap &&other)
Definition: DenseMap.h:732
SmallDenseMap(const SmallDenseMap &other)
Definition: DenseMap.h:902
static unsigned getHashValue(const LookupKeyT &Val)
Definition: DenseMap.h:444
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
void grow(unsigned AtLeast)
Definition: DenseMap.h:797
constexpr bool isPowerOf2_64(uint64_t Value)
Return true if the argument is a power of two > 0 (64 bit edition.)
Definition: MathExtras.h:434
const void * getEpochAddress() const
Returns a pointer to the epoch word stored in the data structure this handle points into...
Definition: EpochTracker.h:76
DenseMap(const DenseMap &other)
Definition: DenseMap.h:727
const_iterator begin() const
Definition: DenseMap.h:112
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again...
Definition: DenseMap.h:130
unsigned size() const
Definition: DenseMap.h:126
DenseMapPair(AltKeyT &&AltKey, AltValueT &&AltValue, typename std::enable_if< std::is_convertible< AltKeyT, KeyT >::value &&std::is_convertible< AltValueT, ValueT >::value >::type *=0)
Definition: DenseMap.h:57
uint64_t NextPowerOf2(uint64_t A)
Returns the next power of two (in 64-bits) that is strictly greater than A.
Definition: MathExtras.h:640
void insert(InputIt I, InputIt E)
insert - Range insertion of pairs.
Definition: DenseMap.h:293
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&... Args)
Definition: DenseMap.h:236
unsigned first
SmallDenseMap & operator=(const SmallDenseMap &other)
Definition: DenseMap.h:995
constexpr bool empty(const T &RangeOrContainer)
Test whether RangeOrContainer is empty. Similar to C++17 std::empty.
Definition: STLExtras.h:210
bool operator==(const ConstIterator &RHS) const
Definition: DenseMap.h:1233
size_t getMemorySize() const
Return the approximate size (in bytes) of the actual map.
Definition: DenseMap.h:667
isPodLike - This is a type trait that is used to determine whether a given type can be copied around ...
Definition: ArrayRef.h:530
DenseMapIterator operator++(int)
Definition: DenseMap.h:1259
auto size(R &&Range, typename std::enable_if< std::is_same< typename std::iterator_traits< decltype(Range.begin())>::iterator_category, std::random_access_iterator_tag >::value, void >::type *=nullptr) -> decltype(std::distance(Range.begin(), Range.end()))
Get the size of a range.
Definition: STLExtras.h:1167
void swap(SmallDenseMap &RHS)
Definition: DenseMap.h:923
DenseMap & operator=(const DenseMap &other)
Definition: DenseMap.h:762
bool isPointerIntoBucketsArray(const void *Ptr) const
isPointerIntoBucketsArray - Return true if the specified pointer points somewhere into the DenseMap&#39;s...
Definition: DenseMap.h:344
const_iterator find(const_arg_type_t< KeyT > Val) const
Definition: DenseMap.h:182
void shrink_and_clear()
Definition: DenseMap.h:814
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Definition: BitVector.h:941
SmallDenseMap & operator=(SmallDenseMap &&other)
Definition: DenseMap.h:1001
void copyFrom(const SmallDenseMap &other)
Definition: DenseMap.h:1009
DenseMapIterator & operator++()
Definition: DenseMap.h:1248
ValueT & operator[](KeyT &&Key)
Definition: DenseMap.h:337
bool isHandleInSync() const
Returns true if the DebugEpochBase this Handle is linked to has not called incrementEpoch on itself s...
Definition: EpochTracker.h:71
DenseMap(std::initializer_list< typename BaseT::value_type > Vals)
Definition: DenseMap.h:743
This class represents an analyzed expression in the program.
void erase(iterator I)
Definition: DenseMap.h:309
iterator begin()
Definition: DenseMap.h:100
bool operator!=(const ConstIterator &RHS) const
Definition: DenseMap.h:1240
#define I(x, y, z)
Definition: MD5.cpp:58
iterator end()
Definition: DenseMap.h:109
constexpr char IsConst[]
Key for Kernel::Arg::Metadata::mIsConst.
void swap(DenseMap &RHS)
Definition: DenseMap.h:753
std::pair< iterator, bool > insert(std::pair< KeyT, ValueT > &&KV)
Definition: DenseMap.h:228
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: DenseMap.h:171
LLVM_NODISCARD bool empty() const
Definition: DenseMap.h:123
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Definition: DenseMap.h:211
iterator find_as(const LookupKeyT &Val)
Alternate version of find() which allows a different, and possibly less expensive, key type.
Definition: DenseMap.h:195
value_type & FindAndConstruct(KeyT &&Key)
Definition: DenseMap.h:329
SmallDenseMap(const InputIt &I, const InputIt &E)
Definition: DenseMap.h:913
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
DenseMap(const InputIt &I, const InputIt &E)
Definition: DenseMap.h:738
LLVM Value Representation.
Definition: Value.h:73
DenseMapIterator(const DenseMapIterator< KeyT, ValueT, KeyInfoT, Bucket, IsConstSrc > &I)
Definition: DenseMap.h:1216
#define LLVM_LIKELY(EXPR)
Definition: Compiler.h:191
DenseMap & operator=(DenseMap &&other)
Definition: DenseMap.h:768
pointer operator->() const
Definition: DenseMap.h:1226
DenseMapIterator(pointer Pos, pointer E, const DebugEpochBase &Epoch, bool NoAdvance=false)
Definition: DenseMap.h:1198
std::pair< iterator, bool > insert_as(std::pair< KeyT, ValueT > &&KV, const LookupKeyT &Val)
Alternate version of insert() which allows a different, and possibly less expensive, key type.
Definition: DenseMap.h:275
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
static unsigned getHashValue(const KeyT &Val)
Definition: DenseMap.h:439
const_iterator find_as(const LookupKeyT &Val) const
Definition: DenseMap.h:202