Coverage Summary for Class: CompactLinkedHashMap (com.google.common.collect)

Class Method, % Line, %
CompactLinkedHashMap 0% (0/24) 0% (0/56)
CompactLinkedHashMap$1EntrySetImpl 0% (0/2) 0% (0/2)
CompactLinkedHashMap$1KeySetImpl 0% (0/4) 0% (0/4)
CompactLinkedHashMap$1ValuesImpl 0% (0/4) 0% (0/4)
Total 0% (0/34) 0% (0/66)


1 /* 2  * Copyright (C) 2012 The Guava Authors 3  * 4  * Licensed under the Apache License, Version 2.0 (the "License"); 5  * you may not use this file except in compliance with the License. 6  * You may obtain a copy of the License at 7  * 8  * http://www.apache.org/licenses/LICENSE-2.0 9  * 10  * Unless required by applicable law or agreed to in writing, software 11  * distributed under the License is distributed on an "AS IS" BASIS, 12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13  * See the License for the specific language governing permissions and 14  * limitations under the License. 15  */ 16  17 package com.google.common.collect; 18  19 import com.google.common.annotations.GwtIncompatible; 20 import com.google.common.annotations.VisibleForTesting; 21 import com.google.errorprone.annotations.CanIgnoreReturnValue; 22 import com.google.j2objc.annotations.WeakOuter; 23 import java.util.Arrays; 24 import java.util.Collection; 25 import java.util.LinkedHashMap; 26 import java.util.Map; 27 import java.util.Set; 28 import java.util.Spliterator; 29 import java.util.Spliterators; 30 import org.checkerframework.checker.nullness.qual.Nullable; 31  32 /** 33  * CompactLinkedHashMap is an implementation of a Map with insertion or LRU iteration order, 34  * maintained with a doubly linked list through the entries. All optional operations (put and 35  * remove) are supported. Null keys and values are supported. 36  * 37  * <p>{@code containsKey(k)}, {@code put(k, v)} and {@code remove(k)} are all (expected and 38  * amortized) constant time operations. Expected in the hashtable sense (depends on the hash 39  * function doing a good job of distributing the elements to the buckets to a distribution not far 40  * from uniform), and amortized since some operations can trigger a hash table resize. 41  * 42  * <p>As compared with {@link java.util.LinkedHashMap}, this structure places significantly reduced 43  * load on the garbage collector by only using a constant number of internal objects. 44  * 45  * <p>This class should not be assumed to be universally superior to {@code 46  * java.util.LinkedHashMap}. Generally speaking, this class reduces object allocation and memory 47  * consumption at the price of moderately increased constant factors of CPU. Only use this class 48  * when there is a specific reason to prioritize memory over CPU. 49  * 50  * @author Louis Wasserman 51  */ 52 @GwtIncompatible // not worth using in GWT for now 53 class CompactLinkedHashMap<K, V> extends CompactHashMap<K, V> { 54  // TODO(lowasser): implement removeEldestEntry so this can be used as a drop-in replacement 55  56  /** Creates an empty {@code CompactLinkedHashMap} instance. */ 57  public static <K, V> CompactLinkedHashMap<K, V> create() { 58  return new CompactLinkedHashMap<>(); 59  } 60  61  /** 62  * Creates a {@code CompactLinkedHashMap} instance, with a high enough "initial capacity" that it 63  * <i>should</i> hold {@code expectedSize} elements without rebuilding internal data structures. 64  * 65  * @param expectedSize the number of elements you expect to add to the returned set 66  * @return a new, empty {@code CompactLinkedHashMap} with enough capacity to hold {@code 67  * expectedSize} elements without resizing 68  * @throws IllegalArgumentException if {@code expectedSize} is negative 69  */ 70  public static <K, V> CompactLinkedHashMap<K, V> createWithExpectedSize(int expectedSize) { 71  return new CompactLinkedHashMap<>(expectedSize); 72  } 73  74  private static final int ENDPOINT = -2; 75  76  /** 77  * Contains the link pointers corresponding with the entries, in the range of [0, size()). The 78  * high 32 bits of each long is the "prev" pointer, whereas the low 32 bits is the "succ" pointer 79  * (pointing to the next entry in the linked list). The pointers in [size(), entries.length) are 80  * all "null" (UNSET). 81  * 82  * <p>A node with "prev" pointer equal to {@code ENDPOINT} is the first node in the linked list, 83  * and a node with "next" pointer equal to {@code ENDPOINT} is the last node. 84  */ 85  @VisibleForTesting transient long @Nullable [] links; 86  87  /** Pointer to the first node in the linked list, or {@code ENDPOINT} if there are no entries. */ 88  private transient int firstEntry; 89  90  /** Pointer to the last node in the linked list, or {@code ENDPOINT} if there are no entries. */ 91  private transient int lastEntry; 92  93  private final boolean accessOrder; 94  95  CompactLinkedHashMap() { 96  this(CompactHashing.DEFAULT_SIZE); 97  } 98  99  CompactLinkedHashMap(int expectedSize) { 100  this(expectedSize, false); 101  } 102  103  CompactLinkedHashMap(int expectedSize, boolean accessOrder) { 104  super(expectedSize); 105  this.accessOrder = accessOrder; 106  } 107  108  @Override 109  void init(int expectedSize) { 110  super.init(expectedSize); 111  this.firstEntry = ENDPOINT; 112  this.lastEntry = ENDPOINT; 113  } 114  115  @Override 116  int allocArrays() { 117  int expectedSize = super.allocArrays(); 118  this.links = new long[expectedSize]; 119  return expectedSize; 120  } 121  122  @Override 123  Map<K, V> createHashFloodingResistantDelegate(int tableSize) { 124  return new LinkedHashMap<K, V>(tableSize, 1.0f, accessOrder); 125  } 126  127  @Override 128  @CanIgnoreReturnValue 129  Map<K, V> convertToHashFloodingResistantImplementation() { 130  Map<K, V> result = super.convertToHashFloodingResistantImplementation(); 131  links = null; 132  return result; 133  } 134  135  private int getPredecessor(int entry) { 136  return ((int) (links[entry] >>> 32)) - 1; 137  } 138  139  @Override 140  int getSuccessor(int entry) { 141  return ((int) links[entry]) - 1; 142  } 143  144  private void setSuccessor(int entry, int succ) { 145  long succMask = (~0L) >>> 32; 146  links[entry] = (links[entry] & ~succMask) | ((succ + 1) & succMask); 147  } 148  149  private void setPredecessor(int entry, int pred) { 150  long predMask = ~0L << 32; 151  links[entry] = (links[entry] & ~predMask) | ((long) (pred + 1) << 32); 152  } 153  154  private void setSucceeds(int pred, int succ) { 155  if (pred == ENDPOINT) { 156  firstEntry = succ; 157  } else { 158  setSuccessor(pred, succ); 159  } 160  161  if (succ == ENDPOINT) { 162  lastEntry = pred; 163  } else { 164  setPredecessor(succ, pred); 165  } 166  } 167  168  @Override 169  void insertEntry(int entryIndex, @Nullable K key, @Nullable V value, int hash, int mask) { 170  super.insertEntry(entryIndex, key, value, hash, mask); 171  setSucceeds(lastEntry, entryIndex); 172  setSucceeds(entryIndex, ENDPOINT); 173  } 174  175  @Override 176  void accessEntry(int index) { 177  if (accessOrder) { 178  // delete from previous position... 179  setSucceeds(getPredecessor(index), getSuccessor(index)); 180  // ...and insert at the end. 181  setSucceeds(lastEntry, index); 182  setSucceeds(index, ENDPOINT); 183  incrementModCount(); 184  } 185  } 186  187  @Override 188  void moveLastEntry(int dstIndex, int mask) { 189  int srcIndex = size() - 1; 190  super.moveLastEntry(dstIndex, mask); 191  192  setSucceeds(getPredecessor(dstIndex), getSuccessor(dstIndex)); 193  if (dstIndex < srcIndex) { 194  setSucceeds(getPredecessor(srcIndex), dstIndex); 195  setSucceeds(dstIndex, getSuccessor(srcIndex)); 196  } 197  links[srcIndex] = 0; 198  } 199  200  @Override 201  void resizeEntries(int newCapacity) { 202  super.resizeEntries(newCapacity); 203  links = Arrays.copyOf(links, newCapacity); 204  } 205  206  @Override 207  int firstEntryIndex() { 208  return firstEntry; 209  } 210  211  @Override 212  int adjustAfterRemove(int indexBeforeRemove, int indexRemoved) { 213  return (indexBeforeRemove >= size()) ? indexRemoved : indexBeforeRemove; 214  } 215  216  @Override 217  Set<Entry<K, V>> createEntrySet() { 218  @WeakOuter 219  class EntrySetImpl extends EntrySetView { 220  @Override 221  public Spliterator<Entry<K, V>> spliterator() { 222  return Spliterators.spliterator(this, Spliterator.ORDERED | Spliterator.DISTINCT); 223  } 224  } 225  return new EntrySetImpl(); 226  } 227  228  @Override 229  Set<K> createKeySet() { 230  @WeakOuter 231  class KeySetImpl extends KeySetView { 232  @Override 233  public Object[] toArray() { 234  return ObjectArrays.toArrayImpl(this); 235  } 236  237  @Override 238  public <T> T[] toArray(T[] a) { 239  return ObjectArrays.toArrayImpl(this, a); 240  } 241  242  @Override 243  public Spliterator<K> spliterator() { 244  return Spliterators.spliterator(this, Spliterator.ORDERED | Spliterator.DISTINCT); 245  } 246  } 247  return new KeySetImpl(); 248  } 249  250  @Override 251  Collection<V> createValues() { 252  @WeakOuter 253  class ValuesImpl extends ValuesView { 254  @Override 255  public Object[] toArray() { 256  return ObjectArrays.toArrayImpl(this); 257  } 258  259  @Override 260  public <T> T[] toArray(T[] a) { 261  return ObjectArrays.toArrayImpl(this, a); 262  } 263  264  @Override 265  public Spliterator<V> spliterator() { 266  return Spliterators.spliterator(this, Spliterator.ORDERED); 267  } 268  } 269  return new ValuesImpl(); 270  } 271  272  @Override 273  public void clear() { 274  if (needsAllocArrays()) { 275  return; 276  } 277  this.firstEntry = ENDPOINT; 278  this.lastEntry = ENDPOINT; 279  if (links != null) { 280  Arrays.fill(links, 0, size(), 0); 281  } 282  super.clear(); 283  } 284 }