in src/main/java/org/apache/commons/collections4/map/ConcurrentReferenceHashMap.java [945:1001]
int rehash() {
final HashEntry<K, V>[] oldTable = table;
final int oldCapacity = oldTable.length;
if (oldCapacity >= MAXIMUM_CAPACITY) {
return 0;
}
//
// Reclassify nodes in each list to new Map. Because we are using power-of-two expansion, the elements from each bin must either stay at the same
// index, or move with a power of two offset. We eliminate unnecessary node creation by catching cases where old nodes can be reused because their
// next fields won't change. Statistically, at the default threshold, only about one-sixth of them need cloning when a table doubles. The nodes they
// replace will be garbage collectable as soon as they are no longer referenced by any reader thread that may be in the midst of traversing table
// right now.
//
final HashEntry<K, V>[] newTable = HashEntry.newArray(oldCapacity << 1);
threshold = (int) (newTable.length * loadFactor);
final int sizeMask = newTable.length - 1;
int reduce = 0;
for (int i = 0; i < oldCapacity; i++) {
// We need to guarantee that any existing reads of old Map can
// proceed. So we cannot yet null out each bin.
final HashEntry<K, V> e = oldTable[i];
if (e != null) {
final HashEntry<K, V> next = e.next;
final int idx = e.hash & sizeMask;
// Single node on list
if (next == null) {
newTable[idx] = e;
} else {
// Reuse trailing consecutive sequence at same slot
HashEntry<K, V> lastRun = e;
int lastIdx = idx;
for (HashEntry<K, V> last = next; last != null; last = last.next) {
final int k = last.hash & sizeMask;
if (k != lastIdx) {
lastIdx = k;
lastRun = last;
}
}
newTable[lastIdx] = lastRun;
// Clone all remaining nodes
for (HashEntry<K, V> p = e; p != lastRun; p = p.next) {
// Skip GC'd weak refs
final K key = p.key();
if (key == null) {
reduce++;
continue;
}
final int k = p.hash & sizeMask;
final HashEntry<K, V> n = newTable[k];
newTable[k] = newHashEntry(key, p.hash, n, p.value());
}
}
}
}
table = newTable;
return reduce;
}