java HashMap源码剖析

ops/2024/10/17 23:24:53/

HashMap 是 Java 集合框架中的一个重要类,它基于哈希表实现,提供了快速的插入、删除和查找操作。

以下是一些关键点:

  1. 序列化HashMap 类实现了 Serializable 接口,这意味着它可以被序列化和反序列化。

  2. 初始容量和负载因子HashMap 有一个初始容量(默认为 16)和一个负载因子(默认为 0.75),这些参数影响哈希表的扩展和收缩行为。

  3. 哈希函数HashMap 使用一个哈希函数来计算键的哈希码,并将其映射到哈希表的索引上。

  4. 链表和红黑树:当哈希表中的某个桶(bucket)有多个元素时,它们会形成一个链表。当链表的长度超过一定阈值(默认为 8)时,链表会被转换成红黑树,以提高搜索效率。

  5. 动态扩容:当哈希表中的元素数量超过阈值时,哈希表会进行扩容,即重新计算所有元素的哈希值,并将它们重新分配到新的桶中。

  6. 快速失败迭代器HashMap 提供的迭代器在多线程环境下可能会遇到并发修改异常,因此在迭代过程中需要特别注意线程安全。

  7. 方法HashMap 提供了丰富的方法,包括 putgetremoveclear 等,用于操作映射表中的元素。

  8. 内部类HashMap 使用了一些内部类来实现其功能,例如 NodeTreeNodeHashIterator

HashMap源码

//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//package java.util;import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Map.Entry;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;public class HashMap<K, V> extends AbstractMap<K, V> implements Map<K, V>, Cloneable, Serializable {private static final long serialVersionUID = 362498820763181265L;static final int DEFAULT_INITIAL_CAPACITY = 16;static final int MAXIMUM_CAPACITY = 1073741824;static final float DEFAULT_LOAD_FACTOR = 0.75F;static final int TREEIFY_THRESHOLD = 8;static final int UNTREEIFY_THRESHOLD = 6;static final int MIN_TREEIFY_CAPACITY = 64;transient HashMap.Node<K, V>[] table;transient Set<Entry<K, V>> entrySet;transient int size;transient int modCount;int threshold;final float loadFactor;static final int hash(Object var0) {int var1;return var0 == null ? 0 : (var1 = var0.hashCode()) ^ var1 >>> 16;}static Class<?> comparableClassFor(Object var0) {if (var0 instanceof Comparable) {Class var1;if ((var1 = var0.getClass()) == String.class) {return var1;}Type[] var2;if ((var2 = var1.getGenericInterfaces()) != null) {for(int var6 = 0; var6 < var2.length; ++var6) {Type[] var3;Type var4;ParameterizedType var5;if ((var4 = var2[var6]) instanceof ParameterizedType && (var5 = (ParameterizedType)var4).getRawType() == Comparable.class && (var3 = var5.getActualTypeArguments()) != null && var3.length == 1 && var3[0] == var1) {return var1;}}}}return null;}static int compareComparables(Class<?> var0, Object var1, Object var2) {return var2 != null && var2.getClass() == var0 ? ((Comparable)var1).compareTo(var2) : 0;}static final int tableSizeFor(int var0) {int var1 = var0 - 1;var1 |= var1 >>> 1;var1 |= var1 >>> 2;var1 |= var1 >>> 4;var1 |= var1 >>> 8;var1 |= var1 >>> 16;return var1 < 0 ? 1 : (var1 >= 1073741824 ? 1073741824 : var1 + 1);}public HashMap(int var1, float var2) {if (var1 < 0) {throw new IllegalArgumentException("Illegal initial capacity: " + var1);} else {if (var1 > 1073741824) {var1 = 1073741824;}if (var2 > 0.0F && !Float.isNaN(var2)) {this.loadFactor = var2;this.threshold = tableSizeFor(var1);} else {throw new IllegalArgumentException("Illegal load factor: " + var2);}}}public HashMap(int var1) {this(var1, 0.75F);}public HashMap() {this.loadFactor = 0.75F;}public HashMap(Map<? extends K, ? extends V> var1) {this.loadFactor = 0.75F;this.putMapEntries(var1, false);}final void putMapEntries(Map<? extends K, ? extends V> var1, boolean var2) {int var3 = var1.size();if (var3 > 0) {if (this.table == null) {float var4 = (float)var3 / this.loadFactor + 1.0F;int var5 = var4 < 1.07374182E9F ? (int)var4 : 1073741824;if (var5 > this.threshold) {this.threshold = tableSizeFor(var5);}} else if (var3 > this.threshold) {this.resize();}Iterator var8 = var1.entrySet().iterator();while(var8.hasNext()) {Entry var9 = (Entry)var8.next();Object var6 = var9.getKey();Object var7 = var9.getValue();this.putVal(hash(var6), var6, var7, false, var2);}}}public int size() {return this.size;}public boolean isEmpty() {return this.size == 0;}public V get(Object var1) {HashMap.Node var2;return (var2 = this.getNode(hash(var1), var1)) == null ? null : var2.value;}final HashMap.Node<K, V> getNode(int var1, Object var2) {HashMap.Node[] var3;HashMap.Node var4;int var6;if ((var3 = this.table) != null && (var6 = var3.length) > 0 && (var4 = var3[var6 - 1 & var1]) != null) {Object var7;if (var4.hash == var1 && ((var7 = var4.key) == var2 || var2 != null && var2.equals(var7))) {return var4;}HashMap.Node var5;if ((var5 = var4.next) != null) {if (var4 instanceof HashMap.TreeNode) {return ((HashMap.TreeNode)var4).getTreeNode(var1, var2);}do {if (var5.hash == var1 && ((var7 = var5.key) == var2 || var2 != null && var2.equals(var7))) {return var5;}} while((var5 = var5.next) != null);}}return null;}public boolean containsKey(Object var1) {return this.getNode(hash(var1), var1) != null;}public V put(K var1, V var2) {return this.putVal(hash(var1), var1, var2, false, true);}final V putVal(int var1, K var2, V var3, boolean var4, boolean var5) {HashMap.Node[] var6;int var8;if ((var6 = this.table) == null || (var8 = var6.length) == 0) {var8 = (var6 = this.resize()).length;}Object var7;int var9;if ((var7 = var6[var9 = var8 - 1 & var1]) == null) {var6[var9] = this.newNode(var1, var2, var3, (HashMap.Node)null);} else {Object var10;Object var11;if (((HashMap.Node)var7).hash == var1 && ((var11 = ((HashMap.Node)var7).key) == var2 || var2 != null && var2.equals(var11))) {var10 = var7;} else if (var7 instanceof HashMap.TreeNode) {var10 = ((HashMap.TreeNode)var7).putTreeVal(this, var6, var1, var2, var3);} else {int var12 = 0;while(true) {if ((var10 = ((HashMap.Node)var7).next) == null) {((HashMap.Node)var7).next = this.newNode(var1, var2, var3, (HashMap.Node)null);if (var12 >= 7) {this.treeifyBin(var6, var1);}break;}if (((HashMap.Node)var10).hash == var1 && ((var11 = ((HashMap.Node)var10).key) == var2 || var2 != null && var2.equals(var11))) {break;}var7 = var10;++var12;}}if (var10 != null) {Object var13 = ((HashMap.Node)var10).value;if (!var4 || var13 == null) {((HashMap.Node)var10).value = var3;}this.afterNodeAccess((HashMap.Node)var10);return var13;}}++this.modCount;if (++this.size > this.threshold) {this.resize();}this.afterNodeInsertion(var5);return null;}final HashMap.Node<K, V>[] resize() {HashMap.Node[] var1 = this.table;int var2 = var1 == null ? 0 : var1.length;int var3 = this.threshold;int var5 = 0;int var4;if (var2 > 0) {if (var2 >= 1073741824) {this.threshold = 2147483647;return var1;}if ((var4 = var2 << 1) < 1073741824 && var2 >= 16) {var5 = var3 << 1;}} else if (var3 > 0) {var4 = var3;} else {var4 = 16;var5 = 12;}if (var5 == 0) {float var6 = (float)var4 * this.loadFactor;var5 = var4 < 1073741824 && var6 < 1.07374182E9F ? (int)var6 : 2147483647;}this.threshold = var5;HashMap.Node[] var14 = (HashMap.Node[])(new HashMap.Node[var4]);this.table = var14;if (var1 != null) {for(int var7 = 0; var7 < var2; ++var7) {HashMap.Node var8;if ((var8 = var1[var7]) != null) {var1[var7] = null;if (var8.next == null) {var14[var8.hash & var4 - 1] = var8;} else if (var8 instanceof HashMap.TreeNode) {((HashMap.TreeNode)var8).split(this, var14, var7, var2);} else {HashMap.Node var9 = null;HashMap.Node var10 = null;HashMap.Node var11 = null;HashMap.Node var12 = null;HashMap.Node var13;do {var13 = var8.next;if ((var8.hash & var2) == 0) {if (var10 == null) {var9 = var8;} else {var10.next = var8;}var10 = var8;} else {if (var12 == null) {var11 = var8;} else {var12.next = var8;}var12 = var8;}var8 = var13;} while(var13 != null);if (var10 != null) {var10.next = null;var14[var7] = var9;}if (var12 != null) {var12.next = null;var14[var7 + var2] = var11;}}}}}return var14;}final void treeifyBin(HashMap.Node<K, V>[] var1, int var2) {int var3;if (var1 != null && (var3 = var1.length) >= 64) {int var4;HashMap.Node var5;if ((var5 = var1[var4 = var3 - 1 & var2]) != null) {HashMap.TreeNode var6 = null;HashMap.TreeNode var7 = null;do {HashMap.TreeNode var8 = this.replacementTreeNode(var5, (HashMap.Node)null);if (var7 == null) {var6 = var8;} else {var8.prev = var7;var7.next = var8;}var7 = var8;} while((var5 = var5.next) != null);if ((var1[var4] = var6) != null) {var6.treeify(var1);}}} else {this.resize();}}public void putAll(Map<? extends K, ? extends V> var1) {this.putMapEntries(var1, true);}public V remove(Object var1) {HashMap.Node var2;return (var2 = this.removeNode(hash(var1), var1, (Object)null, false, true)) == null ? null : var2.value;}final HashMap.Node<K, V> removeNode(int var1, Object var2, Object var3, boolean var4, boolean var5) {HashMap.Node[] var6;HashMap.Node var7;int var8;int var9;if ((var6 = this.table) != null && (var8 = var6.length) > 0 && (var7 = var6[var9 = var8 - 1 & var1]) != null) {Object var10 = null;Object var12;if (var7.hash == var1 && ((var12 = var7.key) == var2 || var2 != null && var2.equals(var12))) {var10 = var7;} else {HashMap.Node var11;if ((var11 = var7.next) != null) {if (var7 instanceof HashMap.TreeNode) {var10 = ((HashMap.TreeNode)var7).getTreeNode(var1, var2);} else {label88: {while(var11.hash != var1 || (var12 = var11.key) != var2 && (var2 == null || !var2.equals(var12))) {var7 = var11;if ((var11 = var11.next) == null) {break label88;}}var10 = var11;}}}}Object var13;if (var10 != null && (!var4 || (var13 = ((HashMap.Node)var10).value) == var3 || var3 != null && var3.equals(var13))) {if (var10 instanceof HashMap.TreeNode) {((HashMap.TreeNode)var10).removeTreeNode(this, var6, var5);} else if (var10 == var7) {var6[var9] = ((HashMap.Node)var10).next;} else {var7.next = ((HashMap.Node)var10).next;}++this.modCount;--this.size;this.afterNodeRemoval((HashMap.Node)var10);return (HashMap.Node)var10;}}return null;}public void clear() {++this.modCount;HashMap.Node[] var1;if ((var1 = this.table) != null && this.size > 0) {this.size = 0;for(int var2 = 0; var2 < var1.length; ++var2) {var1[var2] = null;}}}public boolean containsValue(Object var1) {HashMap.Node[] var2;if ((var2 = this.table) != null && this.size > 0) {for(int var4 = 0; var4 < var2.length; ++var4) {for(HashMap.Node var5 = var2[var4]; var5 != null; var5 = var5.next) {Object var3;if ((var3 = var5.value) == var1 || var1 != null && var1.equals(var3)) {return true;}}}}return false;}public Set<K> keySet() {Set var1;return (var1 = this.keySet) == null ? (this.keySet = new HashMap.KeySet()) : var1;}public Collection<V> values() {Collection var1;return (var1 = this.values) == null ? (this.values = new HashMap.Values()) : var1;}public Set<Entry<K, V>> entrySet() {Set var1;return (var1 = this.entrySet) == null ? (this.entrySet = new HashMap.EntrySet()) : var1;}public V getOrDefault(Object var1, V var2) {HashMap.Node var3;return (var3 = this.getNode(hash(var1), var1)) == null ? var2 : var3.value;}public V putIfAbsent(K var1, V var2) {return this.putVal(hash(var1), var1, var2, true, true);}public boolean remove(Object var1, Object var2) {return this.removeNode(hash(var1), var1, var2, true, true) != null;}public boolean replace(K var1, V var2, V var3) {HashMap.Node var4;Object var5;if ((var4 = this.getNode(hash(var1), var1)) == null || (var5 = var4.value) != var2 && (var5 == null || !var5.equals(var2))) {return false;} else {var4.value = var3;this.afterNodeAccess(var4);return true;}}public V replace(K var1, V var2) {HashMap.Node var3;if ((var3 = this.getNode(hash(var1), var1)) != null) {Object var4 = var3.value;var3.value = var2;this.afterNodeAccess(var3);return var4;} else {return null;}}public V computeIfAbsent(K var1, Function<? super K, ? extends V> var2) {if (var2 == null) {throw new NullPointerException();} else {int var3 = hash(var1);int var8 = 0;HashMap.TreeNode var9 = null;Object var10 = null;HashMap.Node[] var4;int var6;if (this.size > this.threshold || (var4 = this.table) == null || (var6 = var4.length) == 0) {var6 = (var4 = this.resize()).length;}HashMap.Node var5;int var7;Object var13;if ((var5 = var4[var7 = var6 - 1 & var3]) != null) {if (var5 instanceof HashMap.TreeNode) {var10 = (var9 = (HashMap.TreeNode)var5).getTreeNode(var3, var1);} else {label69: {HashMap.Node var11 = var5;Object var12;while(var11.hash != var3 || (var12 = var11.key) != var1 && (var1 == null || !var1.equals(var12))) {++var8;if ((var11 = var11.next) == null) {break label69;}}var10 = var11;}}if (var10 != null && (var13 = ((HashMap.Node)var10).value) != null) {this.afterNodeAccess((HashMap.Node)var10);return var13;}}var13 = var2.apply(var1);if (var13 == null) {return null;} else if (var10 != null) {((HashMap.Node)var10).value = var13;this.afterNodeAccess((HashMap.Node)var10);return var13;} else {if (var9 != null) {var9.putTreeVal(this, var4, var3, var1, var13);} else {var4[var7] = this.newNode(var3, var1, var13, var5);if (var8 >= 7) {this.treeifyBin(var4, var3);}}++this.modCount;++this.size;this.afterNodeInsertion(true);return var13;}}}public V computeIfPresent(K var1, BiFunction<? super K, ? super V, ? extends V> var2) {if (var2 == null) {throw new NullPointerException();} else {int var5 = hash(var1);HashMap.Node var3;Object var4;if ((var3 = this.getNode(var5, var1)) != null && (var4 = var3.value) != null) {Object var6 = var2.apply(var1, var4);if (var6 != null) {var3.value = var6;this.afterNodeAccess(var3);return var6;}this.removeNode(var5, var1, (Object)null, false, true);}return null;}}public V compute(K var1, BiFunction<? super K, ? super V, ? extends V> var2) {if (var2 == null) {throw new NullPointerException();} else {int var3 = hash(var1);int var8 = 0;HashMap.TreeNode var9 = null;Object var10 = null;HashMap.Node[] var4;int var6;if (this.size > this.threshold || (var4 = this.table) == null || (var6 = var4.length) == 0) {var6 = (var4 = this.resize()).length;}HashMap.Node var5;int var7;Object var12;if ((var5 = var4[var7 = var6 - 1 & var3]) != null) {if (var5 instanceof HashMap.TreeNode) {var10 = (var9 = (HashMap.TreeNode)var5).getTreeNode(var3, var1);} else {label67: {HashMap.Node var11 = var5;while(var11.hash != var3 || (var12 = var11.key) != var1 && (var1 == null || !var1.equals(var12))) {++var8;if ((var11 = var11.next) == null) {break label67;}}var10 = var11;}}}Object var13 = var10 == null ? null : ((HashMap.Node)var10).value;var12 = var2.apply(var1, var13);if (var10 != null) {if (var12 != null) {((HashMap.Node)var10).value = var12;this.afterNodeAccess((HashMap.Node)var10);} else {this.removeNode(var3, var1, (Object)null, false, true);}} else if (var12 != null) {if (var9 != null) {var9.putTreeVal(this, var4, var3, var1, var12);} else {var4[var7] = this.newNode(var3, var1, var12, var5);if (var8 >= 7) {this.treeifyBin(var4, var3);}}++this.modCount;++this.size;this.afterNodeInsertion(true);}return var12;}}public V merge(K var1, V var2, BiFunction<? super V, ? super V, ? extends V> var3) {if (var2 == null) {throw new NullPointerException();} else if (var3 == null) {throw new NullPointerException();} else {int var4 = hash(var1);int var9 = 0;HashMap.TreeNode var10 = null;Object var11 = null;HashMap.Node[] var5;int var7;if (this.size > this.threshold || (var5 = this.table) == null || (var7 = var5.length) == 0) {var7 = (var5 = this.resize()).length;}HashMap.Node var6;int var8;if ((var6 = var5[var8 = var7 - 1 & var4]) != null) {if (var6 instanceof HashMap.TreeNode) {var11 = (var10 = (HashMap.TreeNode)var6).getTreeNode(var4, var1);} else {label74: {HashMap.Node var12 = var6;Object var13;while(var12.hash != var4 || (var13 = var12.key) != var1 && (var1 == null || !var1.equals(var13))) {++var9;if ((var12 = var12.next) == null) {break label74;}}var11 = var12;}}}if (var11 != null) {Object var14;if (((HashMap.Node)var11).value != null) {var14 = var3.apply(((HashMap.Node)var11).value, var2);} else {var14 = var2;}if (var14 != null) {((HashMap.Node)var11).value = var14;this.afterNodeAccess((HashMap.Node)var11);} else {this.removeNode(var4, var1, (Object)null, false, true);}return var14;} else {if (var2 != null) {if (var10 != null) {var10.putTreeVal(this, var5, var4, var1, var2);} else {var5[var8] = this.newNode(var4, var1, var2, var6);if (var9 >= 7) {this.treeifyBin(var5, var4);}}++this.modCount;++this.size;this.afterNodeInsertion(true);}return var2;}}}public void forEach(BiConsumer<? super K, ? super V> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var2;if (this.size > 0 && (var2 = this.table) != null) {int var3 = this.modCount;for(int var4 = 0; var4 < var2.length; ++var4) {for(HashMap.Node var5 = var2[var4]; var5 != null; var5 = var5.next) {var1.accept(var5.key, var5.value);}}if (this.modCount != var3) {throw new ConcurrentModificationException();}}}}public void replaceAll(BiFunction<? super K, ? super V, ? extends V> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var2;if (this.size > 0 && (var2 = this.table) != null) {int var3 = this.modCount;for(int var4 = 0; var4 < var2.length; ++var4) {for(HashMap.Node var5 = var2[var4]; var5 != null; var5 = var5.next) {var5.value = var1.apply(var5.key, var5.value);}}if (this.modCount != var3) {throw new ConcurrentModificationException();}}}}public Object clone() {HashMap var1;try {var1 = (HashMap)super.clone();} catch (CloneNotSupportedException var3) {throw new InternalError(var3);}var1.reinitialize();var1.putMapEntries(this, false);return var1;}final float loadFactor() {return this.loadFactor;}final int capacity() {return this.table != null ? this.table.length : (this.threshold > 0 ? this.threshold : 16);}private void writeObject(ObjectOutputStream var1) throws IOException {int var2 = this.capacity();var1.defaultWriteObject();var1.writeInt(var2);var1.writeInt(this.size);this.internalWriteEntries(var1);}private void readObject(ObjectInputStream var1) throws IOException, ClassNotFoundException {var1.defaultReadObject();this.reinitialize();if (this.loadFactor > 0.0F && !Float.isNaN(this.loadFactor)) {var1.readInt();int var2 = var1.readInt();if (var2 < 0) {throw new InvalidObjectException("Illegal mappings count: " + var2);} else {if (var2 > 0) {float var3 = Math.min(Math.max(0.25F, this.loadFactor), 4.0F);float var4 = (float)var2 / var3 + 1.0F;int var5 = var4 < 16.0F ? 16 : (var4 >= 1.07374182E9F ? 1073741824 : tableSizeFor((int)var4));float var6 = (float)var5 * var3;this.threshold = var5 < 1073741824 && var6 < 1.07374182E9F ? (int)var6 : 2147483647;HashMap.Node[] var7 = (HashMap.Node[])(new HashMap.Node[var5]);this.table = var7;for(int var8 = 0; var8 < var2; ++var8) {Object var9 = var1.readObject();Object var10 = var1.readObject();this.putVal(hash(var9), var9, var10, false, false);}}}} else {throw new InvalidObjectException("Illegal load factor: " + this.loadFactor);}}HashMap.Node<K, V> newNode(int var1, K var2, V var3, HashMap.Node<K, V> var4) {return new HashMap.Node(var1, var2, var3, var4);}HashMap.Node<K, V> replacementNode(HashMap.Node<K, V> var1, HashMap.Node<K, V> var2) {return new HashMap.Node(var1.hash, var1.key, var1.value, var2);}HashMap.TreeNode<K, V> newTreeNode(int var1, K var2, V var3, HashMap.Node<K, V> var4) {return new HashMap.TreeNode(var1, var2, var3, var4);}HashMap.TreeNode<K, V> replacementTreeNode(HashMap.Node<K, V> var1, HashMap.Node<K, V> var2) {return new HashMap.TreeNode(var1.hash, var1.key, var1.value, var2);}void reinitialize() {this.table = null;this.entrySet = null;this.keySet = null;this.values = null;this.modCount = 0;this.threshold = 0;this.size = 0;}void afterNodeAccess(HashMap.Node<K, V> var1) {}void afterNodeInsertion(boolean var1) {}void afterNodeRemoval(HashMap.Node<K, V> var1) {}void internalWriteEntries(ObjectOutputStream var1) throws IOException {HashMap.Node[] var2;if (this.size > 0 && (var2 = this.table) != null) {for(int var3 = 0; var3 < var2.length; ++var3) {for(HashMap.Node var4 = var2[var3]; var4 != null; var4 = var4.next) {var1.writeObject(var4.key);var1.writeObject(var4.value);}}}}final class EntryIterator extends HashMap<K, V>.HashIterator implements Iterator<Entry<K, V>> {EntryIterator() {super();}public final Entry<K, V> next() {return this.nextNode();}}final class EntrySet extends AbstractSet<Entry<K, V>> {EntrySet() {}public final int size() {return HashMap.this.size;}public final void clear() {HashMap.this.clear();}public final Iterator<Entry<K, V>> iterator() {return HashMap.this.new EntryIterator();}public final boolean contains(Object var1) {if (!(var1 instanceof Entry)) {return false;} else {Entry var2 = (Entry)var1;Object var3 = var2.getKey();HashMap.Node var4 = HashMap.this.getNode(HashMap.hash(var3), var3);return var4 != null && var4.equals(var2);}}public final boolean remove(Object var1) {if (var1 instanceof Entry) {Entry var2 = (Entry)var1;Object var3 = var2.getKey();Object var4 = var2.getValue();return HashMap.this.removeNode(HashMap.hash(var3), var3, var4, true, true) != null;} else {return false;}}public final Spliterator<Entry<K, V>> spliterator() {return new HashMap.EntrySpliterator(HashMap.this, 0, -1, 0, 0);}public final void forEach(Consumer<? super Entry<K, V>> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var2;if (HashMap.this.size > 0 && (var2 = HashMap.this.table) != null) {int var3 = HashMap.this.modCount;for(int var4 = 0; var4 < var2.length; ++var4) {for(HashMap.Node var5 = var2[var4]; var5 != null; var5 = var5.next) {var1.accept(var5);}}if (HashMap.this.modCount != var3) {throw new ConcurrentModificationException();}}}}}static final class EntrySpliterator<K, V> extends HashMap.HashMapSpliterator<K, V> implements Spliterator<Entry<K, V>> {EntrySpliterator(HashMap<K, V> var1, int var2, int var3, int var4, int var5) {super(var1, var2, var3, var4, var5);}public HashMap.EntrySpliterator<K, V> trySplit() {int var1 = this.getFence();int var2 = this.index;int var3 = var2 + var1 >>> 1;return var2 < var3 && this.current == null ? new HashMap.EntrySpliterator(this.map, var2, this.index = var3, this.est >>>= 1, this.expectedModCount) : null;}public void forEachRemaining(Consumer<? super Entry<K, V>> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap var5 = this.map;HashMap.Node[] var6 = var5.table;int var3;int var4;if ((var3 = this.fence) < 0) {var4 = this.expectedModCount = var5.modCount;var3 = this.fence = var6 == null ? 0 : var6.length;} else {var4 = this.expectedModCount;}int var2;if (var6 != null && var6.length >= var3 && (var2 = this.index) >= 0 && (var2 < (this.index = var3) || this.current != null)) {HashMap.Node var7 = this.current;this.current = null;do {do {if (var7 == null) {var7 = var6[var2++];} else {var1.accept(var7);var7 = var7.next;}} while(var7 != null);} while(var2 < var3);if (var5.modCount != var4) {throw new ConcurrentModificationException();}}}}public boolean tryAdvance(Consumer<? super Entry<K, V>> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var3 = this.map.table;int var2;if (var3 != null && var3.length >= (var2 = this.getFence()) && this.index >= 0) {while(this.current != null || this.index < var2) {if (this.current != null) {HashMap.Node var4 = this.current;this.current = this.current.next;var1.accept(var4);if (this.map.modCount != this.expectedModCount) {throw new ConcurrentModificationException();}return true;}this.current = var3[this.index++];}}return false;}}public int characteristics() {return (this.fence >= 0 && this.est != this.map.size ? 0 : 64) | 1;}}abstract class HashIterator {HashMap.Node<K, V> next;HashMap.Node<K, V> current;int expectedModCount;int index;HashIterator() {this.expectedModCount = HashMap.this.modCount;HashMap.Node[] var2 = HashMap.this.table;this.current = this.next = null;this.index = 0;if (var2 != null && HashMap.this.size > 0) {while(this.index < var2.length && (this.next = var2[this.index++]) == null) {}}}public final boolean hasNext() {return this.next != null;}final HashMap.Node<K, V> nextNode() {HashMap.Node var2 = this.next;if (HashMap.this.modCount != this.expectedModCount) {throw new ConcurrentModificationException();} else if (var2 == null) {throw new NoSuchElementException();} else {HashMap.Node[] var1;if ((this.next = (this.current = var2).next) == null && (var1 = HashMap.this.table) != null) {while(this.index < var1.length && (this.next = var1[this.index++]) == null) {}}return var2;}}public final void remove() {HashMap.Node var1 = this.current;if (var1 == null) {throw new IllegalStateException();} else if (HashMap.this.modCount != this.expectedModCount) {throw new ConcurrentModificationException();} else {this.current = null;Object var2 = var1.key;HashMap.this.removeNode(HashMap.hash(var2), var2, (Object)null, false, false);this.expectedModCount = HashMap.this.modCount;}}}static class HashMapSpliterator<K, V> {final HashMap<K, V> map;HashMap.Node<K, V> current;int index;int fence;int est;int expectedModCount;HashMapSpliterator(HashMap<K, V> var1, int var2, int var3, int var4, int var5) {this.map = var1;this.index = var2;this.fence = var3;this.est = var4;this.expectedModCount = var5;}final int getFence() {int var1;if ((var1 = this.fence) < 0) {HashMap var2 = this.map;this.est = var2.size;this.expectedModCount = var2.modCount;HashMap.Node[] var3 = var2.table;var1 = this.fence = var3 == null ? 0 : var3.length;}return var1;}public final long estimateSize() {this.getFence();return (long)this.est;}}final class KeyIterator extends HashMap<K, V>.HashIterator implements Iterator<K> {KeyIterator() {super();}public final K next() {return this.nextNode().key;}}final class KeySet extends AbstractSet<K> {KeySet() {}public final int size() {return HashMap.this.size;}public final void clear() {HashMap.this.clear();}public final Iterator<K> iterator() {return HashMap.this.new KeyIterator();}public final boolean contains(Object var1) {return HashMap.this.containsKey(var1);}public final boolean remove(Object var1) {return HashMap.this.removeNode(HashMap.hash(var1), var1, (Object)null, false, true) != null;}public final Spliterator<K> spliterator() {return new HashMap.KeySpliterator(HashMap.this, 0, -1, 0, 0);}public final void forEach(Consumer<? super K> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var2;if (HashMap.this.size > 0 && (var2 = HashMap.this.table) != null) {int var3 = HashMap.this.modCount;for(int var4 = 0; var4 < var2.length; ++var4) {for(HashMap.Node var5 = var2[var4]; var5 != null; var5 = var5.next) {var1.accept(var5.key);}}if (HashMap.this.modCount != var3) {throw new ConcurrentModificationException();}}}}}static final class KeySpliterator<K, V> extends HashMap.HashMapSpliterator<K, V> implements Spliterator<K> {KeySpliterator(HashMap<K, V> var1, int var2, int var3, int var4, int var5) {super(var1, var2, var3, var4, var5);}public HashMap.KeySpliterator<K, V> trySplit() {int var1 = this.getFence();int var2 = this.index;int var3 = var2 + var1 >>> 1;return var2 < var3 && this.current == null ? new HashMap.KeySpliterator(this.map, var2, this.index = var3, this.est >>>= 1, this.expectedModCount) : null;}public void forEachRemaining(Consumer<? super K> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap var5 = this.map;HashMap.Node[] var6 = var5.table;int var3;int var4;if ((var3 = this.fence) < 0) {var4 = this.expectedModCount = var5.modCount;var3 = this.fence = var6 == null ? 0 : var6.length;} else {var4 = this.expectedModCount;}int var2;if (var6 != null && var6.length >= var3 && (var2 = this.index) >= 0 && (var2 < (this.index = var3) || this.current != null)) {HashMap.Node var7 = this.current;this.current = null;do {do {if (var7 == null) {var7 = var6[var2++];} else {var1.accept(var7.key);var7 = var7.next;}} while(var7 != null);} while(var2 < var3);if (var5.modCount != var4) {throw new ConcurrentModificationException();}}}}public boolean tryAdvance(Consumer<? super K> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var3 = this.map.table;int var2;if (var3 != null && var3.length >= (var2 = this.getFence()) && this.index >= 0) {while(this.current != null || this.index < var2) {if (this.current != null) {Object var4 = this.current.key;this.current = this.current.next;var1.accept(var4);if (this.map.modCount != this.expectedModCount) {throw new ConcurrentModificationException();}return true;}this.current = var3[this.index++];}}return false;}}public int characteristics() {return (this.fence >= 0 && this.est != this.map.size ? 0 : 64) | 1;}}static class Node<K, V> implements Entry<K, V> {final int hash;final K key;V value;HashMap.Node<K, V> next;Node(int var1, K var2, V var3, HashMap.Node<K, V> var4) {this.hash = var1;this.key = var2;this.value = var3;this.next = var4;}public final K getKey() {return this.key;}public final V getValue() {return this.value;}public final String toString() {return this.key + "=" + this.value;}public final int hashCode() {return Objects.hashCode(this.key) ^ Objects.hashCode(this.value);}public final V setValue(V var1) {Object var2 = this.value;this.value = var1;return var2;}public final boolean equals(Object var1) {if (var1 == this) {return true;} else {if (var1 instanceof Entry) {Entry var2 = (Entry)var1;if (Objects.equals(this.key, var2.getKey()) && Objects.equals(this.value, var2.getValue())) {return true;}}return false;}}}static final class TreeNode<K, V> extends java.util.LinkedHashMap.Entry<K, V> {HashMap.TreeNode<K, V> parent;HashMap.TreeNode<K, V> left;HashMap.TreeNode<K, V> right;HashMap.TreeNode<K, V> prev;boolean red;TreeNode(int var1, K var2, V var3, HashMap.Node<K, V> var4) {super(var1, var2, var3, var4);}final HashMap.TreeNode<K, V> root() {HashMap.TreeNode var1;HashMap.TreeNode var2;for(var1 = this; (var2 = var1.parent) != null; var1 = var2) {}return var1;}static <K, V> void moveRootToFront(HashMap.Node<K, V>[] var0, HashMap.TreeNode<K, V> var1) {int var2;if (var1 != null && var0 != null && (var2 = var0.length) > 0) {int var3 = var2 - 1 & var1.hash;HashMap.TreeNode var4 = (HashMap.TreeNode)var0[var3];if (var1 != var4) {var0[var3] = var1;HashMap.TreeNode var6 = var1.prev;HashMap.Node var5;if ((var5 = var1.next) != null) {((HashMap.TreeNode)var5).prev = var6;}if (var6 != null) {var6.next = var5;}if (var4 != null) {var4.prev = var1;}var1.next = var4;var1.prev = null;}assert checkInvariants(var1);}}final HashMap.TreeNode<K, V> find(int var1, Object var2, Class<?> var3) {HashMap.TreeNode var4 = this;do {HashMap.TreeNode var8 = var4.left;HashMap.TreeNode var9 = var4.right;int var5;if ((var5 = var4.hash) > var1) {var4 = var8;} else if (var5 < var1) {var4 = var9;} else {Object var7;if ((var7 = var4.key) == var2 || var2 != null && var2.equals(var7)) {return var4;}if (var8 == null) {var4 = var9;} else if (var9 == null) {var4 = var8;} else {int var6;if ((var3 != null || (var3 = HashMap.comparableClassFor(var2)) != null) && (var6 = HashMap.compareComparables(var3, var2, var7)) != 0) {var4 = var6 < 0 ? var8 : var9;} else {HashMap.TreeNode var10;if ((var10 = var9.find(var1, var2, var3)) != null) {return var10;}var4 = var8;}}}} while(var4 != null);return null;}final HashMap.TreeNode<K, V> getTreeNode(int var1, Object var2) {return (this.parent != null ? this.root() : this).find(var1, var2, (Class)null);}static int tieBreakOrder(Object var0, Object var1) {int var2;if (var0 == null || var1 == null || (var2 = var0.getClass().getName().compareTo(var1.getClass().getName())) == 0) {var2 = System.identityHashCode(var0) <= System.identityHashCode(var1) ? -1 : 1;}return var2;}final void treeify(HashMap.Node<K, V>[] var1) {HashMap.TreeNode var2 = null;HashMap.TreeNode var4;for(HashMap.TreeNode var3 = this; var3 != null; var3 = var4) {var4 = (HashMap.TreeNode)var3.next;var3.left = var3.right = null;if (var2 == null) {var3.parent = null;var3.red = false;var2 = var3;} else {Object var5 = var3.key;int var6 = var3.hash;Class var7 = null;HashMap.TreeNode var8 = var2;int var9;HashMap.TreeNode var12;do {Object var11 = var8.key;int var10;if ((var10 = var8.hash) > var6) {var9 = -1;} else if (var10 < var6) {var9 = 1;} else if (var7 == null && (var7 = HashMap.comparableClassFor(var5)) == null || (var9 = HashMap.compareComparables(var7, var5, var11)) == 0) {var9 = tieBreakOrder(var5, var11);}var12 = var8;} while((var8 = var9 <= 0 ? var8.left : var8.right) != null);var3.parent = var12;if (var9 <= 0) {var12.left = var3;} else {var12.right = var3;}var2 = balanceInsertion(var2, var3);}}moveRootToFront(var1, var2);}final HashMap.Node<K, V> untreeify(HashMap<K, V> var1) {HashMap.Node var2 = null;HashMap.Node var3 = null;for(Object var4 = this; var4 != null; var4 = ((HashMap.Node)var4).next) {HashMap.Node var5 = var1.replacementNode((HashMap.Node)var4, (HashMap.Node)null);if (var3 == null) {var2 = var5;} else {var3.next = var5;}var3 = var5;}return var2;}final HashMap.TreeNode<K, V> putTreeVal(HashMap<K, V> var1, HashMap.Node<K, V>[] var2, int var3, K var4, V var5) {Class var6 = null;boolean var7 = false;HashMap.TreeNode var8 = this.parent != null ? this.root() : this;HashMap.TreeNode var9 = var8;int var10;HashMap.TreeNode var13;do {int var11;if ((var11 = var9.hash) > var3) {var10 = -1;} else if (var11 < var3) {var10 = 1;} else {Object var12;if ((var12 = var9.key) == var4 || var4 != null && var4.equals(var12)) {return var9;}if (var6 == null && (var6 = HashMap.comparableClassFor(var4)) == null || (var10 = HashMap.compareComparables(var6, var4, var12)) == 0) {if (!var7) {var7 = true;HashMap.TreeNode var14;if ((var14 = var9.left) != null && (var13 = var14.find(var3, var4, var6)) != null || (var14 = var9.right) != null && (var13 = var14.find(var3, var4, var6)) != null) {return var13;}}var10 = tieBreakOrder(var4, var12);}}var13 = var9;} while((var9 = var10 <= 0 ? var9.left : var9.right) != null);HashMap.Node var16 = var13.next;HashMap.TreeNode var15 = var1.newTreeNode(var3, var4, var5, var16);if (var10 <= 0) {var13.left = var15;} else {var13.right = var15;}var13.next = var15;var15.parent = var15.prev = var13;if (var16 != null) {((HashMap.TreeNode)var16).prev = var15;}moveRootToFront(var2, balanceInsertion(var8, var15));return null;}final void removeTreeNode(HashMap<K, V> var1, HashMap.Node<K, V>[] var2, boolean var3) {int var4;if (var2 != null && (var4 = var2.length) != 0) {int var5 = var4 - 1 & this.hash;HashMap.TreeNode var6 = (HashMap.TreeNode)var2[var5];HashMap.TreeNode var7 = var6;HashMap.TreeNode var9 = (HashMap.TreeNode)this.next;HashMap.TreeNode var10 = this.prev;if (var10 == null) {var6 = var9;var2[var5] = var9;} else {var10.next = var9;}if (var9 != null) {var9.prev = var10;}if (var6 != null) {if (var7.parent != null) {var7 = var7.root();}HashMap.TreeNode var8;if (var7 != null && var7.right != null && (var8 = var7.left) != null && var8.left != null) {HashMap.TreeNode var12 = this.left;HashMap.TreeNode var13 = this.right;HashMap.TreeNode var14;HashMap.TreeNode var15;HashMap.TreeNode var16;if (var12 != null && var13 != null) {for(var15 = var13; (var16 = var15.left) != null; var15 = var16) {}boolean var17 = var15.red;var15.red = this.red;this.red = var17;HashMap.TreeNode var18 = var15.right;HashMap.TreeNode var19 = this.parent;if (var15 == var13) {this.parent = var15;var15.right = this;} else {HashMap.TreeNode var20 = var15.parent;if ((this.parent = var20) != null) {if (var15 == var20.left) {var20.left = this;} else {var20.right = this;}}if ((var15.right = var13) != null) {var13.parent = var15;}}this.left = null;if ((this.right = var18) != null) {var18.parent = this;}if ((var15.left = var12) != null) {var12.parent = var15;}if ((var15.parent = var19) == null) {var7 = var15;} else if (this == var19.left) {var19.left = var15;} else {var19.right = var15;}if (var18 != null) {var14 = var18;} else {var14 = this;}} else if (var12 != null) {var14 = var12;} else if (var13 != null) {var14 = var13;} else {var14 = this;}if (var14 != this) {var15 = var14.parent = this.parent;if (var15 == null) {var7 = var14;} else if (this == var15.left) {var15.left = var14;} else {var15.right = var14;}this.left = this.right = this.parent = null;}var15 = this.red ? var7 : balanceDeletion(var7, var14);if (var14 == this) {var16 = this.parent;this.parent = null;if (var16 != null) {if (this == var16.left) {var16.left = null;} else if (this == var16.right) {var16.right = null;}}}if (var3) {moveRootToFront(var2, var15);}} else {var2[var5] = var6.untreeify(var1);}}}}final void split(HashMap<K, V> var1, HashMap.Node<K, V>[] var2, int var3, int var4) {HashMap.TreeNode var6 = null;HashMap.TreeNode var7 = null;HashMap.TreeNode var8 = null;HashMap.TreeNode var9 = null;int var10 = 0;int var11 = 0;HashMap.TreeNode var13;for(HashMap.TreeNode var12 = this; var12 != null; var12 = var13) {var13 = (HashMap.TreeNode)var12.next;var12.next = null;if ((var12.hash & var4) == 0) {if ((var12.prev = var7) == null) {var6 = var12;} else {var7.next = var12;}var7 = var12;++var10;} else {if ((var12.prev = var9) == null) {var8 = var12;} else {var9.next = var12;}var9 = var12;++var11;}}if (var6 != null) {if (var10 <= 6) {var2[var3] = var6.untreeify(var1);} else {var2[var3] = var6;if (var8 != null) {var6.treeify(var2);}}}if (var8 != null) {if (var11 <= 6) {var2[var3 + var4] = var8.untreeify(var1);} else {var2[var3 + var4] = var8;if (var6 != null) {var8.treeify(var2);}}}}static <K, V> HashMap.TreeNode<K, V> rotateLeft(HashMap.TreeNode<K, V> var0, HashMap.TreeNode<K, V> var1) {HashMap.TreeNode var2;if (var1 != null && (var2 = var1.right) != null) {HashMap.TreeNode var4;if ((var4 = var1.right = var2.left) != null) {var4.parent = var1;}HashMap.TreeNode var3;if ((var3 = var2.parent = var1.parent) == null) {var0 = var2;var2.red = false;} else if (var3.left == var1) {var3.left = var2;} else {var3.right = var2;}var2.left = var1;var1.parent = var2;}return var0;}static <K, V> HashMap.TreeNode<K, V> rotateRight(HashMap.TreeNode<K, V> var0, HashMap.TreeNode<K, V> var1) {HashMap.TreeNode var2;if (var1 != null && (var2 = var1.left) != null) {HashMap.TreeNode var4;if ((var4 = var1.left = var2.right) != null) {var4.parent = var1;}HashMap.TreeNode var3;if ((var3 = var2.parent = var1.parent) == null) {var0 = var2;var2.red = false;} else if (var3.right == var1) {var3.right = var2;} else {var3.left = var2;}var2.right = var1;var1.parent = var2;}return var0;}static <K, V> HashMap.TreeNode<K, V> balanceInsertion(HashMap.TreeNode<K, V> var0, HashMap.TreeNode<K, V> var1) {var1.red = true;HashMap.TreeNode var2;while((var2 = var1.parent) != null) {HashMap.TreeNode var3;if (!var2.red || (var3 = var2.parent) == null) {return var0;}HashMap.TreeNode var4;if (var2 == (var4 = var3.left)) {HashMap.TreeNode var5;if ((var5 = var3.right) != null && var5.red) {var5.red = false;var2.red = false;var3.red = true;var1 = var3;} else {if (var1 == var2.right) {var1 = var2;var0 = rotateLeft(var0, var2);var3 = (var2 = var2.parent) == null ? null : var2.parent;}if (var2 != null) {var2.red = false;if (var3 != null) {var3.red = true;var0 = rotateRight(var0, var3);}}}} else if (var4 != null && var4.red) {var4.red = false;var2.red = false;var3.red = true;var1 = var3;} else {if (var1 == var2.left) {var1 = var2;var0 = rotateRight(var0, var2);var3 = (var2 = var2.parent) == null ? null : var2.parent;}if (var2 != null) {var2.red = false;if (var3 != null) {var3.red = true;var0 = rotateLeft(var0, var3);}}}}var1.red = false;return var1;}static <K, V> HashMap.TreeNode<K, V> balanceDeletion(HashMap.TreeNode<K, V> var0, HashMap.TreeNode<K, V> var1) {while(var1 != null && var1 != var0) {HashMap.TreeNode var2;if ((var2 = var1.parent) == null) {var1.red = false;return var1;}if (var1.red) {var1.red = false;return var0;}HashMap.TreeNode var3;HashMap.TreeNode var5;HashMap.TreeNode var6;if ((var3 = var2.left) == var1) {HashMap.TreeNode var4;if ((var4 = var2.right) != null && var4.red) {var4.red = false;var2.red = true;var0 = rotateLeft(var0, var2);var4 = (var2 = var1.parent) == null ? null : var2.right;}if (var4 == null) {var1 = var2;} else {var5 = var4.left;var6 = var4.right;if (var6 != null && var6.red || var5 != null && var5.red) {if (var6 == null || !var6.red) {if (var5 != null) {var5.red = false;}var4.red = true;var0 = rotateRight(var0, var4);var4 = (var2 = var1.parent) == null ? null : var2.right;}if (var4 != null) {var4.red = var2 == null ? false : var2.red;if ((var6 = var4.right) != null) {var6.red = false;}}if (var2 != null) {var2.red = false;var0 = rotateLeft(var0, var2);}var1 = var0;} else {var4.red = true;var1 = var2;}}} else {if (var3 != null && var3.red) {var3.red = false;var2.red = true;var0 = rotateRight(var0, var2);var3 = (var2 = var1.parent) == null ? null : var2.left;}if (var3 == null) {var1 = var2;} else {var5 = var3.left;var6 = var3.right;if ((var5 == null || !var5.red) && (var6 == null || !var6.red)) {var3.red = true;var1 = var2;} else {if (var5 == null || !var5.red) {if (var6 != null) {var6.red = false;}var3.red = true;var0 = rotateLeft(var0, var3);var3 = (var2 = var1.parent) == null ? null : var2.left;}if (var3 != null) {var3.red = var2 == null ? false : var2.red;if ((var5 = var3.left) != null) {var5.red = false;}}if (var2 != null) {var2.red = false;var0 = rotateRight(var0, var2);}var1 = var0;}}}}return var0;}static <K, V> boolean checkInvariants(HashMap.TreeNode<K, V> var0) {HashMap.TreeNode var1 = var0.parent;HashMap.TreeNode var2 = var0.left;HashMap.TreeNode var3 = var0.right;HashMap.TreeNode var4 = var0.prev;HashMap.TreeNode var5 = (HashMap.TreeNode)var0.next;if (var4 != null && var4.next != var0) {return false;} else if (var5 != null && var5.prev != var0) {return false;} else if (var1 != null && var0 != var1.left && var0 != var1.right) {return false;} else if (var2 != null && (var2.parent != var0 || var2.hash > var0.hash)) {return false;} else if (var3 == null || var3.parent == var0 && var3.hash >= var0.hash) {if (var0.red && var2 != null && var2.red && var3 != null && var3.red) {return false;} else if (var2 != null && !checkInvariants(var2)) {return false;} else {return var3 == null || checkInvariants(var3);}} else {return false;}}}final class ValueIterator extends HashMap<K, V>.HashIterator implements Iterator<V> {ValueIterator() {super();}public final V next() {return this.nextNode().value;}}static final class ValueSpliterator<K, V> extends HashMap.HashMapSpliterator<K, V> implements Spliterator<V> {ValueSpliterator(HashMap<K, V> var1, int var2, int var3, int var4, int var5) {super(var1, var2, var3, var4, var5);}public HashMap.ValueSpliterator<K, V> trySplit() {int var1 = this.getFence();int var2 = this.index;int var3 = var2 + var1 >>> 1;return var2 < var3 && this.current == null ? new HashMap.ValueSpliterator(this.map, var2, this.index = var3, this.est >>>= 1, this.expectedModCount) : null;}public void forEachRemaining(Consumer<? super V> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap var5 = this.map;HashMap.Node[] var6 = var5.table;int var3;int var4;if ((var3 = this.fence) < 0) {var4 = this.expectedModCount = var5.modCount;var3 = this.fence = var6 == null ? 0 : var6.length;} else {var4 = this.expectedModCount;}int var2;if (var6 != null && var6.length >= var3 && (var2 = this.index) >= 0 && (var2 < (this.index = var3) || this.current != null)) {HashMap.Node var7 = this.current;this.current = null;do {do {if (var7 == null) {var7 = var6[var2++];} else {var1.accept(var7.value);var7 = var7.next;}} while(var7 != null);} while(var2 < var3);if (var5.modCount != var4) {throw new ConcurrentModificationException();}}}}public boolean tryAdvance(Consumer<? super V> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var3 = this.map.table;int var2;if (var3 != null && var3.length >= (var2 = this.getFence()) && this.index >= 0) {while(this.current != null || this.index < var2) {if (this.current != null) {Object var4 = this.current.value;this.current = this.current.next;var1.accept(var4);if (this.map.modCount != this.expectedModCount) {throw new ConcurrentModificationException();}return true;}this.current = var3[this.index++];}}return false;}}public int characteristics() {return this.fence >= 0 && this.est != this.map.size ? 0 : 64;}}final class Values extends AbstractCollection<V> {Values() {}public final int size() {return HashMap.this.size;}public final void clear() {HashMap.this.clear();}public final Iterator<V> iterator() {return HashMap.this.new ValueIterator();}public final boolean contains(Object var1) {return HashMap.this.containsValue(var1);}public final Spliterator<V> spliterator() {return new HashMap.ValueSpliterator(HashMap.this, 0, -1, 0, 0);}public final void forEach(Consumer<? super V> var1) {if (var1 == null) {throw new NullPointerException();} else {HashMap.Node[] var2;if (HashMap.this.size > 0 && (var2 = HashMap.this.table) != null) {int var3 = HashMap.this.modCount;for(int var4 = 0; var4 < var2.length; ++var4) {for(HashMap.Node var5 = var2[var4]; var5 != null; var5 = var5.next) {var1.accept(var5.value);}}if (HashMap.this.modCount != var3) {throw new ConcurrentModificationException();}}}}}
}

接下来我们重点逐行分析下putVal操作,putVal 方法是 HashMap 中用于插入键值对的核心方法,详细可展开看下源码注释:

java">final V putVal(int hash, K key, V value, boolean onlyIfAbsent,boolean evict) {Node<K,V>[] tab; // Node数组,即HashMap的桶数组Node<K,V> p; // 指向当前考察的节点int n, i; // n为桶数组的长度,i为索引值if ((tab = table) == null || (n = tab.length) == 0)n = (tab = resize()).length; // 如果桶数组为空或长度为0,则进行resize操作if ((p = tab[i = (n - 1) & hash]) == null) { // 计算索引值,并检查对应索引处的节点是否存在tab[i] = newNode(hash, key, value, null); // 如果索引处没有节点,新建一个节点放入if (!onlyIfAbsent)++size; // 如果不是onlyIfAbsent模式,则增加size} else { // 如果索引处已有节点,说明发生了哈希冲突Node<K,V> e; K k;boolean rep; // rep表示是否需要替换已存在的节点if ((rep = p.key == key //|| (p.hash == hash && key.equals(p.key))) ) { // 如果找到相同键的节点,则更新值V oldValue = p.value;if (!onlyIfAbsent || oldValue == null)p.value = value; // 更新值return oldValue; // 返回旧值}for (Node<K,V> c = p.next; ; ++i) { // 遍历同一索引下的所有节点if ((c = p.next) == null) { // 如果链表尾部没有更多节点p.next = newNode(hash, key, value, null); // 在链表尾部添加新节点++size; // 增加sizeif (!onlyIfAbsent)break; // 如果不是onlyIfAbsent模式,则退出循环}if ((rep = c.key == key || (c.hash == hash && key.equals(c.key))) )break; // 如果找到相同键的节点,则退出循环p = c; // 移动到下一个节点}}if (size >= threshold) // 如果size达到阈值,则进行resize操作resize();afterNodeAccess(p); // 访问节点后调用return null; // 返回null表示没有旧值
}

resize 方法

resize 方法是 HashMap 中用于调整桶数组大小的方法。下面是对其逐行解释:

final Node<K,V>[] resize() {Node<K,V>[] oldTab = table; // 原桶数组int oldCap = (oldTab == null) ? 0 : oldTab.length; // 原桶数组容量int oldThr = threshold; // 原阈值int newCap, newThr = 0; // 新桶数组容量和新阈值if (oldCap > 0) { // 如果原桶数组容量大于0if (oldCap >= MAXIMUM_CAPACITY) { // 如果已达最大容量,则不再扩展threshold = Integer.MAX_VALUE;return oldTab;}else if ((newCap = oldCap << 1) < MAXIMUM_CAPACITY &&oldCap >= DEFAULT_INITIAL_CAPACITY)newThr = oldThr << 1; // 容量翻倍,阈值也翻倍}else if (oldThr > 0) {newCap = oldThr; // 如果原桶数组为空,使用原阈值作为新容量}else {newCap = DEFAULT_INITIAL_CAPACITY; // 默认初始容量newThr = (int)(DEFAULT_LOAD_FACTOR * DEFAULT_INITIAL_CAPACITY);}if (newThr == 0) {float ft = (float)newCap * loadFactor; // 计算新的阈值newThr = (newCap < MAXIMUM_CAPACITY && ft < (float)MAXIMUM_CAPACITY ?(int)ft : Integer.MAX_VALUE);}threshold = newThr; // 更新阈值Node<K,V>[] newTab = (Node<K,V>[])new Node[newCap]; // 创建新桶数组table = newTab; // 更新桶数组引用if (oldTab != null) { // 如果原桶数组不为空,则重新映射所有节点到新桶数组for (int j = 0; j < oldCap; ++j) {Node<K,V> e;if ((e = oldTab[j]) != null) {oldTab[j] = null; // 清空原桶数组if (e.next == null)newTab[e.hash & (newCap - 1)] = e; // 如果节点没有后续节点,直接放入新桶数组else if (e instanceof TreeNode)((TreeNode<K,V>)e).split(this, newTab, j, oldCap); // 如果节点是红黑树节点,则进行树的分裂操作else { // preserve orderNode<K,V> loHead = null, loTail = null;Node<K,V> hiHead = null, hiTail = null;Node<K,V> next;do {next = e.next; // 遍历同一索引下的所有节点if ((e.hash & oldCap) == 0) { // 如果节点哈希值与原容量进行位与操作结果为0if (loTail == null)loHead = e;elseloTail.next = e;loTail = e;}else {if (hiTail == null)hiHead = e;elsehiTail.next = e;hiTail = e;}} while ((e = next) != null); // 继续处理下一个节点if (loTail != null) { // 如果loTail不为空,说明有节点需要放入新桶数组的同一索引位置loTail.next = null;newTab[j] = loHead;}if (hiTail != null) {hiTail.next = null;newTab[j + oldCap] = hiHead; // 因为容量翻倍,所以索引位置+oldCap}}}}}return newTab;
}

resize 方法中,我们首先检查是否达到了最大容量,如果没有,则将桶数组的大小翻倍,并重新计算阈值。接着,我们创建一个新的桶数组,并遍历原桶数组,将所有节点重新映射到新桶数组上。这个过程中,如果节点形成了链表,我们还需要处理链表的分裂操作,以确保链表中的节点能够均匀地分布在新桶数组的两个位置上。如果节点是红黑树节点,则调用 split 方法进行树的分裂操作。最后,我们更新 table 引用为新桶数组,并返回新桶数组。

这些操作确保了 HashMap 在动态调整大小的同时,能够保持快速的访问性能。

HashMap 中,哈希冲突是指不同的键通过哈希函数计算后得到的索引位置相同。HashMap 使用以下机制来解决哈希冲突:

1. 链表

当插入一个新的键值对时,HashMap 会根据键的哈希码来计算一个索引值,这个索引值决定了键值对将要存储在哪个桶(bucket)中。如果该桶中没有任何元素,那么这个键值对就会直接存储在这个桶中。如果桶中已经有元素,并且它们的哈希码与新键的哈希码相同,那么 HashMap 会检查这些元素的键是否与新键相等。如果键也相等,那么新值就会替换掉老值。如果不相等,那么新键值对就会以链表的形式存储在当前桶中。

链表是解决哈希冲突的初级手段。当桶中的元素数量较少时,这种解决方案的效果是很好的,因为它简单且高效。

2. 红黑树

当链表的长度超过一定阈值(默认为8)时,链表会转换成红黑树。红黑树是一种自平衡的二叉搜索树,它可以保证在最坏的情况下,查找、插入和删除操作的时间复杂度为 O(log n)。这种数据结构可以更有效地处理大量的哈希冲突。

在红黑树中,每个节点都存储了键值对,并且树会保持平衡,以确保树的高度不会无限增长。当进行查找操作时,HashMap 会使用键的哈希码来快速定位到树中的特定节点,然后进行键的相等性检查。

3. 链表与红黑树的转换

当桶中的元素从链表转换为红黑树,或者从红黑树转换回链表时,HashMap 会进行一些额外的操作。例如,在 resize 方法中,当桶数组的大小发生变化时,所有节点都会被重新映射到新的桶数组上。在这个过程中,如果桶中的元素数量超过了阈值,链表就会被转换成红黑树。相反,如果桶中的元素数量减少到了一定程度,红黑树可能会被转换回链表。

这种转换机制允许 HashMap 在面对不同数量级的哈希冲突时,都能够保持良好的性能。

常见问题分析:

1.为什么HashMap的扩容因子是0.75?

负载因子为 0.75 的选择基于以下考虑:

  1. 性能:较低的负载因子可以减少哈希冲突的几率,因为桶中元素的数量较少,这有助于保持操作的快速执行。然而,这也意味着更多的空间被浪费在未使用的桶上。

  2. 内存利用率:较高的负载因子可以提高内存的利用率,因为它允许在相同的内存空间内存储更多的元素。但这也增加了哈希冲突的几率,可能导致性能下降。

  3. 平衡点0.75 是一个折中的选择,它在性能和内存利用率之间取得了平衡。它既避免了频繁的扩容操作,又保持了相对较低的哈希冲突率。

需要注意的是,0.75 只是一个默认值,开发者可以根据实际应用的需求来调整负载因子。例如,如果内存空间非常充足,而性能要求不是很高,可以选择一个较高的负载因子;反之,如果性能要求非常高,可以选择一个较低的负载因子。

此外,负载因子的选择也会影响到 HashMap 的性能瓶颈。如果负载因子过低,可能会导致频繁的扩容操作,增加时间开销;如果负载因子过高,可能会导致哈希冲突增多,降低查找效率。因此,选择合适的负载因子对于优化 HashMap 的性能至关重要。

2.为什么从jdk1.8开始增加了链表转换红黑树的操作,转换成红黑树后还会再转换成链表吗,那些情况会导致转换?

在 JDK 1.8 中,HashMap 引入了红黑树来优化性能,特别是当哈希冲突较多时。以下是红黑树转换的一些关键点和条件:

  1. 链表转换为红黑树

    • 当一个桶(bucket)中的链表长度超过阈值(默认为8)时,链表会转换为红黑树。这是因为红黑树作为一种自平衡的二叉搜索树,其查找、插入和删除操作的时间复杂度为 O(log n),相比于链表的 O(n),可以显著提高性能。
  2. 红黑树转换回链表

    • 当红黑树中的节点数量减少到6或更少时,红黑树会被转换回链表。这是因为在节点数量较少时,链表的插入和删除操作比红黑树更高效。
    • 红黑树转换回链表的操作通常发生在删除元素后,或者在 HashMap 进行扩容操作时,如果某个桶中的元素数量减少到阈值以下。
  3. 转换条件

    • 除了节点数量外,还有一个条件是 HashMap 的容量(桶数组大小)需要足够大。默认情况下,这个值是64。这意味着只有在 HashMap 的容量大于或等于64时,才会执行链表到红黑树的转换和反转换。
  4. 为什么选择8和6作为阈值

    • 选择8作为链表转换为红黑树的阈值,是因为在随机的哈希码分布下,链表长度超过8的概率非常小,而一旦超过这个长度,红黑树的性能优势就会显现出来。
    • 选择6作为红黑树转换回链表的阈值,是为了在保持红黑树性能的同时,避免频繁地在链表和红黑树之间转换,这会导致额外的性能开销。
  5. 性能优化

    • 引入红黑树是为了在面对极端情况下(如不好的哈希算法导致的哈希冲突)时,仍然能够保持较好的性能。
    • 红黑树的引入是为了优化高冲突情况下的性能,而链表和红黑树之间的转换是为了在不同场景下平衡时间和空间的利用。

总结来说,JDK 1.8 中 HashMap 的这一改进,使得它在面对大量哈希冲突时,能够通过红黑树保持较高的性能,同时在冲突较少时,通过链表保持空间效率。这种动态的数据结构转换机制,使得 HashMap 在不同的使用场景下都能够表现出较好的性能。

3.HashMap优略势对比

优势

  1. 快速的查找和插入HashMap 的核心优势在于其快速的查找和插入能力。由于基于哈希表实现,HashMap 能够在平均 O(1) 的时间复杂度内实现元素的添加、删除、查找等操作。

  2. 灵活的内存管理HashMap 能够根据元素数量自动调整内部存储容量的大小,这使得它在处理大量数据时非常高效,并且可以自动扩展以容纳更多的键值对,同时在数据量减少时自动收缩以节省内存空间。

  3. 键值对存储HashMap 允许存储键值对,这使得它在需要根据键快速查找值的场景中非常有用。

  4. 支持空值和空键HashMap 允许存储空值(null)作为键和值,这为某些特定的应用场景提供了便利。

  5. 无序性HashMap 不保证元素的顺序,这在需要快速访问但不需要关心元素顺序的场景中是一个优势。

  6. 广泛的应用HashMap 在 Java 集合框架中被广泛使用,适用于各种需要快速查找和插入的场景。

劣势

  1. 线程不安全HashMap 是非线程安全的,这意味着在多线程环境下,如果没有适当的同步措施,可能会导致数据不一致。为了解决这个问题,可以使用 ConcurrentHashMap,它是 HashMap 的线程安全版本。

  2. 性能瓶颈:在哈希冲突严重的情况下,HashMap 的性能可能会退化。例如,如果所有的键都映射到同一个桶中,那么查找和插入操作的时间复杂度可能会降低到 O(n)。

  3. 内存开销:与一些其他的数据结构相比,HashMap 可能会有更高的内存开销,因为它需要存储额外的指针(用于链表或红黑树)以及维护哈希表的开销。

  4. 无序性:虽然无序性在某些场景中是优势,但在需要有序遍历元素的场景中,HashMap 就不太适用。此时,可能需要使用 TreeMap 或 LinkedHashMap 等其他数据结构。

  5. 初始容量和负载因子的选择HashMap 的性能受到初始容量和负载因子的影响。如果这些参数设置不当,可能会导致频繁的扩容操作,从而影响性能。

  6. 哈希冲突HashMap 使用链表和红黑树来解决哈希冲突,但在极端情况下,如果冲突非常严重,可能会导致性能下降。

  7. 扩容操作的开销:当 HashMap 需要扩容时,需要重新计算所有元素的哈希值,并将它们重新映射到新的桶数组中,这是一个耗时的操作,尤其是在元素数量非常多的情况下。

总的来说,HashMap 是一个非常高效的数据结构,适用于大多数需要快速查找和插入的场景。然而,开发者需要注意其线程安全问题,以及在特定情况下的性能瓶颈。正确地选择初始容量和负载因子,以及对哈希函数的优化,可以进一步提升 HashMap 的性能

以上是对于HashMap的分析,大家如有其他问题可在本篇文章下留言讨论或直接私信我。


http://www.ppmy.cn/ops/126323.html

相关文章

日志分析是什么?如何进行日志分析?

日志分析是对诸如计算机系统、网络设备、应用程序等产生的日志文件进行收集、处理、分析和解读的一个过程。这些日志文件记录了系统和应用在运行过程中的各种事件、状态变化、错误信息等详细数据。 通过对这些日志数据的分析&#xff0c;可以深入了解系统的运行情况、发现潜在…

探索Spring Boot在医疗病历B2B交互中的潜力

第2章 设计技术与开发环境 2.1 相关技术介绍 2.1.1 B/S模式分析 C/S模式主要由客户应用程序(Client)、服务器管理程序(Server)和中间件(middleware)三个部件组成。客户应用程序是系统中用户与数据组件交互。服务器程序负责系统资源&#xff0c;如管理信息数据库的有效管理&…

SSM(5)(动态sql <if>、<where>、返回主键值)

返回主键值&#xff1a; 方法一&#xff1a; useGeneratedKeys 为ture 声明 返回主键 keyProperty 表示要返回的值 封装到对象的属性中 但是这一种方法不支持Orcal数据库。 <insert id"save2" parameterType"com.findyou.entity.User" useGenerated…

python从0快速上手(二)IDE选择

在这个代码横飞的世界里&#xff0c;选择一个合适的Python IDE就好比是选择一把顺手的武器。今天&#xff0c;就让我来带你一探究竟&#xff0c;看看市面上有哪些让人眼花缭乱的Python IDE&#xff0c;并一较高下。 1. PyCharm PyCharm&#xff0c;由大名鼎鼎的JetBrains出品…

【C++11】可变模板参数详解

个人主页&#xff1a;chian-ocean 文章专栏 C 可变模板参数详解 1. 引言 C模板是现代C编程中一个非常强大且灵活的工具。在C11标准中&#xff0c;引入了可变模板参数&#xff08;variadic templates&#xff09;&#xff0c;它为模板编程带来了革命性改变。它的出现允许我们…

苍穹外卖学习笔记(二十)

文章目录 用户端历史订单模块&#xff1a;查询历史订单OrderControllerOrderServiceOrderServiceImpl 查询订单详情OrderControllerOrderServiceOrderServiceImpl 用户端历史订单模块&#xff1a; 查询历史订单 OrderController /*** 历史订单*/GetMapping("/historyOrd…

12.1-基础柱状图构建

Python基础综合案例——数据可视化 动态柱状图 通过Bar构建基础柱状图 反转x和y轴 调用 bar.reversal_axis() 我们现在所看到的数值是从下到上的&#xff0c;当我们反转之后数据是从左向右的&#xff0c;我们现在把数据放到柱的右边。即数值标签在右侧 添加y轴数据的时候&am…

MySQL高可用性(MySQL High Availability)解析

MHA (Master High Availability Manager) 是一种用于 MySQL 主从复制架构的高可用性解决方案&#xff0c;可以实现自动故障转移&#xff0c;从而减少停机时间并提高系统的可用性。 MHA 的工作原理 MHA 通过监控主库的状态并在检测到主库故障时自动进行故障转移来实现高可用性…