diff --git a/common/src/main/java/io/netty/util/DefaultAttributeMap.java b/common/src/main/java/io/netty/util/DefaultAttributeMap.java
index d19b9474cd..691b312b58 100644
--- a/common/src/main/java/io/netty/util/DefaultAttributeMap.java
+++ b/common/src/main/java/io/netty/util/DefaultAttributeMap.java
@@ -17,73 +17,104 @@ package io.netty.util;
import static java.util.Objects.requireNonNull;
+import java.util.Arrays;
import java.util.concurrent.atomic.AtomicReference;
-import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
/**
- * Default {@link AttributeMap} implementation which use simple synchronization per bucket to keep the memory overhead
- * as low as possible.
+ * Default {@link AttributeMap} implementation which not exibit any blocking behaviour on attribute lookup while using a
+ * copy-on-write approach on the modify path.
Attributes lookup and remove exibit {@code O(logn)} time worst-case
+ * complexity, hence {@code attribute::set(null)} is to be preferred to {@code remove}.
*/
public class DefaultAttributeMap implements AttributeMap {
- @SuppressWarnings("rawtypes")
- private static final AtomicReferenceFieldUpdater updater =
- AtomicReferenceFieldUpdater.newUpdater(DefaultAttributeMap.class, AtomicReferenceArray.class, "attributes");
+ private static final AtomicReferenceFieldUpdater ATTRIBUTES_UPDATER =
+ AtomicReferenceFieldUpdater.newUpdater(DefaultAttributeMap.class, DefaultAttribute[].class, "attributes");
+ private static final DefaultAttribute[] EMPTY_ATTRIBUTES = new DefaultAttribute[0];
- private static final int BUCKET_SIZE = 4;
- private static final int MASK = BUCKET_SIZE - 1;
+ /**
+ * Similarly to {@code Arrays::binarySearch} it perform a binary search optimized for this use case, in order to
+ * save polymorphic calls (on comparator side) and unnecessary class checks.
+ */
+ private static int searchAttributeByKey(DefaultAttribute[] sortedAttributes, AttributeKey> key) {
+ int low = 0;
+ int high = sortedAttributes.length - 1;
- // Initialize lazily to reduce memory consumption; updated by AtomicReferenceFieldUpdater above.
- @SuppressWarnings("UnusedDeclaration")
- private volatile AtomicReferenceArray> attributes;
+ while (low <= high) {
+ int mid = low + high >>> 1;
+ DefaultAttribute midVal = sortedAttributes[mid];
+ AttributeKey midValKey = midVal.key;
+ if (midValKey == key) {
+ return mid;
+ }
+ int midValKeyId = midValKey.id();
+ int keyId = key.id();
+ assert midValKeyId != keyId;
+ boolean searchRight = midValKeyId < keyId;
+ if (searchRight) {
+ low = mid + 1;
+ } else {
+ high = mid - 1;
+ }
+ }
+
+ return -(low + 1);
+ }
+
+ private static void orderedCopyOnInsert(DefaultAttribute[] sortedSrc, int srcLength, DefaultAttribute[] copy,
+ DefaultAttribute toInsert) {
+ // let's walk backward, because as a rule of thumb, toInsert.key.id() tends to be higher for new keys
+ final int id = toInsert.key.id();
+ int i;
+ for (i = srcLength - 1; i >= 0; i--) {
+ DefaultAttribute attribute = sortedSrc[i];
+ assert attribute.key.id() != id;
+ if (attribute.key.id() < id) {
+ break;
+ }
+ copy[i + 1] = sortedSrc[i];
+ }
+ copy[i + 1] = toInsert;
+ final int toCopy = i + 1;
+ if (toCopy > 0) {
+ System.arraycopy(sortedSrc, 0, copy, 0, toCopy);
+ }
+ }
+
+ private volatile DefaultAttribute[] attributes = EMPTY_ATTRIBUTES;
@SuppressWarnings("unchecked")
@Override
public Attribute attr(AttributeKey key) {
requireNonNull(key, "key");
- AtomicReferenceArray> attributes = this.attributes;
- if (attributes == null) {
- // Not using ConcurrentHashMap due to high memory consumption.
- attributes = new AtomicReferenceArray<>(BUCKET_SIZE);
-
- if (!updater.compareAndSet(this, null, attributes)) {
- attributes = this.attributes;
- }
- }
-
- int i = index(key);
- DefaultAttribute> head = attributes.get(i);
- if (head == null) {
- // No head exists yet which means we may be able to add the attribute without synchronization and just
- // use compare and set. At worst we need to fallback to synchronization and waste two allocations.
- head = new DefaultAttribute();
- DefaultAttribute attr = new DefaultAttribute<>(head, key);
- head.next = attr;
- attr.prev = head;
- if (attributes.compareAndSet(i, null, head)) {
- // we were able to add it so return the attr right away
- return attr;
+ DefaultAttribute newAttribute = null;
+ for (;;) {
+ final DefaultAttribute[] attributes = this.attributes;
+ final int index = searchAttributeByKey(attributes, key);
+ final DefaultAttribute[] newAttributes;
+ if (index >= 0) {
+ final DefaultAttribute attribute = attributes[index];
+ assert attribute.key() == key;
+ if (!attribute.isRemoved()) {
+ return attribute;
+ }
+ // let's try replace the removed attribute with a new one
+ if (newAttribute == null) {
+ newAttribute = new DefaultAttribute(this, key);
+ }
+ final int count = attributes.length;
+ newAttributes = Arrays.copyOf(attributes, count);
+ newAttributes[index] = newAttribute;
} else {
- head = attributes.get(i);
+ if (newAttribute == null) {
+ newAttribute = new DefaultAttribute(this, key);
+ }
+ final int count = attributes.length;
+ newAttributes = new DefaultAttribute[count + 1];
+ orderedCopyOnInsert(attributes, count, newAttributes, newAttribute);
}
- }
-
- synchronized (head) {
- DefaultAttribute> curr = head;
- for (;;) {
- DefaultAttribute> next = curr.next;
- if (next == null) {
- DefaultAttribute attr = new DefaultAttribute<>(head, key);
- curr.next = attr;
- attr.prev = curr;
- return attr;
- }
-
- if (next.key == key && !next.removed) {
- return (Attribute) next;
- }
- curr = next;
+ if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) {
+ return newAttribute;
}
}
}
@@ -91,69 +122,62 @@ public class DefaultAttributeMap implements AttributeMap {
@Override
public boolean hasAttr(AttributeKey key) {
requireNonNull(key, "key");
- AtomicReferenceArray> attributes = this.attributes;
- if (attributes == null) {
- // no attribute exists
- return false;
- }
-
- int i = index(key);
- DefaultAttribute> head = attributes.get(i);
- if (head == null) {
- // No attribute exists which point to the bucket in which the head should be located
- return false;
- }
-
- // We need to synchronize on the head.
- synchronized (head) {
- // Start with head.next as the head itself does not store an attribute.
- DefaultAttribute> curr = head.next;
- while (curr != null) {
- if (curr.key == key && !curr.removed) {
- return true;
- }
- curr = curr.next;
- }
- return false;
- }
+ return searchAttributeByKey(attributes, key) >= 0;
}
- private static int index(AttributeKey> key) {
- return key.id() & MASK;
+ private void removeAttributeIfMatch(AttributeKey key, DefaultAttribute value) {
+ for (;;) {
+ final DefaultAttribute[] attributes = this.attributes;
+ final int index = searchAttributeByKey(attributes, key);
+ if (index < 0) {
+ return;
+ }
+ final DefaultAttribute attribute = attributes[index];
+ assert attribute.key() == key;
+ if (attribute != value) {
+ return;
+ }
+ final int count = attributes.length;
+ final int newCount = count - 1;
+ final DefaultAttribute[] newAttributes =
+ newCount == 0? EMPTY_ATTRIBUTES : new DefaultAttribute[newCount];
+ // perform 2 bulk copies
+ System.arraycopy(attributes, 0, newAttributes, 0, index);
+ final int remaining = count - index - 1;
+ if (remaining > 0) {
+ System.arraycopy(attributes, index + 1, newAttributes, index, remaining);
+ }
+ if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) {
+ return;
+ }
+ }
}
@SuppressWarnings("serial")
private static final class DefaultAttribute extends AtomicReference implements Attribute {
+ private static final AtomicReferenceFieldUpdater MAP_UPDATER =
+ AtomicReferenceFieldUpdater.newUpdater(DefaultAttribute.class,
+ DefaultAttributeMap.class, "attributeMap");
private static final long serialVersionUID = -2661411462200283011L;
- // The head of the linked-list this attribute belongs to
- private final DefaultAttribute> head;
+ private volatile DefaultAttributeMap attributeMap;
private final AttributeKey key;
- // Double-linked list to prev and next node to allow fast removal
- private DefaultAttribute> prev;
- private DefaultAttribute> next;
-
- // Will be set to true one the attribute is removed via getAndRemove() or remove()
- private volatile boolean removed;
-
- DefaultAttribute(DefaultAttribute> head, AttributeKey key) {
- this.head = head;
+ DefaultAttribute(DefaultAttributeMap attributeMap, AttributeKey key) {
+ this.attributeMap = attributeMap;
this.key = key;
}
- // Special constructor for the head of the linked-list.
- DefaultAttribute() {
- head = this;
- key = null;
- }
-
@Override
public AttributeKey key() {
return key;
}
+ private boolean isRemoved() {
+ return attributeMap == null;
+ }
+
@Override
public T setIfAbsent(T value) {
while (!compareAndSet(null, value)) {
@@ -167,36 +191,22 @@ public class DefaultAttributeMap implements AttributeMap {
@Override
public T getAndRemove() {
- removed = true;
+ final DefaultAttributeMap attributeMap = this.attributeMap;
+ final boolean removed = attributeMap != null && MAP_UPDATER.compareAndSet(this, attributeMap, null);
T oldValue = getAndSet(null);
- remove0();
+ if (removed) {
+ attributeMap.removeAttributeIfMatch(key, this);
+ }
return oldValue;
}
@Override
public void remove() {
- removed = true;
+ final DefaultAttributeMap attributeMap = this.attributeMap;
+ final boolean removed = attributeMap != null && MAP_UPDATER.compareAndSet(this, attributeMap, null);
set(null);
- remove0();
- }
-
- private void remove0() {
- synchronized (head) {
- if (prev == null) {
- // Removed before.
- return;
- }
-
- prev.next = next;
-
- if (next != null) {
- next.prev = prev;
- }
-
- // Null out prev and next - this will guard against multiple remove0() calls which may corrupt
- // the linked list for the bucket.
- prev = null;
- next = null;
+ if (removed) {
+ attributeMap.removeAttributeIfMatch(key, this);
}
}
}
diff --git a/common/src/test/java/io/netty/util/DefaultAttributeMapTest.java b/common/src/test/java/io/netty/util/DefaultAttributeMapTest.java
index eaa4006a5d..38df2fb5ab 100644
--- a/common/src/test/java/io/netty/util/DefaultAttributeMapTest.java
+++ b/common/src/test/java/io/netty/util/DefaultAttributeMapTest.java
@@ -83,6 +83,32 @@ public class DefaultAttributeMapTest {
assertNotSame(attr, attr2);
}
+ @Test
+ public void testHasAttrRemoved() {
+ AttributeKey[] keys = new AttributeKey[20];
+ for (int i = 0; i < 20; i++) {
+ keys[i] = AttributeKey.valueOf(Integer.toString(i));
+ }
+ for (int i = 10; i < 20; i++) {
+ map.attr(keys[i]);
+ }
+ for (int i = 0; i < 10; i++) {
+ map.attr(keys[i]);
+ }
+ for (int i = 10; i < 20; i++) {
+ AttributeKey key = AttributeKey.valueOf(Integer.toString(i));
+ assertTrue(map.hasAttr(key));
+ map.attr(key).remove();
+ assertFalse(map.hasAttr(key));
+ }
+ for (int i = 0; i < 10; i++) {
+ AttributeKey key = AttributeKey.valueOf(Integer.toString(i));
+ assertTrue(map.hasAttr(key));
+ map.attr(key).remove();
+ assertFalse(map.hasAttr(key));
+ }
+ }
+
@Test
public void testGetAndSetWithNull() {
AttributeKey key = AttributeKey.valueOf("key");
diff --git a/microbench/src/main/java/io/netty/util/DefaultAttributeMapBenchmark.java b/microbench/src/main/java/io/netty/util/DefaultAttributeMapBenchmark.java
new file mode 100644
index 0000000000..f9a47d5b08
--- /dev/null
+++ b/microbench/src/main/java/io/netty/util/DefaultAttributeMapBenchmark.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2020 The Netty Project
+ *
+ * The Netty Project licenses this file to you under the Apache License,
+ * version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+package io.netty.util;
+
+import io.netty.microbench.util.AbstractMicrobenchmark;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Threads;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.util.IdentityHashMap;
+
+@Warmup(iterations = 5, time = 1)
+@Measurement(iterations = 5, time = 1)
+@State(Scope.Benchmark)
+public class DefaultAttributeMapBenchmark extends AbstractMicrobenchmark {
+
+ @Param({ "8", "32", "128" })
+ private int keyCount;
+ private AttributeKey[] keys;
+ private IdentityHashMap, Attribute> identityHashMap;
+ private DefaultAttributeMap attributes;
+
+ @State(Scope.Thread)
+ public static class KeySequence {
+
+ long nextKey;
+
+ @Setup(Level.Iteration)
+ public void reset() {
+ nextKey = 0;
+ }
+
+ public long next() {
+ return nextKey++;
+ }
+ }
+
+ @Setup
+ public void init() {
+ if (Integer.bitCount(keyCount) != 1) {
+ throw new AssertionError("keyCount should cbe a power of 2");
+ }
+ attributes = new DefaultAttributeMap();
+ keys = new AttributeKey[keyCount];
+ identityHashMap = new IdentityHashMap, Attribute>(keyCount);
+ for (int i = 0; i < keyCount; i++) {
+ final AttributeKey key = AttributeKey.valueOf(Integer.toString(i));
+ keys[i] = key;
+ final Attribute attribute = attributes.attr(key);
+ identityHashMap.put(key, attribute);
+ }
+ }
+
+ @Benchmark
+ @Threads(3)
+ public Attribute nextAttributeIdentityHashMap(KeySequence sequence) {
+ long next = sequence.next();
+ AttributeKey[] keys = this.keys;
+ AttributeKey key = keys[(int) (next & keys.length - 1)];
+ return identityHashMap.get(key);
+ }
+
+ @Benchmark
+ @Threads(3)
+ public boolean hasAttributeIdentityHashMap(KeySequence sequence) {
+ long next = sequence.next();
+ AttributeKey[] keys = this.keys;
+ AttributeKey key = keys[(int) (next & keys.length - 1)];
+ return identityHashMap.containsKey(key);
+ }
+
+ @Benchmark
+ @Threads(3)
+ public void mixedAttributeIdentityHashMap(KeySequence sequence, Blackhole hole) {
+ long next = sequence.next();
+ AttributeKey[] keys = this.keys;
+ AttributeKey key = keys[(int) (next & keys.length - 1)];
+ if (next % 2 == 0) {
+ hole.consume(identityHashMap.get(key));
+ } else {
+ hole.consume(identityHashMap.containsKey(key));
+ }
+ }
+
+ @Benchmark
+ @Threads(3)
+ public Attribute nextAttributeAttributeMap(KeySequence sequence) {
+ long next = sequence.next();
+ AttributeKey[] keys = this.keys;
+ AttributeKey key = keys[(int) (next & keys.length - 1)];
+ return attributes.attr(key);
+ }
+
+ @Benchmark
+ @Threads(3)
+ public boolean nextHasAttributeAttributeMap(KeySequence sequence) {
+ long next = sequence.next();
+ AttributeKey[] keys = this.keys;
+ AttributeKey key = keys[(int) (next & keys.length - 1)];
+ return attributes.hasAttr(key);
+ }
+
+ @Benchmark
+ @Threads(3)
+ public void mixedAttributeAttributeMap(KeySequence sequence, Blackhole hole) {
+ long next = sequence.next();
+ AttributeKey[] keys = this.keys;
+ AttributeKey key = keys[(int) (next & keys.length - 1)];
+ if (next % 2 == 0) {
+ hole.consume(attributes.attr(key));
+ } else {
+ hole.consume(attributes.hasAttr(key));
+ }
+ }
+}
diff --git a/microbench/src/main/java/io/netty/util/package-info.java b/microbench/src/main/java/io/netty/util/package-info.java
new file mode 100644
index 0000000000..79ce32f095
--- /dev/null
+++ b/microbench/src/main/java/io/netty/util/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright 2020 The Netty Project
+ *
+ * The Netty Project licenses this file to you under the Apache License,
+ * version 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+/**
+ * Benchmarks for {@link io.netty.util}.
+ */
+package io.netty.util;