Reduce DefaultAttributeMap lookup cost (#10530)
Motivation: DefaultAttributeMap::attr has a blocking behaviour on lookup of an existing attribute: it can be made non-blocking. Modification: Replace the existing fixed bucket table using a locked intrusive linked list with an hand-rolled copy-on-write ordered single array Result: Non blocking behaviour for the lookup happy path
This commit is contained in:
parent
86c8f24d9a
commit
4624b6309d
@ -17,73 +17,104 @@ package io.netty.util;
|
|||||||
|
|
||||||
import static java.util.Objects.requireNonNull;
|
import static java.util.Objects.requireNonNull;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
|
||||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default {@link AttributeMap} implementation which use simple synchronization per bucket to keep the memory overhead
|
* Default {@link AttributeMap} implementation which not exibit any blocking behaviour on attribute lookup while using a
|
||||||
* as low as possible.
|
* copy-on-write approach on the modify path.<br> Attributes lookup and remove exibit {@code O(logn)} time worst-case
|
||||||
|
* complexity, hence {@code attribute::set(null)} is to be preferred to {@code remove}.
|
||||||
*/
|
*/
|
||||||
public class DefaultAttributeMap implements AttributeMap {
|
public class DefaultAttributeMap implements AttributeMap {
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
private static final AtomicReferenceFieldUpdater<DefaultAttributeMap, DefaultAttribute[]> ATTRIBUTES_UPDATER =
|
||||||
private static final AtomicReferenceFieldUpdater<DefaultAttributeMap, AtomicReferenceArray> updater =
|
AtomicReferenceFieldUpdater.newUpdater(DefaultAttributeMap.class, DefaultAttribute[].class, "attributes");
|
||||||
AtomicReferenceFieldUpdater.newUpdater(DefaultAttributeMap.class, AtomicReferenceArray.class, "attributes");
|
private static final DefaultAttribute[] EMPTY_ATTRIBUTES = new DefaultAttribute[0];
|
||||||
|
|
||||||
private static final int BUCKET_SIZE = 4;
|
/**
|
||||||
private static final int MASK = BUCKET_SIZE - 1;
|
* Similarly to {@code Arrays::binarySearch} it perform a binary search optimized for this use case, in order to
|
||||||
|
* save polymorphic calls (on comparator side) and unnecessary class checks.
|
||||||
|
*/
|
||||||
|
private static int searchAttributeByKey(DefaultAttribute[] sortedAttributes, AttributeKey<?> key) {
|
||||||
|
int low = 0;
|
||||||
|
int high = sortedAttributes.length - 1;
|
||||||
|
|
||||||
// Initialize lazily to reduce memory consumption; updated by AtomicReferenceFieldUpdater above.
|
while (low <= high) {
|
||||||
@SuppressWarnings("UnusedDeclaration")
|
int mid = low + high >>> 1;
|
||||||
private volatile AtomicReferenceArray<DefaultAttribute<?>> attributes;
|
DefaultAttribute midVal = sortedAttributes[mid];
|
||||||
|
AttributeKey midValKey = midVal.key;
|
||||||
|
if (midValKey == key) {
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
int midValKeyId = midValKey.id();
|
||||||
|
int keyId = key.id();
|
||||||
|
assert midValKeyId != keyId;
|
||||||
|
boolean searchRight = midValKeyId < keyId;
|
||||||
|
if (searchRight) {
|
||||||
|
low = mid + 1;
|
||||||
|
} else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return -(low + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void orderedCopyOnInsert(DefaultAttribute[] sortedSrc, int srcLength, DefaultAttribute[] copy,
|
||||||
|
DefaultAttribute toInsert) {
|
||||||
|
// let's walk backward, because as a rule of thumb, toInsert.key.id() tends to be higher for new keys
|
||||||
|
final int id = toInsert.key.id();
|
||||||
|
int i;
|
||||||
|
for (i = srcLength - 1; i >= 0; i--) {
|
||||||
|
DefaultAttribute attribute = sortedSrc[i];
|
||||||
|
assert attribute.key.id() != id;
|
||||||
|
if (attribute.key.id() < id) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
copy[i + 1] = sortedSrc[i];
|
||||||
|
}
|
||||||
|
copy[i + 1] = toInsert;
|
||||||
|
final int toCopy = i + 1;
|
||||||
|
if (toCopy > 0) {
|
||||||
|
System.arraycopy(sortedSrc, 0, copy, 0, toCopy);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private volatile DefaultAttribute[] attributes = EMPTY_ATTRIBUTES;
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public <T> Attribute<T> attr(AttributeKey<T> key) {
|
public <T> Attribute<T> attr(AttributeKey<T> key) {
|
||||||
requireNonNull(key, "key");
|
requireNonNull(key, "key");
|
||||||
AtomicReferenceArray<DefaultAttribute<?>> attributes = this.attributes;
|
DefaultAttribute newAttribute = null;
|
||||||
if (attributes == null) {
|
|
||||||
// Not using ConcurrentHashMap due to high memory consumption.
|
|
||||||
attributes = new AtomicReferenceArray<>(BUCKET_SIZE);
|
|
||||||
|
|
||||||
if (!updater.compareAndSet(this, null, attributes)) {
|
|
||||||
attributes = this.attributes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int i = index(key);
|
|
||||||
DefaultAttribute<?> head = attributes.get(i);
|
|
||||||
if (head == null) {
|
|
||||||
// No head exists yet which means we may be able to add the attribute without synchronization and just
|
|
||||||
// use compare and set. At worst we need to fallback to synchronization and waste two allocations.
|
|
||||||
head = new DefaultAttribute();
|
|
||||||
DefaultAttribute<T> attr = new DefaultAttribute<>(head, key);
|
|
||||||
head.next = attr;
|
|
||||||
attr.prev = head;
|
|
||||||
if (attributes.compareAndSet(i, null, head)) {
|
|
||||||
// we were able to add it so return the attr right away
|
|
||||||
return attr;
|
|
||||||
} else {
|
|
||||||
head = attributes.get(i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
synchronized (head) {
|
|
||||||
DefaultAttribute<?> curr = head;
|
|
||||||
for (;;) {
|
for (;;) {
|
||||||
DefaultAttribute<?> next = curr.next;
|
final DefaultAttribute[] attributes = this.attributes;
|
||||||
if (next == null) {
|
final int index = searchAttributeByKey(attributes, key);
|
||||||
DefaultAttribute<T> attr = new DefaultAttribute<>(head, key);
|
final DefaultAttribute[] newAttributes;
|
||||||
curr.next = attr;
|
if (index >= 0) {
|
||||||
attr.prev = curr;
|
final DefaultAttribute attribute = attributes[index];
|
||||||
return attr;
|
assert attribute.key() == key;
|
||||||
|
if (!attribute.isRemoved()) {
|
||||||
|
return attribute;
|
||||||
}
|
}
|
||||||
|
// let's try replace the removed attribute with a new one
|
||||||
if (next.key == key && !next.removed) {
|
if (newAttribute == null) {
|
||||||
return (Attribute<T>) next;
|
newAttribute = new DefaultAttribute<T>(this, key);
|
||||||
}
|
}
|
||||||
curr = next;
|
final int count = attributes.length;
|
||||||
|
newAttributes = Arrays.copyOf(attributes, count);
|
||||||
|
newAttributes[index] = newAttribute;
|
||||||
|
} else {
|
||||||
|
if (newAttribute == null) {
|
||||||
|
newAttribute = new DefaultAttribute<T>(this, key);
|
||||||
|
}
|
||||||
|
final int count = attributes.length;
|
||||||
|
newAttributes = new DefaultAttribute[count + 1];
|
||||||
|
orderedCopyOnInsert(attributes, count, newAttributes, newAttribute);
|
||||||
|
}
|
||||||
|
if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) {
|
||||||
|
return newAttribute;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -91,69 +122,62 @@ public class DefaultAttributeMap implements AttributeMap {
|
|||||||
@Override
|
@Override
|
||||||
public <T> boolean hasAttr(AttributeKey<T> key) {
|
public <T> boolean hasAttr(AttributeKey<T> key) {
|
||||||
requireNonNull(key, "key");
|
requireNonNull(key, "key");
|
||||||
AtomicReferenceArray<DefaultAttribute<?>> attributes = this.attributes;
|
return searchAttributeByKey(attributes, key) >= 0;
|
||||||
if (attributes == null) {
|
|
||||||
// no attribute exists
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int i = index(key);
|
private <T> void removeAttributeIfMatch(AttributeKey<T> key, DefaultAttribute<T> value) {
|
||||||
DefaultAttribute<?> head = attributes.get(i);
|
for (;;) {
|
||||||
if (head == null) {
|
final DefaultAttribute[] attributes = this.attributes;
|
||||||
// No attribute exists which point to the bucket in which the head should be located
|
final int index = searchAttributeByKey(attributes, key);
|
||||||
return false;
|
if (index < 0) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
final DefaultAttribute attribute = attributes[index];
|
||||||
// We need to synchronize on the head.
|
assert attribute.key() == key;
|
||||||
synchronized (head) {
|
if (attribute != value) {
|
||||||
// Start with head.next as the head itself does not store an attribute.
|
return;
|
||||||
DefaultAttribute<?> curr = head.next;
|
|
||||||
while (curr != null) {
|
|
||||||
if (curr.key == key && !curr.removed) {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
curr = curr.next;
|
final int count = attributes.length;
|
||||||
|
final int newCount = count - 1;
|
||||||
|
final DefaultAttribute[] newAttributes =
|
||||||
|
newCount == 0? EMPTY_ATTRIBUTES : new DefaultAttribute[newCount];
|
||||||
|
// perform 2 bulk copies
|
||||||
|
System.arraycopy(attributes, 0, newAttributes, 0, index);
|
||||||
|
final int remaining = count - index - 1;
|
||||||
|
if (remaining > 0) {
|
||||||
|
System.arraycopy(attributes, index + 1, newAttributes, index, remaining);
|
||||||
}
|
}
|
||||||
return false;
|
if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static int index(AttributeKey<?> key) {
|
|
||||||
return key.id() & MASK;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("serial")
|
@SuppressWarnings("serial")
|
||||||
private static final class DefaultAttribute<T> extends AtomicReference<T> implements Attribute<T> {
|
private static final class DefaultAttribute<T> extends AtomicReference<T> implements Attribute<T> {
|
||||||
|
|
||||||
|
private static final AtomicReferenceFieldUpdater<DefaultAttribute, DefaultAttributeMap> MAP_UPDATER =
|
||||||
|
AtomicReferenceFieldUpdater.newUpdater(DefaultAttribute.class,
|
||||||
|
DefaultAttributeMap.class, "attributeMap");
|
||||||
private static final long serialVersionUID = -2661411462200283011L;
|
private static final long serialVersionUID = -2661411462200283011L;
|
||||||
|
|
||||||
// The head of the linked-list this attribute belongs to
|
private volatile DefaultAttributeMap attributeMap;
|
||||||
private final DefaultAttribute<?> head;
|
|
||||||
private final AttributeKey<T> key;
|
private final AttributeKey<T> key;
|
||||||
|
|
||||||
// Double-linked list to prev and next node to allow fast removal
|
DefaultAttribute(DefaultAttributeMap attributeMap, AttributeKey<T> key) {
|
||||||
private DefaultAttribute<?> prev;
|
this.attributeMap = attributeMap;
|
||||||
private DefaultAttribute<?> next;
|
|
||||||
|
|
||||||
// Will be set to true one the attribute is removed via getAndRemove() or remove()
|
|
||||||
private volatile boolean removed;
|
|
||||||
|
|
||||||
DefaultAttribute(DefaultAttribute<?> head, AttributeKey<T> key) {
|
|
||||||
this.head = head;
|
|
||||||
this.key = key;
|
this.key = key;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special constructor for the head of the linked-list.
|
|
||||||
DefaultAttribute() {
|
|
||||||
head = this;
|
|
||||||
key = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AttributeKey<T> key() {
|
public AttributeKey<T> key() {
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isRemoved() {
|
||||||
|
return attributeMap == null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public T setIfAbsent(T value) {
|
public T setIfAbsent(T value) {
|
||||||
while (!compareAndSet(null, value)) {
|
while (!compareAndSet(null, value)) {
|
||||||
@ -167,36 +191,22 @@ public class DefaultAttributeMap implements AttributeMap {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public T getAndRemove() {
|
public T getAndRemove() {
|
||||||
removed = true;
|
final DefaultAttributeMap attributeMap = this.attributeMap;
|
||||||
|
final boolean removed = attributeMap != null && MAP_UPDATER.compareAndSet(this, attributeMap, null);
|
||||||
T oldValue = getAndSet(null);
|
T oldValue = getAndSet(null);
|
||||||
remove0();
|
if (removed) {
|
||||||
|
attributeMap.removeAttributeIfMatch(key, this);
|
||||||
|
}
|
||||||
return oldValue;
|
return oldValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void remove() {
|
public void remove() {
|
||||||
removed = true;
|
final DefaultAttributeMap attributeMap = this.attributeMap;
|
||||||
|
final boolean removed = attributeMap != null && MAP_UPDATER.compareAndSet(this, attributeMap, null);
|
||||||
set(null);
|
set(null);
|
||||||
remove0();
|
if (removed) {
|
||||||
}
|
attributeMap.removeAttributeIfMatch(key, this);
|
||||||
|
|
||||||
private void remove0() {
|
|
||||||
synchronized (head) {
|
|
||||||
if (prev == null) {
|
|
||||||
// Removed before.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev.next = next;
|
|
||||||
|
|
||||||
if (next != null) {
|
|
||||||
next.prev = prev;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Null out prev and next - this will guard against multiple remove0() calls which may corrupt
|
|
||||||
// the linked list for the bucket.
|
|
||||||
prev = null;
|
|
||||||
next = null;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,6 +83,32 @@ public class DefaultAttributeMapTest {
|
|||||||
assertNotSame(attr, attr2);
|
assertNotSame(attr, attr2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHasAttrRemoved() {
|
||||||
|
AttributeKey<Integer>[] keys = new AttributeKey[20];
|
||||||
|
for (int i = 0; i < 20; i++) {
|
||||||
|
keys[i] = AttributeKey.valueOf(Integer.toString(i));
|
||||||
|
}
|
||||||
|
for (int i = 10; i < 20; i++) {
|
||||||
|
map.attr(keys[i]);
|
||||||
|
}
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
map.attr(keys[i]);
|
||||||
|
}
|
||||||
|
for (int i = 10; i < 20; i++) {
|
||||||
|
AttributeKey<Integer> key = AttributeKey.valueOf(Integer.toString(i));
|
||||||
|
assertTrue(map.hasAttr(key));
|
||||||
|
map.attr(key).remove();
|
||||||
|
assertFalse(map.hasAttr(key));
|
||||||
|
}
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
AttributeKey<Integer> key = AttributeKey.valueOf(Integer.toString(i));
|
||||||
|
assertTrue(map.hasAttr(key));
|
||||||
|
map.attr(key).remove();
|
||||||
|
assertFalse(map.hasAttr(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetAndSetWithNull() {
|
public void testGetAndSetWithNull() {
|
||||||
AttributeKey<Integer> key = AttributeKey.valueOf("key");
|
AttributeKey<Integer> key = AttributeKey.valueOf("key");
|
||||||
|
@ -0,0 +1,135 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package io.netty.util;
|
||||||
|
|
||||||
|
import io.netty.microbench.util.AbstractMicrobenchmark;
|
||||||
|
import org.openjdk.jmh.annotations.Benchmark;
|
||||||
|
import org.openjdk.jmh.annotations.Level;
|
||||||
|
import org.openjdk.jmh.annotations.Measurement;
|
||||||
|
import org.openjdk.jmh.annotations.Param;
|
||||||
|
import org.openjdk.jmh.annotations.Scope;
|
||||||
|
import org.openjdk.jmh.annotations.Setup;
|
||||||
|
import org.openjdk.jmh.annotations.State;
|
||||||
|
import org.openjdk.jmh.annotations.Threads;
|
||||||
|
import org.openjdk.jmh.annotations.Warmup;
|
||||||
|
import org.openjdk.jmh.infra.Blackhole;
|
||||||
|
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
|
|
||||||
|
@Warmup(iterations = 5, time = 1)
|
||||||
|
@Measurement(iterations = 5, time = 1)
|
||||||
|
@State(Scope.Benchmark)
|
||||||
|
public class DefaultAttributeMapBenchmark extends AbstractMicrobenchmark {
|
||||||
|
|
||||||
|
@Param({ "8", "32", "128" })
|
||||||
|
private int keyCount;
|
||||||
|
private AttributeKey<Integer>[] keys;
|
||||||
|
private IdentityHashMap<AttributeKey<Integer>, Attribute<Integer>> identityHashMap;
|
||||||
|
private DefaultAttributeMap attributes;
|
||||||
|
|
||||||
|
@State(Scope.Thread)
|
||||||
|
public static class KeySequence {
|
||||||
|
|
||||||
|
long nextKey;
|
||||||
|
|
||||||
|
@Setup(Level.Iteration)
|
||||||
|
public void reset() {
|
||||||
|
nextKey = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long next() {
|
||||||
|
return nextKey++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Setup
|
||||||
|
public void init() {
|
||||||
|
if (Integer.bitCount(keyCount) != 1) {
|
||||||
|
throw new AssertionError("keyCount should cbe a power of 2");
|
||||||
|
}
|
||||||
|
attributes = new DefaultAttributeMap();
|
||||||
|
keys = new AttributeKey[keyCount];
|
||||||
|
identityHashMap = new IdentityHashMap<AttributeKey<Integer>, Attribute<Integer>>(keyCount);
|
||||||
|
for (int i = 0; i < keyCount; i++) {
|
||||||
|
final AttributeKey<Integer> key = AttributeKey.valueOf(Integer.toString(i));
|
||||||
|
keys[i] = key;
|
||||||
|
final Attribute<Integer> attribute = attributes.attr(key);
|
||||||
|
identityHashMap.put(key, attribute);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Benchmark
|
||||||
|
@Threads(3)
|
||||||
|
public Attribute<Integer> nextAttributeIdentityHashMap(KeySequence sequence) {
|
||||||
|
long next = sequence.next();
|
||||||
|
AttributeKey<Integer>[] keys = this.keys;
|
||||||
|
AttributeKey<Integer> key = keys[(int) (next & keys.length - 1)];
|
||||||
|
return identityHashMap.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Benchmark
|
||||||
|
@Threads(3)
|
||||||
|
public boolean hasAttributeIdentityHashMap(KeySequence sequence) {
|
||||||
|
long next = sequence.next();
|
||||||
|
AttributeKey<Integer>[] keys = this.keys;
|
||||||
|
AttributeKey<Integer> key = keys[(int) (next & keys.length - 1)];
|
||||||
|
return identityHashMap.containsKey(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Benchmark
|
||||||
|
@Threads(3)
|
||||||
|
public void mixedAttributeIdentityHashMap(KeySequence sequence, Blackhole hole) {
|
||||||
|
long next = sequence.next();
|
||||||
|
AttributeKey<Integer>[] keys = this.keys;
|
||||||
|
AttributeKey<Integer> key = keys[(int) (next & keys.length - 1)];
|
||||||
|
if (next % 2 == 0) {
|
||||||
|
hole.consume(identityHashMap.get(key));
|
||||||
|
} else {
|
||||||
|
hole.consume(identityHashMap.containsKey(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Benchmark
|
||||||
|
@Threads(3)
|
||||||
|
public Attribute<Integer> nextAttributeAttributeMap(KeySequence sequence) {
|
||||||
|
long next = sequence.next();
|
||||||
|
AttributeKey<Integer>[] keys = this.keys;
|
||||||
|
AttributeKey<Integer> key = keys[(int) (next & keys.length - 1)];
|
||||||
|
return attributes.attr(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Benchmark
|
||||||
|
@Threads(3)
|
||||||
|
public boolean nextHasAttributeAttributeMap(KeySequence sequence) {
|
||||||
|
long next = sequence.next();
|
||||||
|
AttributeKey<Integer>[] keys = this.keys;
|
||||||
|
AttributeKey<Integer> key = keys[(int) (next & keys.length - 1)];
|
||||||
|
return attributes.hasAttr(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Benchmark
|
||||||
|
@Threads(3)
|
||||||
|
public void mixedAttributeAttributeMap(KeySequence sequence, Blackhole hole) {
|
||||||
|
long next = sequence.next();
|
||||||
|
AttributeKey<Integer>[] keys = this.keys;
|
||||||
|
AttributeKey<Integer> key = keys[(int) (next & keys.length - 1)];
|
||||||
|
if (next % 2 == 0) {
|
||||||
|
hole.consume(attributes.attr(key));
|
||||||
|
} else {
|
||||||
|
hole.consume(attributes.hasAttr(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
19
microbench/src/main/java/io/netty/util/package-info.java
Normal file
19
microbench/src/main/java/io/netty/util/package-info.java
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Benchmarks for {@link io.netty.util}.
|
||||||
|
*/
|
||||||
|
package io.netty.util;
|
Loading…
Reference in New Issue
Block a user