HPACK Encoder headerFields improvements
Motivation: HPACK Encoder has a data structure which is similar to a previous version of DefaultHeaders. Some of the same improvements can be made. Motivation: - Enforce the restriction that the Encoder's headerFields length must be a power of two so we can use masking instead of modulo - Use AsciiString.hashCode which already has optimizations instead of having yet another hash code algorithm in Encoder Result: Fixes https://github.com/netty/netty/issues/5357
This commit is contained in:
parent
a7f7d9c8e0
commit
6af56ffe76
@ -41,21 +41,22 @@ import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.IN
|
||||
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NEVER;
|
||||
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NONE;
|
||||
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime;
|
||||
import static io.netty.util.internal.MathUtil.findNextPositivePowerOfTwo;
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
|
||||
public final class Encoder {
|
||||
|
||||
private static final int BUCKET_SIZE = 17;
|
||||
|
||||
// for testing
|
||||
private final boolean useIndexing;
|
||||
private final boolean forceHuffmanOn;
|
||||
private final boolean forceHuffmanOff;
|
||||
private final HuffmanEncoder huffmanEncoder = new HuffmanEncoder();
|
||||
|
||||
// a linked hash map of header fields
|
||||
private final HeaderEntry[] headerFields = new HeaderEntry[BUCKET_SIZE];
|
||||
private final HeaderEntry[] headerFields;
|
||||
private final HeaderEntry head = new HeaderEntry(-1, AsciiString.EMPTY_STRING,
|
||||
AsciiString.EMPTY_STRING, Integer.MAX_VALUE, null);
|
||||
private final HuffmanEncoder huffmanEncoder = new HuffmanEncoder();
|
||||
private final byte hashMask;
|
||||
private int size;
|
||||
private int capacity;
|
||||
|
||||
@ -63,7 +64,14 @@ public final class Encoder {
|
||||
* Creates a new encoder.
|
||||
*/
|
||||
public Encoder(int maxHeaderTableSize) {
|
||||
this(maxHeaderTableSize, true, false, false);
|
||||
this(maxHeaderTableSize, 16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new encoder.
|
||||
*/
|
||||
public Encoder(int maxHeaderTableSize, int arraySizeHint) {
|
||||
this(maxHeaderTableSize, true, false, false, arraySizeHint);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -73,7 +81,8 @@ public final class Encoder {
|
||||
int maxHeaderTableSize,
|
||||
boolean useIndexing,
|
||||
boolean forceHuffmanOn,
|
||||
boolean forceHuffmanOff
|
||||
boolean forceHuffmanOff,
|
||||
int arraySizeHint
|
||||
) {
|
||||
if (maxHeaderTableSize < 0) {
|
||||
throw new IllegalArgumentException("Illegal Capacity: " + maxHeaderTableSize);
|
||||
@ -82,6 +91,10 @@ public final class Encoder {
|
||||
this.forceHuffmanOn = forceHuffmanOn;
|
||||
this.forceHuffmanOff = forceHuffmanOff;
|
||||
capacity = maxHeaderTableSize;
|
||||
// Enforce a bound of [2, 128] because hashMask is a byte. The max possible value of hashMask is one less
|
||||
// than the length of this array, and we want the mask to be > 0.
|
||||
headerFields = new HeaderEntry[findNextPositivePowerOfTwo(max(2, min(arraySizeHint, 128)))];
|
||||
hashMask = (byte) (headerFields.length - 1);
|
||||
head.before = head.after = head;
|
||||
}
|
||||
|
||||
@ -302,7 +315,7 @@ public final class Encoder {
|
||||
if (length() == 0 || name == null || value == null) {
|
||||
return null;
|
||||
}
|
||||
int h = hash(name);
|
||||
int h = AsciiString.hashCode(name);
|
||||
int i = index(h);
|
||||
for (HeaderEntry e = headerFields[i]; e != null; e = e.next) {
|
||||
// To avoid short circuit behavior a bitwise operator is used instead of a boolean operator.
|
||||
@ -321,26 +334,21 @@ public final class Encoder {
|
||||
if (length() == 0 || name == null) {
|
||||
return -1;
|
||||
}
|
||||
int h = hash(name);
|
||||
int h = AsciiString.hashCode(name);
|
||||
int i = index(h);
|
||||
int index = -1;
|
||||
for (HeaderEntry e = headerFields[i]; e != null; e = e.next) {
|
||||
if (e.hash == h && equalsConstantTime(name, e.name) != 0) {
|
||||
index = e.index;
|
||||
break;
|
||||
return getIndex(e.index);
|
||||
}
|
||||
}
|
||||
return getIndex(index);
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the index into the dynamic table given the index in the header entry.
|
||||
*/
|
||||
private int getIndex(int index) {
|
||||
if (index == -1) {
|
||||
return -1;
|
||||
}
|
||||
return index - head.before.index + 1;
|
||||
return index == -1 ? -1 : index - head.before.index + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -362,7 +370,7 @@ public final class Encoder {
|
||||
remove();
|
||||
}
|
||||
|
||||
int h = hash(name);
|
||||
int h = AsciiString.hashCode(name);
|
||||
int i = index(h);
|
||||
HeaderEntry old = headerFields[i];
|
||||
HeaderEntry e = new HeaderEntry(h, name, value, head.before.index - 1, old);
|
||||
@ -410,28 +418,11 @@ public final class Encoder {
|
||||
size = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hash code for the given header field name.
|
||||
*/
|
||||
private static int hash(CharSequence name) {
|
||||
int h = 0;
|
||||
for (int i = 0; i < name.length(); i++) {
|
||||
h = 31 * h + name.charAt(i);
|
||||
}
|
||||
if (h > 0) {
|
||||
return h;
|
||||
} else if (h == Integer.MIN_VALUE) {
|
||||
return Integer.MAX_VALUE;
|
||||
} else {
|
||||
return -h;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the index into the hash table for the hash code h.
|
||||
*/
|
||||
private static int index(int h) {
|
||||
return h % BUCKET_SIZE;
|
||||
private int index(int h) {
|
||||
return h & hashMask;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -161,7 +161,7 @@ final class TestCase {
|
||||
maxHeaderTableSize = Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
return new Encoder(maxHeaderTableSize, useIndexing, forceHuffmanOn, forceHuffmanOff);
|
||||
return new Encoder(maxHeaderTableSize, useIndexing, forceHuffmanOn, forceHuffmanOff, 16);
|
||||
}
|
||||
|
||||
private Decoder createDecoder() {
|
||||
|
@ -49,12 +49,6 @@ import static java.lang.Math.max;
|
||||
* @param <T> the type to use for return values when the intention is to return {@code this} object.
|
||||
*/
|
||||
public class DefaultHeaders<K, V, T extends Headers<K, V, T>> implements Headers<K, V, T> {
|
||||
/**
|
||||
* Enforce an upper bound of 128 because {@link #hashMask} is a byte.
|
||||
* The max possible value of {@link #hashMask} is one less than this value.
|
||||
*/
|
||||
private static final int ARRAY_SIZE_HINT_MAX = min(128,
|
||||
max(1, SystemPropertyUtil.getInt("io.netty.DefaultHeaders.arraySizeHintMax", 16)));
|
||||
/**
|
||||
* Constant used to seed the hash code generation. Could be anything but this was borrowed from murmur3.
|
||||
*/
|
||||
@ -120,7 +114,9 @@ public class DefaultHeaders<K, V, T extends Headers<K, V, T>> implements Headers
|
||||
this.valueConverter = checkNotNull(valueConverter, "valueConverter");
|
||||
this.nameValidator = checkNotNull(nameValidator, "nameValidator");
|
||||
this.hashingStrategy = checkNotNull(nameHashingStrategy, "nameHashingStrategy");
|
||||
entries = new DefaultHeaders.HeaderEntry[findNextPositivePowerOfTwo(min(arraySizeHint, ARRAY_SIZE_HINT_MAX))];
|
||||
// Enforce a bound of [2, 128] because hashMask is a byte. The max possible value of hashMask is one less
|
||||
// than the length of this array, and we want the mask to be > 0.
|
||||
entries = new DefaultHeaders.HeaderEntry[findNextPositivePowerOfTwo(max(2, min(arraySizeHint, 128)))];
|
||||
hashMask = (byte) (entries.length - 1);
|
||||
head = new HeaderEntry<K, V>();
|
||||
}
|
||||
|
@ -1394,7 +1394,7 @@ public final class AsciiString implements CharSequence, Comparable<CharSequence>
|
||||
return 0;
|
||||
}
|
||||
if (value.getClass() == AsciiString.class) {
|
||||
return ((AsciiString) value).hashCode();
|
||||
return value.hashCode();
|
||||
}
|
||||
|
||||
return PlatformDependent.hashCodeAscii(value);
|
||||
|
@ -36,16 +36,24 @@ import io.netty.handler.codec.http2.internal.hpack.Encoder;
|
||||
import io.netty.microbench.util.AbstractMicrobenchmark;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Fork;
|
||||
import org.openjdk.jmh.annotations.Level;
|
||||
import org.openjdk.jmh.annotations.Measurement;
|
||||
import org.openjdk.jmh.annotations.Mode;
|
||||
import org.openjdk.jmh.annotations.Param;
|
||||
import org.openjdk.jmh.annotations.Setup;
|
||||
import org.openjdk.jmh.annotations.TearDown;
|
||||
import org.openjdk.jmh.annotations.Threads;
|
||||
import org.openjdk.jmh.annotations.Warmup;
|
||||
import org.openjdk.jmh.infra.Blackhole;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
@Fork(1)
|
||||
@Threads(1)
|
||||
@Warmup(iterations = 5)
|
||||
@Measurement(iterations = 5)
|
||||
public class EncoderBenchmark extends AbstractMicrobenchmark {
|
||||
|
||||
@Param
|
||||
|
Loading…
Reference in New Issue
Block a user