Use ByteProcessor in HpackHuffmanDecoder to reduce bound-checks and r… (#9317)
Motivation: ff0045e3e10684425a26f5b6cb02223fb0444141 changed HpackHuffmanDecoder to use a lookup-table which greatly improved performance. We can squeeze out another 3% win by using an ByteProcessor which will reduce the number of bound-checks / reference-count-checks needed by processing byte-by-byte. Modifications: Implement logic with ByteProcessor Result: Another ~3% perf improvement which shows up when using h2load to simulate load. `h2load -c 100 -m 100 --duration 60 --warm-up-time 10 http://127.0.0.1:8080` Before: ``` finished in 70.02s, 620051.67 req/s, 20.70MB/s requests: 37203100 total, 37203100 started, 37203100 done, 37203100 succeeded, 0 failed, 0 errored, 0 timeout status codes: 37203100 2xx, 0 3xx, 0 4xx, 0 5xx traffic: 1.21GB (1302108500) total, 41.84MB (43872600) headers (space savings 90.00%), 460.24MB (482598600) data min max mean sd +/- sd time for request: 404us 24.52ms 15.93ms 1.45ms 87.90% time for connect: 0us 0us 0us 0us 0.00% time to 1st byte: 0us 0us 0us 0us 0.00% req/s : 6186.64 6211.60 6199.00 5.18 65.00% ``` With this change: ``` finished in 70.02s, 642103.33 req/s, 21.43MB/s requests: 38526200 total, 38526200 started, 38526200 done, 38526200 succeeded, 0 failed, 0 errored, 0 timeout status codes: 38526200 2xx, 0 3xx, 0 4xx, 0 5xx traffic: 1.26GB (1348417000) total, 42.39MB (44444900) headers (space savings 90.00%), 466.25MB (488893900) data min max mean sd +/- sd time for request: 370us 24.89ms 15.52ms 1.35ms 88.02% time for connect: 0us 0us 0us 0us 0.00% time to 1st byte: 0us 0us 0us 0us 0.00% req/s : 6407.06 6435.19 6419.74 5.62 67.00% ```
This commit is contained in:
parent
d1e88610ff
commit
e177b74874
@ -89,6 +89,7 @@ final class HpackDecoder {
|
|||||||
private static final byte READ_LITERAL_HEADER_VALUE_LENGTH = 8;
|
private static final byte READ_LITERAL_HEADER_VALUE_LENGTH = 8;
|
||||||
private static final byte READ_LITERAL_HEADER_VALUE = 9;
|
private static final byte READ_LITERAL_HEADER_VALUE = 9;
|
||||||
|
|
||||||
|
private final HpackHuffmanDecoder huffmanDecoder = new HpackHuffmanDecoder();
|
||||||
private final HpackDynamicTable hpackDynamicTable;
|
private final HpackDynamicTable hpackDynamicTable;
|
||||||
private long maxHeaderListSize;
|
private long maxHeaderListSize;
|
||||||
private long maxDynamicTableSize;
|
private long maxDynamicTableSize;
|
||||||
@ -445,7 +446,7 @@ final class HpackDecoder {
|
|||||||
|
|
||||||
private CharSequence readStringLiteral(ByteBuf in, int length, boolean huffmanEncoded) throws Http2Exception {
|
private CharSequence readStringLiteral(ByteBuf in, int length, boolean huffmanEncoded) throws Http2Exception {
|
||||||
if (huffmanEncoded) {
|
if (huffmanEncoded) {
|
||||||
return HpackHuffmanDecoder.decode(in, length);
|
return huffmanDecoder.decode(in, length);
|
||||||
}
|
}
|
||||||
byte[] buf = new byte[length];
|
byte[] buf = new byte[length];
|
||||||
in.readBytes(buf);
|
in.readBytes(buf);
|
||||||
|
@ -33,11 +33,12 @@ package io.netty.handler.codec.http2;
|
|||||||
|
|
||||||
import io.netty.buffer.ByteBuf;
|
import io.netty.buffer.ByteBuf;
|
||||||
import io.netty.util.AsciiString;
|
import io.netty.util.AsciiString;
|
||||||
|
import io.netty.util.ByteProcessor;
|
||||||
import io.netty.util.internal.ThrowableUtil;
|
import io.netty.util.internal.ThrowableUtil;
|
||||||
|
|
||||||
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
|
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
|
||||||
|
|
||||||
final class HpackHuffmanDecoder {
|
final class HpackHuffmanDecoder implements ByteProcessor {
|
||||||
|
|
||||||
/* Scroll to the bottom! */
|
/* Scroll to the bottom! */
|
||||||
|
|
||||||
@ -4668,6 +4669,13 @@ final class HpackHuffmanDecoder {
|
|||||||
Http2Exception.newStatic(COMPRESSION_ERROR, "HPACK - Bad Encoding",
|
Http2Exception.newStatic(COMPRESSION_ERROR, "HPACK - Bad Encoding",
|
||||||
Http2Exception.ShutdownHint.HARD_SHUTDOWN), HpackHuffmanDecoder.class, "decode(..)");
|
Http2Exception.ShutdownHint.HARD_SHUTDOWN), HpackHuffmanDecoder.class, "decode(..)");
|
||||||
|
|
||||||
|
private byte[] dest;
|
||||||
|
private int k;
|
||||||
|
private int state;
|
||||||
|
private int flags;
|
||||||
|
|
||||||
|
HpackHuffmanDecoder() { }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Decompresses the given Huffman coded string literal.
|
* Decompresses the given Huffman coded string literal.
|
||||||
*
|
*
|
||||||
@ -4675,43 +4683,63 @@ final class HpackHuffmanDecoder {
|
|||||||
* @return the output stream for the compressed data
|
* @return the output stream for the compressed data
|
||||||
* @throws Http2Exception EOS Decoded
|
* @throws Http2Exception EOS Decoded
|
||||||
*/
|
*/
|
||||||
public static AsciiString decode(ByteBuf buf, int length) throws Http2Exception {
|
public AsciiString decode(ByteBuf buf, int length) throws Http2Exception {
|
||||||
if (length == 0) {
|
if (length == 0) {
|
||||||
return AsciiString.EMPTY_STRING;
|
return AsciiString.EMPTY_STRING;
|
||||||
}
|
}
|
||||||
byte[] dest = new byte[length * 8 / 5];
|
dest = new byte[length * 8 / 5];
|
||||||
int k = 0;
|
try {
|
||||||
int state = 0;
|
int readerIndex = buf.readerIndex();
|
||||||
int flags = 0;
|
// Using ByteProcessor to reduce bounds-checking and reference-count checking during byte-by-byte
|
||||||
for (int i = 0; i < length; i++) {
|
// processing of the ByteBuf.
|
||||||
byte input = buf.readByte();
|
int endIndex = buf.forEachByte(readerIndex, length, this);
|
||||||
int index = (state << 4) | ((input & 0xFF) >>> 4);
|
if (endIndex == -1) {
|
||||||
int row = HUFFS[index];
|
// We did consume the requested length
|
||||||
flags = row & 0x00FF00;
|
buf.readerIndex(readerIndex + length);
|
||||||
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
if ((flags & HUFFMAN_COMPLETE_SHIFT) != HUFFMAN_COMPLETE_SHIFT) {
|
||||||
throw BAD_ENCODING;
|
throw BAD_ENCODING;
|
||||||
|
}
|
||||||
|
return new AsciiString(dest, 0, k, false);
|
||||||
}
|
}
|
||||||
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
|
||||||
dest[k++] = (byte) (row & 0xFF);
|
|
||||||
}
|
|
||||||
state = row >> 16;
|
|
||||||
|
|
||||||
index = (state << 4) | (input & 0x0F);
|
// The process(...) method returned before the requested length was requested. This means there
|
||||||
row = HUFFS[index];
|
// was a bad encoding detected.
|
||||||
flags = row & 0x00FF00;
|
buf.readerIndex(endIndex);
|
||||||
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
|
||||||
throw BAD_ENCODING;
|
|
||||||
}
|
|
||||||
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
|
||||||
dest[k++] = (byte) (row & 0xFF);
|
|
||||||
}
|
|
||||||
state = row >> 16;
|
|
||||||
}
|
|
||||||
if ((flags & HUFFMAN_COMPLETE_SHIFT) != HUFFMAN_COMPLETE_SHIFT) {
|
|
||||||
throw BAD_ENCODING;
|
throw BAD_ENCODING;
|
||||||
|
} finally {
|
||||||
|
dest = null;
|
||||||
|
k = 0;
|
||||||
|
state = 0;
|
||||||
|
flags = 0;
|
||||||
}
|
}
|
||||||
return new AsciiString(dest, 0, k, false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private HpackHuffmanDecoder() { }
|
/**
|
||||||
|
* <strong>This should never be called from anything but this class itself!</strong>
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean process(byte input) {
|
||||||
|
int index = (state << 4) | ((input & 0xFF) >>> 4);
|
||||||
|
int row = HUFFS[index];
|
||||||
|
flags = row & 0x00FF00;
|
||||||
|
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
||||||
|
dest[k++] = (byte) (row & 0xFF);
|
||||||
|
}
|
||||||
|
state = row >> 16;
|
||||||
|
|
||||||
|
index = (state << 4) | (input & 0x0F);
|
||||||
|
row = HUFFS[index];
|
||||||
|
flags = row & 0x00FF00;
|
||||||
|
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
||||||
|
dest[k++] = (byte) (row & 0xFF);
|
||||||
|
}
|
||||||
|
state = row >> 16;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -153,7 +153,7 @@ public class HpackHuffmanTest {
|
|||||||
private static byte[] decode(byte[] bytes) throws Http2Exception {
|
private static byte[] decode(byte[] bytes) throws Http2Exception {
|
||||||
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
|
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
|
||||||
try {
|
try {
|
||||||
AsciiString decoded = HpackHuffmanDecoder.decode(buffer, buffer.readableBytes());
|
AsciiString decoded = new HpackHuffmanDecoder().decode(buffer, buffer.readableBytes());
|
||||||
Assert.assertFalse(buffer.isReadable());
|
Assert.assertFalse(buffer.isReadable());
|
||||||
return decoded.toByteArray();
|
return decoded.toByteArray();
|
||||||
} finally {
|
} finally {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user