Use ByteProcessor in HpackHuffmanDecoder to reduce bound-checks and r… (#9317)
Motivation:
ff0045e3e1
changed HpackHuffmanDecoder to use a lookup-table which greatly improved performance. We can squeeze out another 3% win by using an ByteProcessor which will reduce the number of bound-checks / reference-count-checks needed by processing byte-by-byte.
Modifications:
Implement logic with ByteProcessor
Result:
Another ~3% perf improvement which shows up when using h2load to simulate load.
`h2load -c 100 -m 100 --duration 60 --warm-up-time 10 http://127.0.0.1:8080`
Before:
```
finished in 70.02s, 620051.67 req/s, 20.70MB/s
requests: 37203100 total, 37203100 started, 37203100 done, 37203100 succeeded, 0 failed, 0 errored, 0 timeout
status codes: 37203100 2xx, 0 3xx, 0 4xx, 0 5xx
traffic: 1.21GB (1302108500) total, 41.84MB (43872600) headers (space savings 90.00%), 460.24MB (482598600) data
min max mean sd +/- sd
time for request: 404us 24.52ms 15.93ms 1.45ms 87.90%
time for connect: 0us 0us 0us 0us 0.00%
time to 1st byte: 0us 0us 0us 0us 0.00%
req/s : 6186.64 6211.60 6199.00 5.18 65.00%
```
With this change:
```
finished in 70.02s, 642103.33 req/s, 21.43MB/s
requests: 38526200 total, 38526200 started, 38526200 done, 38526200 succeeded, 0 failed, 0 errored, 0 timeout
status codes: 38526200 2xx, 0 3xx, 0 4xx, 0 5xx
traffic: 1.26GB (1348417000) total, 42.39MB (44444900) headers (space savings 90.00%), 466.25MB (488893900) data
min max mean sd +/- sd
time for request: 370us 24.89ms 15.52ms 1.35ms 88.02%
time for connect: 0us 0us 0us 0us 0.00%
time to 1st byte: 0us 0us 0us 0us 0.00%
req/s : 6407.06 6435.19 6419.74 5.62 67.00%
```
This commit is contained in:
parent
16b98d370f
commit
707c95e80d
@ -89,6 +89,7 @@ final class HpackDecoder {
|
||||
private static final byte READ_LITERAL_HEADER_VALUE_LENGTH = 8;
|
||||
private static final byte READ_LITERAL_HEADER_VALUE = 9;
|
||||
|
||||
private final HpackHuffmanDecoder huffmanDecoder = new HpackHuffmanDecoder();
|
||||
private final HpackDynamicTable hpackDynamicTable;
|
||||
private long maxHeaderListSize;
|
||||
private long maxDynamicTableSize;
|
||||
@ -445,7 +446,7 @@ final class HpackDecoder {
|
||||
|
||||
private CharSequence readStringLiteral(ByteBuf in, int length, boolean huffmanEncoded) throws Http2Exception {
|
||||
if (huffmanEncoded) {
|
||||
return HpackHuffmanDecoder.decode(in, length);
|
||||
return huffmanDecoder.decode(in, length);
|
||||
}
|
||||
byte[] buf = new byte[length];
|
||||
in.readBytes(buf);
|
||||
|
@ -33,11 +33,12 @@ package io.netty.handler.codec.http2;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.AsciiString;
|
||||
import io.netty.util.ByteProcessor;
|
||||
import io.netty.util.internal.ThrowableUtil;
|
||||
|
||||
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
|
||||
|
||||
final class HpackHuffmanDecoder {
|
||||
final class HpackHuffmanDecoder implements ByteProcessor {
|
||||
|
||||
/* Scroll to the bottom! */
|
||||
|
||||
@ -4668,6 +4669,13 @@ final class HpackHuffmanDecoder {
|
||||
Http2Exception.newStatic(COMPRESSION_ERROR, "HPACK - Bad Encoding",
|
||||
Http2Exception.ShutdownHint.HARD_SHUTDOWN), HpackHuffmanDecoder.class, "decode(..)");
|
||||
|
||||
private byte[] dest;
|
||||
private int k;
|
||||
private int state;
|
||||
private int flags;
|
||||
|
||||
HpackHuffmanDecoder() { }
|
||||
|
||||
/**
|
||||
* Decompresses the given Huffman coded string literal.
|
||||
*
|
||||
@ -4675,43 +4683,63 @@ final class HpackHuffmanDecoder {
|
||||
* @return the output stream for the compressed data
|
||||
* @throws Http2Exception EOS Decoded
|
||||
*/
|
||||
public static AsciiString decode(ByteBuf buf, int length) throws Http2Exception {
|
||||
public AsciiString decode(ByteBuf buf, int length) throws Http2Exception {
|
||||
if (length == 0) {
|
||||
return AsciiString.EMPTY_STRING;
|
||||
}
|
||||
byte[] dest = new byte[length * 8 / 5];
|
||||
int k = 0;
|
||||
int state = 0;
|
||||
int flags = 0;
|
||||
for (int i = 0; i < length; i++) {
|
||||
byte input = buf.readByte();
|
||||
int index = (state << 4) | ((input & 0xFF) >>> 4);
|
||||
int row = HUFFS[index];
|
||||
flags = row & 0x00FF00;
|
||||
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
||||
throw BAD_ENCODING;
|
||||
dest = new byte[length * 8 / 5];
|
||||
try {
|
||||
int readerIndex = buf.readerIndex();
|
||||
// Using ByteProcessor to reduce bounds-checking and reference-count checking during byte-by-byte
|
||||
// processing of the ByteBuf.
|
||||
int endIndex = buf.forEachByte(readerIndex, length, this);
|
||||
if (endIndex == -1) {
|
||||
// We did consume the requested length
|
||||
buf.readerIndex(readerIndex + length);
|
||||
if ((flags & HUFFMAN_COMPLETE_SHIFT) != HUFFMAN_COMPLETE_SHIFT) {
|
||||
throw BAD_ENCODING;
|
||||
}
|
||||
return new AsciiString(dest, 0, k, false);
|
||||
}
|
||||
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
||||
dest[k++] = (byte) (row & 0xFF);
|
||||
}
|
||||
state = row >> 16;
|
||||
|
||||
index = (state << 4) | (input & 0x0F);
|
||||
row = HUFFS[index];
|
||||
flags = row & 0x00FF00;
|
||||
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
||||
throw BAD_ENCODING;
|
||||
}
|
||||
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
||||
dest[k++] = (byte) (row & 0xFF);
|
||||
}
|
||||
state = row >> 16;
|
||||
}
|
||||
if ((flags & HUFFMAN_COMPLETE_SHIFT) != HUFFMAN_COMPLETE_SHIFT) {
|
||||
// The process(...) method returned before the requested length was requested. This means there
|
||||
// was a bad encoding detected.
|
||||
buf.readerIndex(endIndex);
|
||||
throw BAD_ENCODING;
|
||||
} finally {
|
||||
dest = null;
|
||||
k = 0;
|
||||
state = 0;
|
||||
flags = 0;
|
||||
}
|
||||
return new AsciiString(dest, 0, k, false);
|
||||
}
|
||||
|
||||
private HpackHuffmanDecoder() { }
|
||||
/**
|
||||
* <strong>This should never be called from anything but this class itself!</strong>
|
||||
*/
|
||||
@Override
|
||||
public boolean process(byte input) {
|
||||
int index = (state << 4) | ((input & 0xFF) >>> 4);
|
||||
int row = HUFFS[index];
|
||||
flags = row & 0x00FF00;
|
||||
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
||||
return false;
|
||||
}
|
||||
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
||||
dest[k++] = (byte) (row & 0xFF);
|
||||
}
|
||||
state = row >> 16;
|
||||
|
||||
index = (state << 4) | (input & 0x0F);
|
||||
row = HUFFS[index];
|
||||
flags = row & 0x00FF00;
|
||||
if ((flags & HUFFMAN_FAIL_SHIFT) != 0) {
|
||||
return false;
|
||||
}
|
||||
if ((flags & HUFFMAN_EMIT_SYMBOL_SHIFT) != 0) {
|
||||
dest[k++] = (byte) (row & 0xFF);
|
||||
}
|
||||
state = row >> 16;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ public class HpackHuffmanTest {
|
||||
private static byte[] decode(byte[] bytes) throws Http2Exception {
|
||||
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
|
||||
try {
|
||||
AsciiString decoded = HpackHuffmanDecoder.decode(buffer, buffer.readableBytes());
|
||||
AsciiString decoded = new HpackHuffmanDecoder().decode(buffer, buffer.readableBytes());
|
||||
Assert.assertFalse(buffer.isReadable());
|
||||
return decoded.toByteArray();
|
||||
} finally {
|
||||
|
Loading…
Reference in New Issue
Block a user