Motivation: Table based decoding is fast. Modification: Use table based decoding in HPACK decoder, inspired by https://github.com/python-hyper/hpack/blob/master/hpack/huffman_table.py This modifies the table to be based on integers, rather than 3-tuples of bytes. This is for two reasons: 1. It's faster 2. Using bytes makes the static intializer too big, and doesn't compile. Result: Faster Huffman decoding. This only seems to help the ascii case, the other decoding is about the same. Benchmarks: ``` Before: Benchmark (limitToAscii) (sensitive) (size) Mode Cnt Score Error Units HpackDecoderBenchmark.decode true true SMALL thrpt 20 426293.636 ± 1444.843 ops/s HpackDecoderBenchmark.decode true true MEDIUM thrpt 20 57843.738 ± 725.704 ops/s HpackDecoderBenchmark.decode true true LARGE thrpt 20 3002.412 ± 16.998 ops/s HpackDecoderBenchmark.decode true false SMALL thrpt 20 412339.400 ± 1128.394 ops/s HpackDecoderBenchmark.decode true false MEDIUM thrpt 20 58226.870 ± 199.591 ops/s HpackDecoderBenchmark.decode true false LARGE thrpt 20 3044.256 ± 10.675 ops/s HpackDecoderBenchmark.decode false true SMALL thrpt 20 2082615.030 ± 5929.726 ops/s HpackDecoderBenchmark.decode false true MEDIUM thrpt 10 571640.454 ± 26499.229 ops/s HpackDecoderBenchmark.decode false true LARGE thrpt 20 92714.555 ± 2292.222 ops/s HpackDecoderBenchmark.decode false false SMALL thrpt 20 1745872.421 ± 6788.840 ops/s HpackDecoderBenchmark.decode false false MEDIUM thrpt 20 490420.323 ± 2455.431 ops/s HpackDecoderBenchmark.decode false false LARGE thrpt 20 84536.200 ± 398.714 ops/s After(bytes): Benchmark (limitToAscii) (sensitive) (size) Mode Cnt Score Error Units HpackDecoderBenchmark.decode true true SMALL thrpt 20 472649.148 ± 7122.461 ops/s HpackDecoderBenchmark.decode true true MEDIUM thrpt 20 66739.638 ± 341.607 ops/s HpackDecoderBenchmark.decode true true LARGE thrpt 20 3139.773 ± 24.491 ops/s HpackDecoderBenchmark.decode true false SMALL thrpt 20 466933.833 ± 4514.971 ops/s HpackDecoderBenchmark.decode true false MEDIUM thrpt 20 66111.778 ± 568.326 ops/s HpackDecoderBenchmark.decode true false LARGE thrpt 20 3143.619 ± 3.332 ops/s HpackDecoderBenchmark.decode false true SMALL thrpt 20 2109995.177 ± 6203.143 ops/s HpackDecoderBenchmark.decode false true MEDIUM thrpt 20 586026.055 ± 1578.550 ops/s HpackDecoderBenchmark.decode false false SMALL thrpt 20 1775723.270 ± 4932.057 ops/s HpackDecoderBenchmark.decode false false MEDIUM thrpt 20 493316.467 ± 1453.037 ops/s HpackDecoderBenchmark.decode false false LARGE thrpt 10 85726.219 ± 402.573 ops/s After(ints): Benchmark (limitToAscii) (sensitive) (size) Mode Cnt Score Error Units HpackDecoderBenchmark.decode true true SMALL thrpt 20 615549.006 ± 5282.283 ops/s HpackDecoderBenchmark.decode true true MEDIUM thrpt 20 86714.630 ± 654.489 ops/s HpackDecoderBenchmark.decode true true LARGE thrpt 20 3984.439 ± 61.612 ops/s HpackDecoderBenchmark.decode true false SMALL thrpt 20 602489.337 ± 5397.024 ops/s HpackDecoderBenchmark.decode true false MEDIUM thrpt 20 88399.109 ± 241.115 ops/s HpackDecoderBenchmark.decode true false LARGE thrpt 20 3875.729 ± 103.057 ops/s HpackDecoderBenchmark.decode false true SMALL thrpt 20 2092165.454 ± 11918.859 ops/s HpackDecoderBenchmark.decode false true MEDIUM thrpt 20 583465.437 ± 5452.115 ops/s HpackDecoderBenchmark.decode false true LARGE thrpt 20 93290.061 ± 665.904 ops/s HpackDecoderBenchmark.decode false false SMALL thrpt 20 1758402.495 ± 14677.438 ops/s HpackDecoderBenchmark.decode false false MEDIUM thrpt 10 491598.099 ± 5029.698 ops/s HpackDecoderBenchmark.decode false false LARGE thrpt 20 85834.290 ± 554.915 ops/s ```
102 lines
3.7 KiB
Java
102 lines
3.7 KiB
Java
/*
|
|
* Copyright 2017 The Netty Project
|
|
*
|
|
* The Netty Project licenses this file to you under the Apache License,
|
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
|
* with the License. You may obtain a copy of the License at:
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
* License for the specific language governing permissions and limitations
|
|
* under the License.
|
|
*/
|
|
package io.netty.handler.codec.http2;
|
|
|
|
import io.netty.buffer.ByteBuf;
|
|
import io.netty.buffer.Unpooled;
|
|
import org.junit.Before;
|
|
import org.junit.Test;
|
|
|
|
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE;
|
|
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
|
|
import static org.junit.Assert.assertEquals;
|
|
import static org.mockito.Mockito.mock;
|
|
|
|
public class HpackEncoderTest {
|
|
private HpackDecoder hpackDecoder;
|
|
private HpackEncoder hpackEncoder;
|
|
private Http2Headers mockHeaders;
|
|
|
|
@Before
|
|
public void setUp() {
|
|
hpackEncoder = new HpackEncoder();
|
|
hpackDecoder = new HpackDecoder(DEFAULT_HEADER_LIST_SIZE);
|
|
mockHeaders = mock(Http2Headers.class);
|
|
}
|
|
|
|
@Test
|
|
public void testSetMaxHeaderTableSizeToMaxValue() throws Http2Exception {
|
|
ByteBuf buf = Unpooled.buffer();
|
|
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
|
|
hpackDecoder.setMaxHeaderTableSize(MAX_HEADER_TABLE_SIZE);
|
|
hpackDecoder.decode(0, buf, mockHeaders, true);
|
|
assertEquals(MAX_HEADER_TABLE_SIZE, hpackDecoder.getMaxHeaderTableSize());
|
|
buf.release();
|
|
}
|
|
|
|
@Test(expected = Http2Exception.class)
|
|
public void testSetMaxHeaderTableSizeOverflow() throws Http2Exception {
|
|
ByteBuf buf = Unpooled.buffer();
|
|
try {
|
|
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1);
|
|
} finally {
|
|
buf.release();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* The encoder should not impose an arbitrary limit on the header size if
|
|
* the server has not specified any limit.
|
|
* @throws Http2Exception
|
|
*/
|
|
@Test
|
|
public void testWillEncode16MBHeaderByDefault() throws Http2Exception {
|
|
ByteBuf buf = Unpooled.buffer();
|
|
String bigHeaderName = "x-big-header";
|
|
int bigHeaderSize = 1024 * 1024 * 16;
|
|
String bigHeaderVal = new String(new char[bigHeaderSize]).replace('\0', 'X');
|
|
Http2Headers headersIn = new DefaultHttp2Headers().add(
|
|
"x-big-header", bigHeaderVal);
|
|
Http2Headers headersOut = new DefaultHttp2Headers();
|
|
|
|
try {
|
|
hpackEncoder.encodeHeaders(0, buf, headersIn, Http2HeadersEncoder.NEVER_SENSITIVE);
|
|
hpackDecoder.setMaxHeaderListSize(bigHeaderSize + 1024);
|
|
hpackDecoder.decode(0, buf, headersOut, false);
|
|
} finally {
|
|
buf.release();
|
|
}
|
|
assertEquals(headersOut.get(bigHeaderName).toString(), bigHeaderVal);
|
|
}
|
|
|
|
@Test(expected = Http2Exception.class)
|
|
public void testSetMaxHeaderListSizeEnforcedAfterSet() throws Http2Exception {
|
|
ByteBuf buf = Unpooled.buffer();
|
|
Http2Headers headers = new DefaultHttp2Headers().add(
|
|
"x-big-header",
|
|
new String(new char[1024 * 16]).replace('\0', 'X')
|
|
);
|
|
|
|
hpackEncoder.setMaxHeaderListSize(1000);
|
|
|
|
try {
|
|
hpackEncoder.encodeHeaders(0, buf, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
|
|
} finally {
|
|
buf.release();
|
|
}
|
|
}
|
|
}
|