Add 32 bytes overhead per header entry when calculating headers length in HPackDecoder

Motivation:

According to the HTTP/2 Spec:

SETTINGS_MAX_HEADER_LIST_SIZE (0x6): This advisory setting informs a
peer of the maximum size of header list that the sender is
prepared to accept, in octets. The value is based on the
uncompressed size of header fields, including the length of the
name and value in octets plus an overhead of 32 octets for each
header field.

We were accounting for the 32 bytes when encoding in HpackEncoder,
but not when decoding in HPackDecoder.

Modifications:

- Add 32 bytes to the header list length for each entry when decoding
with HPackDecoder.

Result:

- We account for the 32 bytes overhead by header entry in HPackDecoder
This commit is contained in:
Julien Hoarau 2017-12-22 12:50:16 +11:00 committed by Scott Mitchell
parent c9668ce40f
commit 6b033c51a5
2 changed files with 38 additions and 3 deletions

View File

@ -429,7 +429,7 @@ final class HpackDecoder {
private long addHeader(int streamId, Http2Headers headers, CharSequence name, CharSequence value, private long addHeader(int streamId, Http2Headers headers, CharSequence name, CharSequence value,
long headersLength) throws Http2Exception { long headersLength) throws Http2Exception {
headersLength += name.length() + value.length(); headersLength += HpackHeaderField.sizeOf(name, value);
if (headersLength > maxHeaderListSizeGoAway) { if (headersLength > maxHeaderListSizeGoAway) {
headerListSizeExceeded(maxHeaderListSizeGoAway); headerListSizeExceeded(maxHeaderListSizeGoAway);
} }

View File

@ -35,14 +35,19 @@ import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.util.internal.StringUtil; import io.netty.util.internal.StringUtil;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import static io.netty.handler.codec.http2.HpackDecoder.decodeULE128; import static io.netty.handler.codec.http2.HpackDecoder.decodeULE128;
import static io.netty.handler.codec.http2.Http2HeadersEncoder.NEVER_SENSITIVE; import static io.netty.handler.codec.http2.Http2HeadersEncoder.NEVER_SENSITIVE;
import static io.netty.util.AsciiString.EMPTY_STRING; import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.AsciiString.of; import static io.netty.util.AsciiString.of;
import static java.lang.Integer.MAX_VALUE; import static java.lang.Integer.MAX_VALUE;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -52,6 +57,9 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions;
public class HpackDecoderTest { public class HpackDecoderTest {
@Rule
public ExpectedException expectedException = ExpectedException.none();
private HpackDecoder hpackDecoder; private HpackDecoder hpackDecoder;
private Http2Headers mockHeaders; private Http2Headers mockHeaders;
@ -425,9 +433,9 @@ public class HpackDecoderTest {
@Test @Test
public void testDecodeLargerThanMaxHeaderListSizeButSmallerThanMaxHeaderListSizeUpdatesDynamicTable() public void testDecodeLargerThanMaxHeaderListSizeButSmallerThanMaxHeaderListSizeUpdatesDynamicTable()
throws Http2Exception { throws Http2Exception {
ByteBuf in = Unpooled.buffer(200); ByteBuf in = Unpooled.buffer(300);
try { try {
hpackDecoder.setMaxHeaderListSize(100, 200); hpackDecoder.setMaxHeaderListSize(200, 300);
HpackEncoder hpackEncoder = new HpackEncoder(true); HpackEncoder hpackEncoder = new HpackEncoder(true);
// encode headers that are slightly larger than maxHeaderListSize // encode headers that are slightly larger than maxHeaderListSize
@ -477,4 +485,31 @@ public class HpackDecoderTest {
in.release(); in.release();
} }
} }
@Test
public void testAccountForHeaderOverhead() throws Exception {
ByteBuf in = Unpooled.buffer(100);
try {
String headerName = "12345";
String headerValue = "56789";
long headerSize = headerName.length() + headerValue.length();
hpackDecoder.setMaxHeaderListSize(headerSize, 100);
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add(headerName, headerValue);
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
Http2Headers decoded = new DefaultHttp2Headers();
// SETTINGS_MAX_HEADER_LIST_SIZE is big enough for the header to fit...
assertThat(hpackDecoder.getMaxHeaderListSize(), is(greaterThanOrEqualTo(headerSize)));
// ... but decode should fail because we add some overhead for each header entry
expectedException.expect(Http2Exception.HeaderListSizeException.class);
hpackDecoder.decode(1, in, decoded);
} finally {
in.release();
}
}
} }