netty5/codec-http2/src/test/java/io/netty/handler/codec/http2/HpackEncoderTest.java

102 lines
3.7 KiB
Java
Raw Normal View History

/*
* Copyright 2017 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import org.junit.Before;
import org.junit.Test;
HTTP/2 Max Header List Size Bug Motivation: If the HPACK Decoder detects that SETTINGS_MAX_HEADER_LIST_SIZE has been violated it aborts immediately and sends a RST_STREAM frame for what ever stream caused the issue. Because HPACK is stateful this means that the HPACK state may become out of sync between peers, and the issue won't be detected until the next headers frame. We should make a best effort to keep processing to keep the HPACK state in sync with our peer, or completely close the connection. If the HPACK Encoder is configured to verify SETTINGS_MAX_HEADER_LIST_SIZE it checks the limit and encodes at the same time. This may result in modifying the HPACK local state but not sending the headers to the peer if SETTINGS_MAX_HEADER_LIST_SIZE is violated. This will also lead to an inconsistency in HPACK state that will be flagged at some later time. Modifications: - HPACK Decoder now has 2 levels of limits related to SETTINGS_MAX_HEADER_LIST_SIZE. The first will attempt to keep processing data and send a RST_STREAM after all data is processed. The second will send a GO_AWAY and close the entire connection. - When the HPACK Encoder enforces SETTINGS_MAX_HEADER_LIST_SIZE it should not modify the HPACK state until the size has been checked. - https://tools.ietf.org/html/rfc7540#section-6.5.2 states that the initial value of SETTINGS_MAX_HEADER_LIST_SIZE is "unlimited". We currently use 8k as a limit. We should honor the specifications default value so we don't unintentionally close a connection before the remote peer is aware of the local settings. - Remove unnecessary object allocation in DefaultHttp2HeadersDecoder and DefaultHttp2HeadersEncoder. Result: Fixes https://github.com/netty/netty/issues/6209.
2017-01-14 02:09:44 +01:00
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class HpackEncoderTest {
private HpackDecoder hpackDecoder;
private HpackEncoder hpackEncoder;
private Http2Headers mockHeaders;
@Before
public void setUp() {
hpackEncoder = new HpackEncoder();
hpackDecoder = new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32);
mockHeaders = mock(Http2Headers.class);
}
@Test
public void testSetMaxHeaderTableSizeToMaxValue() throws Http2Exception {
ByteBuf buf = Unpooled.buffer();
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
hpackDecoder.setMaxHeaderTableSize(MAX_HEADER_TABLE_SIZE);
hpackDecoder.decode(0, buf, mockHeaders, true);
assertEquals(MAX_HEADER_TABLE_SIZE, hpackDecoder.getMaxHeaderTableSize());
buf.release();
}
@Test(expected = Http2Exception.class)
public void testSetMaxHeaderTableSizeOverflow() throws Http2Exception {
ByteBuf buf = Unpooled.buffer();
try {
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1);
} finally {
buf.release();
}
}
/**
* The encoder should not impose an arbitrary limit on the header size if
* the server has not specified any limit.
* @throws Http2Exception
*/
@Test
public void testWillEncode16MBHeaderByDefault() throws Http2Exception {
ByteBuf buf = Unpooled.buffer();
String bigHeaderName = "x-big-header";
int bigHeaderSize = 1024 * 1024 * 16;
String bigHeaderVal = new String(new char[bigHeaderSize]).replace('\0', 'X');
Http2Headers headersIn = new DefaultHttp2Headers().add(
"x-big-header", bigHeaderVal);
Http2Headers headersOut = new DefaultHttp2Headers();
try {
hpackEncoder.encodeHeaders(0, buf, headersIn, Http2HeadersEncoder.NEVER_SENSITIVE);
hpackDecoder.setMaxHeaderListSize(bigHeaderSize + 1024);
hpackDecoder.decode(0, buf, headersOut, false);
} finally {
buf.release();
}
assertEquals(headersOut.get(bigHeaderName).toString(), bigHeaderVal);
}
@Test(expected = Http2Exception.class)
public void testSetMaxHeaderListSizeEnforcedAfterSet() throws Http2Exception {
ByteBuf buf = Unpooled.buffer();
Http2Headers headers = new DefaultHttp2Headers().add(
"x-big-header",
new String(new char[1024 * 16]).replace('\0', 'X')
);
hpackEncoder.setMaxHeaderListSize(1000);
try {
hpackEncoder.encodeHeaders(0, buf, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
} finally {
buf.release();
}
}
}