http2: Make MAX_HEADER_LIST_SIZE exceeded a stream error when encoding.

Motivation:

The SETTINGS_MAX_HEADER_LIST_SIZE limit, as enforced by the HPACK Encoder, should be a stream error and not apply to the whole connection.

Modifications:

Made the necessary changes for the exception to be of type StreamException.

Result:

A HEADERS frame exceeding the limit, only affects a specific stream.
This commit is contained in:
buchgr 2016-10-17 14:33:27 +02:00 committed by Scott Mitchell
parent 5f533b7358
commit c9918de37b
10 changed files with 23 additions and 19 deletions

View File

@ -303,7 +303,7 @@ public class DefaultHttp2FrameWriter implements Http2FrameWriter, Http2FrameSize
// Encode the entire header block into an intermediate buffer.
headerBlock = ctx.alloc().buffer();
headersEncoder.encodeHeaders(headers, headerBlock);
headersEncoder.encodeHeaders(streamId, headers, headerBlock);
// Read the first fragment (possibly everything).
Http2Flags flags = new Http2Flags().paddingPresent(padding > 0);
@ -427,7 +427,7 @@ public class DefaultHttp2FrameWriter implements Http2FrameWriter, Http2FrameSize
// Encode the entire header block.
headerBlock = ctx.alloc().buffer();
headersEncoder.encodeHeaders(headers, headerBlock);
headersEncoder.encodeHeaders(streamId, headers, headerBlock);
Http2Flags flags =
new Http2Flags().endOfStream(endStream).priorityPresent(hasPriority).paddingPresent(padding > 0);

View File

@ -59,7 +59,7 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
}
@Override
public void encodeHeaders(Http2Headers headers, ByteBuf buffer) throws Http2Exception {
public void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception {
try {
// If there was a change in the table size, serialize the output from the encoder
// resulting from that change.
@ -68,7 +68,7 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
tableSizeChangeOutput.clear();
}
encoder.encodeHeaders(buffer, headers, sensitivityDetector);
encoder.encodeHeaders(streamId, buffer, headers, sensitivityDetector);
} catch (Http2Exception e) {
throw e;
} catch (Throwable t) {

View File

@ -54,10 +54,11 @@ public interface Http2HeadersEncoder {
/**
* Encodes the given headers and writes the output headers block to the given output buffer.
*
* @param streamId the identifier of the stream for which the headers are encoded.
* @param headers the headers to be encoded.
* @param buffer the buffer to receive the encoded headers.
*/
void encodeHeaders(Http2Headers headers, ByteBuf buffer) throws Http2Exception;
void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception;
/**
* Get the {@link Configuration} for this {@link Http2HeadersEncoder}

View File

@ -32,6 +32,7 @@
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2CodecUtil;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder.SensitivityDetector;
@ -47,6 +48,7 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.headerListSizeExceeded;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.INCREMENTAL;
@ -102,16 +104,16 @@ public final class Encoder {
*
* <strong>The given {@link CharSequence}s must be immutable!</strong>
*/
public void encodeHeaders(ByteBuf out, Http2Headers headers, SensitivityDetector sensitivityDetector)
public void encodeHeaders(int streamId, ByteBuf out, Http2Headers headers, SensitivityDetector sensitivityDetector)
throws Http2Exception {
if (ignoreMaxHeaderListSize) {
encodeHeadersIgnoreMaxHeaderListSize(out, headers, sensitivityDetector);
} else {
encodeHeadersEnforceMaxHeaderListSize(out, headers, sensitivityDetector);
encodeHeadersEnforceMaxHeaderListSize(streamId, out, headers, sensitivityDetector);
}
}
private void encodeHeadersEnforceMaxHeaderListSize(ByteBuf out, Http2Headers headers,
private void encodeHeadersEnforceMaxHeaderListSize(int streamId, ByteBuf out, Http2Headers headers,
SensitivityDetector sensitivityDetector)
throws Http2Exception {
long headerSize = 0;
@ -123,8 +125,7 @@ public final class Encoder {
// overflow.
headerSize += currHeaderSize;
if (headerSize > maxHeaderListSize) {
throw connectionError(PROTOCOL_ERROR, "Header list size octets (%d) exceeds maxHeaderListSize (%d)",
headerSize, maxHeaderListSize);
headerListSizeExceeded(streamId, maxHeaderListSize);
}
encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value), currHeaderSize);
}

View File

@ -77,7 +77,7 @@ public class DefaultHttp2HeadersDecoderTest {
for (int ix = 0; ix < entries.length;) {
http2Headers.add(new AsciiString(entries[ix++], false), new AsciiString(entries[ix++], false));
}
encoder.encodeHeaders(out, http2Headers, NEVER_SENSITIVE);
encoder.encodeHeaders(3 /* randomly chosen */, out, http2Headers, NEVER_SENSITIVE);
return out;
}
}

View File

@ -17,6 +17,7 @@ package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception.StreamException;
import io.netty.util.AsciiString;
import org.junit.Before;
import org.junit.Test;
@ -41,18 +42,18 @@ public class DefaultHttp2HeadersEncoderTest {
Http2Headers headers = headers();
ByteBuf buf = Unpooled.buffer();
try {
encoder.encodeHeaders(headers, buf);
encoder.encodeHeaders(3 /* randomly chosen */, headers, buf);
assertTrue(buf.writerIndex() > 0);
} finally {
buf.release();
}
}
@Test(expected = Http2Exception.class)
@Test(expected = StreamException.class)
public void headersExceedMaxSetSizeShouldFail() throws Http2Exception {
Http2Headers headers = headers();
encoder.headerTable().maxHeaderListSize(2);
encoder.encodeHeaders(headers, Unpooled.buffer());
encoder.encodeHeaders(3 /* randomly chosen */, headers, Unpooled.buffer());
}
private static Http2Headers headers() {

View File

@ -81,7 +81,7 @@ public class Http2HeaderBlockIOTest {
}
private void assertRoundtripSuccessful(Http2Headers in) throws Http2Exception {
encoder.encodeHeaders(in, buffer);
encoder.encodeHeaders(3 /* randomly chosen */, in, buffer);
Http2Headers out = decoder.decodeHeaders(0, buffer);
assertEquals(in, out);

View File

@ -195,7 +195,7 @@ final class TestCase {
encoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
}
encoder.encodeHeaders(buffer, http2Headers, sensitivityDetector);
encoder.encodeHeaders(3 /* randomly chosen */, buffer, http2Headers, sensitivityDetector);
byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
return bytes;

View File

@ -96,8 +96,9 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
Encoder encoder = newTestEncoder();
ByteBuf out = size.newOutBuffer();
try {
encoder.encodeHeaders(out, headers, sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE
: Http2HeadersEncoder.NEVER_SENSITIVE);
encoder.encodeHeaders(3 /* randomly chosen */, out, headers,
sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE
: Http2HeadersEncoder.NEVER_SENSITIVE);
byte[] bytes = new byte[out.readableBytes()];
out.readBytes(bytes);
return bytes;

View File

@ -110,7 +110,7 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
public void encode(Blackhole bh) throws Exception {
Encoder encoder = newTestEncoder();
output.clear();
encoder.encodeHeaders(output, http2Headers, sensitivityDetector);
encoder.encodeHeaders(3 /*randomly chosen*/, output, http2Headers, sensitivityDetector);
bh.consume(output);
}
}