http2: Make MAX_HEADER_LIST_SIZE exceeded a stream error when encoding.
Motivation: The SETTINGS_MAX_HEADER_LIST_SIZE limit, as enforced by the HPACK Encoder, should be a stream error and not apply to the whole connection. Modifications: Made the necessary changes for the exception to be of type StreamException. Result: A HEADERS frame exceeding the limit, only affects a specific stream.
This commit is contained in:
parent
5f533b7358
commit
c9918de37b
@ -303,7 +303,7 @@ public class DefaultHttp2FrameWriter implements Http2FrameWriter, Http2FrameSize
|
|||||||
|
|
||||||
// Encode the entire header block into an intermediate buffer.
|
// Encode the entire header block into an intermediate buffer.
|
||||||
headerBlock = ctx.alloc().buffer();
|
headerBlock = ctx.alloc().buffer();
|
||||||
headersEncoder.encodeHeaders(headers, headerBlock);
|
headersEncoder.encodeHeaders(streamId, headers, headerBlock);
|
||||||
|
|
||||||
// Read the first fragment (possibly everything).
|
// Read the first fragment (possibly everything).
|
||||||
Http2Flags flags = new Http2Flags().paddingPresent(padding > 0);
|
Http2Flags flags = new Http2Flags().paddingPresent(padding > 0);
|
||||||
@ -427,7 +427,7 @@ public class DefaultHttp2FrameWriter implements Http2FrameWriter, Http2FrameSize
|
|||||||
|
|
||||||
// Encode the entire header block.
|
// Encode the entire header block.
|
||||||
headerBlock = ctx.alloc().buffer();
|
headerBlock = ctx.alloc().buffer();
|
||||||
headersEncoder.encodeHeaders(headers, headerBlock);
|
headersEncoder.encodeHeaders(streamId, headers, headerBlock);
|
||||||
|
|
||||||
Http2Flags flags =
|
Http2Flags flags =
|
||||||
new Http2Flags().endOfStream(endStream).priorityPresent(hasPriority).paddingPresent(padding > 0);
|
new Http2Flags().endOfStream(endStream).priorityPresent(hasPriority).paddingPresent(padding > 0);
|
||||||
|
@ -59,7 +59,7 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void encodeHeaders(Http2Headers headers, ByteBuf buffer) throws Http2Exception {
|
public void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception {
|
||||||
try {
|
try {
|
||||||
// If there was a change in the table size, serialize the output from the encoder
|
// If there was a change in the table size, serialize the output from the encoder
|
||||||
// resulting from that change.
|
// resulting from that change.
|
||||||
@ -68,7 +68,7 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
|
|||||||
tableSizeChangeOutput.clear();
|
tableSizeChangeOutput.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
encoder.encodeHeaders(buffer, headers, sensitivityDetector);
|
encoder.encodeHeaders(streamId, buffer, headers, sensitivityDetector);
|
||||||
} catch (Http2Exception e) {
|
} catch (Http2Exception e) {
|
||||||
throw e;
|
throw e;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
@ -54,10 +54,11 @@ public interface Http2HeadersEncoder {
|
|||||||
/**
|
/**
|
||||||
* Encodes the given headers and writes the output headers block to the given output buffer.
|
* Encodes the given headers and writes the output headers block to the given output buffer.
|
||||||
*
|
*
|
||||||
|
* @param streamId the identifier of the stream for which the headers are encoded.
|
||||||
* @param headers the headers to be encoded.
|
* @param headers the headers to be encoded.
|
||||||
* @param buffer the buffer to receive the encoded headers.
|
* @param buffer the buffer to receive the encoded headers.
|
||||||
*/
|
*/
|
||||||
void encodeHeaders(Http2Headers headers, ByteBuf buffer) throws Http2Exception;
|
void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the {@link Configuration} for this {@link Http2HeadersEncoder}
|
* Get the {@link Configuration} for this {@link Http2HeadersEncoder}
|
||||||
|
@ -32,6 +32,7 @@
|
|||||||
package io.netty.handler.codec.http2.internal.hpack;
|
package io.netty.handler.codec.http2.internal.hpack;
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf;
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.handler.codec.http2.Http2CodecUtil;
|
||||||
import io.netty.handler.codec.http2.Http2Exception;
|
import io.netty.handler.codec.http2.Http2Exception;
|
||||||
import io.netty.handler.codec.http2.Http2Headers;
|
import io.netty.handler.codec.http2.Http2Headers;
|
||||||
import io.netty.handler.codec.http2.Http2HeadersEncoder.SensitivityDetector;
|
import io.netty.handler.codec.http2.Http2HeadersEncoder.SensitivityDetector;
|
||||||
@ -47,6 +48,7 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
|
|||||||
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
|
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
|
||||||
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_LIST_SIZE;
|
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_LIST_SIZE;
|
||||||
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
|
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
|
||||||
|
import static io.netty.handler.codec.http2.Http2CodecUtil.headerListSizeExceeded;
|
||||||
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
|
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
|
||||||
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
|
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
|
||||||
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.INCREMENTAL;
|
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.INCREMENTAL;
|
||||||
@ -102,16 +104,16 @@ public final class Encoder {
|
|||||||
*
|
*
|
||||||
* <strong>The given {@link CharSequence}s must be immutable!</strong>
|
* <strong>The given {@link CharSequence}s must be immutable!</strong>
|
||||||
*/
|
*/
|
||||||
public void encodeHeaders(ByteBuf out, Http2Headers headers, SensitivityDetector sensitivityDetector)
|
public void encodeHeaders(int streamId, ByteBuf out, Http2Headers headers, SensitivityDetector sensitivityDetector)
|
||||||
throws Http2Exception {
|
throws Http2Exception {
|
||||||
if (ignoreMaxHeaderListSize) {
|
if (ignoreMaxHeaderListSize) {
|
||||||
encodeHeadersIgnoreMaxHeaderListSize(out, headers, sensitivityDetector);
|
encodeHeadersIgnoreMaxHeaderListSize(out, headers, sensitivityDetector);
|
||||||
} else {
|
} else {
|
||||||
encodeHeadersEnforceMaxHeaderListSize(out, headers, sensitivityDetector);
|
encodeHeadersEnforceMaxHeaderListSize(streamId, out, headers, sensitivityDetector);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void encodeHeadersEnforceMaxHeaderListSize(ByteBuf out, Http2Headers headers,
|
private void encodeHeadersEnforceMaxHeaderListSize(int streamId, ByteBuf out, Http2Headers headers,
|
||||||
SensitivityDetector sensitivityDetector)
|
SensitivityDetector sensitivityDetector)
|
||||||
throws Http2Exception {
|
throws Http2Exception {
|
||||||
long headerSize = 0;
|
long headerSize = 0;
|
||||||
@ -123,8 +125,7 @@ public final class Encoder {
|
|||||||
// overflow.
|
// overflow.
|
||||||
headerSize += currHeaderSize;
|
headerSize += currHeaderSize;
|
||||||
if (headerSize > maxHeaderListSize) {
|
if (headerSize > maxHeaderListSize) {
|
||||||
throw connectionError(PROTOCOL_ERROR, "Header list size octets (%d) exceeds maxHeaderListSize (%d)",
|
headerListSizeExceeded(streamId, maxHeaderListSize);
|
||||||
headerSize, maxHeaderListSize);
|
|
||||||
}
|
}
|
||||||
encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value), currHeaderSize);
|
encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value), currHeaderSize);
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ public class DefaultHttp2HeadersDecoderTest {
|
|||||||
for (int ix = 0; ix < entries.length;) {
|
for (int ix = 0; ix < entries.length;) {
|
||||||
http2Headers.add(new AsciiString(entries[ix++], false), new AsciiString(entries[ix++], false));
|
http2Headers.add(new AsciiString(entries[ix++], false), new AsciiString(entries[ix++], false));
|
||||||
}
|
}
|
||||||
encoder.encodeHeaders(out, http2Headers, NEVER_SENSITIVE);
|
encoder.encodeHeaders(3 /* randomly chosen */, out, http2Headers, NEVER_SENSITIVE);
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ package io.netty.handler.codec.http2;
|
|||||||
|
|
||||||
import io.netty.buffer.ByteBuf;
|
import io.netty.buffer.ByteBuf;
|
||||||
import io.netty.buffer.Unpooled;
|
import io.netty.buffer.Unpooled;
|
||||||
|
import io.netty.handler.codec.http2.Http2Exception.StreamException;
|
||||||
import io.netty.util.AsciiString;
|
import io.netty.util.AsciiString;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -41,18 +42,18 @@ public class DefaultHttp2HeadersEncoderTest {
|
|||||||
Http2Headers headers = headers();
|
Http2Headers headers = headers();
|
||||||
ByteBuf buf = Unpooled.buffer();
|
ByteBuf buf = Unpooled.buffer();
|
||||||
try {
|
try {
|
||||||
encoder.encodeHeaders(headers, buf);
|
encoder.encodeHeaders(3 /* randomly chosen */, headers, buf);
|
||||||
assertTrue(buf.writerIndex() > 0);
|
assertTrue(buf.writerIndex() > 0);
|
||||||
} finally {
|
} finally {
|
||||||
buf.release();
|
buf.release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(expected = Http2Exception.class)
|
@Test(expected = StreamException.class)
|
||||||
public void headersExceedMaxSetSizeShouldFail() throws Http2Exception {
|
public void headersExceedMaxSetSizeShouldFail() throws Http2Exception {
|
||||||
Http2Headers headers = headers();
|
Http2Headers headers = headers();
|
||||||
encoder.headerTable().maxHeaderListSize(2);
|
encoder.headerTable().maxHeaderListSize(2);
|
||||||
encoder.encodeHeaders(headers, Unpooled.buffer());
|
encoder.encodeHeaders(3 /* randomly chosen */, headers, Unpooled.buffer());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Http2Headers headers() {
|
private static Http2Headers headers() {
|
||||||
|
@ -81,7 +81,7 @@ public class Http2HeaderBlockIOTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void assertRoundtripSuccessful(Http2Headers in) throws Http2Exception {
|
private void assertRoundtripSuccessful(Http2Headers in) throws Http2Exception {
|
||||||
encoder.encodeHeaders(in, buffer);
|
encoder.encodeHeaders(3 /* randomly chosen */, in, buffer);
|
||||||
|
|
||||||
Http2Headers out = decoder.decodeHeaders(0, buffer);
|
Http2Headers out = decoder.decodeHeaders(0, buffer);
|
||||||
assertEquals(in, out);
|
assertEquals(in, out);
|
||||||
|
@ -195,7 +195,7 @@ final class TestCase {
|
|||||||
encoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
|
encoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
encoder.encodeHeaders(buffer, http2Headers, sensitivityDetector);
|
encoder.encodeHeaders(3 /* randomly chosen */, buffer, http2Headers, sensitivityDetector);
|
||||||
byte[] bytes = new byte[buffer.readableBytes()];
|
byte[] bytes = new byte[buffer.readableBytes()];
|
||||||
buffer.readBytes(bytes);
|
buffer.readBytes(bytes);
|
||||||
return bytes;
|
return bytes;
|
||||||
|
@ -96,8 +96,9 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
|
|||||||
Encoder encoder = newTestEncoder();
|
Encoder encoder = newTestEncoder();
|
||||||
ByteBuf out = size.newOutBuffer();
|
ByteBuf out = size.newOutBuffer();
|
||||||
try {
|
try {
|
||||||
encoder.encodeHeaders(out, headers, sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE
|
encoder.encodeHeaders(3 /* randomly chosen */, out, headers,
|
||||||
: Http2HeadersEncoder.NEVER_SENSITIVE);
|
sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE
|
||||||
|
: Http2HeadersEncoder.NEVER_SENSITIVE);
|
||||||
byte[] bytes = new byte[out.readableBytes()];
|
byte[] bytes = new byte[out.readableBytes()];
|
||||||
out.readBytes(bytes);
|
out.readBytes(bytes);
|
||||||
return bytes;
|
return bytes;
|
||||||
|
@ -110,7 +110,7 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
|
|||||||
public void encode(Blackhole bh) throws Exception {
|
public void encode(Blackhole bh) throws Exception {
|
||||||
Encoder encoder = newTestEncoder();
|
Encoder encoder = newTestEncoder();
|
||||||
output.clear();
|
output.clear();
|
||||||
encoder.encodeHeaders(output, http2Headers, sensitivityDetector);
|
encoder.encodeHeaders(3 /*randomly chosen*/, output, http2Headers, sensitivityDetector);
|
||||||
bh.consume(output);
|
bh.consume(output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user