HTTP/2 move internal HPACK classes to the http2 package

Motivation:
The internal.hpack classes are no longer exposed in our public APIs and can be made package private in the http2 package.

Modifications:
- Make the hpack classes package private in the http2 package

Result:
Less APIs exposed as public.
This commit is contained in:
Scott Mitchell 2017-03-01 21:30:55 -08:00
parent 7e7e10fb1e
commit f9001b9fc0
43 changed files with 368 additions and 478 deletions

View File

@ -16,7 +16,6 @@
package io.netty.handler.codec.http2; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.UnstableApi; import io.netty.util.internal.UnstableApi;
@ -29,7 +28,7 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
private static final float HEADERS_COUNT_WEIGHT_NEW = 1 / 5f; private static final float HEADERS_COUNT_WEIGHT_NEW = 1 / 5f;
private static final float HEADERS_COUNT_WEIGHT_HISTORICAL = 1 - HEADERS_COUNT_WEIGHT_NEW; private static final float HEADERS_COUNT_WEIGHT_HISTORICAL = 1 - HEADERS_COUNT_WEIGHT_NEW;
private final Decoder decoder; private final HpackDecoder hpackDecoder;
private final boolean validateHeaders; private final boolean validateHeaders;
/** /**
* Used to calculate an exponential moving average of header sizes to get an estimate of how large the data * Used to calculate an exponential moving average of header sizes to get an estimate of how large the data
@ -68,41 +67,41 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
*/ */
public DefaultHttp2HeadersDecoder(boolean validateHeaders, long maxHeaderListSize, public DefaultHttp2HeadersDecoder(boolean validateHeaders, long maxHeaderListSize,
int initialHuffmanDecodeCapacity) { int initialHuffmanDecodeCapacity) {
this(validateHeaders, new Decoder(maxHeaderListSize, initialHuffmanDecodeCapacity)); this(validateHeaders, new HpackDecoder(maxHeaderListSize, initialHuffmanDecodeCapacity));
} }
/** /**
* Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally * Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally
* for testing but violate the RFC if used outside the scope of testing. * for testing but violate the RFC if used outside the scope of testing.
*/ */
DefaultHttp2HeadersDecoder(boolean validateHeaders, Decoder decoder) { DefaultHttp2HeadersDecoder(boolean validateHeaders, HpackDecoder hpackDecoder) {
this.decoder = ObjectUtil.checkNotNull(decoder, "decoder"); this.hpackDecoder = ObjectUtil.checkNotNull(hpackDecoder, "hpackDecoder");
this.validateHeaders = validateHeaders; this.validateHeaders = validateHeaders;
} }
@Override @Override
public void maxHeaderTableSize(long max) throws Http2Exception { public void maxHeaderTableSize(long max) throws Http2Exception {
decoder.setMaxHeaderTableSize(max); hpackDecoder.setMaxHeaderTableSize(max);
} }
@Override @Override
public long maxHeaderTableSize() { public long maxHeaderTableSize() {
return decoder.getMaxHeaderTableSize(); return hpackDecoder.getMaxHeaderTableSize();
} }
@Override @Override
public void maxHeaderListSize(long max, long goAwayMax) throws Http2Exception { public void maxHeaderListSize(long max, long goAwayMax) throws Http2Exception {
decoder.setMaxHeaderListSize(max, goAwayMax); hpackDecoder.setMaxHeaderListSize(max, goAwayMax);
} }
@Override @Override
public long maxHeaderListSize() { public long maxHeaderListSize() {
return decoder.getMaxHeaderListSize(); return hpackDecoder.getMaxHeaderListSize();
} }
@Override @Override
public long maxHeaderListSizeGoAway() { public long maxHeaderListSizeGoAway() {
return decoder.getMaxHeaderListSizeGoAway(); return hpackDecoder.getMaxHeaderListSizeGoAway();
} }
@Override @Override
@ -114,7 +113,7 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
public Http2Headers decodeHeaders(int streamId, ByteBuf headerBlock) throws Http2Exception { public Http2Headers decodeHeaders(int streamId, ByteBuf headerBlock) throws Http2Exception {
try { try {
final Http2Headers headers = new DefaultHttp2Headers(validateHeaders, (int) headerArraySizeAccumulator); final Http2Headers headers = new DefaultHttp2Headers(validateHeaders, (int) headerArraySizeAccumulator);
decoder.decode(streamId, headerBlock, headers); hpackDecoder.decode(streamId, headerBlock, headers);
headerArraySizeAccumulator = HEADERS_COUNT_WEIGHT_NEW * headers.size() + headerArraySizeAccumulator = HEADERS_COUNT_WEIGHT_NEW * headers.size() +
HEADERS_COUNT_WEIGHT_HISTORICAL * headerArraySizeAccumulator; HEADERS_COUNT_WEIGHT_HISTORICAL * headerArraySizeAccumulator;
return headers; return headers;

View File

@ -17,7 +17,6 @@ package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.internal.UnstableApi; import io.netty.util.internal.UnstableApi;
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR; import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
@ -26,7 +25,7 @@ import static io.netty.util.internal.ObjectUtil.checkNotNull;
@UnstableApi @UnstableApi
public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2HeadersEncoder.Configuration { public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2HeadersEncoder.Configuration {
private final Encoder encoder; private final HpackEncoder hpackEncoder;
private final SensitivityDetector sensitivityDetector; private final SensitivityDetector sensitivityDetector;
private final ByteBuf tableSizeChangeOutput = Unpooled.buffer(); private final ByteBuf tableSizeChangeOutput = Unpooled.buffer();
@ -35,38 +34,38 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
} }
public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector) { public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector) {
this(sensitivityDetector, new Encoder()); this(sensitivityDetector, new HpackEncoder());
} }
public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, boolean ignoreMaxHeaderListSize) { public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, boolean ignoreMaxHeaderListSize) {
this(sensitivityDetector, new Encoder(ignoreMaxHeaderListSize)); this(sensitivityDetector, new HpackEncoder(ignoreMaxHeaderListSize));
} }
public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, boolean ignoreMaxHeaderListSize, public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, boolean ignoreMaxHeaderListSize,
int dynamicTableArraySizeHint) { int dynamicTableArraySizeHint) {
this(sensitivityDetector, new Encoder(ignoreMaxHeaderListSize, dynamicTableArraySizeHint)); this(sensitivityDetector, new HpackEncoder(ignoreMaxHeaderListSize, dynamicTableArraySizeHint));
} }
/** /**
* Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally * Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally
* for testing but violate the RFC if used outside the scope of testing. * for testing but violate the RFC if used outside the scope of testing.
*/ */
DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, Encoder encoder) { DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, HpackEncoder hpackEncoder) {
this.sensitivityDetector = checkNotNull(sensitivityDetector, "sensitiveDetector"); this.sensitivityDetector = checkNotNull(sensitivityDetector, "sensitiveDetector");
this.encoder = checkNotNull(encoder, "encoder"); this.hpackEncoder = checkNotNull(hpackEncoder, "hpackEncoder");
} }
@Override @Override
public void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception { public void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception {
try { try {
// If there was a change in the table size, serialize the output from the encoder // If there was a change in the table size, serialize the output from the hpackEncoder
// resulting from that change. // resulting from that change.
if (tableSizeChangeOutput.isReadable()) { if (tableSizeChangeOutput.isReadable()) {
buffer.writeBytes(tableSizeChangeOutput); buffer.writeBytes(tableSizeChangeOutput);
tableSizeChangeOutput.clear(); tableSizeChangeOutput.clear();
} }
encoder.encodeHeaders(streamId, buffer, headers, sensitivityDetector); hpackEncoder.encodeHeaders(streamId, buffer, headers, sensitivityDetector);
} catch (Http2Exception e) { } catch (Http2Exception e) {
throw e; throw e;
} catch (Throwable t) { } catch (Throwable t) {
@ -76,22 +75,22 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
@Override @Override
public void maxHeaderTableSize(long max) throws Http2Exception { public void maxHeaderTableSize(long max) throws Http2Exception {
encoder.setMaxHeaderTableSize(tableSizeChangeOutput, max); hpackEncoder.setMaxHeaderTableSize(tableSizeChangeOutput, max);
} }
@Override @Override
public long maxHeaderTableSize() { public long maxHeaderTableSize() {
return encoder.getMaxHeaderTableSize(); return hpackEncoder.getMaxHeaderTableSize();
} }
@Override @Override
public void maxHeaderListSize(long max) throws Http2Exception { public void maxHeaderListSize(long max) throws Http2Exception {
encoder.setMaxHeaderListSize(max); hpackEncoder.setMaxHeaderListSize(max);
} }
@Override @Override
public long maxHeaderListSize() { public long maxHeaderListSize() {
return encoder.getMaxHeaderListSize(); return hpackEncoder.getMaxHeaderListSize();
} }
@Override @Override

View File

@ -29,13 +29,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2CodecUtil; import io.netty.handler.codec.http2.HpackUtil.IndexType;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE;
@ -52,25 +49,26 @@ import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.internal.ObjectUtil.checkPositive; import static io.netty.util.internal.ObjectUtil.checkPositive;
import static io.netty.util.internal.ThrowableUtil.unknownStackTrace; import static io.netty.util.internal.ThrowableUtil.unknownStackTrace;
public final class Decoder { final class HpackDecoder {
private static final Http2Exception DECODE_ULE_128_DECOMPRESSION_EXCEPTION = unknownStackTrace( private static final Http2Exception DECODE_ULE_128_DECOMPRESSION_EXCEPTION = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - decompression failure"), Decoder.class, "decodeULE128(...)"); connectionError(COMPRESSION_ERROR, "HPACK - decompression failure"), HpackDecoder.class,
"decodeULE128(..)");
private static final Http2Exception DECODE_ULE_128_TO_LONG_DECOMPRESSION_EXCEPTION = unknownStackTrace( private static final Http2Exception DECODE_ULE_128_TO_LONG_DECOMPRESSION_EXCEPTION = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - long overflow"), Decoder.class, "decodeULE128(...)"); connectionError(COMPRESSION_ERROR, "HPACK - long overflow"), HpackDecoder.class, "decodeULE128(..)");
private static final Http2Exception DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION = unknownStackTrace( private static final Http2Exception DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - int overflow"), Decoder.class, "decodeULE128ToInt(...)"); connectionError(COMPRESSION_ERROR, "HPACK - int overflow"), HpackDecoder.class, "decodeULE128ToInt(..)");
private static final Http2Exception DECODE_ILLEGAL_INDEX_VALUE = unknownStackTrace( private static final Http2Exception DECODE_ILLEGAL_INDEX_VALUE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), Decoder.class, "decode(...)"); connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), HpackDecoder.class, "decode(..)");
private static final Http2Exception INDEX_HEADER_ILLEGAL_INDEX_VALUE = unknownStackTrace( private static final Http2Exception INDEX_HEADER_ILLEGAL_INDEX_VALUE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), Decoder.class, "indexHeader(...)"); connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), HpackDecoder.class, "indexHeader(..)");
private static final Http2Exception READ_NAME_ILLEGAL_INDEX_VALUE = unknownStackTrace( private static final Http2Exception READ_NAME_ILLEGAL_INDEX_VALUE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), Decoder.class, "readName(...)"); connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), HpackDecoder.class, "readName(..)");
private static final Http2Exception INVALID_MAX_DYNAMIC_TABLE_SIZE = unknownStackTrace( private static final Http2Exception INVALID_MAX_DYNAMIC_TABLE_SIZE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - invalid max dynamic table size"), Decoder.class, connectionError(COMPRESSION_ERROR, "HPACK - invalid max dynamic table size"), HpackDecoder.class,
"setDynamicTableSize(...)"); "setDynamicTableSize(..)");
private static final Http2Exception MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED = unknownStackTrace( private static final Http2Exception MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - max dynamic table size change required"), Decoder.class, connectionError(COMPRESSION_ERROR, "HPACK - max dynamic table size change required"), HpackDecoder.class,
"decode(...)"); "decode(..)");
private static final byte READ_HEADER_REPRESENTATION = 0; private static final byte READ_HEADER_REPRESENTATION = 0;
private static final byte READ_MAX_DYNAMIC_TABLE_SIZE = 1; private static final byte READ_MAX_DYNAMIC_TABLE_SIZE = 1;
private static final byte READ_INDEXED_HEADER = 2; private static final byte READ_INDEXED_HEADER = 2;
@ -82,8 +80,8 @@ public final class Decoder {
private static final byte READ_LITERAL_HEADER_VALUE_LENGTH = 8; private static final byte READ_LITERAL_HEADER_VALUE_LENGTH = 8;
private static final byte READ_LITERAL_HEADER_VALUE = 9; private static final byte READ_LITERAL_HEADER_VALUE = 9;
private final DynamicTable dynamicTable; private final HpackDynamicTable hpackDynamicTable;
private final HuffmanDecoder huffmanDecoder; private final HpackHuffmanDecoder hpackHuffmanDecoder;
private long maxHeaderListSizeGoAway; private long maxHeaderListSizeGoAway;
private long maxHeaderListSize; private long maxHeaderListSize;
private long maxDynamicTableSize; private long maxDynamicTableSize;
@ -98,7 +96,7 @@ public final class Decoder {
* (which is dangerous). * (which is dangerous).
* @param initialHuffmanDecodeCapacity Size of an intermediate buffer used during huffman decode. * @param initialHuffmanDecodeCapacity Size of an intermediate buffer used during huffman decode.
*/ */
public Decoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity) { HpackDecoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity) {
this(maxHeaderListSize, initialHuffmanDecodeCapacity, DEFAULT_HEADER_TABLE_SIZE); this(maxHeaderListSize, initialHuffmanDecodeCapacity, DEFAULT_HEADER_TABLE_SIZE);
} }
@ -106,14 +104,14 @@ public final class Decoder {
* Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally * Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally
* for testing but violate the RFC if used outside the scope of testing. * for testing but violate the RFC if used outside the scope of testing.
*/ */
Decoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity, int maxHeaderTableSize) { HpackDecoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity, int maxHeaderTableSize) {
this.maxHeaderListSize = checkPositive(maxHeaderListSize, "maxHeaderListSize"); this.maxHeaderListSize = checkPositive(maxHeaderListSize, "maxHeaderListSize");
this.maxHeaderListSizeGoAway = Http2CodecUtil.calculateMaxHeaderListSizeGoAway(maxHeaderListSize); this.maxHeaderListSizeGoAway = Http2CodecUtil.calculateMaxHeaderListSizeGoAway(maxHeaderListSize);
maxDynamicTableSize = encoderMaxDynamicTableSize = maxHeaderTableSize; maxDynamicTableSize = encoderMaxDynamicTableSize = maxHeaderTableSize;
maxDynamicTableSizeChangeRequired = false; maxDynamicTableSizeChangeRequired = false;
dynamicTable = new DynamicTable(maxHeaderTableSize); hpackDynamicTable = new HpackDynamicTable(maxHeaderTableSize);
huffmanDecoder = new HuffmanDecoder(initialHuffmanDecodeCapacity); hpackHuffmanDecoder = new HpackHuffmanDecoder(initialHuffmanDecodeCapacity);
} }
/** /**
@ -135,7 +133,7 @@ public final class Decoder {
case READ_HEADER_REPRESENTATION: case READ_HEADER_REPRESENTATION:
byte b = in.readByte(); byte b = in.readByte();
if (maxDynamicTableSizeChangeRequired && (b & 0xE0) != 0x20) { if (maxDynamicTableSizeChangeRequired && (b & 0xE0) != 0x20) {
// Encoder MUST signal maximum dynamic table size change // HpackEncoder MUST signal maximum dynamic table size change
throw MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED; throw MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED;
} }
if (b < 0) { if (b < 0) {
@ -319,7 +317,7 @@ public final class Decoder {
// decoder requires less space than encoder // decoder requires less space than encoder
// encoder MUST signal this change // encoder MUST signal this change
maxDynamicTableSizeChangeRequired = true; maxDynamicTableSizeChangeRequired = true;
dynamicTable.setCapacity(maxDynamicTableSize); hpackDynamicTable.setCapacity(maxDynamicTableSize);
} }
} }
@ -349,28 +347,28 @@ public final class Decoder {
* decoder. * decoder.
*/ */
public long getMaxHeaderTableSize() { public long getMaxHeaderTableSize() {
return dynamicTable.capacity(); return hpackDynamicTable.capacity();
} }
/** /**
* Return the number of header fields in the dynamic table. Exposed for testing. * Return the number of header fields in the dynamic table. Exposed for testing.
*/ */
int length() { int length() {
return dynamicTable.length(); return hpackDynamicTable.length();
} }
/** /**
* Return the size of the dynamic table. Exposed for testing. * Return the size of the dynamic table. Exposed for testing.
*/ */
long size() { long size() {
return dynamicTable.size(); return hpackDynamicTable.size();
} }
/** /**
* Return the header field at the given index. Exposed for testing. * Return the header field at the given index. Exposed for testing.
*/ */
HeaderField getHeaderField(int index) { HpackHeaderField getHeaderField(int index) {
return dynamicTable.getEntry(index + 1); return hpackDynamicTable.getEntry(index + 1);
} }
private void setDynamicTableSize(long dynamicTableSize) throws Http2Exception { private void setDynamicTableSize(long dynamicTableSize) throws Http2Exception {
@ -379,29 +377,29 @@ public final class Decoder {
} }
encoderMaxDynamicTableSize = dynamicTableSize; encoderMaxDynamicTableSize = dynamicTableSize;
maxDynamicTableSizeChangeRequired = false; maxDynamicTableSizeChangeRequired = false;
dynamicTable.setCapacity(dynamicTableSize); hpackDynamicTable.setCapacity(dynamicTableSize);
} }
private CharSequence readName(int index) throws Http2Exception { private CharSequence readName(int index) throws Http2Exception {
if (index <= StaticTable.length) { if (index <= HpackStaticTable.length) {
HeaderField headerField = StaticTable.getEntry(index); HpackHeaderField hpackHeaderField = HpackStaticTable.getEntry(index);
return headerField.name; return hpackHeaderField.name;
} }
if (index - StaticTable.length <= dynamicTable.length()) { if (index - HpackStaticTable.length <= hpackDynamicTable.length()) {
HeaderField headerField = dynamicTable.getEntry(index - StaticTable.length); HpackHeaderField hpackHeaderField = hpackDynamicTable.getEntry(index - HpackStaticTable.length);
return headerField.name; return hpackHeaderField.name;
} }
throw READ_NAME_ILLEGAL_INDEX_VALUE; throw READ_NAME_ILLEGAL_INDEX_VALUE;
} }
private long indexHeader(int streamId, int index, Http2Headers headers, long headersLength) throws Http2Exception { private long indexHeader(int streamId, int index, Http2Headers headers, long headersLength) throws Http2Exception {
if (index <= StaticTable.length) { if (index <= HpackStaticTable.length) {
HeaderField headerField = StaticTable.getEntry(index); HpackHeaderField hpackHeaderField = HpackStaticTable.getEntry(index);
return addHeader(streamId, headers, headerField.name, headerField.value, headersLength); return addHeader(streamId, headers, hpackHeaderField.name, hpackHeaderField.value, headersLength);
} }
if (index - StaticTable.length <= dynamicTable.length()) { if (index - HpackStaticTable.length <= hpackDynamicTable.length()) {
HeaderField headerField = dynamicTable.getEntry(index - StaticTable.length); HpackHeaderField hpackHeaderField = hpackDynamicTable.getEntry(index - HpackStaticTable.length);
return addHeader(streamId, headers, headerField.name, headerField.value, headersLength); return addHeader(streamId, headers, hpackHeaderField.name, hpackHeaderField.value, headersLength);
} }
throw INDEX_HEADER_ILLEGAL_INDEX_VALUE; throw INDEX_HEADER_ILLEGAL_INDEX_VALUE;
} }
@ -416,7 +414,7 @@ public final class Decoder {
break; break;
case INCREMENTAL: case INCREMENTAL:
dynamicTable.add(new HeaderField(name, value)); hpackDynamicTable.add(new HpackHeaderField(name, value));
break; break;
default: default:
@ -438,7 +436,7 @@ public final class Decoder {
private CharSequence readStringLiteral(ByteBuf in, int length, boolean huffmanEncoded) throws Http2Exception { private CharSequence readStringLiteral(ByteBuf in, int length, boolean huffmanEncoded) throws Http2Exception {
if (huffmanEncoded) { if (huffmanEncoded) {
return huffmanDecoder.decode(in, length); return hpackHuffmanDecoder.decode(in, length);
} }
byte[] buf = new byte[length]; byte[] buf = new byte[length];
in.readBytes(buf); in.readBytes(buf);

View File

@ -29,16 +29,15 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.internal.hpack.HeaderField.HEADER_ENTRY_OVERHEAD;
final class DynamicTable { final class HpackDynamicTable {
// a circular queue of header fields // a circular queue of header fields
HeaderField[] headerFields; HpackHeaderField[] hpackHeaderFields;
int head; int head;
int tail; int tail;
private long size; private long size;
@ -47,7 +46,7 @@ final class DynamicTable {
/** /**
* Creates a new dynamic table with the specified initial capacity. * Creates a new dynamic table with the specified initial capacity.
*/ */
DynamicTable(long initialCapacity) { HpackDynamicTable(long initialCapacity) {
setCapacity(initialCapacity); setCapacity(initialCapacity);
} }
@ -57,7 +56,7 @@ final class DynamicTable {
public int length() { public int length() {
int length; int length;
if (head < tail) { if (head < tail) {
length = headerFields.length - tail + head; length = hpackHeaderFields.length - tail + head;
} else { } else {
length = head - tail; length = head - tail;
} }
@ -82,15 +81,15 @@ final class DynamicTable {
* Return the header field at the given index. The first and newest entry is always at index 1, * Return the header field at the given index. The first and newest entry is always at index 1,
* and the oldest entry is at the index length(). * and the oldest entry is at the index length().
*/ */
public HeaderField getEntry(int index) { public HpackHeaderField getEntry(int index) {
if (index <= 0 || index > length()) { if (index <= 0 || index > length()) {
throw new IndexOutOfBoundsException(); throw new IndexOutOfBoundsException();
} }
int i = head - index; int i = head - index;
if (i < 0) { if (i < 0) {
return headerFields[i + headerFields.length]; return hpackHeaderFields[i + hpackHeaderFields.length];
} else { } else {
return headerFields[i]; return hpackHeaderFields[i];
} }
} }
@ -100,7 +99,7 @@ final class DynamicTable {
* If the size of the new entry is larger than the table's capacity, the dynamic table will be * If the size of the new entry is larger than the table's capacity, the dynamic table will be
* cleared. * cleared.
*/ */
public void add(HeaderField header) { public void add(HpackHeaderField header) {
int headerSize = header.size(); int headerSize = header.size();
if (headerSize > capacity) { if (headerSize > capacity) {
clear(); clear();
@ -109,9 +108,9 @@ final class DynamicTable {
while (capacity - size < headerSize) { while (capacity - size < headerSize) {
remove(); remove();
} }
headerFields[head++] = header; hpackHeaderFields[head++] = header;
size += header.size(); size += header.size();
if (head == headerFields.length) { if (head == hpackHeaderFields.length) {
head = 0; head = 0;
} }
} }
@ -119,14 +118,14 @@ final class DynamicTable {
/** /**
* Remove and return the oldest header field from the dynamic table. * Remove and return the oldest header field from the dynamic table.
*/ */
public HeaderField remove() { public HpackHeaderField remove() {
HeaderField removed = headerFields[tail]; HpackHeaderField removed = hpackHeaderFields[tail];
if (removed == null) { if (removed == null) {
return null; return null;
} }
size -= removed.size(); size -= removed.size();
headerFields[tail++] = null; hpackHeaderFields[tail++] = null;
if (tail == headerFields.length) { if (tail == hpackHeaderFields.length) {
tail = 0; tail = 0;
} }
return removed; return removed;
@ -137,8 +136,8 @@ final class DynamicTable {
*/ */
public void clear() { public void clear() {
while (tail != head) { while (tail != head) {
headerFields[tail++] = null; hpackHeaderFields[tail++] = null;
if (tail == headerFields.length) { if (tail == hpackHeaderFields.length) {
tail = 0; tail = 0;
} }
} }
@ -170,31 +169,31 @@ final class DynamicTable {
} }
} }
int maxEntries = (int) (capacity / HEADER_ENTRY_OVERHEAD); int maxEntries = (int) (capacity / HpackHeaderField.HEADER_ENTRY_OVERHEAD);
if (capacity % HEADER_ENTRY_OVERHEAD != 0) { if (capacity % HpackHeaderField.HEADER_ENTRY_OVERHEAD != 0) {
maxEntries++; maxEntries++;
} }
// check if capacity change requires us to reallocate the array // check if capacity change requires us to reallocate the array
if (headerFields != null && headerFields.length == maxEntries) { if (hpackHeaderFields != null && hpackHeaderFields.length == maxEntries) {
return; return;
} }
HeaderField[] tmp = new HeaderField[maxEntries]; HpackHeaderField[] tmp = new HpackHeaderField[maxEntries];
// initially length will be 0 so there will be no copy // initially length will be 0 so there will be no copy
int len = length(); int len = length();
int cursor = tail; int cursor = tail;
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
HeaderField entry = headerFields[cursor++]; HpackHeaderField entry = hpackHeaderFields[cursor++];
tmp[i] = entry; tmp[i] = entry;
if (cursor == headerFields.length) { if (cursor == hpackHeaderFields.length) {
cursor = 0; cursor = 0;
} }
} }
tail = 0; tail = 0;
head = tail + len; head = tail + len;
headerFields = tmp; hpackHeaderFields = tmp;
} }
} }

View File

@ -29,11 +29,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Exception; import io.netty.handler.codec.http2.HpackUtil.IndexType;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder.SensitivityDetector; import io.netty.handler.codec.http2.Http2HeadersEncoder.SensitivityDetector;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import io.netty.util.CharsetUtil; import io.netty.util.CharsetUtil;
@ -41,6 +40,7 @@ import io.netty.util.CharsetUtil;
import java.util.Arrays; import java.util.Arrays;
import java.util.Map; import java.util.Map;
import static io.netty.handler.codec.http2.HpackUtil.equalsConstantTime;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
@ -50,21 +50,16 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.headerListSizeExceeded; import static io.netty.handler.codec.http2.Http2CodecUtil.headerListSizeExceeded;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.internal.hpack.HeaderField.sizeOf;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.INCREMENTAL;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NEVER;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NONE;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime;
import static io.netty.util.internal.MathUtil.findNextPositivePowerOfTwo; import static io.netty.util.internal.MathUtil.findNextPositivePowerOfTwo;
import static java.lang.Math.max; import static java.lang.Math.max;
import static java.lang.Math.min; import static java.lang.Math.min;
public final class Encoder { final class HpackEncoder {
// a linked hash map of header fields // a linked hash map of header fields
private final HeaderEntry[] headerFields; private final HeaderEntry[] headerFields;
private final HeaderEntry head = new HeaderEntry(-1, AsciiString.EMPTY_STRING, private final HeaderEntry head = new HeaderEntry(-1, AsciiString.EMPTY_STRING,
AsciiString.EMPTY_STRING, Integer.MAX_VALUE, null); AsciiString.EMPTY_STRING, Integer.MAX_VALUE, null);
private final HuffmanEncoder huffmanEncoder = new HuffmanEncoder(); private final HpackHuffmanEncoder hpackHuffmanEncoder = new HpackHuffmanEncoder();
private final byte hashMask; private final byte hashMask;
private final boolean ignoreMaxHeaderListSize; private final boolean ignoreMaxHeaderListSize;
private long size; private long size;
@ -74,21 +69,21 @@ public final class Encoder {
/** /**
* Creates a new encoder. * Creates a new encoder.
*/ */
public Encoder() { HpackEncoder() {
this(false); this(false);
} }
/** /**
* Creates a new encoder. * Creates a new encoder.
*/ */
public Encoder(boolean ignoreMaxHeaderListSize) { public HpackEncoder(boolean ignoreMaxHeaderListSize) {
this(ignoreMaxHeaderListSize, 16); this(ignoreMaxHeaderListSize, 16);
} }
/** /**
* Creates a new encoder. * Creates a new encoder.
*/ */
public Encoder(boolean ignoreMaxHeaderListSize, int arraySizeHint) { public HpackEncoder(boolean ignoreMaxHeaderListSize, int arraySizeHint) {
this.ignoreMaxHeaderListSize = ignoreMaxHeaderListSize; this.ignoreMaxHeaderListSize = ignoreMaxHeaderListSize;
maxHeaderTableSize = DEFAULT_HEADER_TABLE_SIZE; maxHeaderTableSize = DEFAULT_HEADER_TABLE_SIZE;
maxHeaderListSize = DEFAULT_HEADER_LIST_SIZE; maxHeaderListSize = DEFAULT_HEADER_LIST_SIZE;
@ -123,7 +118,7 @@ public final class Encoder {
CharSequence value = header.getValue(); CharSequence value = header.getValue();
// OK to increment now and check for bounds after because this value is limited to unsigned int and will not // OK to increment now and check for bounds after because this value is limited to unsigned int and will not
// overflow. // overflow.
headerSize += sizeOf(name, value); headerSize += HpackHeaderField.sizeOf(name, value);
if (headerSize > maxHeaderListSize) { if (headerSize > maxHeaderListSize) {
headerListSizeExceeded(streamId, maxHeaderListSize, false); headerListSizeExceeded(streamId, maxHeaderListSize, false);
} }
@ -136,7 +131,8 @@ public final class Encoder {
for (Map.Entry<CharSequence, CharSequence> header : headers) { for (Map.Entry<CharSequence, CharSequence> header : headers) {
CharSequence name = header.getKey(); CharSequence name = header.getKey();
CharSequence value = header.getValue(); CharSequence value = header.getValue();
encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value), sizeOf(name, value)); encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value),
HpackHeaderField.sizeOf(name, value));
} }
} }
@ -149,16 +145,16 @@ public final class Encoder {
// If the header value is sensitive then it must never be indexed // If the header value is sensitive then it must never be indexed
if (sensitive) { if (sensitive) {
int nameIndex = getNameIndex(name); int nameIndex = getNameIndex(name);
encodeLiteral(out, name, value, NEVER, nameIndex); encodeLiteral(out, name, value, IndexType.NEVER, nameIndex);
return; return;
} }
// If the peer will only use the static table // If the peer will only use the static table
if (maxHeaderTableSize == 0) { if (maxHeaderTableSize == 0) {
int staticTableIndex = StaticTable.getIndex(name, value); int staticTableIndex = HpackStaticTable.getIndex(name, value);
if (staticTableIndex == -1) { if (staticTableIndex == -1) {
int nameIndex = StaticTable.getIndex(name); int nameIndex = HpackStaticTable.getIndex(name);
encodeLiteral(out, name, value, NONE, nameIndex); encodeLiteral(out, name, value, IndexType.NONE, nameIndex);
} else { } else {
encodeInteger(out, 0x80, 7, staticTableIndex); encodeInteger(out, 0x80, 7, staticTableIndex);
} }
@ -168,23 +164,23 @@ public final class Encoder {
// If the headerSize is greater than the max table size then it must be encoded literally // If the headerSize is greater than the max table size then it must be encoded literally
if (headerSize > maxHeaderTableSize) { if (headerSize > maxHeaderTableSize) {
int nameIndex = getNameIndex(name); int nameIndex = getNameIndex(name);
encodeLiteral(out, name, value, NONE, nameIndex); encodeLiteral(out, name, value, IndexType.NONE, nameIndex);
return; return;
} }
HeaderEntry headerField = getEntry(name, value); HeaderEntry headerField = getEntry(name, value);
if (headerField != null) { if (headerField != null) {
int index = getIndex(headerField.index) + StaticTable.length; int index = getIndex(headerField.index) + HpackStaticTable.length;
// Section 6.1. Indexed Header Field Representation // Section 6.1. Indexed Header Field Representation
encodeInteger(out, 0x80, 7, index); encodeInteger(out, 0x80, 7, index);
} else { } else {
int staticTableIndex = StaticTable.getIndex(name, value); int staticTableIndex = HpackStaticTable.getIndex(name, value);
if (staticTableIndex != -1) { if (staticTableIndex != -1) {
// Section 6.1. Indexed Header Field Representation // Section 6.1. Indexed Header Field Representation
encodeInteger(out, 0x80, 7, staticTableIndex); encodeInteger(out, 0x80, 7, staticTableIndex);
} else { } else {
ensureCapacity(headerSize); ensureCapacity(headerSize);
encodeLiteral(out, name, value, INCREMENTAL, getNameIndex(name)); encodeLiteral(out, name, value, IndexType.INCREMENTAL, getNameIndex(name));
add(name, value, headerSize); add(name, value, headerSize);
} }
} }
@ -255,10 +251,10 @@ public final class Encoder {
* Encode string literal according to Section 5.2. * Encode string literal according to Section 5.2.
*/ */
private void encodeStringLiteral(ByteBuf out, CharSequence string) { private void encodeStringLiteral(ByteBuf out, CharSequence string) {
int huffmanLength = huffmanEncoder.getEncodedLength(string); int huffmanLength = hpackHuffmanEncoder.getEncodedLength(string);
if (huffmanLength < string.length()) { if (huffmanLength < string.length()) {
encodeInteger(out, 0x80, 7, huffmanLength); encodeInteger(out, 0x80, 7, huffmanLength);
huffmanEncoder.encode(out, string); hpackHuffmanEncoder.encode(out, string);
} else { } else {
encodeInteger(out, 0x00, 7, string.length()); encodeInteger(out, 0x00, 7, string.length());
if (string instanceof AsciiString) { if (string instanceof AsciiString) {
@ -276,7 +272,7 @@ public final class Encoder {
/** /**
* Encode literal header field according to Section 6.2. * Encode literal header field according to Section 6.2.
*/ */
private void encodeLiteral(ByteBuf out, CharSequence name, CharSequence value, HpackUtil.IndexType indexType, private void encodeLiteral(ByteBuf out, CharSequence name, CharSequence value, IndexType indexType,
int nameIndex) { int nameIndex) {
boolean nameIndexValid = nameIndex != -1; boolean nameIndexValid = nameIndex != -1;
switch (indexType) { switch (indexType) {
@ -299,11 +295,11 @@ public final class Encoder {
} }
private int getNameIndex(CharSequence name) { private int getNameIndex(CharSequence name) {
int index = StaticTable.getIndex(name); int index = HpackStaticTable.getIndex(name);
if (index == -1) { if (index == -1) {
index = getIndex(name); index = getIndex(name);
if (index >= 0) { if (index >= 0) {
index += StaticTable.length; index += HpackStaticTable.length;
} }
} }
return index; return index;
@ -340,7 +336,7 @@ public final class Encoder {
/** /**
* Return the header field at the given index. Exposed for testing. * Return the header field at the given index. Exposed for testing.
*/ */
HeaderField getHeaderField(int index) { HpackHeaderField getHeaderField(int index) {
HeaderEntry entry = head; HeaderEntry entry = head;
while (index-- >= 0) { while (index-- >= 0) {
entry = entry.before; entry = entry.before;
@ -421,7 +417,7 @@ public final class Encoder {
/** /**
* Remove and return the oldest header field from the dynamic table. * Remove and return the oldest header field from the dynamic table.
*/ */
private HeaderField remove() { private HpackHeaderField remove() {
if (size == 0) { if (size == 0) {
return null; return null;
} }
@ -465,9 +461,9 @@ public final class Encoder {
} }
/** /**
* A linked hash map HeaderField entry. * A linked hash map HpackHeaderField entry.
*/ */
private static class HeaderEntry extends HeaderField { private static final class HeaderEntry extends HpackHeaderField {
// These fields comprise the doubly linked list used for iteration. // These fields comprise the doubly linked list used for iteration.
HeaderEntry before, after; HeaderEntry before, after;

View File

@ -29,12 +29,11 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime;
import static io.netty.util.internal.ObjectUtil.checkNotNull; import static io.netty.util.internal.ObjectUtil.checkNotNull;
class HeaderField { class HpackHeaderField {
// Section 4.1. Calculating Table Size // Section 4.1. Calculating Table Size
// The additional 32 octets account for an estimated // The additional 32 octets account for an estimated
@ -49,32 +48,32 @@ class HeaderField {
final CharSequence value; final CharSequence value;
// This constructor can only be used if name and value are ISO-8859-1 encoded. // This constructor can only be used if name and value are ISO-8859-1 encoded.
HeaderField(CharSequence name, CharSequence value) { HpackHeaderField(CharSequence name, CharSequence value) {
this.name = checkNotNull(name, "name"); this.name = checkNotNull(name, "name");
this.value = checkNotNull(value, "value"); this.value = checkNotNull(value, "value");
} }
int size() { final int size() {
return name.length() + value.length() + HEADER_ENTRY_OVERHEAD; return name.length() + value.length() + HEADER_ENTRY_OVERHEAD;
} }
@Override @Override
public int hashCode() { public final int hashCode() {
// TODO(nmittler): Netty's build rules require this. Probably need a better implementation. // TODO(nmittler): Netty's build rules require this. Probably need a better implementation.
return super.hashCode(); return super.hashCode();
} }
@Override @Override
public boolean equals(Object obj) { public final boolean equals(Object obj) {
if (obj == this) { if (obj == this) {
return true; return true;
} }
if (!(obj instanceof HeaderField)) { if (!(obj instanceof HpackHeaderField)) {
return false; return false;
} }
HeaderField other = (HeaderField) obj; HpackHeaderField other = (HpackHeaderField) obj;
// To avoid short circuit behavior a bitwise operator is used instead of a boolean operator. // To avoid short circuit behavior a bitwise operator is used instead of a boolean operator.
return (equalsConstantTime(name, other.name) & equalsConstantTime(value, other.value)) != 0; return (HpackUtil.equalsConstantTime(name, other.name) & HpackUtil.equalsConstantTime(value, other.value)) != 0;
} }
@Override @Override

View File

@ -29,10 +29,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import io.netty.util.ByteProcessor; import io.netty.util.ByteProcessor;
import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.ObjectUtil;
@ -40,21 +39,19 @@ import io.netty.util.internal.ThrowableUtil;
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR; import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
final class HuffmanDecoder { final class HpackHuffmanDecoder {
private static final Http2Exception EOS_DECODED = ThrowableUtil.unknownStackTrace( private static final Http2Exception EOS_DECODED = ThrowableUtil.unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - EOS Decoded"), HuffmanDecoder.class, "decode(...)"); connectionError(COMPRESSION_ERROR, "HPACK - EOS Decoded"), HpackHuffmanDecoder.class, "decode(..)");
private static final Http2Exception INVALID_PADDING = ThrowableUtil.unknownStackTrace( private static final Http2Exception INVALID_PADDING = ThrowableUtil.unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - Invalid Padding"), HuffmanDecoder.class, "decode(...)"); connectionError(COMPRESSION_ERROR, "HPACK - Invalid Padding"), HpackHuffmanDecoder.class, "decode(..)");
private static final Node ROOT = buildTree(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS); private static final Node ROOT = buildTree(HpackUtil.HUFFMAN_CODES, HpackUtil.HUFFMAN_CODE_LENGTHS);
private final DecoderProcessor processor; private final DecoderProcessor processor;
HuffmanDecoder(int initialCapacity) { HpackHuffmanDecoder(int initialCapacity) {
processor = new DecoderProcessor(initialCapacity); processor = new DecoderProcessor(initialCapacity);
} }

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
@ -37,19 +37,15 @@ import io.netty.util.ByteProcessor;
import io.netty.util.internal.ObjectUtil; import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.PlatformDependent;
final class HpackHuffmanEncoder {
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
final class HuffmanEncoder {
private final int[] codes; private final int[] codes;
private final byte[] lengths; private final byte[] lengths;
private final EncodedLengthProcessor encodedLengthProcessor = new EncodedLengthProcessor(); private final EncodedLengthProcessor encodedLengthProcessor = new EncodedLengthProcessor();
private final EncodeProcessor encodeProcessor = new EncodeProcessor(); private final EncodeProcessor encodeProcessor = new EncodeProcessor();
HuffmanEncoder() { HpackHuffmanEncoder() {
this(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS); this(HpackUtil.HUFFMAN_CODES, HpackUtil.HUFFMAN_CODE_LENGTHS);
} }
/** /**
@ -58,7 +54,7 @@ final class HuffmanEncoder {
* @param codes the Huffman codes indexed by symbol * @param codes the Huffman codes indexed by symbol
* @param lengths the length of each Huffman code * @param lengths the length of each Huffman code
*/ */
private HuffmanEncoder(int[] codes, byte[] lengths) { private HpackHuffmanEncoder(int[] codes, byte[] lengths) {
this.codes = codes; this.codes = codes;
this.lengths = lengths; this.lengths = lengths;
} }

View File

@ -29,22 +29,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.handler.codec.UnsupportedValueConverter; import io.netty.handler.codec.UnsupportedValueConverter;
import io.netty.handler.codec.http2.CharSequenceMap;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime; import static io.netty.handler.codec.http2.HpackUtil.equalsConstantTime;
final class StaticTable { final class HpackStaticTable {
// Appendix A: Static Table // Appendix A: Static Table
// http://tools.ietf.org/html/rfc7541#appendix-A // http://tools.ietf.org/html/rfc7541#appendix-A
private static final List<HeaderField> STATIC_TABLE = Arrays.asList( private static final List<HpackHeaderField> STATIC_TABLE = Arrays.asList(
/* 1 */ newEmptyHeaderField(":authority"), /* 1 */ newEmptyHeaderField(":authority"),
/* 2 */ newHeaderField(":method", "GET"), /* 2 */ newHeaderField(":method", "GET"),
/* 3 */ newHeaderField(":method", "POST"), /* 3 */ newHeaderField(":method", "POST"),
@ -108,12 +107,12 @@ final class StaticTable {
/* 61 */ newEmptyHeaderField("www-authenticate") /* 61 */ newEmptyHeaderField("www-authenticate")
); );
private static HeaderField newEmptyHeaderField(CharSequence name) { private static HpackHeaderField newEmptyHeaderField(CharSequence name) {
return newHeaderField(name, AsciiString.EMPTY_STRING); return newHeaderField(name, AsciiString.EMPTY_STRING);
} }
private static HeaderField newHeaderField(CharSequence name, CharSequence value) { private static HpackHeaderField newHeaderField(CharSequence name, CharSequence value) {
return new HeaderField(AsciiString.of(name), AsciiString.of(value)); return new HpackHeaderField(AsciiString.of(name), AsciiString.of(value));
} }
private static final CharSequenceMap<Integer> STATIC_INDEX_BY_NAME = createMap(); private static final CharSequenceMap<Integer> STATIC_INDEX_BY_NAME = createMap();
@ -126,7 +125,7 @@ final class StaticTable {
/** /**
* Return the header field at the given index value. * Return the header field at the given index value.
*/ */
static HeaderField getEntry(int index) { static HpackHeaderField getEntry(int index) {
return STATIC_TABLE.get(index - 1); return STATIC_TABLE.get(index - 1);
} }
@ -154,7 +153,7 @@ final class StaticTable {
// Note this assumes all entries for a given header field are sequential. // Note this assumes all entries for a given header field are sequential.
while (index <= length) { while (index <= length) {
HeaderField entry = getEntry(index); HpackHeaderField entry = getEntry(index);
if (equalsConstantTime(name, entry.name) == 0) { if (equalsConstantTime(name, entry.name) == 0) {
break; break;
} }
@ -176,7 +175,7 @@ final class StaticTable {
// Iterate through the static table in reverse order to // Iterate through the static table in reverse order to
// save the smallest index for a given name in the map. // save the smallest index for a given name in the map.
for (int index = length; index > 0; index--) { for (int index = length; index > 0; index--) {
HeaderField entry = getEntry(index); HpackHeaderField entry = getEntry(index);
CharSequence name = entry.name; CharSequence name = entry.name;
ret.set(name, index); ret.set(name, index);
} }
@ -184,6 +183,6 @@ final class StaticTable {
} }
// singleton // singleton
private StaticTable() { private HpackStaticTable() {
} }
} }

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import io.netty.util.internal.ConstantTimeUtils; import io.netty.util.internal.ConstantTimeUtils;

View File

@ -1,38 +0,0 @@
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2014 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* <a href="http://tools.ietf.org/html/rfc7541">HPACK: Header Compression for HTTP/2</a>.
* Please note this implementation is only compliant when used with HTTP/2 and so not meant to be used outside of
* this scope.
*/
package io.netty.handler.codec.http2.internal.hpack;

View File

@ -18,7 +18,6 @@ import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.buffer.UnpooledByteBufAllocator; import io.netty.buffer.UnpooledByteBufAllocator;
import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -43,7 +42,7 @@ public class DefaultHttp2FrameReaderTest {
private DefaultHttp2FrameReader frameReader; private DefaultHttp2FrameReader frameReader;
// Used to generate frame // Used to generate frame
private Encoder encoder; private HpackEncoder hpackEncoder;
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
@ -52,7 +51,7 @@ public class DefaultHttp2FrameReaderTest {
when(ctx.alloc()).thenReturn(UnpooledByteBufAllocator.DEFAULT); when(ctx.alloc()).thenReturn(UnpooledByteBufAllocator.DEFAULT);
frameReader = new DefaultHttp2FrameReader(); frameReader = new DefaultHttp2FrameReader();
encoder = new Encoder(); hpackEncoder = new HpackEncoder();
} }
@After @After
@ -338,7 +337,7 @@ public class DefaultHttp2FrameReaderTest {
Http2Flags flags) throws Http2Exception { Http2Flags flags) throws Http2Exception {
ByteBuf headerBlock = Unpooled.buffer(); ByteBuf headerBlock = Unpooled.buffer();
try { try {
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE); hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, flags, streamId); writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, flags, streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes()); output.writeBytes(headerBlock, headerBlock.readableBytes());
} finally { } finally {
@ -351,7 +350,7 @@ public class DefaultHttp2FrameReaderTest {
ByteBuf dataPayload) throws Http2Exception { ByteBuf dataPayload) throws Http2Exception {
ByteBuf headerBlock = Unpooled.buffer(); ByteBuf headerBlock = Unpooled.buffer();
try { try {
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE); hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, writeFrameHeader(output, headerBlock.readableBytes(), HEADERS,
new Http2Flags().endOfHeaders(true), streamId); new Http2Flags().endOfHeaders(true), streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes()); output.writeBytes(headerBlock, headerBlock.readableBytes());
@ -370,7 +369,7 @@ public class DefaultHttp2FrameReaderTest {
try { try {
writeUnsignedInt(streamDependency, headerBlock); writeUnsignedInt(streamDependency, headerBlock);
headerBlock.writeByte(weight - 1); headerBlock.writeByte(weight - 1);
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE); hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, flags, streamId); writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, flags, streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes()); output.writeBytes(headerBlock, headerBlock.readableBytes());
} finally { } finally {
@ -383,7 +382,7 @@ public class DefaultHttp2FrameReaderTest {
Http2Flags flags) throws Http2Exception { Http2Flags flags) throws Http2Exception {
ByteBuf headerBlock = Unpooled.buffer(); ByteBuf headerBlock = Unpooled.buffer();
try { try {
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE); hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), CONTINUATION, flags, streamId); writeFrameHeader(output, headerBlock.readableBytes(), CONTINUATION, flags, streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes()); output.writeBytes(headerBlock, headerBlock.readableBytes());
} finally { } finally {

View File

@ -17,7 +17,6 @@ package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -119,13 +118,13 @@ public class DefaultHttp2HeadersDecoderTest {
} }
private static ByteBuf encode(byte[]... entries) throws Exception { private static ByteBuf encode(byte[]... entries) throws Exception {
Encoder encoder = newTestEncoder(); HpackEncoder hpackEncoder = newTestEncoder();
ByteBuf out = Unpooled.buffer(); ByteBuf out = Unpooled.buffer();
Http2Headers http2Headers = new DefaultHttp2Headers(false); Http2Headers http2Headers = new DefaultHttp2Headers(false);
for (int ix = 0; ix < entries.length;) { for (int ix = 0; ix < entries.length;) {
http2Headers.add(new AsciiString(entries[ix++], false), new AsciiString(entries[ix++], false)); http2Headers.add(new AsciiString(entries[ix++], false), new AsciiString(entries[ix++], false));
} }
encoder.encodeHeaders(3 /* randomly chosen */, out, http2Headers, NEVER_SENSITIVE); hpackEncoder.encodeHeaders(3 /* randomly chosen */, out, http2Headers, NEVER_SENSITIVE);
return out; return out;
} }
} }

View File

@ -29,18 +29,15 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import static io.netty.handler.codec.http2.HpackDecoder.decodeULE128;
import static io.netty.handler.codec.http2.Http2HeadersEncoder.NEVER_SENSITIVE; import static io.netty.handler.codec.http2.Http2HeadersEncoder.NEVER_SENSITIVE;
import static io.netty.handler.codec.http2.internal.hpack.Decoder.decodeULE128;
import static io.netty.util.AsciiString.EMPTY_STRING; import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.AsciiString.of; import static io.netty.util.AsciiString.of;
import static java.lang.Integer.MAX_VALUE; import static java.lang.Integer.MAX_VALUE;
@ -53,19 +50,19 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions;
public class DecoderTest { public class HpackDecoderTest {
private Decoder decoder; private HpackDecoder hpackDecoder;
private Http2Headers mockHeaders; private Http2Headers mockHeaders;
private static String hex(String s) { private static String hex(String s) {
return Hex.encodeHexString(s.getBytes()); return HpackHex.encodeHexString(s.getBytes());
} }
private void decode(String encoded) throws Http2Exception { private void decode(String encoded) throws Http2Exception {
byte[] b = Hex.decodeHex(encoded.toCharArray()); byte[] b = HpackHex.decodeHex(encoded.toCharArray());
ByteBuf in = Unpooled.wrappedBuffer(b); ByteBuf in = Unpooled.wrappedBuffer(b);
try { try {
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
} finally { } finally {
in.release(); in.release();
} }
@ -73,7 +70,7 @@ public class DecoderTest {
@Before @Before
public void setUp() throws Http2Exception { public void setUp() throws Http2Exception {
decoder = new Decoder(8192, 32); hpackDecoder = new HpackDecoder(8192, 32);
mockHeaders = mock(Http2Headers.class); mockHeaders = mock(Http2Headers.class);
} }
@ -160,9 +157,9 @@ public class DecoderTest {
ByteBuf in = Unpooled.wrappedBuffer(input); ByteBuf in = Unpooled.wrappedBuffer(input);
try { try {
final long expectedHeaderSize = 4026531870L; // based on the input above final long expectedHeaderSize = 4026531870L; // based on the input above
decoder.setMaxHeaderTableSize(expectedHeaderSize); hpackDecoder.setMaxHeaderTableSize(expectedHeaderSize);
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
assertEquals(expectedHeaderSize, decoder.getMaxHeaderTableSize()); assertEquals(expectedHeaderSize, hpackDecoder.getMaxHeaderTableSize());
} finally { } finally {
in.release(); in.release();
} }
@ -173,8 +170,8 @@ public class DecoderTest {
byte[] input = {(byte) 0x3F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x0E}; byte[] input = {(byte) 0x3F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x0E};
ByteBuf in = Unpooled.wrappedBuffer(input); ByteBuf in = Unpooled.wrappedBuffer(input);
try { try {
decoder.setMaxHeaderTableSize(4026531870L - 1); // based on the input above ... 1 less than is above. hpackDecoder.setMaxHeaderTableSize(4026531870L - 1); // based on the input above ... 1 less than is above.
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
} finally { } finally {
in.release(); in.release();
} }
@ -185,7 +182,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x80, 0}; byte[] input = {0, (byte) 0x80, 0};
ByteBuf in = Unpooled.wrappedBuffer(input); ByteBuf in = Unpooled.wrappedBuffer(input);
try { try {
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
verify(mockHeaders, times(1)).add(EMPTY_STRING, EMPTY_STRING); verify(mockHeaders, times(1)).add(EMPTY_STRING, EMPTY_STRING);
} finally { } finally {
in.release(); in.release();
@ -197,7 +194,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x81, -1}; byte[] input = {0, (byte) 0x81, -1};
ByteBuf in = Unpooled.wrappedBuffer(input); ByteBuf in = Unpooled.wrappedBuffer(input);
try { try {
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
} finally { } finally {
in.release(); in.release();
} }
@ -208,7 +205,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x84, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}; byte[] input = {0, (byte) 0x84, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF};
ByteBuf in = Unpooled.wrappedBuffer(input); ByteBuf in = Unpooled.wrappedBuffer(input);
try { try {
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
} finally { } finally {
in.release(); in.release();
} }
@ -219,7 +216,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x81, 0}; byte[] input = {0, (byte) 0x81, 0};
ByteBuf in = Unpooled.wrappedBuffer(input); ByteBuf in = Unpooled.wrappedBuffer(input);
try { try {
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
} finally { } finally {
in.release(); in.release();
} }
@ -227,12 +224,12 @@ public class DecoderTest {
@Test(expected = Http2Exception.class) @Test(expected = Http2Exception.class)
public void testIncompleteIndex() throws Http2Exception { public void testIncompleteIndex() throws Http2Exception {
byte[] compressed = Hex.decodeHex("FFF0".toCharArray()); byte[] compressed = HpackHex.decodeHex("FFF0".toCharArray());
ByteBuf in = Unpooled.wrappedBuffer(compressed); ByteBuf in = Unpooled.wrappedBuffer(compressed);
try { try {
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
assertEquals(1, in.readableBytes()); assertEquals(1, in.readableBytes());
decoder.decode(0, in, mockHeaders); hpackDecoder.decode(0, in, mockHeaders);
} finally { } finally {
in.release(); in.release();
} }
@ -259,16 +256,16 @@ public class DecoderTest {
@Test @Test
public void testDynamicTableSizeUpdate() throws Http2Exception { public void testDynamicTableSizeUpdate() throws Http2Exception {
decode("20"); decode("20");
assertEquals(0, decoder.getMaxHeaderTableSize()); assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("3FE11F"); decode("3FE11F");
assertEquals(4096, decoder.getMaxHeaderTableSize()); assertEquals(4096, hpackDecoder.getMaxHeaderTableSize());
} }
@Test @Test
public void testDynamicTableSizeUpdateRequired() throws Http2Exception { public void testDynamicTableSizeUpdateRequired() throws Http2Exception {
decoder.setMaxHeaderTableSize(32); hpackDecoder.setMaxHeaderTableSize(32);
decode("3F00"); decode("3F00");
assertEquals(31, decoder.getMaxHeaderTableSize()); assertEquals(31, hpackDecoder.getMaxHeaderTableSize());
} }
@Test(expected = Http2Exception.class) @Test(expected = Http2Exception.class)
@ -279,14 +276,14 @@ public class DecoderTest {
@Test(expected = Http2Exception.class) @Test(expected = Http2Exception.class)
public void testInsidiousMaxDynamicTableSize() throws Http2Exception { public void testInsidiousMaxDynamicTableSize() throws Http2Exception {
decoder.setMaxHeaderTableSize(MAX_VALUE); hpackDecoder.setMaxHeaderTableSize(MAX_VALUE);
// max header table size sign overflow // max header table size sign overflow
decode("3FE1FFFFFF07"); decode("3FE1FFFFFF07");
} }
@Test @Test
public void testMaxValidDynamicTableSize() throws Http2Exception { public void testMaxValidDynamicTableSize() throws Http2Exception {
decoder.setMaxHeaderTableSize(MAX_VALUE); hpackDecoder.setMaxHeaderTableSize(MAX_VALUE);
String baseValue = "3FE1FFFFFF0"; String baseValue = "3FE1FFFFFF0";
for (int i = 0; i < 7; ++i) { for (int i = 0; i < 7; ++i) {
decode(baseValue + i); decode(baseValue + i);
@ -295,22 +292,22 @@ public class DecoderTest {
@Test @Test
public void testReduceMaxDynamicTableSize() throws Http2Exception { public void testReduceMaxDynamicTableSize() throws Http2Exception {
decoder.setMaxHeaderTableSize(0); hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, decoder.getMaxHeaderTableSize()); assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("2081"); decode("2081");
} }
@Test(expected = Http2Exception.class) @Test(expected = Http2Exception.class)
public void testTooLargeDynamicTableSizeUpdate() throws Http2Exception { public void testTooLargeDynamicTableSizeUpdate() throws Http2Exception {
decoder.setMaxHeaderTableSize(0); hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, decoder.getMaxHeaderTableSize()); assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("21"); // encoder max header table size not small enough decode("21"); // encoder max header table size not small enough
} }
@Test(expected = Http2Exception.class) @Test(expected = Http2Exception.class)
public void testMissingDynamicTableSizeUpdate() throws Http2Exception { public void testMissingDynamicTableSizeUpdate() throws Http2Exception {
decoder.setMaxHeaderTableSize(0); hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, decoder.getMaxHeaderTableSize()); assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("81"); decode("81");
} }
@ -429,8 +426,8 @@ public class DecoderTest {
throws Http2Exception { throws Http2Exception {
ByteBuf in = Unpooled.buffer(200); ByteBuf in = Unpooled.buffer(200);
try { try {
decoder.setMaxHeaderListSize(100, 200); hpackDecoder.setMaxHeaderListSize(100, 200);
Encoder encoder = new Encoder(true); HpackEncoder hpackEncoder = new HpackEncoder(true);
// encode headers that are slightly larger than maxHeaderListSize // encode headers that are slightly larger than maxHeaderListSize
// but smaller than maxHeaderListSizeGoAway // but smaller than maxHeaderListSizeGoAway
@ -439,13 +436,13 @@ public class DecoderTest {
toEncode.add("test_2", "2"); toEncode.add("test_2", "2");
toEncode.add("long", String.format("%0100d", 0).replace('0', 'A')); toEncode.add("long", String.format("%0100d", 0).replace('0', 'A'));
toEncode.add("test_3", "3"); toEncode.add("test_3", "3");
encoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE); hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
// decode the headers, we should get an exception, but // decode the headers, we should get an exception, but
// the decoded headers object should contain all of the headers // the decoded headers object should contain all of the headers
Http2Headers decoded = new DefaultHttp2Headers(); Http2Headers decoded = new DefaultHttp2Headers();
try { try {
decoder.decode(1, in, decoded); hpackDecoder.decode(1, in, decoded);
fail(); fail();
} catch (Http2Exception e) { } catch (Http2Exception e) {
assertTrue(e instanceof Http2Exception.HeaderListSizeException); assertTrue(e instanceof Http2Exception.HeaderListSizeException);

View File

@ -13,12 +13,10 @@
* License for the specific language governing permissions and limitations * License for the specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -27,25 +25,25 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
public class EncoderTest { public class HpackEncoderTest {
private Decoder decoder; private HpackDecoder hpackDecoder;
private Encoder encoder; private HpackEncoder hpackEncoder;
private Http2Headers mockHeaders; private Http2Headers mockHeaders;
@Before @Before
public void setUp() throws Http2Exception { public void setUp() throws Http2Exception {
encoder = new Encoder(); hpackEncoder = new HpackEncoder();
decoder = new Decoder(DEFAULT_HEADER_LIST_SIZE, 32); hpackDecoder = new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32);
mockHeaders = mock(Http2Headers.class); mockHeaders = mock(Http2Headers.class);
} }
@Test @Test
public void testSetMaxHeaderTableSizeToMaxValue() throws Http2Exception { public void testSetMaxHeaderTableSizeToMaxValue() throws Http2Exception {
ByteBuf buf = Unpooled.buffer(); ByteBuf buf = Unpooled.buffer();
encoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE); hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
decoder.setMaxHeaderTableSize(MAX_HEADER_TABLE_SIZE); hpackDecoder.setMaxHeaderTableSize(MAX_HEADER_TABLE_SIZE);
decoder.decode(0, buf, mockHeaders); hpackDecoder.decode(0, buf, mockHeaders);
assertEquals(MAX_HEADER_TABLE_SIZE, decoder.getMaxHeaderTableSize()); assertEquals(MAX_HEADER_TABLE_SIZE, hpackDecoder.getMaxHeaderTableSize());
buf.release(); buf.release();
} }
@ -53,7 +51,7 @@ public class EncoderTest {
public void testSetMaxHeaderTableSizeOverflow() throws Http2Exception { public void testSetMaxHeaderTableSizeOverflow() throws Http2Exception {
ByteBuf buf = Unpooled.buffer(); ByteBuf buf = Unpooled.buffer();
try { try {
encoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1); hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1);
} finally { } finally {
buf.release(); buf.release();
} }

View File

@ -31,23 +31,23 @@
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
/** /**
* Extracted from org/apache/commons/codec/binary/Hex.java Copyright Apache Software Foundation * Extracted from org/apache/commons/codec/binary/Hex.java Copyright Apache Software Foundation
*/ */
final class Hex { final class HpackHex {
private Hex() { private HpackHex() {
} }
/** /**
* Used to build output as Hex * Used to build output as HpackHex
*/ */
private static final char[] DIGITS_LOWER = private static final char[] DIGITS_LOWER =
{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
/** /**
* Used to build output as Hex * Used to build output as HpackHex
*/ */
private static final char[] DIGITS_UPPER = private static final char[] DIGITS_UPPER =
{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'}; {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
@ -89,7 +89,7 @@ final class Hex {
* each byte in order. The returned array will be double the length of the passed array, as it * each byte in order. The returned array will be double the length of the passed array, as it
* takes two characters to represent any given byte. * takes two characters to represent any given byte.
* *
* @param data a byte[] to convert to Hex characters * @param data a byte[] to convert to HpackHex characters
* @return A char[] containing hexadecimal characters * @return A char[] containing hexadecimal characters
*/ */
public static char[] encodeHex(byte[] data) { public static char[] encodeHex(byte[] data) {
@ -101,7 +101,7 @@ final class Hex {
* each byte in order. The returned array will be double the length of the passed array, as it * each byte in order. The returned array will be double the length of the passed array, as it
* takes two characters to represent any given byte. * takes two characters to represent any given byte.
* *
* @param data a byte[] to convert to Hex characters * @param data a byte[] to convert to HpackHex characters
* @param toLowerCase <code>true</code> converts to lowercase, <code>false</code> to uppercase * @param toLowerCase <code>true</code> converts to lowercase, <code>false</code> to uppercase
* @return A char[] containing hexadecimal characters * @return A char[] containing hexadecimal characters
* @since 1.4 * @since 1.4
@ -115,7 +115,7 @@ final class Hex {
* each byte in order. The returned array will be double the length of the passed array, as it * each byte in order. The returned array will be double the length of the passed array, as it
* takes two characters to represent any given byte. * takes two characters to represent any given byte.
* *
* @param data a byte[] to convert to Hex characters * @param data a byte[] to convert to HpackHex characters
* @param toDigits the output alphabet * @param toDigits the output alphabet
* @return A char[] containing hexadecimal characters * @return A char[] containing hexadecimal characters
* @since 1.4 * @since 1.4
@ -136,7 +136,7 @@ final class Hex {
* order. The returned String will be double the length of the passed array, as it takes two * order. The returned String will be double the length of the passed array, as it takes two
* characters to represent any given byte. * characters to represent any given byte.
* *
* @param data a byte[] to convert to Hex characters * @param data a byte[] to convert to HpackHex characters
* @return A String containing hexadecimal characters * @return A String containing hexadecimal characters
* @since 1.4 * @since 1.4
*/ */

View File

@ -29,11 +29,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -41,7 +40,7 @@ import org.junit.Test;
import java.util.Arrays; import java.util.Arrays;
import java.util.Random; import java.util.Random;
public class HuffmanTest { public class HpackHuffmanTest {
@Test @Test
public void testHuffman() throws Http2Exception { public void testHuffman() throws Http2Exception {
@ -123,19 +122,19 @@ public class HuffmanTest {
} }
private static void roundTrip(String s) throws Http2Exception { private static void roundTrip(String s) throws Http2Exception {
roundTrip(new HuffmanEncoder(), newHuffmanDecoder(), s); roundTrip(new HpackHuffmanEncoder(), newHuffmanDecoder(), s);
} }
private static void roundTrip(HuffmanEncoder encoder, HuffmanDecoder decoder, String s) private static void roundTrip(HpackHuffmanEncoder encoder, HpackHuffmanDecoder decoder, String s)
throws Http2Exception { throws Http2Exception {
roundTrip(encoder, decoder, s.getBytes()); roundTrip(encoder, decoder, s.getBytes());
} }
private static void roundTrip(byte[] buf) throws Http2Exception { private static void roundTrip(byte[] buf) throws Http2Exception {
roundTrip(new HuffmanEncoder(), newHuffmanDecoder(), buf); roundTrip(new HpackHuffmanEncoder(), newHuffmanDecoder(), buf);
} }
private static void roundTrip(HuffmanEncoder encoder, HuffmanDecoder decoder, byte[] buf) private static void roundTrip(HpackHuffmanEncoder encoder, HpackHuffmanDecoder decoder, byte[] buf)
throws Http2Exception { throws Http2Exception {
ByteBuf buffer = Unpooled.buffer(); ByteBuf buffer = Unpooled.buffer();
try { try {
@ -151,7 +150,7 @@ public class HuffmanTest {
} }
} }
private static byte[] decode(HuffmanDecoder decoder, byte[] bytes) throws Http2Exception { private static byte[] decode(HpackHuffmanDecoder decoder, byte[] bytes) throws Http2Exception {
ByteBuf buffer = Unpooled.wrappedBuffer(bytes); ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
try { try {
AsciiString decoded = decoder.decode(buffer, buffer.readableBytes()); AsciiString decoded = decoder.decode(buffer, buffer.readableBytes());
@ -162,7 +161,7 @@ public class HuffmanTest {
} }
} }
private static HuffmanDecoder newHuffmanDecoder() { private static HpackHuffmanDecoder newHuffmanDecoder() {
return new HuffmanDecoder(32); return new HpackHuffmanDecoder(32);
} }
} }

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -72,8 +72,8 @@ public class HpackTest {
@Test @Test
public void test() throws Exception { public void test() throws Exception {
InputStream is = HpackTest.class.getResourceAsStream(TEST_DIR + fileName); InputStream is = HpackTest.class.getResourceAsStream(TEST_DIR + fileName);
TestCase testCase = TestCase.load(is); HpackTestCase hpackTestCase = HpackTestCase.load(is);
testCase.testCompress(); hpackTestCase.testCompress();
testCase.testDecompress(); hpackTestCase.testDecompress();
} }
} }

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import com.google.gson.FieldNamingPolicy; import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson; import com.google.gson.Gson;
@ -41,10 +41,6 @@ import com.google.gson.JsonObject;
import com.google.gson.JsonParseException; import com.google.gson.JsonParseException;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@ -60,11 +56,11 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SI
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2TestUtil.newTestEncoder; import static io.netty.handler.codec.http2.Http2TestUtil.newTestEncoder;
final class TestCase { final class HpackTestCase {
private static final Gson GSON = new GsonBuilder() private static final Gson GSON = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES) .setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.registerTypeAdapter(HeaderField.class, new HeaderFieldDeserializer()) .registerTypeAdapter(HpackHeaderField.class, new HeaderFieldDeserializer())
.create(); .create();
int maxHeaderTableSize = -1; int maxHeaderTableSize = -1;
@ -72,39 +68,39 @@ final class TestCase {
List<HeaderBlock> headerBlocks; List<HeaderBlock> headerBlocks;
private TestCase() { private HpackTestCase() {
} }
static TestCase load(InputStream is) throws IOException { static HpackTestCase load(InputStream is) throws IOException {
InputStreamReader r = new InputStreamReader(is); InputStreamReader r = new InputStreamReader(is);
TestCase testCase = GSON.fromJson(r, TestCase.class); HpackTestCase hpackTestCase = GSON.fromJson(r, HpackTestCase.class);
for (HeaderBlock headerBlock : testCase.headerBlocks) { for (HeaderBlock headerBlock : hpackTestCase.headerBlocks) {
headerBlock.encodedBytes = Hex.decodeHex(headerBlock.getEncodedStr().toCharArray()); headerBlock.encodedBytes = HpackHex.decodeHex(headerBlock.getEncodedStr().toCharArray());
} }
return testCase; return hpackTestCase;
} }
void testCompress() throws Exception { void testCompress() throws Exception {
Encoder encoder = createEncoder(); HpackEncoder hpackEncoder = createEncoder();
for (HeaderBlock headerBlock : headerBlocks) { for (HeaderBlock headerBlock : headerBlocks) {
byte[] actual = byte[] actual =
encode(encoder, headerBlock.getHeaders(), headerBlock.getMaxHeaderTableSize(), encode(hpackEncoder, headerBlock.getHeaders(), headerBlock.getMaxHeaderTableSize(),
sensitiveHeaders); sensitiveHeaders);
if (!Arrays.equals(actual, headerBlock.encodedBytes)) { if (!Arrays.equals(actual, headerBlock.encodedBytes)) {
throw new AssertionError( throw new AssertionError(
"\nEXPECTED:\n" + headerBlock.getEncodedStr() + "\nEXPECTED:\n" + headerBlock.getEncodedStr() +
"\nACTUAL:\n" + Hex.encodeHexString(actual)); "\nACTUAL:\n" + HpackHex.encodeHexString(actual));
} }
List<HeaderField> actualDynamicTable = new ArrayList<HeaderField>(); List<HpackHeaderField> actualDynamicTable = new ArrayList<HpackHeaderField>();
for (int index = 0; index < encoder.length(); index++) { for (int index = 0; index < hpackEncoder.length(); index++) {
actualDynamicTable.add(encoder.getHeaderField(index)); actualDynamicTable.add(hpackEncoder.getHeaderField(index));
} }
List<HeaderField> expectedDynamicTable = headerBlock.getDynamicTable(); List<HpackHeaderField> expectedDynamicTable = headerBlock.getDynamicTable();
if (!expectedDynamicTable.equals(actualDynamicTable)) { if (!expectedDynamicTable.equals(actualDynamicTable)) {
throw new AssertionError( throw new AssertionError(
@ -112,24 +108,24 @@ final class TestCase {
"\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable); "\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable);
} }
if (headerBlock.getTableSize() != encoder.size()) { if (headerBlock.getTableSize() != hpackEncoder.size()) {
throw new AssertionError( throw new AssertionError(
"\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() + "\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() +
"\n ACTUAL TABLE SIZE : " + encoder.size()); "\n ACTUAL TABLE SIZE : " + hpackEncoder.size());
} }
} }
} }
void testDecompress() throws Exception { void testDecompress() throws Exception {
Decoder decoder = createDecoder(); HpackDecoder hpackDecoder = createDecoder();
for (HeaderBlock headerBlock : headerBlocks) { for (HeaderBlock headerBlock : headerBlocks) {
List<HeaderField> actualHeaders = decode(decoder, headerBlock.encodedBytes); List<HpackHeaderField> actualHeaders = decode(hpackDecoder, headerBlock.encodedBytes);
List<HeaderField> expectedHeaders = new ArrayList<HeaderField>(); List<HpackHeaderField> expectedHeaders = new ArrayList<HpackHeaderField>();
for (HeaderField h : headerBlock.getHeaders()) { for (HpackHeaderField h : headerBlock.getHeaders()) {
expectedHeaders.add(new HeaderField(h.name, h.value)); expectedHeaders.add(new HpackHeaderField(h.name, h.value));
} }
if (!expectedHeaders.equals(actualHeaders)) { if (!expectedHeaders.equals(actualHeaders)) {
@ -138,12 +134,12 @@ final class TestCase {
"\nACTUAL:\n" + actualHeaders); "\nACTUAL:\n" + actualHeaders);
} }
List<HeaderField> actualDynamicTable = new ArrayList<HeaderField>(); List<HpackHeaderField> actualDynamicTable = new ArrayList<HpackHeaderField>();
for (int index = 0; index < decoder.length(); index++) { for (int index = 0; index < hpackDecoder.length(); index++) {
actualDynamicTable.add(decoder.getHeaderField(index)); actualDynamicTable.add(hpackDecoder.getHeaderField(index));
} }
List<HeaderField> expectedDynamicTable = headerBlock.getDynamicTable(); List<HpackHeaderField> expectedDynamicTable = headerBlock.getDynamicTable();
if (!expectedDynamicTable.equals(actualDynamicTable)) { if (!expectedDynamicTable.equals(actualDynamicTable)) {
throw new AssertionError( throw new AssertionError(
@ -151,15 +147,15 @@ final class TestCase {
"\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable); "\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable);
} }
if (headerBlock.getTableSize() != decoder.size()) { if (headerBlock.getTableSize() != hpackDecoder.size()) {
throw new AssertionError( throw new AssertionError(
"\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() + "\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() +
"\n ACTUAL TABLE SIZE : " + decoder.size()); "\n ACTUAL TABLE SIZE : " + hpackDecoder.size());
} }
} }
} }
private Encoder createEncoder() { private HpackEncoder createEncoder() {
int maxHeaderTableSize = this.maxHeaderTableSize; int maxHeaderTableSize = this.maxHeaderTableSize;
if (maxHeaderTableSize == -1) { if (maxHeaderTableSize == -1) {
maxHeaderTableSize = Integer.MAX_VALUE; maxHeaderTableSize = Integer.MAX_VALUE;
@ -172,16 +168,16 @@ final class TestCase {
} }
} }
private Decoder createDecoder() { private HpackDecoder createDecoder() {
int maxHeaderTableSize = this.maxHeaderTableSize; int maxHeaderTableSize = this.maxHeaderTableSize;
if (maxHeaderTableSize == -1) { if (maxHeaderTableSize == -1) {
maxHeaderTableSize = Integer.MAX_VALUE; maxHeaderTableSize = Integer.MAX_VALUE;
} }
return new Decoder(DEFAULT_HEADER_LIST_SIZE, 32, maxHeaderTableSize); return new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32, maxHeaderTableSize);
} }
private static byte[] encode(Encoder encoder, List<HeaderField> headers, int maxHeaderTableSize, private static byte[] encode(HpackEncoder hpackEncoder, List<HpackHeaderField> headers, int maxHeaderTableSize,
final boolean sensitive) throws Http2Exception { final boolean sensitive) throws Http2Exception {
Http2Headers http2Headers = toHttp2Headers(headers); Http2Headers http2Headers = toHttp2Headers(headers);
Http2HeadersEncoder.SensitivityDetector sensitivityDetector = new Http2HeadersEncoder.SensitivityDetector() { Http2HeadersEncoder.SensitivityDetector sensitivityDetector = new Http2HeadersEncoder.SensitivityDetector() {
@ -193,10 +189,10 @@ final class TestCase {
ByteBuf buffer = Unpooled.buffer(); ByteBuf buffer = Unpooled.buffer();
try { try {
if (maxHeaderTableSize != -1) { if (maxHeaderTableSize != -1) {
encoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize); hpackEncoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
} }
encoder.encodeHeaders(3 /* randomly chosen */, buffer, http2Headers, sensitivityDetector); hpackEncoder.encodeHeaders(3 /* randomly chosen */, buffer, http2Headers, sensitivityDetector);
byte[] bytes = new byte[buffer.readableBytes()]; byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes); buffer.readBytes(bytes);
return bytes; return bytes;
@ -205,20 +201,20 @@ final class TestCase {
} }
} }
private static Http2Headers toHttp2Headers(List<HeaderField> inHeaders) { private static Http2Headers toHttp2Headers(List<HpackHeaderField> inHeaders) {
Http2Headers headers = new DefaultHttp2Headers(false); Http2Headers headers = new DefaultHttp2Headers(false);
for (HeaderField e : inHeaders) { for (HpackHeaderField e : inHeaders) {
headers.add(e.name, e.value); headers.add(e.name, e.value);
} }
return headers; return headers;
} }
private static List<HeaderField> decode(Decoder decoder, byte[] expected) throws Exception { private static List<HpackHeaderField> decode(HpackDecoder hpackDecoder, byte[] expected) throws Exception {
ByteBuf in = Unpooled.wrappedBuffer(expected); ByteBuf in = Unpooled.wrappedBuffer(expected);
try { try {
List<HeaderField> headers = new ArrayList<HeaderField>(); List<HpackHeaderField> headers = new ArrayList<HpackHeaderField>();
TestHeaderListener listener = new TestHeaderListener(headers); TestHeaderListener listener = new TestHeaderListener(headers);
decoder.decode(0, in, listener); hpackDecoder.decode(0, in, listener);
return headers; return headers;
} finally { } finally {
in.release(); in.release();
@ -237,8 +233,8 @@ final class TestCase {
private int maxHeaderTableSize = -1; private int maxHeaderTableSize = -1;
private byte[] encodedBytes; private byte[] encodedBytes;
private List<String> encoded; private List<String> encoded;
private List<HeaderField> headers; private List<HpackHeaderField> headers;
private List<HeaderField> dynamicTable; private List<HpackHeaderField> dynamicTable;
private int tableSize; private int tableSize;
private int getMaxHeaderTableSize() { private int getMaxHeaderTableSize() {
@ -249,11 +245,11 @@ final class TestCase {
return concat(encoded).replaceAll(" ", ""); return concat(encoded).replaceAll(" ", "");
} }
public List<HeaderField> getHeaders() { public List<HpackHeaderField> getHeaders() {
return headers; return headers;
} }
public List<HeaderField> getDynamicTable() { public List<HpackHeaderField> getDynamicTable() {
return dynamicTable; return dynamicTable;
} }
@ -262,11 +258,11 @@ final class TestCase {
} }
} }
static class HeaderFieldDeserializer implements JsonDeserializer<HeaderField> { static class HeaderFieldDeserializer implements JsonDeserializer<HpackHeaderField> {
@Override @Override
public HeaderField deserialize(JsonElement json, Type typeOfT, public HpackHeaderField deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context) { JsonDeserializationContext context) {
JsonObject jsonObject = json.getAsJsonObject(); JsonObject jsonObject = json.getAsJsonObject();
Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet(); Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet();
if (entrySet.size() != 1) { if (entrySet.size() != 1) {
@ -275,7 +271,7 @@ final class TestCase {
Map.Entry<String, JsonElement> entry = entrySet.iterator().next(); Map.Entry<String, JsonElement> entry = entrySet.iterator().next();
String name = entry.getKey(); String name = entry.getKey();
String value = entry.getValue().getAsString(); String value = entry.getValue().getAsString();
return new HeaderField(name, value); return new HpackHeaderField(name, value);
} }
} }
} }

View File

@ -23,8 +23,6 @@ import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise; import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultChannelPromise; import io.netty.channel.DefaultChannelPromise;
import io.netty.handler.codec.ByteToMessageDecoder; import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import io.netty.util.concurrent.Future; import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener; import io.netty.util.concurrent.GenericFutureListener;
@ -92,7 +90,7 @@ public final class Http2TestUtil {
return s; return s;
} }
public static Encoder newTestEncoder() { public static HpackEncoder newTestEncoder() {
try { try {
return newTestEncoder(true, MAX_HEADER_LIST_SIZE, MAX_HEADER_TABLE_SIZE); return newTestEncoder(true, MAX_HEADER_LIST_SIZE, MAX_HEADER_TABLE_SIZE);
} catch (Http2Exception e) { } catch (Http2Exception e) {
@ -100,20 +98,20 @@ public final class Http2TestUtil {
} }
} }
public static Encoder newTestEncoder(boolean ignoreMaxHeaderListSize, public static HpackEncoder newTestEncoder(boolean ignoreMaxHeaderListSize,
long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception { long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception {
Encoder encoder = new Encoder(); HpackEncoder hpackEncoder = new HpackEncoder();
ByteBuf buf = Unpooled.buffer(); ByteBuf buf = Unpooled.buffer();
try { try {
encoder.setMaxHeaderTableSize(buf, maxHeaderTableSize); hpackEncoder.setMaxHeaderTableSize(buf, maxHeaderTableSize);
encoder.setMaxHeaderListSize(maxHeaderListSize); hpackEncoder.setMaxHeaderListSize(maxHeaderListSize);
} finally { } finally {
buf.release(); buf.release();
} }
return encoder; return hpackEncoder;
} }
public static Decoder newTestDecoder() { public static HpackDecoder newTestDecoder() {
try { try {
return newTestDecoder(MAX_HEADER_LIST_SIZE, MAX_HEADER_TABLE_SIZE); return newTestDecoder(MAX_HEADER_LIST_SIZE, MAX_HEADER_TABLE_SIZE);
} catch (Http2Exception e) { } catch (Http2Exception e) {
@ -121,10 +119,10 @@ public final class Http2TestUtil {
} }
} }
public static Decoder newTestDecoder(long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception { public static HpackDecoder newTestDecoder(long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception {
Decoder decoder = new Decoder(maxHeaderListSize, 32); HpackDecoder hpackDecoder = new HpackDecoder(maxHeaderListSize, 32);
decoder.setMaxHeaderTableSize(maxHeaderTableSize); hpackDecoder.setMaxHeaderTableSize(maxHeaderTableSize);
return decoder; return hpackDecoder;
} }
private Http2TestUtil() { private Http2TestUtil() {

View File

@ -29,23 +29,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.handler.codec.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import java.util.List; import java.util.List;
final class TestHeaderListener extends DefaultHttp2Headers { final class TestHeaderListener extends DefaultHttp2Headers {
private final List<HeaderField> headers; private final List<HpackHeaderField> headers;
TestHeaderListener(List<HeaderField> headers) { TestHeaderListener(List<HpackHeaderField> headers) {
this.headers = headers; this.headers = headers;
} }
@Override @Override
public TestHeaderListener add(CharSequence name, CharSequence value) { public TestHeaderListener add(CharSequence name, CharSequence value) {
headers.add(new HeaderField(name, value)); headers.add(new HpackHeaderField(name, value));
return this; return this;
} }
} }

View File

@ -29,16 +29,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark; import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
@ -50,14 +44,11 @@ import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static io.netty.microbench.http2.internal.hpack.HpackUtilBenchmark.newTestEncoder;
public class DecoderBenchmark extends AbstractMicrobenchmark { public class HpackDecoderBenchmark extends AbstractMicrobenchmark {
@Param @Param
public HeadersSize size; public HpackHeadersSize size;
@Param({ "true", "false" }) @Param({ "true", "false" })
public boolean sensitive; public boolean sensitive;
@ -69,7 +60,7 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
@Setup(Level.Trial) @Setup(Level.Trial)
public void setup() throws Http2Exception { public void setup() throws Http2Exception {
input = Unpooled.wrappedBuffer(getSerializedHeaders(Util.http2Headers(size, limitToAscii), sensitive)); input = Unpooled.wrappedBuffer(getSerializedHeaders(HpackUtil.http2Headers(size, limitToAscii), sensitive));
} }
@TearDown(Level.Trial) @TearDown(Level.Trial)
@ -80,7 +71,7 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
@Benchmark @Benchmark
@BenchmarkMode(Mode.Throughput) @BenchmarkMode(Mode.Throughput)
public void decode(final Blackhole bh) throws Http2Exception { public void decode(final Blackhole bh) throws Http2Exception {
Decoder decoder = new Decoder(DEFAULT_HEADER_LIST_SIZE, 32); HpackDecoder hpackDecoder = new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Http2Headers headers = Http2Headers headers =
new DefaultHttp2Headers() { new DefaultHttp2Headers() {
@ -90,14 +81,14 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
return this; return this;
} }
}; };
decoder.decode(0, input.duplicate(), headers); hpackDecoder.decode(0, input.duplicate(), headers);
} }
private byte[] getSerializedHeaders(Http2Headers headers, boolean sensitive) throws Http2Exception { private byte[] getSerializedHeaders(Http2Headers headers, boolean sensitive) throws Http2Exception {
Encoder encoder = newTestEncoder(); HpackEncoder hpackEncoder = HpackUtilBenchmark.newTestEncoder();
ByteBuf out = size.newOutBuffer(); ByteBuf out = size.newOutBuffer();
try { try {
encoder.encodeHeaders(3 /* randomly chosen */, out, headers, hpackEncoder.encodeHeaders(3 /* randomly chosen */, out, headers,
sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE
: Http2HeadersEncoder.NEVER_SENSITIVE); : Http2HeadersEncoder.NEVER_SENSITIVE);
byte[] bytes = new byte[out.readableBytes()]; byte[] bytes = new byte[out.readableBytes()];

View File

@ -13,12 +13,10 @@
* License for the specific language governing permissions and limitations * License for the specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Error;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.microbench.util.AbstractMicrobenchmark; import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
@ -40,7 +38,7 @@ import java.util.concurrent.TimeUnit;
@Warmup(iterations = 5) @Warmup(iterations = 5)
@Measurement(iterations = 10) @Measurement(iterations = 10)
@OutputTimeUnit(TimeUnit.NANOSECONDS) @OutputTimeUnit(TimeUnit.NANOSECONDS)
public class DecoderULE128Benchmark extends AbstractMicrobenchmark { public class HpackDecoderULE128Benchmark extends AbstractMicrobenchmark {
private static final Http2Exception DECODE_ULE_128_TO_LONG_DECOMPRESSION_EXCEPTION = private static final Http2Exception DECODE_ULE_128_TO_LONG_DECOMPRESSION_EXCEPTION =
new Http2Exception(Http2Error.COMPRESSION_ERROR); new Http2Exception(Http2Error.COMPRESSION_ERROR);
private static final Http2Exception DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION = private static final Http2Exception DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION =

View File

@ -29,12 +29,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark; import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
@ -56,18 +53,16 @@ import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static io.netty.microbench.http2.internal.hpack.HpackUtilBenchmark.newTestEncoder;
@Fork(1) @Fork(1)
@Threads(1) @Threads(1)
@State(Scope.Benchmark) @State(Scope.Benchmark)
@Warmup(iterations = 5) @Warmup(iterations = 5)
@Measurement(iterations = 5) @Measurement(iterations = 5)
@OutputTimeUnit(TimeUnit.NANOSECONDS) @OutputTimeUnit(TimeUnit.NANOSECONDS)
public class EncoderBenchmark extends AbstractMicrobenchmark { public class HpackEncoderBenchmark extends AbstractMicrobenchmark {
@Param @Param
public HeadersSize size; public HpackHeadersSize size;
@Param({ "true", "false" }) @Param({ "true", "false" })
public boolean sensitive; public boolean sensitive;
@ -84,7 +79,7 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
@Setup(Level.Trial) @Setup(Level.Trial)
public void setup() { public void setup() {
http2Headers = Util.http2Headers(size, limitToAscii); http2Headers = HpackUtil.http2Headers(size, limitToAscii);
if (duplicates) { if (duplicates) {
int size = http2Headers.size(); int size = http2Headers.size();
if (size > 0) { if (size > 0) {
@ -108,9 +103,9 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
@Benchmark @Benchmark
@BenchmarkMode(Mode.AverageTime) @BenchmarkMode(Mode.AverageTime)
public void encode(Blackhole bh) throws Exception { public void encode(Blackhole bh) throws Exception {
Encoder encoder = newTestEncoder(); HpackEncoder hpackEncoder = HpackUtilBenchmark.newTestEncoder();
output.clear(); output.clear();
encoder.encodeHeaders(3 /*randomly chosen*/, output, http2Headers, sensitivityDetector); hpackEncoder.encodeHeaders(3 /*randomly chosen*/, output, http2Headers, sensitivityDetector);
bh.consume(output); bh.consume(output);
} }
} }

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
@ -40,14 +40,14 @@ import java.util.Random;
/** /**
* Helper class representing a single header entry. Used by the benchmarks. * Helper class representing a single header entry. Used by the benchmarks.
*/ */
class Header { class HpackHeader {
private static final String ALPHABET = private static final String ALPHABET =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_"; "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
final CharSequence name; final CharSequence name;
final CharSequence value; final CharSequence value;
Header(byte[] name, byte[] value) { HpackHeader(byte[] name, byte[] value) {
this.name = new AsciiString(name, false); this.name = new AsciiString(name, false);
this.value = new AsciiString(value, false); this.value = new AsciiString(value, false);
} }
@ -55,15 +55,15 @@ class Header {
/** /**
* Creates a number of random headers with the given name/value lengths. * Creates a number of random headers with the given name/value lengths.
*/ */
static List<Header> createHeaders(int numHeaders, int nameLength, int valueLength, static List<HpackHeader> createHeaders(int numHeaders, int nameLength, int valueLength,
boolean limitToAscii) { boolean limitToAscii) {
List<Header> headers = new ArrayList<Header>(numHeaders); List<HpackHeader> hpackHeaders = new ArrayList<HpackHeader>(numHeaders);
for (int i = 0; i < numHeaders; ++i) { for (int i = 0; i < numHeaders; ++i) {
byte[] name = randomBytes(new byte[nameLength], limitToAscii); byte[] name = randomBytes(new byte[nameLength], limitToAscii);
byte[] value = randomBytes(new byte[valueLength], limitToAscii); byte[] value = randomBytes(new byte[valueLength], limitToAscii);
headers.add(new Header(name, value)); hpackHeaders.add(new HpackHeader(name, value));
} }
return headers; return hpackHeaders;
} }
private static byte[] randomBytes(byte[] bytes, boolean limitToAscii) { private static byte[] randomBytes(byte[] bytes, boolean limitToAscii) {

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
@ -39,7 +39,7 @@ import java.util.List;
/** /**
* Enum that indicates the size of the headers to be used for the benchmark. * Enum that indicates the size of the headers to be used for the benchmark.
*/ */
public enum HeadersSize { public enum HpackHeadersSize {
SMALL(5, 20, 40), SMALL(5, 20, 40),
MEDIUM(20, 40, 80), MEDIUM(20, 40, 80),
LARGE(100, 100, 300); LARGE(100, 100, 300);
@ -48,14 +48,14 @@ public enum HeadersSize {
private final int nameLength; private final int nameLength;
private final int valueLength; private final int valueLength;
HeadersSize(int numHeaders, int nameLength, int valueLength) { HpackHeadersSize(int numHeaders, int nameLength, int valueLength) {
this.numHeaders = numHeaders; this.numHeaders = numHeaders;
this.nameLength = nameLength; this.nameLength = nameLength;
this.valueLength = valueLength; this.valueLength = valueLength;
} }
public List<Header> newHeaders(boolean limitAscii) { public List<HpackHeader> newHeaders(boolean limitAscii) {
return Header.createHeaders(numHeaders, nameLength, valueLength, limitAscii); return HpackHeader.createHeaders(numHeaders, nameLength, valueLength, limitAscii);
} }
public ByteBuf newOutBuffer() { public ByteBuf newOutBuffer() {

View File

@ -29,10 +29,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Headers;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -41,23 +38,23 @@ import java.util.Map;
/** /**
* Utility methods for hpack tests. * Utility methods for hpack tests.
*/ */
public final class Util { public final class HpackUtil {
private Util() { private HpackUtil() {
} }
/** /**
* Internal key used to index a particular set of headers in the map. * Internal key used to index a particular set of headers in the map.
*/ */
private static class HeadersKey { private static class HeadersKey {
final HeadersSize size; final HpackHeadersSize size;
final boolean limitToAscii; final boolean limitToAscii;
public HeadersKey(HeadersSize size, boolean limitToAscii) { public HeadersKey(HpackHeadersSize size, boolean limitToAscii) {
this.size = size; this.size = size;
this.limitToAscii = limitToAscii; this.limitToAscii = limitToAscii;
} }
List<Header> newHeaders() { List<HpackHeader> newHeaders() {
return size.newHeaders(limitToAscii); return size.newHeaders(limitToAscii);
} }
@ -86,12 +83,12 @@ public final class Util {
} }
} }
private static final Map<HeadersKey, List<Header>> headersMap; private static final Map<HeadersKey, List<HpackHeader>> headersMap;
static { static {
HeadersSize[] sizes = HeadersSize.values(); HpackHeadersSize[] sizes = HpackHeadersSize.values();
headersMap = new HashMap<HeadersKey, List<Header>>(sizes.length * 2); headersMap = new HashMap<HeadersKey, List<HpackHeader>>(sizes.length * 2);
for (HeadersSize size : sizes) { for (HpackHeadersSize size : sizes) {
HeadersKey key = new HeadersKey(size, true); HeadersKey key = new HeadersKey(size, true);
headersMap.put(key, key.newHeaders()); headersMap.put(key, key.newHeaders());
@ -103,16 +100,16 @@ public final class Util {
/** /**
* Gets headers for the given size and whether the key/values should be limited to ASCII. * Gets headers for the given size and whether the key/values should be limited to ASCII.
*/ */
static List<Header> headers(HeadersSize size, boolean limitToAscii) { static List<HpackHeader> headers(HpackHeadersSize size, boolean limitToAscii) {
return headersMap.get(new HeadersKey(size, limitToAscii)); return headersMap.get(new HeadersKey(size, limitToAscii));
} }
static Http2Headers http2Headers(HeadersSize size, boolean limitToAscii) { static Http2Headers http2Headers(HpackHeadersSize size, boolean limitToAscii) {
List<Header> headers = headersMap.get(new HeadersKey(size, limitToAscii)); List<HpackHeader> hpackHeaders = headersMap.get(new HeadersKey(size, limitToAscii));
Http2Headers http2Headers = new DefaultHttp2Headers(false); Http2Headers http2Headers = new DefaultHttp2Headers(false);
for (int i = 0; i < headers.size(); ++i) { for (int i = 0; i < hpackHeaders.size(); ++i) {
Header header = headers.get(i); HpackHeader hpackHeader = hpackHeaders.get(i);
http2Headers.add(header.name, header.value); http2Headers.add(hpackHeader.name, hpackHeader.value);
} }
return http2Headers; return http2Headers;
} }

View File

@ -13,12 +13,10 @@
* License for the specific language governing permissions and limitations * License for the specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package io.netty.microbench.http2.internal.hpack; package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark; import io.netty.microbench.util.AbstractMicrobenchmark;
import io.netty.util.AsciiString; import io.netty.util.AsciiString;
import io.netty.util.internal.ConstantTimeUtils; import io.netty.util.internal.ConstantTimeUtils;
@ -41,21 +39,21 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
@Measurement(iterations = 5) @Measurement(iterations = 5)
public class HpackUtilBenchmark extends AbstractMicrobenchmark { public class HpackUtilBenchmark extends AbstractMicrobenchmark {
@Param @Param
public HeadersSize size; public HpackHeadersSize size;
private List<Header> headers; private List<HpackHeader> hpackHeaders;
@Setup(Level.Trial) @Setup(Level.Trial)
public void setup() { public void setup() {
headers = Util.headers(size, false); hpackHeaders = HpackUtil.headers(size, false);
} }
@Benchmark @Benchmark
public int oldEquals() { public int oldEquals() {
int count = 0; int count = 0;
for (int i = 0; i < headers.size(); ++i) { for (int i = 0; i < hpackHeaders.size(); ++i) {
Header header = headers.get(i); HpackHeader hpackHeader = hpackHeaders.get(i);
if (oldEquals(header.name, header.name)) { if (oldEquals(hpackHeader.name, hpackHeader.name)) {
++count; ++count;
} }
} }
@ -65,9 +63,9 @@ public class HpackUtilBenchmark extends AbstractMicrobenchmark {
@Benchmark @Benchmark
public int newEquals() { public int newEquals() {
int count = 0; int count = 0;
for (int i = 0; i < headers.size(); ++i) { for (int i = 0; i < hpackHeaders.size(); ++i) {
Header header = headers.get(i); HpackHeader hpackHeader = hpackHeaders.get(i);
if (newEquals(header.name, header.name)) { if (newEquals(hpackHeader.name, hpackHeader.name)) {
++count; ++count;
} }
} }
@ -99,17 +97,17 @@ public class HpackUtilBenchmark extends AbstractMicrobenchmark {
return ConstantTimeUtils.equalsConstantTime(s1, s2) != 0; return ConstantTimeUtils.equalsConstantTime(s1, s2) != 0;
} }
static Encoder newTestEncoder() { static HpackEncoder newTestEncoder() {
Encoder encoder = new Encoder(); HpackEncoder hpackEncoder = new HpackEncoder();
ByteBuf buf = Unpooled.buffer(); ByteBuf buf = Unpooled.buffer();
try { try {
encoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE); hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
encoder.setMaxHeaderListSize(MAX_HEADER_LIST_SIZE); hpackEncoder.setMaxHeaderListSize(MAX_HEADER_LIST_SIZE);
} catch (Http2Exception e) { } catch (Http2Exception e) {
throw new Error("max size not allowed?", e); throw new Error("max size not allowed?", e);
} finally { } finally {
buf.release(); buf.release();
} }
return encoder; return hpackEncoder;
} }
} }

View File

@ -0,0 +1,19 @@
/*
* Copyright 2017 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Benchmarks for {@link io.netty.handler.codec.http2}.
*/
package io.netty.handler.codec.http2;

View File

@ -1,36 +0,0 @@
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Benchmarks for {@link io.netty.handler.codec.http2.internal.hpack}.
*/
package io.netty.microbench.http2.internal.hpack;