HTTP/2 move internal HPACK classes to the http2 package

Motivation:
The internal.hpack classes are no longer exposed in our public APIs and can be made package private in the http2 package.

Modifications:
- Make the hpack classes package private in the http2 package

Result:
Less APIs exposed as public.
This commit is contained in:
Scott Mitchell 2017-03-01 21:30:55 -08:00
parent 7e7e10fb1e
commit f9001b9fc0
43 changed files with 368 additions and 478 deletions

View File

@ -16,7 +16,6 @@
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.UnstableApi;
@ -29,7 +28,7 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
private static final float HEADERS_COUNT_WEIGHT_NEW = 1 / 5f;
private static final float HEADERS_COUNT_WEIGHT_HISTORICAL = 1 - HEADERS_COUNT_WEIGHT_NEW;
private final Decoder decoder;
private final HpackDecoder hpackDecoder;
private final boolean validateHeaders;
/**
* Used to calculate an exponential moving average of header sizes to get an estimate of how large the data
@ -68,41 +67,41 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
*/
public DefaultHttp2HeadersDecoder(boolean validateHeaders, long maxHeaderListSize,
int initialHuffmanDecodeCapacity) {
this(validateHeaders, new Decoder(maxHeaderListSize, initialHuffmanDecodeCapacity));
this(validateHeaders, new HpackDecoder(maxHeaderListSize, initialHuffmanDecodeCapacity));
}
/**
* Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally
* for testing but violate the RFC if used outside the scope of testing.
*/
DefaultHttp2HeadersDecoder(boolean validateHeaders, Decoder decoder) {
this.decoder = ObjectUtil.checkNotNull(decoder, "decoder");
DefaultHttp2HeadersDecoder(boolean validateHeaders, HpackDecoder hpackDecoder) {
this.hpackDecoder = ObjectUtil.checkNotNull(hpackDecoder, "hpackDecoder");
this.validateHeaders = validateHeaders;
}
@Override
public void maxHeaderTableSize(long max) throws Http2Exception {
decoder.setMaxHeaderTableSize(max);
hpackDecoder.setMaxHeaderTableSize(max);
}
@Override
public long maxHeaderTableSize() {
return decoder.getMaxHeaderTableSize();
return hpackDecoder.getMaxHeaderTableSize();
}
@Override
public void maxHeaderListSize(long max, long goAwayMax) throws Http2Exception {
decoder.setMaxHeaderListSize(max, goAwayMax);
hpackDecoder.setMaxHeaderListSize(max, goAwayMax);
}
@Override
public long maxHeaderListSize() {
return decoder.getMaxHeaderListSize();
return hpackDecoder.getMaxHeaderListSize();
}
@Override
public long maxHeaderListSizeGoAway() {
return decoder.getMaxHeaderListSizeGoAway();
return hpackDecoder.getMaxHeaderListSizeGoAway();
}
@Override
@ -114,7 +113,7 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
public Http2Headers decodeHeaders(int streamId, ByteBuf headerBlock) throws Http2Exception {
try {
final Http2Headers headers = new DefaultHttp2Headers(validateHeaders, (int) headerArraySizeAccumulator);
decoder.decode(streamId, headerBlock, headers);
hpackDecoder.decode(streamId, headerBlock, headers);
headerArraySizeAccumulator = HEADERS_COUNT_WEIGHT_NEW * headers.size() +
HEADERS_COUNT_WEIGHT_HISTORICAL * headerArraySizeAccumulator;
return headers;

View File

@ -17,7 +17,6 @@ package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.internal.UnstableApi;
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
@ -26,7 +25,7 @@ import static io.netty.util.internal.ObjectUtil.checkNotNull;
@UnstableApi
public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2HeadersEncoder.Configuration {
private final Encoder encoder;
private final HpackEncoder hpackEncoder;
private final SensitivityDetector sensitivityDetector;
private final ByteBuf tableSizeChangeOutput = Unpooled.buffer();
@ -35,38 +34,38 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
}
public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector) {
this(sensitivityDetector, new Encoder());
this(sensitivityDetector, new HpackEncoder());
}
public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, boolean ignoreMaxHeaderListSize) {
this(sensitivityDetector, new Encoder(ignoreMaxHeaderListSize));
this(sensitivityDetector, new HpackEncoder(ignoreMaxHeaderListSize));
}
public DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, boolean ignoreMaxHeaderListSize,
int dynamicTableArraySizeHint) {
this(sensitivityDetector, new Encoder(ignoreMaxHeaderListSize, dynamicTableArraySizeHint));
this(sensitivityDetector, new HpackEncoder(ignoreMaxHeaderListSize, dynamicTableArraySizeHint));
}
/**
* Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally
* for testing but violate the RFC if used outside the scope of testing.
*/
DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, Encoder encoder) {
DefaultHttp2HeadersEncoder(SensitivityDetector sensitivityDetector, HpackEncoder hpackEncoder) {
this.sensitivityDetector = checkNotNull(sensitivityDetector, "sensitiveDetector");
this.encoder = checkNotNull(encoder, "encoder");
this.hpackEncoder = checkNotNull(hpackEncoder, "hpackEncoder");
}
@Override
public void encodeHeaders(int streamId, Http2Headers headers, ByteBuf buffer) throws Http2Exception {
try {
// If there was a change in the table size, serialize the output from the encoder
// If there was a change in the table size, serialize the output from the hpackEncoder
// resulting from that change.
if (tableSizeChangeOutput.isReadable()) {
buffer.writeBytes(tableSizeChangeOutput);
tableSizeChangeOutput.clear();
}
encoder.encodeHeaders(streamId, buffer, headers, sensitivityDetector);
hpackEncoder.encodeHeaders(streamId, buffer, headers, sensitivityDetector);
} catch (Http2Exception e) {
throw e;
} catch (Throwable t) {
@ -76,22 +75,22 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
@Override
public void maxHeaderTableSize(long max) throws Http2Exception {
encoder.setMaxHeaderTableSize(tableSizeChangeOutput, max);
hpackEncoder.setMaxHeaderTableSize(tableSizeChangeOutput, max);
}
@Override
public long maxHeaderTableSize() {
return encoder.getMaxHeaderTableSize();
return hpackEncoder.getMaxHeaderTableSize();
}
@Override
public void maxHeaderListSize(long max) throws Http2Exception {
encoder.setMaxHeaderListSize(max);
hpackEncoder.setMaxHeaderListSize(max);
}
@Override
public long maxHeaderListSize() {
return encoder.getMaxHeaderListSize();
return hpackEncoder.getMaxHeaderListSize();
}
@Override

View File

@ -29,13 +29,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2CodecUtil;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType;
import io.netty.handler.codec.http2.HpackUtil.IndexType;
import io.netty.util.AsciiString;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE;
@ -52,25 +49,26 @@ import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.internal.ObjectUtil.checkPositive;
import static io.netty.util.internal.ThrowableUtil.unknownStackTrace;
public final class Decoder {
final class HpackDecoder {
private static final Http2Exception DECODE_ULE_128_DECOMPRESSION_EXCEPTION = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - decompression failure"), Decoder.class, "decodeULE128(...)");
connectionError(COMPRESSION_ERROR, "HPACK - decompression failure"), HpackDecoder.class,
"decodeULE128(..)");
private static final Http2Exception DECODE_ULE_128_TO_LONG_DECOMPRESSION_EXCEPTION = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - long overflow"), Decoder.class, "decodeULE128(...)");
connectionError(COMPRESSION_ERROR, "HPACK - long overflow"), HpackDecoder.class, "decodeULE128(..)");
private static final Http2Exception DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - int overflow"), Decoder.class, "decodeULE128ToInt(...)");
connectionError(COMPRESSION_ERROR, "HPACK - int overflow"), HpackDecoder.class, "decodeULE128ToInt(..)");
private static final Http2Exception DECODE_ILLEGAL_INDEX_VALUE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), Decoder.class, "decode(...)");
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), HpackDecoder.class, "decode(..)");
private static final Http2Exception INDEX_HEADER_ILLEGAL_INDEX_VALUE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), Decoder.class, "indexHeader(...)");
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), HpackDecoder.class, "indexHeader(..)");
private static final Http2Exception READ_NAME_ILLEGAL_INDEX_VALUE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), Decoder.class, "readName(...)");
connectionError(COMPRESSION_ERROR, "HPACK - illegal index value"), HpackDecoder.class, "readName(..)");
private static final Http2Exception INVALID_MAX_DYNAMIC_TABLE_SIZE = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - invalid max dynamic table size"), Decoder.class,
"setDynamicTableSize(...)");
connectionError(COMPRESSION_ERROR, "HPACK - invalid max dynamic table size"), HpackDecoder.class,
"setDynamicTableSize(..)");
private static final Http2Exception MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED = unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - max dynamic table size change required"), Decoder.class,
"decode(...)");
connectionError(COMPRESSION_ERROR, "HPACK - max dynamic table size change required"), HpackDecoder.class,
"decode(..)");
private static final byte READ_HEADER_REPRESENTATION = 0;
private static final byte READ_MAX_DYNAMIC_TABLE_SIZE = 1;
private static final byte READ_INDEXED_HEADER = 2;
@ -82,8 +80,8 @@ public final class Decoder {
private static final byte READ_LITERAL_HEADER_VALUE_LENGTH = 8;
private static final byte READ_LITERAL_HEADER_VALUE = 9;
private final DynamicTable dynamicTable;
private final HuffmanDecoder huffmanDecoder;
private final HpackDynamicTable hpackDynamicTable;
private final HpackHuffmanDecoder hpackHuffmanDecoder;
private long maxHeaderListSizeGoAway;
private long maxHeaderListSize;
private long maxDynamicTableSize;
@ -98,7 +96,7 @@ public final class Decoder {
* (which is dangerous).
* @param initialHuffmanDecodeCapacity Size of an intermediate buffer used during huffman decode.
*/
public Decoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity) {
HpackDecoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity) {
this(maxHeaderListSize, initialHuffmanDecodeCapacity, DEFAULT_HEADER_TABLE_SIZE);
}
@ -106,14 +104,14 @@ public final class Decoder {
* Exposed Used for testing only! Default values used in the initial settings frame are overriden intentionally
* for testing but violate the RFC if used outside the scope of testing.
*/
Decoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity, int maxHeaderTableSize) {
HpackDecoder(long maxHeaderListSize, int initialHuffmanDecodeCapacity, int maxHeaderTableSize) {
this.maxHeaderListSize = checkPositive(maxHeaderListSize, "maxHeaderListSize");
this.maxHeaderListSizeGoAway = Http2CodecUtil.calculateMaxHeaderListSizeGoAway(maxHeaderListSize);
maxDynamicTableSize = encoderMaxDynamicTableSize = maxHeaderTableSize;
maxDynamicTableSizeChangeRequired = false;
dynamicTable = new DynamicTable(maxHeaderTableSize);
huffmanDecoder = new HuffmanDecoder(initialHuffmanDecodeCapacity);
hpackDynamicTable = new HpackDynamicTable(maxHeaderTableSize);
hpackHuffmanDecoder = new HpackHuffmanDecoder(initialHuffmanDecodeCapacity);
}
/**
@ -135,7 +133,7 @@ public final class Decoder {
case READ_HEADER_REPRESENTATION:
byte b = in.readByte();
if (maxDynamicTableSizeChangeRequired && (b & 0xE0) != 0x20) {
// Encoder MUST signal maximum dynamic table size change
// HpackEncoder MUST signal maximum dynamic table size change
throw MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED;
}
if (b < 0) {
@ -319,7 +317,7 @@ public final class Decoder {
// decoder requires less space than encoder
// encoder MUST signal this change
maxDynamicTableSizeChangeRequired = true;
dynamicTable.setCapacity(maxDynamicTableSize);
hpackDynamicTable.setCapacity(maxDynamicTableSize);
}
}
@ -349,28 +347,28 @@ public final class Decoder {
* decoder.
*/
public long getMaxHeaderTableSize() {
return dynamicTable.capacity();
return hpackDynamicTable.capacity();
}
/**
* Return the number of header fields in the dynamic table. Exposed for testing.
*/
int length() {
return dynamicTable.length();
return hpackDynamicTable.length();
}
/**
* Return the size of the dynamic table. Exposed for testing.
*/
long size() {
return dynamicTable.size();
return hpackDynamicTable.size();
}
/**
* Return the header field at the given index. Exposed for testing.
*/
HeaderField getHeaderField(int index) {
return dynamicTable.getEntry(index + 1);
HpackHeaderField getHeaderField(int index) {
return hpackDynamicTable.getEntry(index + 1);
}
private void setDynamicTableSize(long dynamicTableSize) throws Http2Exception {
@ -379,29 +377,29 @@ public final class Decoder {
}
encoderMaxDynamicTableSize = dynamicTableSize;
maxDynamicTableSizeChangeRequired = false;
dynamicTable.setCapacity(dynamicTableSize);
hpackDynamicTable.setCapacity(dynamicTableSize);
}
private CharSequence readName(int index) throws Http2Exception {
if (index <= StaticTable.length) {
HeaderField headerField = StaticTable.getEntry(index);
return headerField.name;
if (index <= HpackStaticTable.length) {
HpackHeaderField hpackHeaderField = HpackStaticTable.getEntry(index);
return hpackHeaderField.name;
}
if (index - StaticTable.length <= dynamicTable.length()) {
HeaderField headerField = dynamicTable.getEntry(index - StaticTable.length);
return headerField.name;
if (index - HpackStaticTable.length <= hpackDynamicTable.length()) {
HpackHeaderField hpackHeaderField = hpackDynamicTable.getEntry(index - HpackStaticTable.length);
return hpackHeaderField.name;
}
throw READ_NAME_ILLEGAL_INDEX_VALUE;
}
private long indexHeader(int streamId, int index, Http2Headers headers, long headersLength) throws Http2Exception {
if (index <= StaticTable.length) {
HeaderField headerField = StaticTable.getEntry(index);
return addHeader(streamId, headers, headerField.name, headerField.value, headersLength);
if (index <= HpackStaticTable.length) {
HpackHeaderField hpackHeaderField = HpackStaticTable.getEntry(index);
return addHeader(streamId, headers, hpackHeaderField.name, hpackHeaderField.value, headersLength);
}
if (index - StaticTable.length <= dynamicTable.length()) {
HeaderField headerField = dynamicTable.getEntry(index - StaticTable.length);
return addHeader(streamId, headers, headerField.name, headerField.value, headersLength);
if (index - HpackStaticTable.length <= hpackDynamicTable.length()) {
HpackHeaderField hpackHeaderField = hpackDynamicTable.getEntry(index - HpackStaticTable.length);
return addHeader(streamId, headers, hpackHeaderField.name, hpackHeaderField.value, headersLength);
}
throw INDEX_HEADER_ILLEGAL_INDEX_VALUE;
}
@ -416,7 +414,7 @@ public final class Decoder {
break;
case INCREMENTAL:
dynamicTable.add(new HeaderField(name, value));
hpackDynamicTable.add(new HpackHeaderField(name, value));
break;
default:
@ -438,7 +436,7 @@ public final class Decoder {
private CharSequence readStringLiteral(ByteBuf in, int length, boolean huffmanEncoded) throws Http2Exception {
if (huffmanEncoded) {
return huffmanDecoder.decode(in, length);
return hpackHuffmanDecoder.decode(in, length);
}
byte[] buf = new byte[length];
in.readBytes(buf);

View File

@ -29,16 +29,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.internal.hpack.HeaderField.HEADER_ENTRY_OVERHEAD;
final class DynamicTable {
final class HpackDynamicTable {
// a circular queue of header fields
HeaderField[] headerFields;
HpackHeaderField[] hpackHeaderFields;
int head;
int tail;
private long size;
@ -47,7 +46,7 @@ final class DynamicTable {
/**
* Creates a new dynamic table with the specified initial capacity.
*/
DynamicTable(long initialCapacity) {
HpackDynamicTable(long initialCapacity) {
setCapacity(initialCapacity);
}
@ -57,7 +56,7 @@ final class DynamicTable {
public int length() {
int length;
if (head < tail) {
length = headerFields.length - tail + head;
length = hpackHeaderFields.length - tail + head;
} else {
length = head - tail;
}
@ -82,15 +81,15 @@ final class DynamicTable {
* Return the header field at the given index. The first and newest entry is always at index 1,
* and the oldest entry is at the index length().
*/
public HeaderField getEntry(int index) {
public HpackHeaderField getEntry(int index) {
if (index <= 0 || index > length()) {
throw new IndexOutOfBoundsException();
}
int i = head - index;
if (i < 0) {
return headerFields[i + headerFields.length];
return hpackHeaderFields[i + hpackHeaderFields.length];
} else {
return headerFields[i];
return hpackHeaderFields[i];
}
}
@ -100,7 +99,7 @@ final class DynamicTable {
* If the size of the new entry is larger than the table's capacity, the dynamic table will be
* cleared.
*/
public void add(HeaderField header) {
public void add(HpackHeaderField header) {
int headerSize = header.size();
if (headerSize > capacity) {
clear();
@ -109,9 +108,9 @@ final class DynamicTable {
while (capacity - size < headerSize) {
remove();
}
headerFields[head++] = header;
hpackHeaderFields[head++] = header;
size += header.size();
if (head == headerFields.length) {
if (head == hpackHeaderFields.length) {
head = 0;
}
}
@ -119,14 +118,14 @@ final class DynamicTable {
/**
* Remove and return the oldest header field from the dynamic table.
*/
public HeaderField remove() {
HeaderField removed = headerFields[tail];
public HpackHeaderField remove() {
HpackHeaderField removed = hpackHeaderFields[tail];
if (removed == null) {
return null;
}
size -= removed.size();
headerFields[tail++] = null;
if (tail == headerFields.length) {
hpackHeaderFields[tail++] = null;
if (tail == hpackHeaderFields.length) {
tail = 0;
}
return removed;
@ -137,8 +136,8 @@ final class DynamicTable {
*/
public void clear() {
while (tail != head) {
headerFields[tail++] = null;
if (tail == headerFields.length) {
hpackHeaderFields[tail++] = null;
if (tail == hpackHeaderFields.length) {
tail = 0;
}
}
@ -170,31 +169,31 @@ final class DynamicTable {
}
}
int maxEntries = (int) (capacity / HEADER_ENTRY_OVERHEAD);
if (capacity % HEADER_ENTRY_OVERHEAD != 0) {
int maxEntries = (int) (capacity / HpackHeaderField.HEADER_ENTRY_OVERHEAD);
if (capacity % HpackHeaderField.HEADER_ENTRY_OVERHEAD != 0) {
maxEntries++;
}
// check if capacity change requires us to reallocate the array
if (headerFields != null && headerFields.length == maxEntries) {
if (hpackHeaderFields != null && hpackHeaderFields.length == maxEntries) {
return;
}
HeaderField[] tmp = new HeaderField[maxEntries];
HpackHeaderField[] tmp = new HpackHeaderField[maxEntries];
// initially length will be 0 so there will be no copy
int len = length();
int cursor = tail;
for (int i = 0; i < len; i++) {
HeaderField entry = headerFields[cursor++];
HpackHeaderField entry = hpackHeaderFields[cursor++];
tmp[i] = entry;
if (cursor == headerFields.length) {
if (cursor == hpackHeaderFields.length) {
cursor = 0;
}
}
tail = 0;
head = tail + len;
headerFields = tmp;
hpackHeaderFields = tmp;
}
}

View File

@ -29,11 +29,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.HpackUtil.IndexType;
import io.netty.handler.codec.http2.Http2HeadersEncoder.SensitivityDetector;
import io.netty.util.AsciiString;
import io.netty.util.CharsetUtil;
@ -41,6 +40,7 @@ import io.netty.util.CharsetUtil;
import java.util.Arrays;
import java.util.Map;
import static io.netty.handler.codec.http2.HpackUtil.equalsConstantTime;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
@ -50,21 +50,16 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.headerListSizeExceeded;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.internal.hpack.HeaderField.sizeOf;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.INCREMENTAL;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NEVER;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NONE;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime;
import static io.netty.util.internal.MathUtil.findNextPositivePowerOfTwo;
import static java.lang.Math.max;
import static java.lang.Math.min;
public final class Encoder {
final class HpackEncoder {
// a linked hash map of header fields
private final HeaderEntry[] headerFields;
private final HeaderEntry head = new HeaderEntry(-1, AsciiString.EMPTY_STRING,
AsciiString.EMPTY_STRING, Integer.MAX_VALUE, null);
private final HuffmanEncoder huffmanEncoder = new HuffmanEncoder();
private final HpackHuffmanEncoder hpackHuffmanEncoder = new HpackHuffmanEncoder();
private final byte hashMask;
private final boolean ignoreMaxHeaderListSize;
private long size;
@ -74,21 +69,21 @@ public final class Encoder {
/**
* Creates a new encoder.
*/
public Encoder() {
HpackEncoder() {
this(false);
}
/**
* Creates a new encoder.
*/
public Encoder(boolean ignoreMaxHeaderListSize) {
public HpackEncoder(boolean ignoreMaxHeaderListSize) {
this(ignoreMaxHeaderListSize, 16);
}
/**
* Creates a new encoder.
*/
public Encoder(boolean ignoreMaxHeaderListSize, int arraySizeHint) {
public HpackEncoder(boolean ignoreMaxHeaderListSize, int arraySizeHint) {
this.ignoreMaxHeaderListSize = ignoreMaxHeaderListSize;
maxHeaderTableSize = DEFAULT_HEADER_TABLE_SIZE;
maxHeaderListSize = DEFAULT_HEADER_LIST_SIZE;
@ -123,7 +118,7 @@ public final class Encoder {
CharSequence value = header.getValue();
// OK to increment now and check for bounds after because this value is limited to unsigned int and will not
// overflow.
headerSize += sizeOf(name, value);
headerSize += HpackHeaderField.sizeOf(name, value);
if (headerSize > maxHeaderListSize) {
headerListSizeExceeded(streamId, maxHeaderListSize, false);
}
@ -136,7 +131,8 @@ public final class Encoder {
for (Map.Entry<CharSequence, CharSequence> header : headers) {
CharSequence name = header.getKey();
CharSequence value = header.getValue();
encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value), sizeOf(name, value));
encodeHeader(out, name, value, sensitivityDetector.isSensitive(name, value),
HpackHeaderField.sizeOf(name, value));
}
}
@ -149,16 +145,16 @@ public final class Encoder {
// If the header value is sensitive then it must never be indexed
if (sensitive) {
int nameIndex = getNameIndex(name);
encodeLiteral(out, name, value, NEVER, nameIndex);
encodeLiteral(out, name, value, IndexType.NEVER, nameIndex);
return;
}
// If the peer will only use the static table
if (maxHeaderTableSize == 0) {
int staticTableIndex = StaticTable.getIndex(name, value);
int staticTableIndex = HpackStaticTable.getIndex(name, value);
if (staticTableIndex == -1) {
int nameIndex = StaticTable.getIndex(name);
encodeLiteral(out, name, value, NONE, nameIndex);
int nameIndex = HpackStaticTable.getIndex(name);
encodeLiteral(out, name, value, IndexType.NONE, nameIndex);
} else {
encodeInteger(out, 0x80, 7, staticTableIndex);
}
@ -168,23 +164,23 @@ public final class Encoder {
// If the headerSize is greater than the max table size then it must be encoded literally
if (headerSize > maxHeaderTableSize) {
int nameIndex = getNameIndex(name);
encodeLiteral(out, name, value, NONE, nameIndex);
encodeLiteral(out, name, value, IndexType.NONE, nameIndex);
return;
}
HeaderEntry headerField = getEntry(name, value);
if (headerField != null) {
int index = getIndex(headerField.index) + StaticTable.length;
int index = getIndex(headerField.index) + HpackStaticTable.length;
// Section 6.1. Indexed Header Field Representation
encodeInteger(out, 0x80, 7, index);
} else {
int staticTableIndex = StaticTable.getIndex(name, value);
int staticTableIndex = HpackStaticTable.getIndex(name, value);
if (staticTableIndex != -1) {
// Section 6.1. Indexed Header Field Representation
encodeInteger(out, 0x80, 7, staticTableIndex);
} else {
ensureCapacity(headerSize);
encodeLiteral(out, name, value, INCREMENTAL, getNameIndex(name));
encodeLiteral(out, name, value, IndexType.INCREMENTAL, getNameIndex(name));
add(name, value, headerSize);
}
}
@ -255,10 +251,10 @@ public final class Encoder {
* Encode string literal according to Section 5.2.
*/
private void encodeStringLiteral(ByteBuf out, CharSequence string) {
int huffmanLength = huffmanEncoder.getEncodedLength(string);
int huffmanLength = hpackHuffmanEncoder.getEncodedLength(string);
if (huffmanLength < string.length()) {
encodeInteger(out, 0x80, 7, huffmanLength);
huffmanEncoder.encode(out, string);
hpackHuffmanEncoder.encode(out, string);
} else {
encodeInteger(out, 0x00, 7, string.length());
if (string instanceof AsciiString) {
@ -276,7 +272,7 @@ public final class Encoder {
/**
* Encode literal header field according to Section 6.2.
*/
private void encodeLiteral(ByteBuf out, CharSequence name, CharSequence value, HpackUtil.IndexType indexType,
private void encodeLiteral(ByteBuf out, CharSequence name, CharSequence value, IndexType indexType,
int nameIndex) {
boolean nameIndexValid = nameIndex != -1;
switch (indexType) {
@ -299,11 +295,11 @@ public final class Encoder {
}
private int getNameIndex(CharSequence name) {
int index = StaticTable.getIndex(name);
int index = HpackStaticTable.getIndex(name);
if (index == -1) {
index = getIndex(name);
if (index >= 0) {
index += StaticTable.length;
index += HpackStaticTable.length;
}
}
return index;
@ -340,7 +336,7 @@ public final class Encoder {
/**
* Return the header field at the given index. Exposed for testing.
*/
HeaderField getHeaderField(int index) {
HpackHeaderField getHeaderField(int index) {
HeaderEntry entry = head;
while (index-- >= 0) {
entry = entry.before;
@ -421,7 +417,7 @@ public final class Encoder {
/**
* Remove and return the oldest header field from the dynamic table.
*/
private HeaderField remove() {
private HpackHeaderField remove() {
if (size == 0) {
return null;
}
@ -465,9 +461,9 @@ public final class Encoder {
}
/**
* A linked hash map HeaderField entry.
* A linked hash map HpackHeaderField entry.
*/
private static class HeaderEntry extends HeaderField {
private static final class HeaderEntry extends HpackHeaderField {
// These fields comprise the doubly linked list used for iteration.
HeaderEntry before, after;

View File

@ -29,12 +29,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
class HeaderField {
class HpackHeaderField {
// Section 4.1. Calculating Table Size
// The additional 32 octets account for an estimated
@ -49,32 +48,32 @@ class HeaderField {
final CharSequence value;
// This constructor can only be used if name and value are ISO-8859-1 encoded.
HeaderField(CharSequence name, CharSequence value) {
HpackHeaderField(CharSequence name, CharSequence value) {
this.name = checkNotNull(name, "name");
this.value = checkNotNull(value, "value");
}
int size() {
final int size() {
return name.length() + value.length() + HEADER_ENTRY_OVERHEAD;
}
@Override
public int hashCode() {
public final int hashCode() {
// TODO(nmittler): Netty's build rules require this. Probably need a better implementation.
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
public final boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof HeaderField)) {
if (!(obj instanceof HpackHeaderField)) {
return false;
}
HeaderField other = (HeaderField) obj;
HpackHeaderField other = (HpackHeaderField) obj;
// To avoid short circuit behavior a bitwise operator is used instead of a boolean operator.
return (equalsConstantTime(name, other.name) & equalsConstantTime(value, other.value)) != 0;
return (HpackUtil.equalsConstantTime(name, other.name) & HpackUtil.equalsConstantTime(value, other.value)) != 0;
}
@Override

View File

@ -29,10 +29,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.util.AsciiString;
import io.netty.util.ByteProcessor;
import io.netty.util.internal.ObjectUtil;
@ -40,21 +39,19 @@ import io.netty.util.internal.ThrowableUtil;
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
final class HuffmanDecoder {
final class HpackHuffmanDecoder {
private static final Http2Exception EOS_DECODED = ThrowableUtil.unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - EOS Decoded"), HuffmanDecoder.class, "decode(...)");
connectionError(COMPRESSION_ERROR, "HPACK - EOS Decoded"), HpackHuffmanDecoder.class, "decode(..)");
private static final Http2Exception INVALID_PADDING = ThrowableUtil.unknownStackTrace(
connectionError(COMPRESSION_ERROR, "HPACK - Invalid Padding"), HuffmanDecoder.class, "decode(...)");
connectionError(COMPRESSION_ERROR, "HPACK - Invalid Padding"), HpackHuffmanDecoder.class, "decode(..)");
private static final Node ROOT = buildTree(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS);
private static final Node ROOT = buildTree(HpackUtil.HUFFMAN_CODES, HpackUtil.HUFFMAN_CODE_LENGTHS);
private final DecoderProcessor processor;
HuffmanDecoder(int initialCapacity) {
HpackHuffmanDecoder(int initialCapacity) {
processor = new DecoderProcessor(initialCapacity);
}

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.util.AsciiString;
@ -37,19 +37,15 @@ import io.netty.util.ByteProcessor;
import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.PlatformDependent;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
final class HuffmanEncoder {
final class HpackHuffmanEncoder {
private final int[] codes;
private final byte[] lengths;
private final EncodedLengthProcessor encodedLengthProcessor = new EncodedLengthProcessor();
private final EncodeProcessor encodeProcessor = new EncodeProcessor();
HuffmanEncoder() {
this(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS);
HpackHuffmanEncoder() {
this(HpackUtil.HUFFMAN_CODES, HpackUtil.HUFFMAN_CODE_LENGTHS);
}
/**
@ -58,7 +54,7 @@ final class HuffmanEncoder {
* @param codes the Huffman codes indexed by symbol
* @param lengths the length of each Huffman code
*/
private HuffmanEncoder(int[] codes, byte[] lengths) {
private HpackHuffmanEncoder(int[] codes, byte[] lengths) {
this.codes = codes;
this.lengths = lengths;
}

View File

@ -29,22 +29,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.handler.codec.UnsupportedValueConverter;
import io.netty.handler.codec.http2.CharSequenceMap;
import io.netty.util.AsciiString;
import java.util.Arrays;
import java.util.List;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.equalsConstantTime;
import static io.netty.handler.codec.http2.HpackUtil.equalsConstantTime;
final class StaticTable {
final class HpackStaticTable {
// Appendix A: Static Table
// http://tools.ietf.org/html/rfc7541#appendix-A
private static final List<HeaderField> STATIC_TABLE = Arrays.asList(
private static final List<HpackHeaderField> STATIC_TABLE = Arrays.asList(
/* 1 */ newEmptyHeaderField(":authority"),
/* 2 */ newHeaderField(":method", "GET"),
/* 3 */ newHeaderField(":method", "POST"),
@ -108,12 +107,12 @@ final class StaticTable {
/* 61 */ newEmptyHeaderField("www-authenticate")
);
private static HeaderField newEmptyHeaderField(CharSequence name) {
private static HpackHeaderField newEmptyHeaderField(CharSequence name) {
return newHeaderField(name, AsciiString.EMPTY_STRING);
}
private static HeaderField newHeaderField(CharSequence name, CharSequence value) {
return new HeaderField(AsciiString.of(name), AsciiString.of(value));
private static HpackHeaderField newHeaderField(CharSequence name, CharSequence value) {
return new HpackHeaderField(AsciiString.of(name), AsciiString.of(value));
}
private static final CharSequenceMap<Integer> STATIC_INDEX_BY_NAME = createMap();
@ -126,7 +125,7 @@ final class StaticTable {
/**
* Return the header field at the given index value.
*/
static HeaderField getEntry(int index) {
static HpackHeaderField getEntry(int index) {
return STATIC_TABLE.get(index - 1);
}
@ -154,7 +153,7 @@ final class StaticTable {
// Note this assumes all entries for a given header field are sequential.
while (index <= length) {
HeaderField entry = getEntry(index);
HpackHeaderField entry = getEntry(index);
if (equalsConstantTime(name, entry.name) == 0) {
break;
}
@ -176,7 +175,7 @@ final class StaticTable {
// Iterate through the static table in reverse order to
// save the smallest index for a given name in the map.
for (int index = length; index > 0; index--) {
HeaderField entry = getEntry(index);
HpackHeaderField entry = getEntry(index);
CharSequence name = entry.name;
ret.set(name, index);
}
@ -184,6 +183,6 @@ final class StaticTable {
}
// singleton
private StaticTable() {
private HpackStaticTable() {
}
}

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.util.AsciiString;
import io.netty.util.internal.ConstantTimeUtils;

View File

@ -1,38 +0,0 @@
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2014 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* <a href="http://tools.ietf.org/html/rfc7541">HPACK: Header Compression for HTTP/2</a>.
* Please note this implementation is only compliant when used with HTTP/2 and so not meant to be used outside of
* this scope.
*/
package io.netty.handler.codec.http2.internal.hpack;

View File

@ -18,7 +18,6 @@ import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.buffer.UnpooledByteBufAllocator;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -43,7 +42,7 @@ public class DefaultHttp2FrameReaderTest {
private DefaultHttp2FrameReader frameReader;
// Used to generate frame
private Encoder encoder;
private HpackEncoder hpackEncoder;
@Before
public void setUp() throws Exception {
@ -52,7 +51,7 @@ public class DefaultHttp2FrameReaderTest {
when(ctx.alloc()).thenReturn(UnpooledByteBufAllocator.DEFAULT);
frameReader = new DefaultHttp2FrameReader();
encoder = new Encoder();
hpackEncoder = new HpackEncoder();
}
@After
@ -338,7 +337,7 @@ public class DefaultHttp2FrameReaderTest {
Http2Flags flags) throws Http2Exception {
ByteBuf headerBlock = Unpooled.buffer();
try {
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, flags, streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes());
} finally {
@ -351,7 +350,7 @@ public class DefaultHttp2FrameReaderTest {
ByteBuf dataPayload) throws Http2Exception {
ByteBuf headerBlock = Unpooled.buffer();
try {
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), HEADERS,
new Http2Flags().endOfHeaders(true), streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes());
@ -370,7 +369,7 @@ public class DefaultHttp2FrameReaderTest {
try {
writeUnsignedInt(streamDependency, headerBlock);
headerBlock.writeByte(weight - 1);
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), HEADERS, flags, streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes());
} finally {
@ -383,7 +382,7 @@ public class DefaultHttp2FrameReaderTest {
Http2Flags flags) throws Http2Exception {
ByteBuf headerBlock = Unpooled.buffer();
try {
encoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
hpackEncoder.encodeHeaders(streamId, headerBlock, headers, Http2HeadersEncoder.NEVER_SENSITIVE);
writeFrameHeader(output, headerBlock.readableBytes(), CONTINUATION, flags, streamId);
output.writeBytes(headerBlock, headerBlock.readableBytes());
} finally {

View File

@ -17,7 +17,6 @@ package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.AsciiString;
import org.junit.Before;
import org.junit.Test;
@ -119,13 +118,13 @@ public class DefaultHttp2HeadersDecoderTest {
}
private static ByteBuf encode(byte[]... entries) throws Exception {
Encoder encoder = newTestEncoder();
HpackEncoder hpackEncoder = newTestEncoder();
ByteBuf out = Unpooled.buffer();
Http2Headers http2Headers = new DefaultHttp2Headers(false);
for (int ix = 0; ix < entries.length;) {
http2Headers.add(new AsciiString(entries[ix++], false), new AsciiString(entries[ix++], false));
}
encoder.encodeHeaders(3 /* randomly chosen */, out, http2Headers, NEVER_SENSITIVE);
hpackEncoder.encodeHeaders(3 /* randomly chosen */, out, http2Headers, NEVER_SENSITIVE);
return out;
}
}

View File

@ -29,18 +29,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import org.junit.Before;
import org.junit.Test;
import static io.netty.handler.codec.http2.HpackDecoder.decodeULE128;
import static io.netty.handler.codec.http2.Http2HeadersEncoder.NEVER_SENSITIVE;
import static io.netty.handler.codec.http2.internal.hpack.Decoder.decodeULE128;
import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.AsciiString.of;
import static java.lang.Integer.MAX_VALUE;
@ -53,19 +50,19 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
public class DecoderTest {
private Decoder decoder;
public class HpackDecoderTest {
private HpackDecoder hpackDecoder;
private Http2Headers mockHeaders;
private static String hex(String s) {
return Hex.encodeHexString(s.getBytes());
return HpackHex.encodeHexString(s.getBytes());
}
private void decode(String encoded) throws Http2Exception {
byte[] b = Hex.decodeHex(encoded.toCharArray());
byte[] b = HpackHex.decodeHex(encoded.toCharArray());
ByteBuf in = Unpooled.wrappedBuffer(b);
try {
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
} finally {
in.release();
}
@ -73,7 +70,7 @@ public class DecoderTest {
@Before
public void setUp() throws Http2Exception {
decoder = new Decoder(8192, 32);
hpackDecoder = new HpackDecoder(8192, 32);
mockHeaders = mock(Http2Headers.class);
}
@ -160,9 +157,9 @@ public class DecoderTest {
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
final long expectedHeaderSize = 4026531870L; // based on the input above
decoder.setMaxHeaderTableSize(expectedHeaderSize);
decoder.decode(0, in, mockHeaders);
assertEquals(expectedHeaderSize, decoder.getMaxHeaderTableSize());
hpackDecoder.setMaxHeaderTableSize(expectedHeaderSize);
hpackDecoder.decode(0, in, mockHeaders);
assertEquals(expectedHeaderSize, hpackDecoder.getMaxHeaderTableSize());
} finally {
in.release();
}
@ -173,8 +170,8 @@ public class DecoderTest {
byte[] input = {(byte) 0x3F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x0E};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.setMaxHeaderTableSize(4026531870L - 1); // based on the input above ... 1 less than is above.
decoder.decode(0, in, mockHeaders);
hpackDecoder.setMaxHeaderTableSize(4026531870L - 1); // based on the input above ... 1 less than is above.
hpackDecoder.decode(0, in, mockHeaders);
} finally {
in.release();
}
@ -185,7 +182,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x80, 0};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
verify(mockHeaders, times(1)).add(EMPTY_STRING, EMPTY_STRING);
} finally {
in.release();
@ -197,7 +194,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x81, -1};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
} finally {
in.release();
}
@ -208,7 +205,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x84, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
} finally {
in.release();
}
@ -219,7 +216,7 @@ public class DecoderTest {
byte[] input = {0, (byte) 0x81, 0};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
} finally {
in.release();
}
@ -227,12 +224,12 @@ public class DecoderTest {
@Test(expected = Http2Exception.class)
public void testIncompleteIndex() throws Http2Exception {
byte[] compressed = Hex.decodeHex("FFF0".toCharArray());
byte[] compressed = HpackHex.decodeHex("FFF0".toCharArray());
ByteBuf in = Unpooled.wrappedBuffer(compressed);
try {
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
assertEquals(1, in.readableBytes());
decoder.decode(0, in, mockHeaders);
hpackDecoder.decode(0, in, mockHeaders);
} finally {
in.release();
}
@ -259,16 +256,16 @@ public class DecoderTest {
@Test
public void testDynamicTableSizeUpdate() throws Http2Exception {
decode("20");
assertEquals(0, decoder.getMaxHeaderTableSize());
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("3FE11F");
assertEquals(4096, decoder.getMaxHeaderTableSize());
assertEquals(4096, hpackDecoder.getMaxHeaderTableSize());
}
@Test
public void testDynamicTableSizeUpdateRequired() throws Http2Exception {
decoder.setMaxHeaderTableSize(32);
hpackDecoder.setMaxHeaderTableSize(32);
decode("3F00");
assertEquals(31, decoder.getMaxHeaderTableSize());
assertEquals(31, hpackDecoder.getMaxHeaderTableSize());
}
@Test(expected = Http2Exception.class)
@ -279,14 +276,14 @@ public class DecoderTest {
@Test(expected = Http2Exception.class)
public void testInsidiousMaxDynamicTableSize() throws Http2Exception {
decoder.setMaxHeaderTableSize(MAX_VALUE);
hpackDecoder.setMaxHeaderTableSize(MAX_VALUE);
// max header table size sign overflow
decode("3FE1FFFFFF07");
}
@Test
public void testMaxValidDynamicTableSize() throws Http2Exception {
decoder.setMaxHeaderTableSize(MAX_VALUE);
hpackDecoder.setMaxHeaderTableSize(MAX_VALUE);
String baseValue = "3FE1FFFFFF0";
for (int i = 0; i < 7; ++i) {
decode(baseValue + i);
@ -295,22 +292,22 @@ public class DecoderTest {
@Test
public void testReduceMaxDynamicTableSize() throws Http2Exception {
decoder.setMaxHeaderTableSize(0);
assertEquals(0, decoder.getMaxHeaderTableSize());
hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("2081");
}
@Test(expected = Http2Exception.class)
public void testTooLargeDynamicTableSizeUpdate() throws Http2Exception {
decoder.setMaxHeaderTableSize(0);
assertEquals(0, decoder.getMaxHeaderTableSize());
hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("21"); // encoder max header table size not small enough
}
@Test(expected = Http2Exception.class)
public void testMissingDynamicTableSizeUpdate() throws Http2Exception {
decoder.setMaxHeaderTableSize(0);
assertEquals(0, decoder.getMaxHeaderTableSize());
hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("81");
}
@ -429,8 +426,8 @@ public class DecoderTest {
throws Http2Exception {
ByteBuf in = Unpooled.buffer(200);
try {
decoder.setMaxHeaderListSize(100, 200);
Encoder encoder = new Encoder(true);
hpackDecoder.setMaxHeaderListSize(100, 200);
HpackEncoder hpackEncoder = new HpackEncoder(true);
// encode headers that are slightly larger than maxHeaderListSize
// but smaller than maxHeaderListSizeGoAway
@ -439,13 +436,13 @@ public class DecoderTest {
toEncode.add("test_2", "2");
toEncode.add("long", String.format("%0100d", 0).replace('0', 'A'));
toEncode.add("test_3", "3");
encoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
// decode the headers, we should get an exception, but
// the decoded headers object should contain all of the headers
Http2Headers decoded = new DefaultHttp2Headers();
try {
decoder.decode(1, in, decoded);
hpackDecoder.decode(1, in, decoded);
fail();
} catch (Http2Exception e) {
assertTrue(e instanceof Http2Exception.HeaderListSizeException);

View File

@ -13,12 +13,10 @@
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import org.junit.Before;
import org.junit.Test;
@ -27,25 +25,25 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class EncoderTest {
private Decoder decoder;
private Encoder encoder;
public class HpackEncoderTest {
private HpackDecoder hpackDecoder;
private HpackEncoder hpackEncoder;
private Http2Headers mockHeaders;
@Before
public void setUp() throws Http2Exception {
encoder = new Encoder();
decoder = new Decoder(DEFAULT_HEADER_LIST_SIZE, 32);
hpackEncoder = new HpackEncoder();
hpackDecoder = new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32);
mockHeaders = mock(Http2Headers.class);
}
@Test
public void testSetMaxHeaderTableSizeToMaxValue() throws Http2Exception {
ByteBuf buf = Unpooled.buffer();
encoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
decoder.setMaxHeaderTableSize(MAX_HEADER_TABLE_SIZE);
decoder.decode(0, buf, mockHeaders);
assertEquals(MAX_HEADER_TABLE_SIZE, decoder.getMaxHeaderTableSize());
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
hpackDecoder.setMaxHeaderTableSize(MAX_HEADER_TABLE_SIZE);
hpackDecoder.decode(0, buf, mockHeaders);
assertEquals(MAX_HEADER_TABLE_SIZE, hpackDecoder.getMaxHeaderTableSize());
buf.release();
}
@ -53,7 +51,7 @@ public class EncoderTest {
public void testSetMaxHeaderTableSizeOverflow() throws Http2Exception {
ByteBuf buf = Unpooled.buffer();
try {
encoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1);
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1);
} finally {
buf.release();
}

View File

@ -31,23 +31,23 @@
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
/**
* Extracted from org/apache/commons/codec/binary/Hex.java Copyright Apache Software Foundation
*/
final class Hex {
private Hex() {
final class HpackHex {
private HpackHex() {
}
/**
* Used to build output as Hex
* Used to build output as HpackHex
*/
private static final char[] DIGITS_LOWER =
{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
/**
* Used to build output as Hex
* Used to build output as HpackHex
*/
private static final char[] DIGITS_UPPER =
{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
@ -89,7 +89,7 @@ final class Hex {
* each byte in order. The returned array will be double the length of the passed array, as it
* takes two characters to represent any given byte.
*
* @param data a byte[] to convert to Hex characters
* @param data a byte[] to convert to HpackHex characters
* @return A char[] containing hexadecimal characters
*/
public static char[] encodeHex(byte[] data) {
@ -101,7 +101,7 @@ final class Hex {
* each byte in order. The returned array will be double the length of the passed array, as it
* takes two characters to represent any given byte.
*
* @param data a byte[] to convert to Hex characters
* @param data a byte[] to convert to HpackHex characters
* @param toLowerCase <code>true</code> converts to lowercase, <code>false</code> to uppercase
* @return A char[] containing hexadecimal characters
* @since 1.4
@ -115,7 +115,7 @@ final class Hex {
* each byte in order. The returned array will be double the length of the passed array, as it
* takes two characters to represent any given byte.
*
* @param data a byte[] to convert to Hex characters
* @param data a byte[] to convert to HpackHex characters
* @param toDigits the output alphabet
* @return A char[] containing hexadecimal characters
* @since 1.4
@ -136,7 +136,7 @@ final class Hex {
* order. The returned String will be double the length of the passed array, as it takes two
* characters to represent any given byte.
*
* @param data a byte[] to convert to Hex characters
* @param data a byte[] to convert to HpackHex characters
* @return A String containing hexadecimal characters
* @since 1.4
*/

View File

@ -29,11 +29,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.util.AsciiString;
import org.junit.Assert;
import org.junit.Test;
@ -41,7 +40,7 @@ import org.junit.Test;
import java.util.Arrays;
import java.util.Random;
public class HuffmanTest {
public class HpackHuffmanTest {
@Test
public void testHuffman() throws Http2Exception {
@ -123,19 +122,19 @@ public class HuffmanTest {
}
private static void roundTrip(String s) throws Http2Exception {
roundTrip(new HuffmanEncoder(), newHuffmanDecoder(), s);
roundTrip(new HpackHuffmanEncoder(), newHuffmanDecoder(), s);
}
private static void roundTrip(HuffmanEncoder encoder, HuffmanDecoder decoder, String s)
private static void roundTrip(HpackHuffmanEncoder encoder, HpackHuffmanDecoder decoder, String s)
throws Http2Exception {
roundTrip(encoder, decoder, s.getBytes());
}
private static void roundTrip(byte[] buf) throws Http2Exception {
roundTrip(new HuffmanEncoder(), newHuffmanDecoder(), buf);
roundTrip(new HpackHuffmanEncoder(), newHuffmanDecoder(), buf);
}
private static void roundTrip(HuffmanEncoder encoder, HuffmanDecoder decoder, byte[] buf)
private static void roundTrip(HpackHuffmanEncoder encoder, HpackHuffmanDecoder decoder, byte[] buf)
throws Http2Exception {
ByteBuf buffer = Unpooled.buffer();
try {
@ -151,7 +150,7 @@ public class HuffmanTest {
}
}
private static byte[] decode(HuffmanDecoder decoder, byte[] bytes) throws Http2Exception {
private static byte[] decode(HpackHuffmanDecoder decoder, byte[] bytes) throws Http2Exception {
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
try {
AsciiString decoded = decoder.decode(buffer, buffer.readableBytes());
@ -162,7 +161,7 @@ public class HuffmanTest {
}
}
private static HuffmanDecoder newHuffmanDecoder() {
return new HuffmanDecoder(32);
private static HpackHuffmanDecoder newHuffmanDecoder() {
return new HpackHuffmanDecoder(32);
}
}

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -72,8 +72,8 @@ public class HpackTest {
@Test
public void test() throws Exception {
InputStream is = HpackTest.class.getResourceAsStream(TEST_DIR + fileName);
TestCase testCase = TestCase.load(is);
testCase.testCompress();
testCase.testDecompress();
HpackTestCase hpackTestCase = HpackTestCase.load(is);
hpackTestCase.testCompress();
hpackTestCase.testDecompress();
}
}

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
package io.netty.handler.codec.http2;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
@ -41,10 +41,6 @@ import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder;
import java.io.IOException;
import java.io.InputStream;
@ -60,11 +56,11 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SI
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2TestUtil.newTestEncoder;
final class TestCase {
final class HpackTestCase {
private static final Gson GSON = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.registerTypeAdapter(HeaderField.class, new HeaderFieldDeserializer())
.registerTypeAdapter(HpackHeaderField.class, new HeaderFieldDeserializer())
.create();
int maxHeaderTableSize = -1;
@ -72,39 +68,39 @@ final class TestCase {
List<HeaderBlock> headerBlocks;
private TestCase() {
private HpackTestCase() {
}
static TestCase load(InputStream is) throws IOException {
static HpackTestCase load(InputStream is) throws IOException {
InputStreamReader r = new InputStreamReader(is);
TestCase testCase = GSON.fromJson(r, TestCase.class);
for (HeaderBlock headerBlock : testCase.headerBlocks) {
headerBlock.encodedBytes = Hex.decodeHex(headerBlock.getEncodedStr().toCharArray());
HpackTestCase hpackTestCase = GSON.fromJson(r, HpackTestCase.class);
for (HeaderBlock headerBlock : hpackTestCase.headerBlocks) {
headerBlock.encodedBytes = HpackHex.decodeHex(headerBlock.getEncodedStr().toCharArray());
}
return testCase;
return hpackTestCase;
}
void testCompress() throws Exception {
Encoder encoder = createEncoder();
HpackEncoder hpackEncoder = createEncoder();
for (HeaderBlock headerBlock : headerBlocks) {
byte[] actual =
encode(encoder, headerBlock.getHeaders(), headerBlock.getMaxHeaderTableSize(),
encode(hpackEncoder, headerBlock.getHeaders(), headerBlock.getMaxHeaderTableSize(),
sensitiveHeaders);
if (!Arrays.equals(actual, headerBlock.encodedBytes)) {
throw new AssertionError(
"\nEXPECTED:\n" + headerBlock.getEncodedStr() +
"\nACTUAL:\n" + Hex.encodeHexString(actual));
"\nACTUAL:\n" + HpackHex.encodeHexString(actual));
}
List<HeaderField> actualDynamicTable = new ArrayList<HeaderField>();
for (int index = 0; index < encoder.length(); index++) {
actualDynamicTable.add(encoder.getHeaderField(index));
List<HpackHeaderField> actualDynamicTable = new ArrayList<HpackHeaderField>();
for (int index = 0; index < hpackEncoder.length(); index++) {
actualDynamicTable.add(hpackEncoder.getHeaderField(index));
}
List<HeaderField> expectedDynamicTable = headerBlock.getDynamicTable();
List<HpackHeaderField> expectedDynamicTable = headerBlock.getDynamicTable();
if (!expectedDynamicTable.equals(actualDynamicTable)) {
throw new AssertionError(
@ -112,24 +108,24 @@ final class TestCase {
"\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable);
}
if (headerBlock.getTableSize() != encoder.size()) {
if (headerBlock.getTableSize() != hpackEncoder.size()) {
throw new AssertionError(
"\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() +
"\n ACTUAL TABLE SIZE : " + encoder.size());
"\n ACTUAL TABLE SIZE : " + hpackEncoder.size());
}
}
}
void testDecompress() throws Exception {
Decoder decoder = createDecoder();
HpackDecoder hpackDecoder = createDecoder();
for (HeaderBlock headerBlock : headerBlocks) {
List<HeaderField> actualHeaders = decode(decoder, headerBlock.encodedBytes);
List<HpackHeaderField> actualHeaders = decode(hpackDecoder, headerBlock.encodedBytes);
List<HeaderField> expectedHeaders = new ArrayList<HeaderField>();
for (HeaderField h : headerBlock.getHeaders()) {
expectedHeaders.add(new HeaderField(h.name, h.value));
List<HpackHeaderField> expectedHeaders = new ArrayList<HpackHeaderField>();
for (HpackHeaderField h : headerBlock.getHeaders()) {
expectedHeaders.add(new HpackHeaderField(h.name, h.value));
}
if (!expectedHeaders.equals(actualHeaders)) {
@ -138,12 +134,12 @@ final class TestCase {
"\nACTUAL:\n" + actualHeaders);
}
List<HeaderField> actualDynamicTable = new ArrayList<HeaderField>();
for (int index = 0; index < decoder.length(); index++) {
actualDynamicTable.add(decoder.getHeaderField(index));
List<HpackHeaderField> actualDynamicTable = new ArrayList<HpackHeaderField>();
for (int index = 0; index < hpackDecoder.length(); index++) {
actualDynamicTable.add(hpackDecoder.getHeaderField(index));
}
List<HeaderField> expectedDynamicTable = headerBlock.getDynamicTable();
List<HpackHeaderField> expectedDynamicTable = headerBlock.getDynamicTable();
if (!expectedDynamicTable.equals(actualDynamicTable)) {
throw new AssertionError(
@ -151,15 +147,15 @@ final class TestCase {
"\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable);
}
if (headerBlock.getTableSize() != decoder.size()) {
if (headerBlock.getTableSize() != hpackDecoder.size()) {
throw new AssertionError(
"\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() +
"\n ACTUAL TABLE SIZE : " + decoder.size());
"\n ACTUAL TABLE SIZE : " + hpackDecoder.size());
}
}
}
private Encoder createEncoder() {
private HpackEncoder createEncoder() {
int maxHeaderTableSize = this.maxHeaderTableSize;
if (maxHeaderTableSize == -1) {
maxHeaderTableSize = Integer.MAX_VALUE;
@ -172,16 +168,16 @@ final class TestCase {
}
}
private Decoder createDecoder() {
private HpackDecoder createDecoder() {
int maxHeaderTableSize = this.maxHeaderTableSize;
if (maxHeaderTableSize == -1) {
maxHeaderTableSize = Integer.MAX_VALUE;
}
return new Decoder(DEFAULT_HEADER_LIST_SIZE, 32, maxHeaderTableSize);
return new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32, maxHeaderTableSize);
}
private static byte[] encode(Encoder encoder, List<HeaderField> headers, int maxHeaderTableSize,
private static byte[] encode(HpackEncoder hpackEncoder, List<HpackHeaderField> headers, int maxHeaderTableSize,
final boolean sensitive) throws Http2Exception {
Http2Headers http2Headers = toHttp2Headers(headers);
Http2HeadersEncoder.SensitivityDetector sensitivityDetector = new Http2HeadersEncoder.SensitivityDetector() {
@ -193,10 +189,10 @@ final class TestCase {
ByteBuf buffer = Unpooled.buffer();
try {
if (maxHeaderTableSize != -1) {
encoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
hpackEncoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
}
encoder.encodeHeaders(3 /* randomly chosen */, buffer, http2Headers, sensitivityDetector);
hpackEncoder.encodeHeaders(3 /* randomly chosen */, buffer, http2Headers, sensitivityDetector);
byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
return bytes;
@ -205,20 +201,20 @@ final class TestCase {
}
}
private static Http2Headers toHttp2Headers(List<HeaderField> inHeaders) {
private static Http2Headers toHttp2Headers(List<HpackHeaderField> inHeaders) {
Http2Headers headers = new DefaultHttp2Headers(false);
for (HeaderField e : inHeaders) {
for (HpackHeaderField e : inHeaders) {
headers.add(e.name, e.value);
}
return headers;
}
private static List<HeaderField> decode(Decoder decoder, byte[] expected) throws Exception {
private static List<HpackHeaderField> decode(HpackDecoder hpackDecoder, byte[] expected) throws Exception {
ByteBuf in = Unpooled.wrappedBuffer(expected);
try {
List<HeaderField> headers = new ArrayList<HeaderField>();
List<HpackHeaderField> headers = new ArrayList<HpackHeaderField>();
TestHeaderListener listener = new TestHeaderListener(headers);
decoder.decode(0, in, listener);
hpackDecoder.decode(0, in, listener);
return headers;
} finally {
in.release();
@ -237,8 +233,8 @@ final class TestCase {
private int maxHeaderTableSize = -1;
private byte[] encodedBytes;
private List<String> encoded;
private List<HeaderField> headers;
private List<HeaderField> dynamicTable;
private List<HpackHeaderField> headers;
private List<HpackHeaderField> dynamicTable;
private int tableSize;
private int getMaxHeaderTableSize() {
@ -249,11 +245,11 @@ final class TestCase {
return concat(encoded).replaceAll(" ", "");
}
public List<HeaderField> getHeaders() {
public List<HpackHeaderField> getHeaders() {
return headers;
}
public List<HeaderField> getDynamicTable() {
public List<HpackHeaderField> getDynamicTable() {
return dynamicTable;
}
@ -262,11 +258,11 @@ final class TestCase {
}
}
static class HeaderFieldDeserializer implements JsonDeserializer<HeaderField> {
static class HeaderFieldDeserializer implements JsonDeserializer<HpackHeaderField> {
@Override
public HeaderField deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context) {
public HpackHeaderField deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context) {
JsonObject jsonObject = json.getAsJsonObject();
Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet();
if (entrySet.size() != 1) {
@ -275,7 +271,7 @@ final class TestCase {
Map.Entry<String, JsonElement> entry = entrySet.iterator().next();
String name = entry.getKey();
String value = entry.getValue().getAsString();
return new HeaderField(name, value);
return new HpackHeaderField(name, value);
}
}
}

View File

@ -23,8 +23,6 @@ import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultChannelPromise;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.AsciiString;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
@ -92,7 +90,7 @@ public final class Http2TestUtil {
return s;
}
public static Encoder newTestEncoder() {
public static HpackEncoder newTestEncoder() {
try {
return newTestEncoder(true, MAX_HEADER_LIST_SIZE, MAX_HEADER_TABLE_SIZE);
} catch (Http2Exception e) {
@ -100,20 +98,20 @@ public final class Http2TestUtil {
}
}
public static Encoder newTestEncoder(boolean ignoreMaxHeaderListSize,
long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception {
Encoder encoder = new Encoder();
public static HpackEncoder newTestEncoder(boolean ignoreMaxHeaderListSize,
long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception {
HpackEncoder hpackEncoder = new HpackEncoder();
ByteBuf buf = Unpooled.buffer();
try {
encoder.setMaxHeaderTableSize(buf, maxHeaderTableSize);
encoder.setMaxHeaderListSize(maxHeaderListSize);
hpackEncoder.setMaxHeaderTableSize(buf, maxHeaderTableSize);
hpackEncoder.setMaxHeaderListSize(maxHeaderListSize);
} finally {
buf.release();
}
return encoder;
return hpackEncoder;
}
public static Decoder newTestDecoder() {
public static HpackDecoder newTestDecoder() {
try {
return newTestDecoder(MAX_HEADER_LIST_SIZE, MAX_HEADER_TABLE_SIZE);
} catch (Http2Exception e) {
@ -121,10 +119,10 @@ public final class Http2TestUtil {
}
}
public static Decoder newTestDecoder(long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception {
Decoder decoder = new Decoder(maxHeaderListSize, 32);
decoder.setMaxHeaderTableSize(maxHeaderTableSize);
return decoder;
public static HpackDecoder newTestDecoder(long maxHeaderListSize, long maxHeaderTableSize) throws Http2Exception {
HpackDecoder hpackDecoder = new HpackDecoder(maxHeaderListSize, 32);
hpackDecoder.setMaxHeaderTableSize(maxHeaderTableSize);
return hpackDecoder;
}
private Http2TestUtil() {

View File

@ -29,23 +29,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
package io.netty.handler.codec.http2;
import java.util.List;
final class TestHeaderListener extends DefaultHttp2Headers {
private final List<HeaderField> headers;
private final List<HpackHeaderField> headers;
TestHeaderListener(List<HeaderField> headers) {
TestHeaderListener(List<HpackHeaderField> headers) {
this.headers = headers;
}
@Override
public TestHeaderListener add(CharSequence name, CharSequence value) {
headers.add(new HeaderField(name, value));
headers.add(new HpackHeaderField(name, value));
return this;
}
}

View File

@ -29,16 +29,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.microbench.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
@ -50,14 +44,11 @@ import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.infra.Blackhole;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_LIST_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static io.netty.microbench.http2.internal.hpack.HpackUtilBenchmark.newTestEncoder;
public class DecoderBenchmark extends AbstractMicrobenchmark {
public class HpackDecoderBenchmark extends AbstractMicrobenchmark {
@Param
public HeadersSize size;
public HpackHeadersSize size;
@Param({ "true", "false" })
public boolean sensitive;
@ -69,7 +60,7 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
@Setup(Level.Trial)
public void setup() throws Http2Exception {
input = Unpooled.wrappedBuffer(getSerializedHeaders(Util.http2Headers(size, limitToAscii), sensitive));
input = Unpooled.wrappedBuffer(getSerializedHeaders(HpackUtil.http2Headers(size, limitToAscii), sensitive));
}
@TearDown(Level.Trial)
@ -80,7 +71,7 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
@Benchmark
@BenchmarkMode(Mode.Throughput)
public void decode(final Blackhole bh) throws Http2Exception {
Decoder decoder = new Decoder(DEFAULT_HEADER_LIST_SIZE, 32);
HpackDecoder hpackDecoder = new HpackDecoder(DEFAULT_HEADER_LIST_SIZE, 32);
@SuppressWarnings("unchecked")
Http2Headers headers =
new DefaultHttp2Headers() {
@ -90,14 +81,14 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
return this;
}
};
decoder.decode(0, input.duplicate(), headers);
hpackDecoder.decode(0, input.duplicate(), headers);
}
private byte[] getSerializedHeaders(Http2Headers headers, boolean sensitive) throws Http2Exception {
Encoder encoder = newTestEncoder();
HpackEncoder hpackEncoder = HpackUtilBenchmark.newTestEncoder();
ByteBuf out = size.newOutBuffer();
try {
encoder.encodeHeaders(3 /* randomly chosen */, out, headers,
hpackEncoder.encodeHeaders(3 /* randomly chosen */, out, headers,
sensitive ? Http2HeadersEncoder.ALWAYS_SENSITIVE
: Http2HeadersEncoder.NEVER_SENSITIVE);
byte[] bytes = new byte[out.readableBytes()];

View File

@ -13,12 +13,10 @@
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.microbench.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Error;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
@ -40,7 +38,7 @@ import java.util.concurrent.TimeUnit;
@Warmup(iterations = 5)
@Measurement(iterations = 10)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public class DecoderULE128Benchmark extends AbstractMicrobenchmark {
public class HpackDecoderULE128Benchmark extends AbstractMicrobenchmark {
private static final Http2Exception DECODE_ULE_128_TO_LONG_DECOMPRESSION_EXCEPTION =
new Http2Exception(Http2Error.COMPRESSION_ERROR);
private static final Http2Exception DECODE_ULE_128_TO_INT_DECOMPRESSION_EXCEPTION =

View File

@ -29,12 +29,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.microbench.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.Http2HeadersEncoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
@ -56,18 +53,16 @@ import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static io.netty.microbench.http2.internal.hpack.HpackUtilBenchmark.newTestEncoder;
@Fork(1)
@Threads(1)
@State(Scope.Benchmark)
@Warmup(iterations = 5)
@Measurement(iterations = 5)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public class EncoderBenchmark extends AbstractMicrobenchmark {
public class HpackEncoderBenchmark extends AbstractMicrobenchmark {
@Param
public HeadersSize size;
public HpackHeadersSize size;
@Param({ "true", "false" })
public boolean sensitive;
@ -84,7 +79,7 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
@Setup(Level.Trial)
public void setup() {
http2Headers = Util.http2Headers(size, limitToAscii);
http2Headers = HpackUtil.http2Headers(size, limitToAscii);
if (duplicates) {
int size = http2Headers.size();
if (size > 0) {
@ -108,9 +103,9 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
@Benchmark
@BenchmarkMode(Mode.AverageTime)
public void encode(Blackhole bh) throws Exception {
Encoder encoder = newTestEncoder();
HpackEncoder hpackEncoder = HpackUtilBenchmark.newTestEncoder();
output.clear();
encoder.encodeHeaders(3 /*randomly chosen*/, output, http2Headers, sensitivityDetector);
hpackEncoder.encodeHeaders(3 /*randomly chosen*/, output, http2Headers, sensitivityDetector);
bh.consume(output);
}
}

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.microbench.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.util.AsciiString;
@ -40,14 +40,14 @@ import java.util.Random;
/**
* Helper class representing a single header entry. Used by the benchmarks.
*/
class Header {
class HpackHeader {
private static final String ALPHABET =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
final CharSequence name;
final CharSequence value;
Header(byte[] name, byte[] value) {
HpackHeader(byte[] name, byte[] value) {
this.name = new AsciiString(name, false);
this.value = new AsciiString(value, false);
}
@ -55,15 +55,15 @@ class Header {
/**
* Creates a number of random headers with the given name/value lengths.
*/
static List<Header> createHeaders(int numHeaders, int nameLength, int valueLength,
boolean limitToAscii) {
List<Header> headers = new ArrayList<Header>(numHeaders);
static List<HpackHeader> createHeaders(int numHeaders, int nameLength, int valueLength,
boolean limitToAscii) {
List<HpackHeader> hpackHeaders = new ArrayList<HpackHeader>(numHeaders);
for (int i = 0; i < numHeaders; ++i) {
byte[] name = randomBytes(new byte[nameLength], limitToAscii);
byte[] value = randomBytes(new byte[valueLength], limitToAscii);
headers.add(new Header(name, value));
hpackHeaders.add(new HpackHeader(name, value));
}
return headers;
return hpackHeaders;
}
private static byte[] randomBytes(byte[] bytes, boolean limitToAscii) {

View File

@ -29,7 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.microbench.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
@ -39,7 +39,7 @@ import java.util.List;
/**
* Enum that indicates the size of the headers to be used for the benchmark.
*/
public enum HeadersSize {
public enum HpackHeadersSize {
SMALL(5, 20, 40),
MEDIUM(20, 40, 80),
LARGE(100, 100, 300);
@ -48,14 +48,14 @@ public enum HeadersSize {
private final int nameLength;
private final int valueLength;
HeadersSize(int numHeaders, int nameLength, int valueLength) {
HpackHeadersSize(int numHeaders, int nameLength, int valueLength) {
this.numHeaders = numHeaders;
this.nameLength = nameLength;
this.valueLength = valueLength;
}
public List<Header> newHeaders(boolean limitAscii) {
return Header.createHeaders(numHeaders, nameLength, valueLength, limitAscii);
public List<HpackHeader> newHeaders(boolean limitAscii) {
return HpackHeader.createHeaders(numHeaders, nameLength, valueLength, limitAscii);
}
public ByteBuf newOutBuffer() {

View File

@ -29,10 +29,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.microbench.http2.internal.hpack;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Headers;
package io.netty.handler.codec.http2;
import java.util.HashMap;
import java.util.List;
@ -41,23 +38,23 @@ import java.util.Map;
/**
* Utility methods for hpack tests.
*/
public final class Util {
private Util() {
public final class HpackUtil {
private HpackUtil() {
}
/**
* Internal key used to index a particular set of headers in the map.
*/
private static class HeadersKey {
final HeadersSize size;
final HpackHeadersSize size;
final boolean limitToAscii;
public HeadersKey(HeadersSize size, boolean limitToAscii) {
public HeadersKey(HpackHeadersSize size, boolean limitToAscii) {
this.size = size;
this.limitToAscii = limitToAscii;
}
List<Header> newHeaders() {
List<HpackHeader> newHeaders() {
return size.newHeaders(limitToAscii);
}
@ -86,12 +83,12 @@ public final class Util {
}
}
private static final Map<HeadersKey, List<Header>> headersMap;
private static final Map<HeadersKey, List<HpackHeader>> headersMap;
static {
HeadersSize[] sizes = HeadersSize.values();
headersMap = new HashMap<HeadersKey, List<Header>>(sizes.length * 2);
for (HeadersSize size : sizes) {
HpackHeadersSize[] sizes = HpackHeadersSize.values();
headersMap = new HashMap<HeadersKey, List<HpackHeader>>(sizes.length * 2);
for (HpackHeadersSize size : sizes) {
HeadersKey key = new HeadersKey(size, true);
headersMap.put(key, key.newHeaders());
@ -103,16 +100,16 @@ public final class Util {
/**
* Gets headers for the given size and whether the key/values should be limited to ASCII.
*/
static List<Header> headers(HeadersSize size, boolean limitToAscii) {
static List<HpackHeader> headers(HpackHeadersSize size, boolean limitToAscii) {
return headersMap.get(new HeadersKey(size, limitToAscii));
}
static Http2Headers http2Headers(HeadersSize size, boolean limitToAscii) {
List<Header> headers = headersMap.get(new HeadersKey(size, limitToAscii));
static Http2Headers http2Headers(HpackHeadersSize size, boolean limitToAscii) {
List<HpackHeader> hpackHeaders = headersMap.get(new HeadersKey(size, limitToAscii));
Http2Headers http2Headers = new DefaultHttp2Headers(false);
for (int i = 0; i < headers.size(); ++i) {
Header header = headers.get(i);
http2Headers.add(header.name, header.value);
for (int i = 0; i < hpackHeaders.size(); ++i) {
HpackHeader hpackHeader = hpackHeaders.get(i);
http2Headers.add(hpackHeader.name, hpackHeader.value);
}
return http2Headers;
}

View File

@ -13,12 +13,10 @@
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.microbench.http2.internal.hpack;
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Exception;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark;
import io.netty.util.AsciiString;
import io.netty.util.internal.ConstantTimeUtils;
@ -41,21 +39,21 @@ import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
@Measurement(iterations = 5)
public class HpackUtilBenchmark extends AbstractMicrobenchmark {
@Param
public HeadersSize size;
public HpackHeadersSize size;
private List<Header> headers;
private List<HpackHeader> hpackHeaders;
@Setup(Level.Trial)
public void setup() {
headers = Util.headers(size, false);
hpackHeaders = HpackUtil.headers(size, false);
}
@Benchmark
public int oldEquals() {
int count = 0;
for (int i = 0; i < headers.size(); ++i) {
Header header = headers.get(i);
if (oldEquals(header.name, header.name)) {
for (int i = 0; i < hpackHeaders.size(); ++i) {
HpackHeader hpackHeader = hpackHeaders.get(i);
if (oldEquals(hpackHeader.name, hpackHeader.name)) {
++count;
}
}
@ -65,9 +63,9 @@ public class HpackUtilBenchmark extends AbstractMicrobenchmark {
@Benchmark
public int newEquals() {
int count = 0;
for (int i = 0; i < headers.size(); ++i) {
Header header = headers.get(i);
if (newEquals(header.name, header.name)) {
for (int i = 0; i < hpackHeaders.size(); ++i) {
HpackHeader hpackHeader = hpackHeaders.get(i);
if (newEquals(hpackHeader.name, hpackHeader.name)) {
++count;
}
}
@ -99,17 +97,17 @@ public class HpackUtilBenchmark extends AbstractMicrobenchmark {
return ConstantTimeUtils.equalsConstantTime(s1, s2) != 0;
}
static Encoder newTestEncoder() {
Encoder encoder = new Encoder();
static HpackEncoder newTestEncoder() {
HpackEncoder hpackEncoder = new HpackEncoder();
ByteBuf buf = Unpooled.buffer();
try {
encoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
encoder.setMaxHeaderListSize(MAX_HEADER_LIST_SIZE);
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE);
hpackEncoder.setMaxHeaderListSize(MAX_HEADER_LIST_SIZE);
} catch (Http2Exception e) {
throw new Error("max size not allowed?", e);
} finally {
buf.release();
}
return encoder;
return hpackEncoder;
}
}

View File

@ -0,0 +1,19 @@
/*
* Copyright 2017 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Benchmarks for {@link io.netty.handler.codec.http2}.
*/
package io.netty.handler.codec.http2;

View File

@ -1,36 +0,0 @@
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Benchmarks for {@link io.netty.handler.codec.http2.internal.hpack}.
*/
package io.netty.microbench.http2.internal.hpack;