Optimize HPACK usage to align more with Netty types and remove heavy object creations. Related to [#3597]

Motivations:

The HPACK code was not really optimized and written with Netty types in mind. Because of this a lot of garbage was created due heavy object creation.

This was first reported in [#3597] and https://github.com/grpc/grpc-java/issues/1872 .

Modifications:

- Directly use ByteBuf as input and output
- Make use of ByteProcessor where possible
- Use AsciiString as this is the only thing we need for our http2 usage

Result:

Less garbage and better usage of Netty apis.
This commit is contained in:
Norman Maurer 2016-05-30 13:40:08 +02:00
parent a725b97092
commit b4d4c0034d
22 changed files with 643 additions and 623 deletions

View File

@ -18,11 +18,16 @@ package io.netty.handler.codec.http2;
import io.netty.handler.codec.DefaultHeaders;
import io.netty.handler.codec.UnsupportedValueConverter;
import io.netty.handler.codec.ValueConverter;
import io.netty.util.internal.UnstableApi;
import static io.netty.util.AsciiString.CASE_INSENSITIVE_HASHER;
import static io.netty.util.AsciiString.CASE_SENSITIVE_HASHER;
final class CharSequenceMap<V> extends DefaultHeaders<CharSequence, V, CharSequenceMap<V>> {
/**
* Internal use only!
*/
@UnstableApi
public final class CharSequenceMap<V> extends DefaultHeaders<CharSequence, V, CharSequenceMap<V>> {
public CharSequenceMap() {
this(true);
}
@ -34,4 +39,10 @@ final class CharSequenceMap<V> extends DefaultHeaders<CharSequence, V, CharSeque
public CharSequenceMap(boolean caseSensitive, ValueConverter<V> valueConverter) {
super(caseSensitive ? CASE_SENSITIVE_HASHER : CASE_INSENSITIVE_HASHER, valueConverter);
}
@SuppressWarnings("unchecked")
public CharSequenceMap(boolean caseSensitive, ValueConverter<V> valueConverter, int arraySizeHint) {
super(caseSensitive ? CASE_SENSITIVE_HASHER : CASE_INSENSITIVE_HASHER, valueConverter,
NameValidator.NOT_NULL, arraySizeHint);
}
}

View File

@ -16,20 +16,15 @@
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufInputStream;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.handler.codec.http2.internal.hpack.HeaderListener;
import io.netty.util.AsciiString;
import io.netty.util.internal.UnstableApi;
import java.io.IOException;
import java.io.InputStream;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_MAX_HEADER_SIZE;
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
import static io.netty.handler.codec.http2.Http2Error.ENHANCE_YOUR_CALM;
import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
@ -53,14 +48,15 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
}
public DefaultHttp2HeadersDecoder(boolean validateHeaders) {
this(DEFAULT_MAX_HEADER_SIZE, DEFAULT_HEADER_TABLE_SIZE, validateHeaders);
this(DEFAULT_MAX_HEADER_SIZE, DEFAULT_HEADER_TABLE_SIZE, validateHeaders, 32);
}
public DefaultHttp2HeadersDecoder(int maxHeaderSize, int maxHeaderTableSize, boolean validateHeaders) {
public DefaultHttp2HeadersDecoder(int maxHeaderSize, int maxHeaderTableSize, boolean validateHeaders,
int initialHuffmanDecodeCapacity) {
if (maxHeaderSize <= 0) {
throw new IllegalArgumentException("maxHeaderSize must be positive: " + maxHeaderSize);
}
decoder = new Decoder(maxHeaderSize, maxHeaderTableSize);
decoder = new Decoder(maxHeaderSize, maxHeaderTableSize, initialHuffmanDecodeCapacity);
headerTable = new Http2HeaderTableDecoder();
this.maxHeaderSize = maxHeaderSize;
this.validateHeaders = validateHeaders;
@ -91,17 +87,9 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
@Override
public Http2Headers decodeHeaders(ByteBuf headerBlock) throws Http2Exception {
InputStream in = new ByteBufInputStream(headerBlock);
try {
final Http2Headers headers = new DefaultHttp2Headers(validateHeaders, (int) headerArraySizeAccumulator);
HeaderListener listener = new HeaderListener() {
@Override
public void addHeader(byte[] key, byte[] value, boolean sensitive) {
headers.add(new AsciiString(key, false), new AsciiString(value, false));
}
};
decoder.decode(in, listener);
decoder.decode(headerBlock, headers);
if (decoder.endHeaderBlock()) {
maxHeaderSizeExceeded();
}
@ -123,12 +111,6 @@ public class DefaultHttp2HeadersDecoder implements Http2HeadersDecoder, Http2Hea
// the the Header builder throws IllegalArgumentException if the key or value was invalid
// for any reason (e.g. the key was an invalid pseudo-header).
throw connectionError(COMPRESSION_ERROR, e, e.getMessage());
} finally {
try {
in.close();
} catch (IOException e) {
throw connectionError(INTERNAL_ERROR, e, e.getMessage());
}
}
}

View File

@ -16,19 +16,14 @@
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufOutputStream;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.util.AsciiString;
import io.netty.util.internal.UnstableApi;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map.Entry;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2Error.COMPRESSION_ERROR;
import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
@ -36,9 +31,9 @@ import static io.netty.util.internal.ObjectUtil.checkNotNull;
@UnstableApi
public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2HeadersEncoder.Configuration {
private final Encoder encoder;
private final ByteArrayOutputStream tableSizeChangeOutput = new ByteArrayOutputStream();
private final SensitivityDetector sensitivityDetector;
private final Http2HeaderTable headerTable;
private final ByteBuf tableSizeChangeOutput = Unpooled.buffer();
public DefaultHttp2HeadersEncoder() {
this(DEFAULT_HEADER_TABLE_SIZE, NEVER_SENSITIVE);
@ -52,7 +47,6 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
@Override
public void encodeHeaders(Http2Headers headers, ByteBuf buffer) throws Http2Exception {
final OutputStream stream = new ByteBufOutputStream(buffer);
try {
if (headers.size() > headerTable.maxHeaderListSize()) {
throw connectionError(PROTOCOL_ERROR, "Number of headers (%d) exceeds maxHeaderListSize (%d)",
@ -61,24 +55,18 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
// If there was a change in the table size, serialize the output from the encoder
// resulting from that change.
if (tableSizeChangeOutput.size() > 0) {
buffer.writeBytes(tableSizeChangeOutput.toByteArray());
tableSizeChangeOutput.reset();
if (tableSizeChangeOutput.isReadable()) {
buffer.writeBytes(tableSizeChangeOutput);
tableSizeChangeOutput.clear();
}
for (Entry<CharSequence, CharSequence> header : headers) {
encodeHeader(header.getKey(), header.getValue(), stream);
encodeHeader(buffer, header.getKey(), header.getValue());
}
} catch (Http2Exception e) {
throw e;
} catch (Throwable t) {
throw connectionError(COMPRESSION_ERROR, t, "Failed encoding headers block: %s", t.getMessage());
} finally {
try {
stream.close();
} catch (IOException e) {
throw connectionError(INTERNAL_ERROR, e, e.getMessage());
}
}
}
@ -92,13 +80,8 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
return this;
}
private byte[] toBytes(CharSequence chars) {
AsciiString aString = AsciiString.of(chars);
return aString.isEntireArrayUsed() ? aString.array() : aString.toByteArray();
}
private void encodeHeader(CharSequence key, CharSequence value, OutputStream stream) throws IOException {
encoder.encodeHeader(stream, toBytes(key), toBytes(value), sensitivityDetector.isSensitive(key, value));
private void encodeHeader(ByteBuf out, CharSequence key, CharSequence value) {
encoder.encodeHeader(out, key, value, sensitivityDetector.isSensitive(key, value));
}
/**
@ -113,8 +96,6 @@ public class DefaultHttp2HeadersEncoder implements Http2HeadersEncoder, Http2Hea
try {
// No headers should be emitted. If they are, we throw.
encoder.setMaxHeaderTableSize(tableSizeChangeOutput, max);
} catch (IOException e) {
throw new Http2Exception(COMPRESSION_ERROR, e.getMessage(), e);
} catch (Throwable t) {
throw new Http2Exception(PROTOCOL_ERROR, t.getMessage(), t);
}

View File

@ -31,13 +31,17 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType;
import io.netty.util.internal.ThrowableUtil;
import io.netty.util.AsciiString;
import java.io.IOException;
import java.io.InputStream;
import static io.netty.util.internal.EmptyArrays.EMPTY_BYTES;
import static io.netty.util.AsciiString.EMPTY_STRING;
import static java.lang.Math.min;
public final class Decoder {
@ -45,8 +49,6 @@ public final class Decoder {
new IOException("HPACK - decompression failure"), Decoder.class, "decode(...)");
private static final IOException DECODE_ULE_128_DECOMPRESSION_EXCEPTION = ThrowableUtil.unknownStackTrace(
new IOException("HPACK - decompression failure"), Decoder.class, "decodeULE128(...)");
private static final IOException READ_STRING_LITERAL_DECOMPRESSION_EXCEPTION = ThrowableUtil.unknownStackTrace(
new IOException("HPACK - decompression failure"), Decoder.class, "readStringLiteral(...)");
private static final IOException DECODE_ILLEGAL_INDEX_VALUE = ThrowableUtil.unknownStackTrace(
new IOException("HPACK - illegal index value"), Decoder.class, "decode(...)");
private static final IOException INDEX_HEADER_ILLEGAL_INDEX_VALUE = ThrowableUtil.unknownStackTrace(
@ -59,8 +61,8 @@ public final class Decoder {
new IOException("HPACK - max dynamic table size change required"), Decoder.class, "decode(...)");
private final DynamicTable dynamicTable;
private int maxHeaderSize;
private final HuffmanDecoder huffmanDecoder;
private final int maxHeaderSize;
private int maxDynamicTableSize;
private int encoderMaxDynamicTableSize;
private boolean maxDynamicTableSizeChangeRequired;
@ -73,7 +75,7 @@ public final class Decoder {
private int skipLength;
private int nameLength;
private int valueLength;
private byte[] name;
private CharSequence name;
private enum State {
READ_HEADER_REPRESENTATION,
@ -93,12 +95,13 @@ public final class Decoder {
/**
* Creates a new decoder.
*/
public Decoder(int maxHeaderSize, int maxHeaderTableSize) {
public Decoder(int maxHeaderSize, int maxHeaderTableSize, int initialHuffmanDecodeCapacity) {
dynamicTable = new DynamicTable(maxHeaderTableSize);
this.maxHeaderSize = maxHeaderSize;
maxDynamicTableSize = maxHeaderTableSize;
encoderMaxDynamicTableSize = maxHeaderTableSize;
maxDynamicTableSizeChangeRequired = false;
huffmanDecoder = new HuffmanDecoder(initialHuffmanDecodeCapacity);
reset();
}
@ -111,11 +114,11 @@ public final class Decoder {
/**
* Decode the header block into header fields.
*/
public void decode(InputStream in, HeaderListener headerListener) throws IOException {
while (in.available() > 0) {
public void decode(ByteBuf in, Http2Headers headers) throws IOException {
while (in.isReadable()) {
switch (state) {
case READ_HEADER_REPRESENTATION:
byte b = (byte) in.read();
byte b = in.readByte();
if (maxDynamicTableSizeChangeRequired && (b & 0xE0) != 0x20) {
// Encoder MUST signal maximum dynamic table size change
throw MAX_DYNAMIC_TABLE_SIZE_CHANGE_REQUIRED;
@ -128,7 +131,7 @@ public final class Decoder {
} else if (index == 0x7F) {
state = State.READ_INDEXED_HEADER;
} else {
indexHeader(index, headerListener);
indexHeader(index, headers);
}
} else if ((b & 0x40) == 0x40) {
// Literal Header Field with Incremental Indexing
@ -194,7 +197,7 @@ public final class Decoder {
throw DECODE_DECOMPRESSION_EXCEPTION;
}
indexHeader(index + headerIndex, headerListener);
indexHeader(index + headerIndex, headers);
state = State.READ_HEADER_REPRESENTATION;
break;
@ -215,7 +218,7 @@ public final class Decoder {
break;
case READ_LITERAL_HEADER_NAME_LENGTH_PREFIX:
b = (byte) in.read();
b = in.readByte();
huffmanEncoded = (b & 0x80) == 0x80;
index = b & 0x7F;
if (index == 0x7f) {
@ -228,7 +231,7 @@ public final class Decoder {
if (indexType == IndexType.NONE) {
// Name is unused so skip bytes
name = EMPTY_BYTES;
name = EMPTY_STRING;
skipLength = nameLength;
state = State.SKIP_LITERAL_HEADER_NAME;
break;
@ -237,7 +240,7 @@ public final class Decoder {
// Check name length against max dynamic table size
if (nameLength + HeaderField.HEADER_ENTRY_OVERHEAD > dynamicTable.capacity()) {
dynamicTable.clear();
name = EMPTY_BYTES;
name = EMPTY_STRING;
skipLength = nameLength;
state = State.SKIP_LITERAL_HEADER_NAME;
break;
@ -264,7 +267,7 @@ public final class Decoder {
if (exceedsMaxHeaderSize(nameLength)) {
if (indexType == IndexType.NONE) {
// Name is unused so skip bytes
name = EMPTY_BYTES;
name = EMPTY_STRING;
skipLength = nameLength;
state = State.SKIP_LITERAL_HEADER_NAME;
break;
@ -273,7 +276,7 @@ public final class Decoder {
// Check name length against max dynamic table size
if (nameLength + HeaderField.HEADER_ENTRY_OVERHEAD > dynamicTable.capacity()) {
dynamicTable.clear();
name = EMPTY_BYTES;
name = EMPTY_STRING;
skipLength = nameLength;
state = State.SKIP_LITERAL_HEADER_NAME;
break;
@ -284,7 +287,7 @@ public final class Decoder {
case READ_LITERAL_HEADER_NAME:
// Wait until entire name is readable
if (in.available() < nameLength) {
if (in.readableBytes() < nameLength) {
return;
}
@ -294,7 +297,9 @@ public final class Decoder {
break;
case SKIP_LITERAL_HEADER_NAME:
skipLength -= in.skip(skipLength);
int skip = min(in.readableBytes(), skipLength);
in.skipBytes(skip);
skipLength -= skip;
if (skipLength == 0) {
state = State.READ_LITERAL_HEADER_VALUE_LENGTH_PREFIX;
@ -302,7 +307,7 @@ public final class Decoder {
break;
case READ_LITERAL_HEADER_VALUE_LENGTH_PREFIX:
b = (byte) in.read();
b = in.readByte();
huffmanEncoded = (b & 0x80) == 0x80;
index = b & 0x7F;
if (index == 0x7f) {
@ -331,7 +336,7 @@ public final class Decoder {
}
if (valueLength == 0) {
insertHeader(headerListener, name, EMPTY_BYTES, indexType);
insertHeader(headers, name, EMPTY_STRING, indexType);
state = State.READ_HEADER_REPRESENTATION;
} else {
state = State.READ_LITERAL_HEADER_VALUE;
@ -377,18 +382,19 @@ public final class Decoder {
case READ_LITERAL_HEADER_VALUE:
// Wait until entire value is readable
if (in.available() < valueLength) {
if (in.readableBytes() < valueLength) {
return;
}
byte[] value = readStringLiteral(in, valueLength);
insertHeader(headerListener, name, value, indexType);
CharSequence value = readStringLiteral(in, valueLength);
insertHeader(headers, name, value, indexType);
state = State.READ_HEADER_REPRESENTATION;
break;
case SKIP_LITERAL_HEADER_VALUE:
valueLength -= in.skip(valueLength);
int skipBytes = min(in.readableBytes(), valueLength);
in.skipBytes(skipBytes);
valueLength -= skipBytes;
if (valueLength == 0) {
state = State.READ_HEADER_REPRESENTATION;
}
@ -474,21 +480,21 @@ public final class Decoder {
}
}
private void indexHeader(int index, HeaderListener headerListener) throws IOException {
private void indexHeader(int index, Http2Headers headers) throws IOException {
if (index <= StaticTable.length) {
HeaderField headerField = StaticTable.getEntry(index);
addHeader(headerListener, headerField.name, headerField.value, false);
addHeader(headers, headerField.name, headerField.value);
} else if (index - StaticTable.length <= dynamicTable.length()) {
HeaderField headerField = dynamicTable.getEntry(index - StaticTable.length);
addHeader(headerListener, headerField.name, headerField.value, false);
addHeader(headers, headerField.name, headerField.value);
} else {
throw INDEX_HEADER_ILLEGAL_INDEX_VALUE;
}
}
private void insertHeader(HeaderListener headerListener, byte[] name, byte[] value,
private void insertHeader(Http2Headers headers, CharSequence name, CharSequence value,
IndexType indexType) {
addHeader(headerListener, name, value, indexType == IndexType.NEVER);
addHeader(headers, name, value);
switch (indexType) {
case NONE:
@ -504,11 +510,10 @@ public final class Decoder {
}
}
private void addHeader(HeaderListener headerListener, byte[] name, byte[] value,
boolean sensitive) {
long newSize = headerSize + name.length + value.length;
private void addHeader(Http2Headers headers, CharSequence name, CharSequence value) {
long newSize = headerSize + name.length() + value.length();
if (newSize <= maxHeaderSize) {
headerListener.addHeader(name, value, sensitive);
headers.add(name, value);
headerSize = (int) newSize;
} else {
// truncation will be reported during endHeaderBlock
@ -527,32 +532,29 @@ public final class Decoder {
return true;
}
private byte[] readStringLiteral(InputStream in, int length) throws IOException {
byte[] buf = new byte[length];
if (in.read(buf) != length) {
throw READ_STRING_LITERAL_DECOMPRESSION_EXCEPTION;
}
private CharSequence readStringLiteral(ByteBuf in, int length) throws IOException {
if (huffmanEncoded) {
return Huffman.DECODER.decode(buf);
return huffmanDecoder.decode(in, length);
} else {
return buf;
byte[] buf = new byte[length];
in.readBytes(buf);
return new AsciiString(buf, false);
}
}
// Unsigned Little Endian Base 128 Variable-Length Integer Encoding
private static int decodeULE128(InputStream in) throws IOException {
in.mark(5);
private static int decodeULE128(ByteBuf in) throws IOException {
in.markReaderIndex();
int result = 0;
int shift = 0;
while (shift < 32) {
if (in.available() == 0) {
if (!in.isReadable()) {
// Buffer does not contain entire integer,
// reset reader index and return -1.
in.reset();
in.resetReaderIndex();
return -1;
}
byte b = (byte) in.read();
byte b = in.readByte();
if (shift == 28 && (b & 0xF8) != 0) {
break;
}
@ -563,7 +565,7 @@ public final class Decoder {
shift += 7;
}
// Value exceeds Integer.MAX_VALUE
in.reset();
in.resetReaderIndex();
throw DECODE_ULE_128_DECOMPRESSION_EXCEPTION;
}
}

View File

@ -31,8 +31,10 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import java.io.IOException;
import java.io.OutputStream;
import io.netty.buffer.ByteBuf;
import io.netty.util.AsciiString;
import io.netty.util.CharsetUtil;
import java.util.Arrays;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.INCREMENTAL;
@ -42,16 +44,17 @@ import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.IndexType.NO
public final class Encoder {
private static final int BUCKET_SIZE = 17;
private static final byte[] EMPTY = {};
// for testing
private final boolean useIndexing;
private final boolean forceHuffmanOn;
private final boolean forceHuffmanOff;
private final HuffmanEncoder huffmanEncoder = new HuffmanEncoder();
// a linked hash map of header fields
private final HeaderEntry[] headerFields = new HeaderEntry[BUCKET_SIZE];
private final HeaderEntry head = new HeaderEntry(-1, EMPTY, EMPTY, Integer.MAX_VALUE, null);
private final HeaderEntry head = new HeaderEntry(-1, AsciiString.EMPTY_STRING,
AsciiString.EMPTY_STRING, Integer.MAX_VALUE, null);
private int size;
private int capacity;
@ -77,15 +80,16 @@ public final class Encoder {
this.useIndexing = useIndexing;
this.forceHuffmanOn = forceHuffmanOn;
this.forceHuffmanOff = forceHuffmanOff;
this.capacity = maxHeaderTableSize;
capacity = maxHeaderTableSize;
head.before = head.after = head;
}
/**
* Encode the header field into the header block.
*
* <strong>The given {@link CharSequence}s must be immutable!</strong>
*/
public void encodeHeader(OutputStream out, byte[] name, byte[] value, boolean sensitive)
throws IOException {
public void encodeHeader(ByteBuf out, CharSequence name, CharSequence value, boolean sensitive) {
// If the header value is sensitive then it must never be indexed
if (sensitive) {
@ -143,7 +147,7 @@ public final class Encoder {
/**
* Set the maximum table size.
*/
public void setMaxHeaderTableSize(OutputStream out, int maxHeaderTableSize) throws IOException {
public void setMaxHeaderTableSize(ByteBuf out, int maxHeaderTableSize) {
if (maxHeaderTableSize < 0) {
throw new IllegalArgumentException("Illegal Capacity: " + maxHeaderTableSize);
}
@ -165,22 +169,22 @@ public final class Encoder {
/**
* Encode integer according to Section 5.1.
*/
private static void encodeInteger(OutputStream out, int mask, int n, int i) throws IOException {
private static void encodeInteger(ByteBuf out, int mask, int n, int i) {
if (n < 0 || n > 8) {
throw new IllegalArgumentException("N: " + n);
}
int nbits = 0xFF >>> (8 - n);
if (i < nbits) {
out.write(mask | i);
out.writeByte(mask | i);
} else {
out.write(mask | nbits);
out.writeByte(mask | nbits);
int length = i - nbits;
while (true) {
for (;;) {
if ((length & ~0x7F) == 0) {
out.write(length);
out.writeByte(length);
return;
} else {
out.write((length & 0x7F) | 0x80);
out.writeByte((length & 0x7F) | 0x80);
length >>>= 7;
}
}
@ -190,23 +194,30 @@ public final class Encoder {
/**
* Encode string literal according to Section 5.2.
*/
private void encodeStringLiteral(OutputStream out, byte[] string) throws IOException {
int huffmanLength = Huffman.ENCODER.getEncodedLength(string);
if ((huffmanLength < string.length && !forceHuffmanOff) || forceHuffmanOn) {
private void encodeStringLiteral(ByteBuf out, CharSequence string) {
int huffmanLength = huffmanEncoder.getEncodedLength(string);
if ((huffmanLength < string.length() && !forceHuffmanOff) || forceHuffmanOn) {
encodeInteger(out, 0x80, 7, huffmanLength);
Huffman.ENCODER.encode(out, string);
huffmanEncoder.encode(out, string);
} else {
encodeInteger(out, 0x00, 7, string.length);
out.write(string, 0, string.length);
encodeInteger(out, 0x00, 7, string.length());
if (string instanceof AsciiString) {
// Fast-path
AsciiString asciiString = (AsciiString) string;
out.writeBytes(asciiString.array(), asciiString.arrayOffset(), asciiString.length());
} else {
// Only ASCII is allowed in http2 headers, so its fine to use this.
// https://tools.ietf.org/html/rfc7540#section-8.1.2
out.writeCharSequence(string, CharsetUtil.ISO_8859_1);
}
}
}
/**
* Encode literal header field according to Section 6.2.
*/
private void encodeLiteral(OutputStream out, byte[] name, byte[] value, HpackUtil.IndexType indexType,
int nameIndex)
throws IOException {
private void encodeLiteral(ByteBuf out, CharSequence name, CharSequence value, HpackUtil.IndexType indexType,
int nameIndex) {
int mask;
int prefixBits;
switch (indexType) {
@ -232,7 +243,7 @@ public final class Encoder {
encodeStringLiteral(out, value);
}
private int getNameIndex(byte[] name) {
private int getNameIndex(CharSequence name) {
int index = StaticTable.getIndex(name);
if (index == -1) {
index = getIndex(name);
@ -247,7 +258,7 @@ public final class Encoder {
* Ensure that the dynamic table has enough room to hold 'headerSize' more bytes. Removes the
* oldest entry from the dynamic table until sufficient space is available.
*/
private void ensureCapacity(int headerSize) throws IOException {
private void ensureCapacity(int headerSize) {
while (size + headerSize > capacity) {
int index = length();
if (index == 0) {
@ -286,7 +297,7 @@ public final class Encoder {
* Returns the header entry with the lowest index value for the header field. Returns null if
* header field is not in the dynamic table.
*/
private HeaderEntry getEntry(byte[] name, byte[] value) {
private HeaderEntry getEntry(CharSequence name, CharSequence value) {
if (length() == 0 || name == null || value == null) {
return null;
}
@ -306,7 +317,7 @@ public final class Encoder {
* Returns the lowest index value for the header field name in the dynamic table. Returns -1 if
* the header field name is not in the dynamic table.
*/
private int getIndex(byte[] name) {
private int getIndex(CharSequence name) {
if (length() == 0 || name == null) {
return -1;
}
@ -327,7 +338,7 @@ public final class Encoder {
*/
private int getIndex(int index) {
if (index == -1) {
return index;
return -1;
}
return index - head.before.index + 1;
}
@ -337,7 +348,7 @@ public final class Encoder {
* the size of the table and the new header field is less than the table's capacity. If the size
* of the new entry is larger than the table's capacity, the dynamic table will be cleared.
*/
private void add(byte[] name, byte[] value) {
private void add(CharSequence name, CharSequence value) {
int headerSize = HeaderField.sizeOf(name, value);
// Clear the table if the header field size is larger than the capacity.
@ -351,10 +362,6 @@ public final class Encoder {
remove();
}
// Copy name and value that modifications of original do not affect the dynamic table.
name = Arrays.copyOf(name, name.length);
value = Arrays.copyOf(value, value.length);
int h = hash(name);
int i = index(h);
HeaderEntry old = headerFields[i];
@ -400,16 +407,16 @@ public final class Encoder {
private void clear() {
Arrays.fill(headerFields, null);
head.before = head.after = head;
this.size = 0;
size = 0;
}
/**
* Returns the hash code for the given header field name.
*/
private static int hash(byte[] name) {
private static int hash(CharSequence name) {
int h = 0;
for (int i = 0; i < name.length; i++) {
h = 31 * h + name[i];
for (int i = 0; i < name.length(); i++) {
h = 31 * h + name.charAt(i);
}
if (h > 0) {
return h;
@ -444,7 +451,7 @@ public final class Encoder {
/**
* Creates new entry.
*/
HeaderEntry(int hash, byte[] name, byte[] value, int index, HeaderEntry next) {
HeaderEntry(int hash, CharSequence name, CharSequence value, int index, HeaderEntry next) {
super(name, value);
this.index = index;
this.hash = hash;

View File

@ -31,35 +31,30 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import static io.netty.util.CharsetUtil.ISO_8859_1;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
class HeaderField implements Comparable<HeaderField> {
class HeaderField {
// Section 4.1. Calculating Table Size
// The additional 32 octets account for an estimated
// overhead associated with the structure.
static final int HEADER_ENTRY_OVERHEAD = 32;
static int sizeOf(byte[] name, byte[] value) {
return name.length + value.length + HEADER_ENTRY_OVERHEAD;
static int sizeOf(CharSequence name, CharSequence value) {
return name.length() + value.length() + HEADER_ENTRY_OVERHEAD;
}
final byte[] name;
final byte[] value;
final CharSequence name;
final CharSequence value;
// This constructor can only be used if name and value are ISO-8859-1 encoded.
HeaderField(String name, String value) {
this(name.getBytes(ISO_8859_1), value.getBytes(ISO_8859_1));
}
HeaderField(byte[] name, byte[] value) {
HeaderField(CharSequence name, CharSequence value) {
this.name = checkNotNull(name, "name");
this.value = checkNotNull(value, "value");
}
int size() {
return name.length + value.length + HEADER_ENTRY_OVERHEAD;
return name.length() + value.length() + HEADER_ENTRY_OVERHEAD;
}
@Override
@ -68,32 +63,6 @@ class HeaderField implements Comparable<HeaderField> {
return super.hashCode();
}
@Override
public int compareTo(HeaderField anotherHeaderField) {
int ret = compareTo(name, anotherHeaderField.name);
if (ret == 0) {
ret = compareTo(value, anotherHeaderField.value);
}
return ret;
}
private int compareTo(byte[] s1, byte[] s2) {
int len1 = s1.length;
int len2 = s2.length;
int lim = Math.min(len1, len2);
int k = 0;
while (k < lim) {
byte b1 = s1[k];
byte b2 = s2[k];
if (b1 != b2) {
return b1 - b2;
}
k++;
}
return len1 - len2;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
@ -110,8 +79,6 @@ class HeaderField implements Comparable<HeaderField> {
@Override
public String toString() {
String nameString = new String(name);
String valueString = new String(value);
return nameString + ": " + valueString;
return name + ": " + value;
}
}

View File

@ -1,41 +0,0 @@
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2014 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
public interface HeaderListener {
/**
* emitHeader is called by the decoder during header field emission.
* The name and value byte arrays must not be modified.
*/
void addHeader(byte[] name, byte[] value, boolean sensitive);
}

View File

@ -31,19 +31,17 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import java.nio.charset.Charset;
final class HpackUtil {
/**
* A string compare that doesn't leak timing information.
*/
static boolean equals(byte[] s1, byte[] s2) {
if (s1.length != s2.length) {
static boolean equals(CharSequence s1, CharSequence s2) {
if (s1.length() != s2.length()) {
return false;
}
char c = 0;
for (int i = 0; i < s1.length; i++) {
c |= s1[i] ^ s2[i];
for (int i = 0; i < s1.length(); i++) {
c |= s1.charAt(i) ^ s2.charAt(i);
}
return c == 0;
}

View File

@ -1,54 +0,0 @@
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2014 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.handler.codec.http2.internal.hpack;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
public final class Huffman {
/**
* Huffman Decoder
*/
public static final HuffmanDecoder DECODER =
new HuffmanDecoder(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS);
/**
* Huffman Encoder
*/
public static final HuffmanEncoder ENCODER =
new HuffmanEncoder(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS);
private Huffman() {
// utility class
}
}

View File

@ -32,10 +32,16 @@
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.util.internal.ThrowableUtil;
import io.netty.buffer.ByteBuf;
import io.netty.util.AsciiString;
import io.netty.util.ByteProcessor;
import io.netty.util.internal.ObjectUtil;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
final class HuffmanDecoder {
private static final IOException EOS_DECODED = ThrowableUtil.unknownStackTrace(
@ -43,19 +49,12 @@ final class HuffmanDecoder {
private static final IOException INVALID_PADDING = ThrowableUtil.unknownStackTrace(
new IOException("HPACK - Invalid Padding"), HuffmanDecoder.class, "decode(...)");
private final Node root;
private static final Node ROOT = buildTree(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS);
/**
* Creates a new Huffman decoder with the specified Huffman coding.
*
* @param codes the Huffman codes indexed by symbol
* @param lengths the length of each Huffman code
*/
HuffmanDecoder(int[] codes, byte[] lengths) {
if (codes.length != 257 || codes.length != lengths.length) {
throw new IllegalArgumentException("invalid Huffman coding");
}
root = buildTree(codes, lengths);
private final DecoderProcessor processor;
HuffmanDecoder(int initialCapacity) {
processor = new DecoderProcessor(initialCapacity);
}
/**
@ -66,84 +65,11 @@ final class HuffmanDecoder {
* @throws IOException if an I/O error occurs. In particular, an <code>IOException</code> may be
* thrown if the output stream has been closed.
*/
public byte[] decode(byte[] buf) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
/*
* The idea here is to consume whole bytes at a time rather than individual bits. node
* represents the Huffman tree, with all bit patterns denormalized as 256 children. Each
* child represents the last 8 bits of the huffman code. The parents of each child each
* represent the successive 8 bit chunks that lead up to the last most part. 8 bit bytes
* from buf are used to traverse these tree until a terminal node is found.
*
* current is a bit buffer. The low order bits represent how much of the huffman code has
* not been used to traverse the tree. Thus, the high order bits are just garbage.
* currentBits represents how many of the low order bits of current are actually valid.
* currentBits will vary between 0 and 15.
*
* symbolBits is the number of bits of the the symbol being decoded, *including* all those
* of the parent nodes. symbolBits tells how far down the tree we are. For example, when
* decoding the invalid sequence {0xff, 0xff}, currentBits will be 0, but symbolBits will be
* 16. This is used to know if buf ended early (before consuming a whole symbol) or if
* there is too much padding.
*/
Node node = root;
int current = 0;
int currentBits = 0;
int symbolBits = 0;
for (int i = 0; i < buf.length; i++) {
int b = buf[i] & 0xFF;
current = (current << 8) | b;
currentBits += 8;
symbolBits += 8;
// While there are unconsumed bits in current, keep consuming symbols.
while (currentBits >= 8) {
int c = (current >>> (currentBits - 8)) & 0xFF;
node = node.children[c];
currentBits -= node.bits;
if (node.isTerminal()) {
if (node.symbol == HpackUtil.HUFFMAN_EOS) {
throw EOS_DECODED;
}
baos.write(node.symbol);
node = root;
// Upon consuming a whole symbol, reset the symbol bits to the number of bits
// left over in the byte.
symbolBits = currentBits;
}
}
}
/*
* We have consumed all the bytes in buf, but haven't consumed all the symbols. We may be on
* a partial symbol, so consume until there is nothing left. This will loop at most 2 times.
*/
while (currentBits > 0) {
int c = (current << (8 - currentBits)) & 0xFF;
node = node.children[c];
if (node.isTerminal() && node.bits <= currentBits) {
if (node.symbol == HpackUtil.HUFFMAN_EOS) {
throw EOS_DECODED;
}
currentBits -= node.bits;
baos.write(node.symbol);
node = root;
symbolBits = currentBits;
} else {
break;
}
}
// Section 5.2. String Literal Representation
// A padding strictly longer than 7 bits MUST be treated as a decoding error.
// Padding not corresponding to the most significant bits of the code
// for the EOS symbol (0xFF) MUST be treated as a decoding error.
int mask = (1 << symbolBits) - 1;
if (symbolBits > 7 || (current & mask) != mask) {
throw INVALID_PADDING;
}
return baos.toByteArray();
public AsciiString decode(ByteBuf buf, int length) throws IOException {
processor.reset();
buf.forEachByte(buf.readerIndex(), length, processor);
buf.skipBytes(length);
return processor.end();
}
private static final class Node {
@ -155,7 +81,7 @@ final class HuffmanDecoder {
/**
* Construct an internal node
*/
private Node() {
Node() {
symbol = 0;
bits = 8;
children = new Node[256];
@ -167,7 +93,7 @@ final class HuffmanDecoder {
* @param symbol the symbol the node represents
* @param bits the number of bits matched by this node
*/
private Node(int symbol, int bits) {
Node(int symbol, int bits) {
assert bits > 0 && bits <= 8;
this.symbol = symbol;
this.bits = bits;
@ -210,4 +136,113 @@ final class HuffmanDecoder {
current.children[i] = terminal;
}
}
private static final class DecoderProcessor implements ByteProcessor {
private final int initialCapacity;
private byte[] bytes;
private int index;
private Node node;
private int current;
private int currentBits;
private int symbolBits;
DecoderProcessor(int initialCapacity) {
this.initialCapacity = ObjectUtil.checkPositive(initialCapacity, "initialCapacity");
}
void reset() {
node = ROOT;
current = 0;
currentBits = 0;
symbolBits = 0;
bytes = new byte[initialCapacity];
index = 0;
}
/*
* The idea here is to consume whole bytes at a time rather than individual bits. node
* represents the Huffman tree, with all bit patterns denormalized as 256 children. Each
* child represents the last 8 bits of the huffman code. The parents of each child each
* represent the successive 8 bit chunks that lead up to the last most part. 8 bit bytes
* from buf are used to traverse these tree until a terminal node is found.
*
* current is a bit buffer. The low order bits represent how much of the huffman code has
* not been used to traverse the tree. Thus, the high order bits are just garbage.
* currentBits represents how many of the low order bits of current are actually valid.
* currentBits will vary between 0 and 15.
*
* symbolBits is the number of bits of the the symbol being decoded, *including* all those
* of the parent nodes. symbolBits tells how far down the tree we are. For example, when
* decoding the invalid sequence {0xff, 0xff}, currentBits will be 0, but symbolBits will be
* 16. This is used to know if buf ended early (before consuming a whole symbol) or if
* there is too much padding.
*/
@Override
public boolean process(byte value) throws IOException {
current = (current << 8) | (value & 0xFF);
currentBits += 8;
symbolBits += 8;
// While there are unconsumed bits in current, keep consuming symbols.
do {
node = node.children[(current >>> (currentBits - 8)) & 0xFF];
currentBits -= node.bits;
if (node.isTerminal()) {
if (node.symbol == HpackUtil.HUFFMAN_EOS) {
throw EOS_DECODED;
}
append(node.symbol);
node = ROOT;
// Upon consuming a whole symbol, reset the symbol bits to the number of bits
// left over in the byte.
symbolBits = currentBits;
}
} while (currentBits >= 8);
return true;
}
AsciiString end() throws IOException {
/*
* We have consumed all the bytes in buf, but haven't consumed all the symbols. We may be on
* a partial symbol, so consume until there is nothing left. This will loop at most 2 times.
*/
while (currentBits > 0) {
node = node.children[(current << (8 - currentBits)) & 0xFF];
if (node.isTerminal() && node.bits <= currentBits) {
if (node.symbol == HpackUtil.HUFFMAN_EOS) {
throw EOS_DECODED;
}
currentBits -= node.bits;
append(node.symbol);
node = ROOT;
symbolBits = currentBits;
} else {
break;
}
}
// Section 5.2. String Literal Representation
// A padding strictly longer than 7 bits MUST be treated as a decoding error.
// Padding not corresponding to the most significant bits of the code
// for the EOS symbol (0xFF) MUST be treated as a decoding error.
int mask = (1 << symbolBits) - 1;
if (symbolBits > 7 || (current & mask) != mask) {
throw INVALID_PADDING;
}
return new AsciiString(bytes, 0, index, false);
}
private void append(int i) {
try {
bytes[index] = (byte) i;
} catch (IndexOutOfBoundsException ignore) {
// Always just expand by INITIAL_SIZE
byte[] newBytes = new byte[bytes.length + initialCapacity];
System.arraycopy(bytes, 0, newBytes, 0, bytes.length);
bytes = newBytes;
bytes[index] = (byte) i;
}
index++;
}
}
}

View File

@ -31,13 +31,26 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import java.io.IOException;
import java.io.OutputStream;
import io.netty.buffer.ByteBuf;
import io.netty.util.AsciiString;
import io.netty.util.ByteProcessor;
import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.PlatformDependent;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODES;
import static io.netty.handler.codec.http2.internal.hpack.HpackUtil.HUFFMAN_CODE_LENGTHS;
final class HuffmanEncoder {
private final int[] codes;
private final byte[] lengths;
private final EncodedLengthProcessor encodedLengthProcessor = new EncodedLengthProcessor();
private final EncodeProcessor encodeProcessor = new EncodeProcessor();
HuffmanEncoder() {
this(HUFFMAN_CODES, HUFFMAN_CODE_LENGTHS);
}
/**
* Creates a new Huffman encoder with the specified Huffman coding.
@ -45,7 +58,7 @@ final class HuffmanEncoder {
* @param codes the Huffman codes indexed by symbol
* @param lengths the length of each Huffman code
*/
HuffmanEncoder(int[] codes, byte[] lengths) {
private HuffmanEncoder(int[] codes, byte[] lengths) {
this.codes = codes;
this.lengths = lengths;
}
@ -55,40 +68,30 @@ final class HuffmanEncoder {
*
* @param out the output stream for the compressed data
* @param data the string literal to be Huffman encoded
* @throws IOException if an I/O error occurs.
* @see HuffmanEncoder#encode(OutputStream, byte[], int, int)
*/
public void encode(OutputStream out, byte[] data) throws IOException {
encode(out, data, 0, data.length);
public void encode(ByteBuf out, CharSequence data) {
ObjectUtil.checkNotNull(out, "out");
if (data instanceof AsciiString) {
AsciiString string = (AsciiString) data;
try {
encodeProcessor.out = out;
string.forEachByte(encodeProcessor);
} catch (Exception e) {
PlatformDependent.throwException(e);
} finally {
encodeProcessor.end();
}
} else {
encodeSlowPath(out, data);
}
}
/**
* Compresses the input string literal using the Huffman coding.
*
* @param out the output stream for the compressed data
* @param data the string literal to be Huffman encoded
* @param off the start offset in the data
* @param len the number of bytes to encode
* @throws IOException if an I/O error occurs. In particular, an <code>IOException</code> may be
* thrown if the output stream has been closed.
*/
public void encode(OutputStream out, byte[] data, int off, int len) throws IOException {
if (out == null) {
throw new NullPointerException("out");
} else if (data == null) {
throw new NullPointerException("data");
} else if (off < 0 || len < 0 || (off + len) < 0 || off > data.length ||
(off + len) > data.length) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return;
}
private void encodeSlowPath(ByteBuf out, CharSequence data) {
long current = 0;
int n = 0;
for (int i = 0; i < len; i++) {
int b = data[off + i] & 0xFF;
for (int i = 0; i < data.length(); i++) {
int b = data.charAt(i) & 0xFF;
int code = codes[b];
int nbits = lengths[b];
@ -98,14 +101,14 @@ final class HuffmanEncoder {
while (n >= 8) {
n -= 8;
out.write((int) (current >> n));
out.writeByte((int) (current >> n));
}
}
if (n > 0) {
current <<= 8 - n;
current |= 0xFF >>> n; // this should be EOS symbol
out.write((int) current);
out.writeByte((int) current);
}
}
@ -115,14 +118,81 @@ final class HuffmanEncoder {
* @param data the string literal to be Huffman encoded
* @return the number of bytes required to Huffman encode <code>data</code>
*/
public int getEncodedLength(byte[] data) {
if (data == null) {
throw new NullPointerException("data");
public int getEncodedLength(CharSequence data) {
if (data instanceof AsciiString) {
AsciiString string = (AsciiString) data;
try {
encodedLengthProcessor.reset();
string.forEachByte(encodedLengthProcessor);
return encodedLengthProcessor.length();
} catch (Exception e) {
PlatformDependent.throwException(e);
return -1;
}
} else {
return getEncodedLengthSlowPath(data);
}
}
private int getEncodedLengthSlowPath(CharSequence data) {
long len = 0;
for (byte b : data) {
len += lengths[b & 0xFF];
for (int i = 0; i < data.length(); i++) {
len += lengths[data.charAt(i) & 0xFF];
}
return (int) ((len + 7) >> 3);
}
private final class EncodeProcessor implements ByteProcessor {
ByteBuf out;
private long current;
private int n;
@Override
public boolean process(byte value) {
int b = value & 0xFF;
int nbits = lengths[b];
current <<= nbits;
current |= codes[b];
n += nbits;
while (n >= 8) {
n -= 8;
out.writeByte((int) (current >> n));
}
return true;
}
void end() {
try {
if (n > 0) {
current <<= 8 - n;
current |= 0xFF >>> n; // this should be EOS symbol
out.writeByte((int) current);
}
} finally {
out = null;
current = 0;
n = 0;
}
}
}
private final class EncodedLengthProcessor implements ByteProcessor {
private long len;
@Override
public boolean process(byte value) {
len += lengths[value & 0xFF];
return true;
}
void reset() {
len = 0;
}
int length() {
return (int) ((len + 7) >> 3);
}
}
}

View File

@ -31,84 +31,90 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.netty.handler.codec.UnsupportedValueConverter;
import io.netty.handler.codec.http2.CharSequenceMap;
import io.netty.util.AsciiString;
import static io.netty.util.CharsetUtil.ISO_8859_1;
import java.util.Arrays;
import java.util.List;
final class StaticTable {
private static final String EMPTY = "";
// Appendix A: Static Table
// http://tools.ietf.org/html/rfc7541#appendix-A
private static final List<HeaderField> STATIC_TABLE = Arrays.asList(
/* 1 */ new HeaderField(":authority", EMPTY),
/* 2 */ new HeaderField(":method", "GET"),
/* 3 */ new HeaderField(":method", "POST"),
/* 4 */ new HeaderField(":path", "/"),
/* 5 */ new HeaderField(":path", "/index.html"),
/* 6 */ new HeaderField(":scheme", "http"),
/* 7 */ new HeaderField(":scheme", "https"),
/* 8 */ new HeaderField(":status", "200"),
/* 9 */ new HeaderField(":status", "204"),
/* 10 */ new HeaderField(":status", "206"),
/* 11 */ new HeaderField(":status", "304"),
/* 12 */ new HeaderField(":status", "400"),
/* 13 */ new HeaderField(":status", "404"),
/* 14 */ new HeaderField(":status", "500"),
/* 15 */ new HeaderField("accept-charset", EMPTY),
/* 16 */ new HeaderField("accept-encoding", "gzip, deflate"),
/* 17 */ new HeaderField("accept-language", EMPTY),
/* 18 */ new HeaderField("accept-ranges", EMPTY),
/* 19 */ new HeaderField("accept", EMPTY),
/* 20 */ new HeaderField("access-control-allow-origin", EMPTY),
/* 21 */ new HeaderField("age", EMPTY),
/* 22 */ new HeaderField("allow", EMPTY),
/* 23 */ new HeaderField("authorization", EMPTY),
/* 24 */ new HeaderField("cache-control", EMPTY),
/* 25 */ new HeaderField("content-disposition", EMPTY),
/* 26 */ new HeaderField("content-encoding", EMPTY),
/* 27 */ new HeaderField("content-language", EMPTY),
/* 28 */ new HeaderField("content-length", EMPTY),
/* 29 */ new HeaderField("content-location", EMPTY),
/* 30 */ new HeaderField("content-range", EMPTY),
/* 31 */ new HeaderField("content-type", EMPTY),
/* 32 */ new HeaderField("cookie", EMPTY),
/* 33 */ new HeaderField("date", EMPTY),
/* 34 */ new HeaderField("etag", EMPTY),
/* 35 */ new HeaderField("expect", EMPTY),
/* 36 */ new HeaderField("expires", EMPTY),
/* 37 */ new HeaderField("from", EMPTY),
/* 38 */ new HeaderField("host", EMPTY),
/* 39 */ new HeaderField("if-match", EMPTY),
/* 40 */ new HeaderField("if-modified-since", EMPTY),
/* 41 */ new HeaderField("if-none-match", EMPTY),
/* 42 */ new HeaderField("if-range", EMPTY),
/* 43 */ new HeaderField("if-unmodified-since", EMPTY),
/* 44 */ new HeaderField("last-modified", EMPTY),
/* 45 */ new HeaderField("link", EMPTY),
/* 46 */ new HeaderField("location", EMPTY),
/* 47 */ new HeaderField("max-forwards", EMPTY),
/* 48 */ new HeaderField("proxy-authenticate", EMPTY),
/* 49 */ new HeaderField("proxy-authorization", EMPTY),
/* 50 */ new HeaderField("range", EMPTY),
/* 51 */ new HeaderField("referer", EMPTY),
/* 52 */ new HeaderField("refresh", EMPTY),
/* 53 */ new HeaderField("retry-after", EMPTY),
/* 54 */ new HeaderField("server", EMPTY),
/* 55 */ new HeaderField("set-cookie", EMPTY),
/* 56 */ new HeaderField("strict-transport-security", EMPTY),
/* 57 */ new HeaderField("transfer-encoding", EMPTY),
/* 58 */ new HeaderField("user-agent", EMPTY),
/* 59 */ new HeaderField("vary", EMPTY),
/* 60 */ new HeaderField("via", EMPTY),
/* 61 */ new HeaderField("www-authenticate", EMPTY)
/* 1 */ newEmptyHeaderField(":authority"),
/* 2 */ newHeaderField(":method", "GET"),
/* 3 */ newHeaderField(":method", "POST"),
/* 4 */ newHeaderField(":path", "/"),
/* 5 */ newHeaderField(":path", "/index.html"),
/* 6 */ newHeaderField(":scheme", "http"),
/* 7 */ newHeaderField(":scheme", "https"),
/* 8 */ newHeaderField(":status", "200"),
/* 9 */ newHeaderField(":status", "204"),
/* 10 */ newHeaderField(":status", "206"),
/* 11 */ newHeaderField(":status", "304"),
/* 12 */ newHeaderField(":status", "400"),
/* 13 */ newHeaderField(":status", "404"),
/* 14 */ newHeaderField(":status", "500"),
/* 15 */ newEmptyHeaderField("accept-charset"),
/* 16 */ newHeaderField("accept-encoding", "gzip, deflate"),
/* 17 */ newEmptyHeaderField("accept-language"),
/* 18 */ newEmptyHeaderField("accept-ranges"),
/* 19 */ newEmptyHeaderField("accept"),
/* 20 */ newEmptyHeaderField("access-control-allow-origin"),
/* 21 */ newEmptyHeaderField("age"),
/* 22 */ newEmptyHeaderField("allow"),
/* 23 */ newEmptyHeaderField("authorization"),
/* 24 */ newEmptyHeaderField("cache-control"),
/* 25 */ newEmptyHeaderField("content-disposition"),
/* 26 */ newEmptyHeaderField("content-encoding"),
/* 27 */ newEmptyHeaderField("content-language"),
/* 28 */ newEmptyHeaderField("content-length"),
/* 29 */ newEmptyHeaderField("content-location"),
/* 30 */ newEmptyHeaderField("content-range"),
/* 31 */ newEmptyHeaderField("content-type"),
/* 32 */ newEmptyHeaderField("cookie"),
/* 33 */ newEmptyHeaderField("date"),
/* 34 */ newEmptyHeaderField("etag"),
/* 35 */ newEmptyHeaderField("expect"),
/* 36 */ newEmptyHeaderField("expires"),
/* 37 */ newEmptyHeaderField("from"),
/* 38 */ newEmptyHeaderField("host"),
/* 39 */ newEmptyHeaderField("if-match"),
/* 40 */ newEmptyHeaderField("if-modified-since"),
/* 41 */ newEmptyHeaderField("if-none-match"),
/* 42 */ newEmptyHeaderField("if-range"),
/* 43 */ newEmptyHeaderField("if-unmodified-since"),
/* 44 */ newEmptyHeaderField("last-modified"),
/* 45 */ newEmptyHeaderField("link"),
/* 46 */ newEmptyHeaderField("location"),
/* 47 */ newEmptyHeaderField("max-forwards"),
/* 48 */ newEmptyHeaderField("proxy-authenticate"),
/* 49 */ newEmptyHeaderField("proxy-authorization"),
/* 50 */ newEmptyHeaderField("range"),
/* 51 */ newEmptyHeaderField("referer"),
/* 52 */ newEmptyHeaderField("refresh"),
/* 53 */ newEmptyHeaderField("retry-after"),
/* 54 */ newEmptyHeaderField("server"),
/* 55 */ newEmptyHeaderField("set-cookie"),
/* 56 */ newEmptyHeaderField("strict-transport-security"),
/* 57 */ newEmptyHeaderField("transfer-encoding"),
/* 58 */ newEmptyHeaderField("user-agent"),
/* 59 */ newEmptyHeaderField("vary"),
/* 60 */ newEmptyHeaderField("via"),
/* 61 */ newEmptyHeaderField("www-authenticate")
);
private static final Map<String, Integer> STATIC_INDEX_BY_NAME = createMap();
private static HeaderField newEmptyHeaderField(CharSequence name) {
return newHeaderField(name, AsciiString.EMPTY_STRING);
}
private static HeaderField newHeaderField(CharSequence name, CharSequence value) {
return new HeaderField(AsciiString.of(name), AsciiString.of(value));
}
private static final CharSequenceMap<Integer> STATIC_INDEX_BY_NAME = createMap();
/**
* The number of header fields in the static table.
@ -126,9 +132,8 @@ final class StaticTable {
* Returns the lowest index value for the given header field name in the static table. Returns
* -1 if the header field name is not in the static table.
*/
static int getIndex(byte[] name) {
String nameString = new String(name, 0, name.length, ISO_8859_1);
Integer index = STATIC_INDEX_BY_NAME.get(nameString);
static int getIndex(CharSequence name) {
Integer index = STATIC_INDEX_BY_NAME.get(name);
if (index == null) {
return -1;
}
@ -139,7 +144,7 @@ final class StaticTable {
* Returns the index value for the given header field in the static table. Returns -1 if the
* header field is not in the static table.
*/
static int getIndex(byte[] name, byte[] value) {
static int getIndex(CharSequence name, CharSequence value) {
int index = getIndex(name);
if (index == -1) {
return -1;
@ -160,16 +165,18 @@ final class StaticTable {
return -1;
}
// create a map of header name to index value to allow quick lookup
private static Map<String, Integer> createMap() {
// create a map CharSequenceMap header name to index value to allow quick lookup
private static CharSequenceMap<Integer> createMap() {
int length = STATIC_TABLE.size();
HashMap<String, Integer> ret = new HashMap<String, Integer>(length);
@SuppressWarnings("unchecked")
CharSequenceMap<Integer> ret = new CharSequenceMap<Integer>(true,
UnsupportedValueConverter.<Integer>instance(), length);
// Iterate through the static table in reverse order to
// save the smallest index for a given name in the map.
for (int index = length; index > 0; index--) {
HeaderField entry = getEntry(index);
String name = new String(entry.name, 0, entry.name.length, ISO_8859_1);
ret.put(name, index);
CharSequence name = entry.name;
ret.set(name, index);
}
return ret;
}

View File

@ -31,6 +31,8 @@
*/
/**
* <a href="http://tools.ietf.org/html/rfc7541">HPACK: Header Compression for HTTP/2</a>
* <a href="http://tools.ietf.org/html/rfc7541">HPACK: Header Compression for HTTP/2</a>.
* Please note this implementation is only compliant when used with HTTP/2 and so not meant to be used outside of
* this scope.
*/
package io.netty.handler.codec.http2.internal.hpack;

View File

@ -22,8 +22,6 @@ import io.netty.util.AsciiString;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_MAX_HEADER_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2TestUtil.randomBytes;
@ -73,12 +71,12 @@ public class DefaultHttp2HeadersDecoderTest {
private static ByteBuf encode(byte[]... entries) throws Exception {
Encoder encoder = new Encoder(MAX_HEADER_TABLE_SIZE);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
ByteBuf out = Unpooled.buffer();
for (int ix = 0; ix < entries.length;) {
byte[] key = entries[ix++];
byte[] value = entries[ix++];
encoder.encodeHeader(stream, key, value, false);
encoder.encodeHeader(out, new AsciiString(key, false), new AsciiString(value, false), false);
}
return Unpooled.wrappedBuffer(stream.toByteArray());
return out;
}
}

View File

@ -31,14 +31,16 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.util.CharsetUtil;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2Headers;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import static io.netty.util.internal.EmptyArrays.EMPTY_BYTES;
import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.AsciiString.of;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -54,74 +56,70 @@ public class DecoderTest {
private static final int MAX_HEADER_TABLE_SIZE = 4096;
private Decoder decoder;
private HeaderListener mockListener;
private Http2Headers mockHeaders;
private static String hex(String s) {
return Hex.encodeHexString(s.getBytes());
}
private static byte[] getBytes(String s) {
return s.getBytes(CharsetUtil.ISO_8859_1);
}
private void decode(String encoded) throws IOException {
byte[] b = Hex.decodeHex(encoded.toCharArray());
ByteArrayInputStream in = new ByteArrayInputStream(b);
ByteBuf in = Unpooled.wrappedBuffer(b);
try {
decoder.decode(in, mockListener);
decoder.decode(in, mockHeaders);
} finally {
in.close();
in.release();
}
}
@Before
public void setUp() {
decoder = new Decoder(MAX_HEADER_SIZE, MAX_HEADER_TABLE_SIZE);
mockListener = mock(HeaderListener.class);
decoder = new Decoder(MAX_HEADER_SIZE, MAX_HEADER_TABLE_SIZE, 32);
mockHeaders = mock(Http2Headers.class);
}
@Test
public void testLiteralHuffmanEncodedWithEmptyNameAndValue() throws IOException {
byte[] input = {0, (byte) 0x80, 0};
ByteArrayInputStream in = new ByteArrayInputStream(input);
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(in, mockListener);
verify(mockListener, times(1)).addHeader(EMPTY_BYTES, EMPTY_BYTES, false);
decoder.decode(in, mockHeaders);
verify(mockHeaders, times(1)).add(EMPTY_STRING, EMPTY_STRING);
} finally {
in.close();
in.release();
}
}
@Test(expected = IOException.class)
public void testLiteralHuffmanEncodedWithPaddingGreaterThan7Throws() throws IOException {
byte[] input = {0, (byte) 0x81, -1};
ByteArrayInputStream in = new ByteArrayInputStream(input);
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(in, mockListener);
decoder.decode(in, mockHeaders);
} finally {
in.close();
in.release();
}
}
@Test(expected = IOException.class)
public void testLiteralHuffmanEncodedWithDecodingEOSThrows() throws IOException {
byte[] input = {0, (byte) 0x84, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF};
ByteArrayInputStream in = new ByteArrayInputStream(input);
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(in, mockListener);
decoder.decode(in, mockHeaders);
} finally {
in.close();
in.release();
}
}
@Test(expected = IOException.class)
public void testLiteralHuffmanEncodedWithPaddingNotCorrespondingToMSBThrows() throws IOException {
byte[] input = {0, (byte) 0x81, 0};
ByteArrayInputStream in = new ByteArrayInputStream(input);
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
decoder.decode(in, mockListener);
decoder.decode(in, mockHeaders);
} finally {
in.close();
in.release();
}
}
@ -129,14 +127,14 @@ public class DecoderTest {
public void testIncompleteIndex() throws IOException {
// Verify incomplete indices are unread
byte[] compressed = Hex.decodeHex("FFF0".toCharArray());
ByteArrayInputStream in = new ByteArrayInputStream(compressed);
ByteBuf in = Unpooled.wrappedBuffer(compressed);
try {
decoder.decode(in, mockListener);
assertEquals(1, in.available());
decoder.decode(in, mockListener);
assertEquals(1, in.available());
decoder.decode(in, mockHeaders);
assertEquals(1, in.readableBytes());
decoder.decode(in, mockHeaders);
assertEquals(1, in.readableBytes());
} finally {
in.close();
in.release();
}
}
@ -209,21 +207,21 @@ public class DecoderTest {
@Test
public void testLiteralWithIncrementalIndexingWithEmptyName() throws Exception {
decode("400005" + hex("value"));
verify(mockListener, times(1)).addHeader(EMPTY_BYTES, getBytes("value"), false);
verify(mockHeaders, times(1)).add(EMPTY_STRING, of("value"));
}
@Test
public void testLiteralWithIncrementalIndexingCompleteEviction() throws Exception {
// Verify indexed host header
decode("4004" + hex("name") + "05" + hex("value"));
verify(mockListener).addHeader(getBytes("name"), getBytes("value"), false);
verifyNoMoreInteractions(mockListener);
verify(mockHeaders).add(of("name"), of("value"));
verifyNoMoreInteractions(mockHeaders);
assertFalse(decoder.endHeaderBlock());
reset(mockListener);
reset(mockHeaders);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 4096; i++) {
sb.append("a");
sb.append('a');
}
String value = sb.toString();
sb = new StringBuilder();
@ -232,14 +230,14 @@ public class DecoderTest {
sb.append("61"); // 'a'
}
decode(sb.toString());
verify(mockListener).addHeader(getBytes(":authority"), getBytes(value), false);
verifyNoMoreInteractions(mockListener);
verify(mockHeaders).add(of(":authority"), of(value));
verifyNoMoreInteractions(mockHeaders);
assertFalse(decoder.endHeaderBlock());
// Verify next header is inserted at index 62
decode("4004" + hex("name") + "05" + hex("value") + "BE");
verify(mockListener, times(2)).addHeader(getBytes("name"), getBytes("value"), false);
verifyNoMoreInteractions(mockListener);
verify(mockHeaders, times(2)).add(of("name"), of("value"));
verifyNoMoreInteractions(mockHeaders);
}
@Test
@ -252,15 +250,15 @@ public class DecoderTest {
}
sb.append("00");
decode(sb.toString());
verifyNoMoreInteractions(mockListener);
verifyNoMoreInteractions(mockHeaders);
// Verify header block is reported as truncated
assertTrue(decoder.endHeaderBlock());
// Verify next header is inserted at index 62
decode("4004" + hex("name") + "05" + hex("value") + "BE");
verify(mockListener, times(2)).addHeader(getBytes("name"), getBytes("value"), false);
verifyNoMoreInteractions(mockListener);
verify(mockHeaders, times(2)).add(of("name"), of("value"));
verifyNoMoreInteractions(mockHeaders);
}
@Test
@ -274,21 +272,21 @@ public class DecoderTest {
sb.append("61"); // 'a'
}
decode(sb.toString());
verifyNoMoreInteractions(mockListener);
verifyNoMoreInteractions(mockHeaders);
// Verify header block is reported as truncated
assertTrue(decoder.endHeaderBlock());
// Verify next header is inserted at index 62
decode("4004" + hex("name") + "05" + hex("value") + "BE");
verify(mockListener, times(2)).addHeader(getBytes("name"), getBytes("value"), false);
verifyNoMoreInteractions(mockListener);
verify(mockHeaders, times(2)).add(of("name"), of("value"));
verifyNoMoreInteractions(mockHeaders);
}
@Test
public void testLiteralWithoutIndexingWithEmptyName() throws Exception {
decode("000005" + hex("value"));
verify(mockListener, times(1)).addHeader(EMPTY_BYTES, getBytes("value"), false);
verify(mockHeaders, times(1)).add(EMPTY_STRING, of("value"));
}
@Test(expected = IOException.class)
@ -301,7 +299,7 @@ public class DecoderTest {
}
sb.append("00");
decode(sb.toString());
verifyNoMoreInteractions(mockListener);
verifyNoMoreInteractions(mockHeaders);
// Verify header block is reported as truncated
assertTrue(decoder.endHeaderBlock());
@ -321,7 +319,7 @@ public class DecoderTest {
sb.append("61"); // 'a'
}
decode(sb.toString());
verifyNoMoreInteractions(mockListener);
verifyNoMoreInteractions(mockHeaders);
// Verify header block is reported as truncated
assertTrue(decoder.endHeaderBlock());
@ -333,7 +331,7 @@ public class DecoderTest {
@Test
public void testLiteralNeverIndexedWithEmptyName() throws Exception {
decode("100005" + hex("value"));
verify(mockListener, times(1)).addHeader(EMPTY_BYTES, getBytes("value"), true);
verify(mockHeaders, times(1)).add(EMPTY_STRING, of("value"));
}
@Test(expected = IOException.class)
@ -346,7 +344,7 @@ public class DecoderTest {
}
sb.append("00");
decode(sb.toString());
verifyNoMoreInteractions(mockListener);
verifyNoMoreInteractions(mockHeaders);
// Verify header block is reported as truncated
assertTrue(decoder.endHeaderBlock());
@ -366,7 +364,7 @@ public class DecoderTest {
sb.append("61"); // 'a'
}
decode(sb.toString());
verifyNoMoreInteractions(mockListener);
verifyNoMoreInteractions(mockHeaders);
// Verify header block is reported as truncated
assertTrue(decoder.endHeaderBlock());

View File

@ -31,11 +31,12 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.util.AsciiString;
import org.junit.Assert;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
@ -44,7 +45,6 @@ public class HuffmanTest {
@Test
public void testHuffman() throws IOException {
String s = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for (int i = 0; i < s.length(); i++) {
roundTrip(s.substring(0, i));
@ -62,59 +62,59 @@ public class HuffmanTest {
for (int i = 0; i < 4; i++) {
buf[i] = (byte) 0xFF;
}
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeIllegalPadding() throws IOException {
byte[] buf = new byte[1];
buf[0] = 0x00; // '0', invalid padding
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeExtraPadding() throws IOException {
byte[] buf = makeBuf(0x0f, 0xFF); // '1', 'EOS'
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeExtraPadding1byte() throws IOException {
byte[] buf = makeBuf(0xFF);
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeExtraPadding2byte() throws IOException {
byte[] buf = makeBuf(0x1F, 0xFF); // 'a'
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeExtraPadding3byte() throws IOException {
byte[] buf = makeBuf(0x1F, 0xFF, 0xFF); // 'a'
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeExtraPadding4byte() throws IOException {
byte[] buf = makeBuf(0x1F, 0xFF, 0xFF, 0xFF); // 'a'
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodeExtraPadding29bit() throws IOException {
byte[] buf = makeBuf(0xFF, 0x9F, 0xFF, 0xFF, 0xFF); // '|'
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
@Test(expected = IOException.class)
public void testDecodePartialSymbol() throws IOException {
byte[] buf = makeBuf(0x52, 0xBC, 0x30, 0xFF, 0xFF, 0xFF, 0xFF); // " pFA\x00", 31 bits of padding, a.k.a. EOS
Huffman.DECODER.decode(buf);
decode(newHuffmanDecoder(), buf);
}
private byte[] makeBuf(int ... bytes) {
private static byte[] makeBuf(int ... bytes) {
byte[] buf = new byte[bytes.length];
for (int i = 0; i < buf.length; i++) {
buf[i] = (byte) bytes[i];
@ -122,8 +122,8 @@ public class HuffmanTest {
return buf;
}
private void roundTrip(String s) throws IOException {
roundTrip(Huffman.ENCODER, Huffman.DECODER, s);
private static void roundTrip(String s) throws IOException {
roundTrip(new HuffmanEncoder(), newHuffmanDecoder(), s);
}
private static void roundTrip(HuffmanEncoder encoder, HuffmanDecoder decoder, String s)
@ -131,19 +131,38 @@ public class HuffmanTest {
roundTrip(encoder, decoder, s.getBytes());
}
private void roundTrip(byte[] buf) throws IOException {
roundTrip(Huffman.ENCODER, Huffman.DECODER, buf);
private static void roundTrip(byte[] buf) throws IOException {
roundTrip(new HuffmanEncoder(), newHuffmanDecoder(), buf);
}
private static void roundTrip(HuffmanEncoder encoder, HuffmanDecoder decoder, byte[] buf)
throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
ByteBuf buffer = Unpooled.buffer();
try {
encoder.encode(buffer, new AsciiString(buf, false));
byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
encoder.encode(dos, buf);
byte[] actualBytes = decode(decoder, bytes);
byte[] actualBytes = decoder.decode(baos.toByteArray());
Assert.assertTrue(Arrays.equals(buf, actualBytes));
} finally {
buffer.release();
}
}
Assert.assertTrue(Arrays.equals(buf, actualBytes));
private static byte[] decode(HuffmanDecoder decoder, byte[] bytes) throws IOException {
ByteBuf buffer = Unpooled.wrappedBuffer(bytes);
try {
AsciiString decoded = decoder.decode(buffer, buffer.readableBytes());
Assert.assertFalse(buffer.isReadable());
return decoded.toByteArray();
} finally {
buffer.release();
}
}
private static HuffmanDecoder newHuffmanDecoder() {
return new HuffmanDecoder(32);
}
}

View File

@ -39,9 +39,10 @@ import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.util.AsciiString;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -169,31 +170,39 @@ final class TestCase {
maxHeaderTableSize = Integer.MAX_VALUE;
}
return new Decoder(8192, maxHeaderTableSize);
return new Decoder(8192, maxHeaderTableSize, 32);
}
private static byte[] encode(Encoder encoder, List<HeaderField> headers, int maxHeaderTableSize,
boolean sensitive)
throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
boolean sensitive) {
ByteBuf buffer = Unpooled.buffer();
try {
if (maxHeaderTableSize != -1) {
encoder.setMaxHeaderTableSize(buffer, maxHeaderTableSize);
}
if (maxHeaderTableSize != -1) {
encoder.setMaxHeaderTableSize(baos, maxHeaderTableSize);
for (HeaderField e : headers) {
encoder.encodeHeader(buffer, AsciiString.of(e.name), AsciiString.of(e.value), sensitive);
}
byte[] bytes = new byte[buffer.readableBytes()];
buffer.readBytes(bytes);
return bytes;
} finally {
buffer.release();
}
for (HeaderField e : headers) {
encoder.encodeHeader(baos, e.name, e.value, sensitive);
}
return baos.toByteArray();
}
private static List<HeaderField> decode(Decoder decoder, byte[] expected) throws IOException {
List<HeaderField> headers = new ArrayList<HeaderField>();
TestHeaderListener listener = new TestHeaderListener(headers);
decoder.decode(new ByteArrayInputStream(expected), listener);
decoder.endHeaderBlock();
return headers;
ByteBuf in = Unpooled.wrappedBuffer(expected);
try {
List<HeaderField> headers = new ArrayList<HeaderField>();
TestHeaderListener listener = new TestHeaderListener(headers);
decoder.decode(in, listener);
decoder.endHeaderBlock();
return headers;
} finally {
in.release();
}
}
private static String concat(List<String> l) {
@ -237,8 +246,7 @@ final class TestCase {
@Override
public HeaderField deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context)
throws JsonParseException {
JsonDeserializationContext context) {
JsonObject jsonObject = json.getAsJsonObject();
Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet();
if (entrySet.size() != 1) {

View File

@ -31,9 +31,11 @@
*/
package io.netty.handler.codec.http2.internal.hpack;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import java.util.List;
final class TestHeaderListener implements HeaderListener {
final class TestHeaderListener extends DefaultHttp2Headers {
private final List<HeaderField> headers;
@ -42,7 +44,8 @@ final class TestHeaderListener implements HeaderListener {
}
@Override
public void addHeader(byte[] name, byte[] value, boolean sensitive) {
public TestHeaderListener add(CharSequence name, CharSequence value) {
headers.add(new HeaderField(name, value));
return this;
}
}

View File

@ -31,9 +31,12 @@
*/
package io.netty.microbench.http2.internal.hpack;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.DefaultHttp2Headers;
import io.netty.handler.codec.http2.Http2Headers;
import io.netty.handler.codec.http2.internal.hpack.Decoder;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.handler.codec.http2.internal.hpack.HeaderListener;
import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
@ -41,10 +44,9 @@ import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.infra.Blackhole;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
@ -65,35 +67,49 @@ public class DecoderBenchmark extends AbstractMicrobenchmark {
@Param({ "true", "false" })
public boolean limitToAscii;
private byte[] input;
private ByteBuf input;
@Setup(Level.Trial)
public void setup() throws IOException {
input = getSerializedHeaders(Util.headers(size, limitToAscii), sensitive);
input = Unpooled.wrappedBuffer(getSerializedHeaders(Util.headers(size, limitToAscii), sensitive));
}
@TearDown(Level.Trial)
public void teardown() throws IOException {
input.release();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
public void decode(final Blackhole bh) throws IOException {
Decoder decoder = new Decoder(maxHeaderSize, maxTableSize);
decoder.decode(new ByteArrayInputStream(input), new HeaderListener() {
Decoder decoder = new Decoder(maxHeaderSize, maxTableSize, 32);
@SuppressWarnings("unchecked")
Http2Headers headers =
new DefaultHttp2Headers() {
@Override
public void addHeader(byte[] name, byte[] value, boolean sensitive) {
public Http2Headers add(CharSequence name, CharSequence value) {
bh.consume(sensitive);
return this;
}
});
};
decoder.decode(input.duplicate(), headers);
decoder.endHeaderBlock();
}
private byte[] getSerializedHeaders(List<Header> headers, boolean sensitive)
throws IOException {
private byte[] getSerializedHeaders(List<Header> headers, boolean sensitive) {
Encoder encoder = new Encoder(4096);
ByteArrayOutputStream outputStream = size.newOutputStream();
for (int i = 0; i < headers.size(); ++i) {
Header header = headers.get(i);
encoder.encodeHeader(outputStream, header.name, header.value, sensitive);
ByteBuf out = size.newOutBuffer();
try {
for (int i = 0; i < headers.size(); ++i) {
Header header = headers.get(i);
encoder.encodeHeader(out, header.name, header.value, sensitive);
}
byte[] bytes = new byte[out.readableBytes()];
out.readBytes(bytes);
return bytes;
} finally {
out.release();
}
return outputStream.toByteArray();
}
}

View File

@ -31,6 +31,7 @@
*/
package io.netty.microbench.http2.internal.hpack;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http2.internal.hpack.Encoder;
import io.netty.microbench.util.AbstractMicrobenchmark;
import org.openjdk.jmh.annotations.Benchmark;
@ -39,9 +40,9 @@ import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.infra.Blackhole;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
@ -63,31 +64,36 @@ public class EncoderBenchmark extends AbstractMicrobenchmark {
public boolean limitToAscii;
private List<Header> headers;
private ByteArrayOutputStream outputStream;
private ByteBuf output;
@Setup(Level.Trial)
public void setup() {
headers = Util.headers(size, limitToAscii);
outputStream = size.newOutputStream();
output = size.newOutBuffer();
}
@TearDown(Level.Trial)
public void tearDown() {
output.release();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
public void encode(Blackhole bh) throws IOException {
Encoder encoder = new Encoder(maxTableSize);
outputStream.reset();
output.clear();
if (duplicates) {
// If duplicates is set, re-add the same header each time.
Header header = headers.get(0);
for (int i = 0; i < headers.size(); ++i) {
encoder.encodeHeader(outputStream, header.name, header.value, sensitive);
encoder.encodeHeader(output, header.name, header.value, sensitive);
}
} else {
for (int i = 0; i < headers.size(); ++i) {
Header header = headers.get(i);
encoder.encodeHeader(outputStream, header.name, header.value, sensitive);
encoder.encodeHeader(output, header.name, header.value, sensitive);
}
}
bh.consume(outputStream);
bh.consume(output);
}
}

View File

@ -31,6 +31,8 @@
*/
package io.netty.microbench.http2.internal.hpack;
import io.netty.util.AsciiString;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@ -42,12 +44,12 @@ class Header {
private static final String ALPHABET =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
final byte[] name;
final byte[] value;
final CharSequence name;
final CharSequence value;
Header(byte[] name, byte[] value) {
this.name = name;
this.value = value;
this.name = new AsciiString(name, false);
this.value = new AsciiString(value, false);
}
/**

View File

@ -31,6 +31,9 @@
*/
package io.netty.microbench.http2.internal.hpack;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.io.ByteArrayOutputStream;
import java.util.List;
@ -56,7 +59,7 @@ public enum HeadersSize {
return Header.createHeaders(numHeaders, nameLength, valueLength, limitAscii);
}
public ByteArrayOutputStream newOutputStream() {
return new ByteArrayOutputStream(numHeaders * (nameLength + valueLength));
public ByteBuf newOutBuffer() {
return Unpooled.buffer(numHeaders * (nameLength + valueLength));
}
}