codec-http2: Improve h1 to h2 header conversion

Motivation:

Netty could handle "connection" or "te" headers more gently when
converting from http/1.1 to http/2 headers.  Http/2 headers don't
support single-hop headers, so when we convert from http/1.1 to http/2,
we should drop all single-hop headers.  This includes headers like
"transfer-encoding" and "connection", but also the headers that
"connection" points to, since "connection" can be used to designate
other headers as single-hop headers.  For the "te" header, we can more
permissively convert it by just dropping non-conforming headers (ie
non-"trailers" headers) which is what we do for all other headers when
we convert.

Modifications:

Add a new blacklist to the http/1.1 to http/2 conversion, which is
constructed from the values of the "connection" header, and stop
throwing an exception when a "te" header is passed with a non-"trailers"
value.  Instead, drop all values except for "trailers".  Add unit tests
for "connection" and "te" headers when converting from http/1.1 to http/2.

Result:

This will improve the h2c upgrade request, and also conversions from
http/1.1 to http/2.  This will simplify implementing spec-compliant
http/2 servers that want to share code between their http/1.1 and http/2
implementations.

[Fixes #7355]
This commit is contained in:
Moses Nakamura 2017-11-10 16:56:55 -05:00 committed by Norman Maurer
parent c921742a42
commit d976dc108d
4 changed files with 98 additions and 9 deletions

View File

@ -15,6 +15,7 @@
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBufAllocator;
import io.netty.handler.codec.UnsupportedValueConverter;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.DefaultHttpRequest;
@ -37,7 +38,9 @@ import io.netty.util.internal.UnstableApi;
import java.net.URI;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import static io.netty.handler.codec.http.HttpScheme.HTTP;
import static io.netty.handler.codec.http.HttpScheme.HTTPS;
@ -47,6 +50,7 @@ import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.handler.codec.http2.Http2Exception.streamError;
import static io.netty.util.AsciiString.EMPTY_STRING;
import static io.netty.util.ByteProcessor.FIND_COMMA;
import static io.netty.util.ByteProcessor.FIND_SEMI_COLON;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
import static io.netty.util.internal.StringUtil.isNullOrEmpty;
@ -410,19 +414,52 @@ public final class HttpConversionUtil {
return out;
}
private static CharSequenceMap<AsciiString> toLowercaseMap(List<String> values) {
UnsupportedValueConverter<AsciiString> valueConverter = UnsupportedValueConverter.<AsciiString>instance();
CharSequenceMap<AsciiString> result =
new CharSequenceMap<AsciiString>(true, valueConverter, values.size());
// we iterate because the underlying list is probably a linked list
for (CharSequence value : values) {
AsciiString lowerCased = AsciiString.of(value).toLowerCase();
try {
int index = lowerCased.forEachByte(FIND_COMMA);
if (index != -1) {
int start = 0;
do {
result.add(lowerCased.subSequence(start, index, false).trim(), EMPTY_STRING);
start = index + 1;
} while (start < lowerCased.length() &&
(index = lowerCased.forEachByte(start, value.length() - start, FIND_COMMA)) != -1);
result.add(lowerCased.subSequence(start, lowerCased.length(), false).trim(), EMPTY_STRING);
} else {
result.add(lowerCased.trim(), EMPTY_STRING);
}
} catch (Exception e) {
// This is not expect to happen because FIND_COMMA never throws but must be caught
// because of the ByteProcessor interface.
throw new IllegalStateException(e);
}
}
return result;
}
public static void toHttp2Headers(HttpHeaders inHeaders, Http2Headers out) {
Iterator<Entry<CharSequence, CharSequence>> iter = inHeaders.iteratorCharSequence();
new CharSequenceMap<AsciiString>();
CharSequenceMap<AsciiString> connectionBlacklist =
toLowercaseMap(inHeaders.getAll(HttpHeaderNames.CONNECTION));
while (iter.hasNext()) {
Entry<CharSequence, CharSequence> entry = iter.next();
final AsciiString aName = AsciiString.of(entry.getKey()).toLowerCase();
if (!HTTP_TO_HTTP2_HEADER_BLACKLIST.contains(aName)) {
if (!HTTP_TO_HTTP2_HEADER_BLACKLIST.contains(aName) &&
!connectionBlacklist.contains(aName)) {
// https://tools.ietf.org/html/rfc7540#section-8.1.2.2 makes a special exception for TE
if (aName.contentEqualsIgnoreCase(HttpHeaderNames.TE) &&
!AsciiString.contentEqualsIgnoreCase(entry.getValue(), HttpHeaderValues.TRAILERS)) {
throw new IllegalArgumentException("Invalid value for " + HttpHeaderNames.TE + ": " +
entry.getValue());
}
if (aName.contentEqualsIgnoreCase(HttpHeaderNames.COOKIE)) {
if (aName.contentEqualsIgnoreCase(HttpHeaderNames.TE)) {
if (AsciiString.containsIgnoreCase(entry.getValue(), HttpHeaderValues.TRAILERS)) {
out.add(HttpHeaderNames.TE, HttpHeaderValues.TRAILERS);
}
} else if (aName.contentEqualsIgnoreCase(HttpHeaderNames.COOKIE)) {
AsciiString value = AsciiString.of(entry.getValue());
// split up cookies to allow for better compression
// https://tools.ietf.org/html/rfc7540#section-8.1.2.5

View File

@ -15,6 +15,10 @@
*/
package io.netty.handler.codec.http2;
import io.netty.handler.codec.http.DefaultHttpHeaders;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.util.AsciiString;
import org.junit.Test;
@ -55,4 +59,46 @@ public class HttpConversionUtilTest {
public void setHttp2AuthorityWithEmptyAuthority() {
HttpConversionUtil.setHttp2Authority("info@", new DefaultHttp2Headers());
}
@Test
public void stripTEHeaders() {
HttpHeaders inHeaders = new DefaultHttpHeaders();
inHeaders.add(HttpHeaderNames.TE, HttpHeaderValues.GZIP);
Http2Headers out = new DefaultHttp2Headers();
HttpConversionUtil.toHttp2Headers(inHeaders, out);
assertTrue(out.isEmpty());
}
@Test
public void stripTEHeadersExcludingTrailers() {
HttpHeaders inHeaders = new DefaultHttpHeaders();
inHeaders.add(HttpHeaderNames.TE, HttpHeaderValues.GZIP);
inHeaders.add(HttpHeaderNames.TE, HttpHeaderValues.TRAILERS);
Http2Headers out = new DefaultHttp2Headers();
HttpConversionUtil.toHttp2Headers(inHeaders, out);
assertSame(HttpHeaderValues.TRAILERS, out.get(HttpHeaderNames.TE));
}
@Test
public void stripConnectionHeadersAndNominees() {
HttpHeaders inHeaders = new DefaultHttpHeaders();
inHeaders.add(HttpHeaderNames.CONNECTION, "foo");
inHeaders.add("foo", "bar");
Http2Headers out = new DefaultHttp2Headers();
HttpConversionUtil.toHttp2Headers(inHeaders, out);
assertTrue(out.isEmpty());
}
@Test
public void stripConnectionNomineesWithCsv() {
HttpHeaders inHeaders = new DefaultHttpHeaders();
inHeaders.add(HttpHeaderNames.CONNECTION, "foo, bar");
inHeaders.add("foo", "baz");
inHeaders.add("bar", "qux");
inHeaders.add("hello", "world");
Http2Headers out = new DefaultHttp2Headers();
HttpConversionUtil.toHttp2Headers(inHeaders, out);
assertEquals(1, out.size());
assertSame("world", out.get("hello"));
}
}

View File

@ -1006,7 +1006,8 @@ public final class AsciiString implements CharSequence, Comparable<CharSequence>
}
/**
* Copies this string removing white space characters from the beginning and end of the string.
* Duplicates this string removing white space characters from the beginning and end of the
* string, without copying.
*
* @return a new string with characters {@code <= \\u0020} removed from the beginning and the end.
*/

View File

@ -81,10 +81,15 @@ public interface ByteProcessor {
ByteProcessor FIND_NON_LF = new IndexNotOfProcessor((byte) '\n');
/**
* Aborts on a {@code CR (';')}.
* Aborts on a semicolon {@code (';')}.
*/
ByteProcessor FIND_SEMI_COLON = new IndexOfProcessor((byte) ';');
/**
* Aborts on a comma {@code (',')}.
*/
ByteProcessor FIND_COMMA = new IndexOfProcessor((byte) ',');
/**
* Aborts on a {@code CR ('\r')} or a {@code LF ('\n')}.
*/