Fix a bug where HttpContentEncoder does not update 'Content-Length' and 'Transfer-Encoding' correctly
- Add 'Content-Length' and remove 'Transfer-Encoding' in case of FullHttpMessage - Remove 'Content-Length' and add 'Transfer-Encoding: chunked' in case of non-full HttpMessage - Fixes #1275
This commit is contained in:
parent
7ee571968c
commit
4b1df148a6
@ -23,8 +23,11 @@ import io.netty.buffer.Unpooled;
|
|||||||
import io.netty.channel.ChannelHandlerContext;
|
import io.netty.channel.ChannelHandlerContext;
|
||||||
import io.netty.channel.embedded.EmbeddedByteChannel;
|
import io.netty.channel.embedded.EmbeddedByteChannel;
|
||||||
import io.netty.handler.codec.MessageToMessageCodec;
|
import io.netty.handler.codec.MessageToMessageCodec;
|
||||||
|
import io.netty.handler.codec.http.HttpHeaders.Names;
|
||||||
|
import io.netty.handler.codec.http.HttpHeaders.Values;
|
||||||
|
|
||||||
import java.util.ArrayDeque;
|
import java.util.ArrayDeque;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Queue;
|
import java.util.Queue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -168,27 +171,25 @@ public abstract class HttpContentEncoder extends MessageToMessageCodec<HttpMessa
|
|||||||
|
|
||||||
HttpObject[] encoded = encodeContent(message, c);
|
HttpObject[] encoded = encodeContent(message, c);
|
||||||
|
|
||||||
if (!HttpHeaders.isTransferEncodingChunked(message) && encoded.length == 3) {
|
if (encoded[0] instanceof HttpMessage && encoded[encoded.length - 1] instanceof LastHttpContent) {
|
||||||
if (headers.contains(HttpHeaders.Names.CONTENT_LENGTH)) {
|
// Set 'Content-Length' if the length of the content is known.
|
||||||
long length = ((ByteBufHolder) encoded[1]).data().readableBytes() +
|
long contentLength = 0;
|
||||||
((ByteBufHolder) encoded[2]).data().readableBytes();
|
for (int i = 1; i < encoded.length; i ++) {
|
||||||
|
contentLength += ((ByteBufHolder) encoded[i]).data().readableBytes();
|
||||||
headers.set(
|
|
||||||
HttpHeaders.Names.CONTENT_LENGTH,
|
|
||||||
Long.toString(length));
|
|
||||||
}
|
}
|
||||||
|
headers.set(Names.CONTENT_LENGTH, contentLength);
|
||||||
|
headers.remove(Names.TRANSFER_ENCODING);
|
||||||
|
} else {
|
||||||
|
headers.remove(Names.CONTENT_LENGTH);
|
||||||
|
headers.set(Names.TRANSFER_ENCODING, Values.CHUNKED);
|
||||||
}
|
}
|
||||||
for (HttpObject obj: encoded) {
|
|
||||||
out.add(obj);
|
Collections.addAll(out, encoded);
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (encoder != null) {
|
if (encoder != null) {
|
||||||
HttpObject[] encoded = encodeContent(null, c);
|
Collections.addAll(out, encodeContent(null, c));
|
||||||
for (HttpObject obj: encoded) {
|
|
||||||
out.add(obj);
|
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,7 +201,7 @@ public abstract class HttpContentEncoder extends MessageToMessageCodec<HttpMessa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private HttpObject[] encodeContent(HttpMessage header, HttpContent c) {
|
private HttpObject[] encodeContent(HttpMessage msg, HttpContent c) {
|
||||||
ByteBuf newContent = Unpooled.buffer();
|
ByteBuf newContent = Unpooled.buffer();
|
||||||
ByteBuf content = c.data();
|
ByteBuf content = c.data();
|
||||||
encode(content, newContent);
|
encode(content, newContent);
|
||||||
@ -212,25 +213,25 @@ public abstract class HttpContentEncoder extends MessageToMessageCodec<HttpMessa
|
|||||||
// Generate an additional chunk if the decoder produced
|
// Generate an additional chunk if the decoder produced
|
||||||
// the last product on closure,
|
// the last product on closure,
|
||||||
if (lastProduct.isReadable()) {
|
if (lastProduct.isReadable()) {
|
||||||
if (header == null) {
|
if (msg == null) {
|
||||||
return new HttpObject[] { new DefaultHttpContent(newContent),
|
return new HttpObject[] { new DefaultHttpContent(newContent),
|
||||||
new DefaultLastHttpContent(lastProduct)};
|
new DefaultLastHttpContent(lastProduct)};
|
||||||
} else {
|
} else {
|
||||||
return new HttpObject[] { header, new DefaultHttpContent(newContent),
|
return new HttpObject[] { msg, new DefaultHttpContent(newContent),
|
||||||
new DefaultLastHttpContent(lastProduct)};
|
new DefaultLastHttpContent(lastProduct)};
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (header == null) {
|
if (msg == null) {
|
||||||
return new HttpObject[] { new DefaultLastHttpContent(newContent) };
|
return new HttpObject[] { new DefaultLastHttpContent(newContent) };
|
||||||
} else {
|
} else {
|
||||||
return new HttpObject[] { header, new DefaultLastHttpContent(newContent) };
|
return new HttpObject[] { msg, new DefaultLastHttpContent(newContent) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (header == null) {
|
if (msg == null) {
|
||||||
return new HttpObject[] { new DefaultHttpContent(newContent) };
|
return new HttpObject[] { new DefaultHttpContent(newContent) };
|
||||||
} else {
|
} else {
|
||||||
return new HttpObject[] { header, new DefaultHttpContent(newContent) };
|
return new HttpObject[] { msg, new DefaultHttpContent(newContent) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,105 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2013 The Netty Project
|
||||||
|
*
|
||||||
|
* The Netty Project licenses this file to you under the Apache License,
|
||||||
|
* version 2.0 (the "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at:
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
* License for the specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.netty.handler.codec.http;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.Unpooled;
|
||||||
|
import io.netty.channel.ChannelHandlerContext;
|
||||||
|
import io.netty.channel.embedded.EmbeddedByteChannel;
|
||||||
|
import io.netty.channel.embedded.EmbeddedMessageChannel;
|
||||||
|
import io.netty.handler.codec.ByteToByteEncoder;
|
||||||
|
import io.netty.handler.codec.http.HttpHeaders.Names;
|
||||||
|
import io.netty.util.CharsetUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.*;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
public class HttpContentEncoderTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSplitContent() throws Exception {
|
||||||
|
EmbeddedMessageChannel ch = new EmbeddedMessageChannel(
|
||||||
|
new HttpContentEncoder() {
|
||||||
|
@Override
|
||||||
|
protected Result beginEncode(HttpMessage header, HttpContent msg, String acceptEncoding) {
|
||||||
|
return new Result("test", new EmbeddedByteChannel(new ByteToByteEncoder() {
|
||||||
|
@Override
|
||||||
|
protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) {
|
||||||
|
out.writeBytes(String.valueOf(in.readableBytes()).getBytes(CharsetUtil.US_ASCII));
|
||||||
|
in.skipBytes(in.readableBytes());
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ch.writeInbound(new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"));
|
||||||
|
|
||||||
|
ch.writeOutbound(new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK));
|
||||||
|
ch.writeOutbound(new DefaultHttpContent(Unpooled.wrappedBuffer(new byte[3])));
|
||||||
|
ch.writeOutbound(new DefaultHttpContent(Unpooled.wrappedBuffer(new byte[2])));
|
||||||
|
ch.writeOutbound(new DefaultLastHttpContent(Unpooled.wrappedBuffer(new byte[1])));
|
||||||
|
|
||||||
|
HttpResponse res = (HttpResponse) ch.readOutbound();
|
||||||
|
assertThat(res.headers().get(Names.TRANSFER_ENCODING), is("chunked"));
|
||||||
|
assertThat(res.headers().get(Names.CONTENT_LENGTH), is(nullValue()));
|
||||||
|
|
||||||
|
HttpContent chunk;
|
||||||
|
chunk = (HttpContent) ch.readOutbound();
|
||||||
|
assertThat(chunk.data().toString(CharsetUtil.US_ASCII), is("3"));
|
||||||
|
chunk = (HttpContent) ch.readOutbound();
|
||||||
|
assertThat(chunk.data().toString(CharsetUtil.US_ASCII), is("2"));
|
||||||
|
chunk = (HttpContent) ch.readOutbound();
|
||||||
|
assertThat(chunk.data().toString(CharsetUtil.US_ASCII), is("1"));
|
||||||
|
|
||||||
|
assertThat(chunk, is(instanceOf(LastHttpContent.class)));
|
||||||
|
assertThat(ch.readOutbound(), is(nullValue()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testFullContent() throws Exception {
|
||||||
|
EmbeddedMessageChannel ch = new EmbeddedMessageChannel(
|
||||||
|
new HttpContentEncoder() {
|
||||||
|
@Override
|
||||||
|
protected Result beginEncode(HttpMessage header, HttpContent msg, String acceptEncoding) {
|
||||||
|
return new Result("test", new EmbeddedByteChannel(new ByteToByteEncoder() {
|
||||||
|
@Override
|
||||||
|
protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) {
|
||||||
|
out.writeBytes(String.valueOf(in.readableBytes()).getBytes(CharsetUtil.US_ASCII));
|
||||||
|
in.skipBytes(in.readableBytes());
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ch.writeInbound(new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"));
|
||||||
|
|
||||||
|
FullHttpResponse res = new DefaultFullHttpResponse(
|
||||||
|
HttpVersion.HTTP_1_1, HttpResponseStatus.OK, Unpooled.wrappedBuffer(new byte[42]));
|
||||||
|
res.headers().set(Names.CONTENT_LENGTH, 42);
|
||||||
|
ch.writeOutbound(res);
|
||||||
|
|
||||||
|
HttpResponse encodedRes = (HttpResponse) ch.readOutbound();
|
||||||
|
assertThat(encodedRes.headers().get(Names.TRANSFER_ENCODING), is(nullValue()));
|
||||||
|
assertThat(encodedRes.headers().get(Names.CONTENT_LENGTH), is("2"));
|
||||||
|
|
||||||
|
LastHttpContent c = (LastHttpContent) ch.readOutbound();
|
||||||
|
assertThat(c.data().readableBytes(), is(2));
|
||||||
|
|
||||||
|
assertThat(ch.readOutbound(), is(nullValue()));
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user