Fix NPEs in HttpPostRequestEncoder#nextChunk
Motivation: HttpPostRequestEncoder maintains an internal buffer that holds the current encoded data. There are use cases when this internal buffer becomes null, the next chunk processing implementation should take this into consideration. Modifications: - When preparing the last chunk if currentBuffer is null, mark isLastChunkSent as true and send LastHttpContent.EMPTY_LAST_CONTENT - When calculating the remaining size take into consideration that the currentBuffer might be null - Tests are based on those provided in the issue by @nebhale and @bfiorini Result: Fixes #5478
This commit is contained in:
parent
deb5c45204
commit
08748344d8
@ -1062,40 +1062,30 @@ public class HttpPostRequestEncoder implements ChunkedInput<HttpContent> {
|
||||
isLastChunkSent = true;
|
||||
return LastHttpContent.EMPTY_LAST_CONTENT;
|
||||
}
|
||||
ByteBuf buffer;
|
||||
int size = HttpPostBodyUtil.chunkSize;
|
||||
// first test if previous buffer is not empty
|
||||
if (currentBuffer != null) {
|
||||
size -= currentBuffer.readableBytes();
|
||||
}
|
||||
int size = calculateRemainingSize();
|
||||
if (size <= 0) {
|
||||
// NextChunk from buffer
|
||||
buffer = fillByteBuf();
|
||||
ByteBuf buffer = fillByteBuf();
|
||||
return new DefaultHttpContent(buffer);
|
||||
}
|
||||
// size > 0
|
||||
if (currentData != null) {
|
||||
// continue to read data
|
||||
HttpContent chunk;
|
||||
if (isMultipart) {
|
||||
HttpContent chunk = encodeNextChunkMultipart(size);
|
||||
if (chunk != null) {
|
||||
return chunk;
|
||||
}
|
||||
chunk = encodeNextChunkMultipart(size);
|
||||
} else {
|
||||
HttpContent chunk = encodeNextChunkUrlEncoded(size);
|
||||
if (chunk != null) {
|
||||
// NextChunk Url from currentData
|
||||
return chunk;
|
||||
}
|
||||
chunk = encodeNextChunkUrlEncoded(size);
|
||||
}
|
||||
size = HttpPostBodyUtil.chunkSize - currentBuffer.readableBytes();
|
||||
if (chunk != null) {
|
||||
// NextChunk from data
|
||||
return chunk;
|
||||
}
|
||||
size = calculateRemainingSize();
|
||||
}
|
||||
if (!iterator.hasNext()) {
|
||||
isLastChunk = true;
|
||||
// NextChunk as last non empty from buffer
|
||||
buffer = currentBuffer;
|
||||
currentBuffer = null;
|
||||
return new DefaultHttpContent(buffer);
|
||||
return lastChunk();
|
||||
}
|
||||
while (size > 0 && iterator.hasNext()) {
|
||||
currentData = iterator.next();
|
||||
@ -1107,21 +1097,33 @@ public class HttpPostRequestEncoder implements ChunkedInput<HttpContent> {
|
||||
}
|
||||
if (chunk == null) {
|
||||
// not enough
|
||||
size = HttpPostBodyUtil.chunkSize - currentBuffer.readableBytes();
|
||||
size = calculateRemainingSize();
|
||||
continue;
|
||||
}
|
||||
// NextChunk from data
|
||||
return chunk;
|
||||
}
|
||||
// end since no more data
|
||||
return lastChunk();
|
||||
}
|
||||
|
||||
private int calculateRemainingSize() {
|
||||
int size = HttpPostBodyUtil.chunkSize;
|
||||
if (currentBuffer != null) {
|
||||
size -= currentBuffer.readableBytes();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
private HttpContent lastChunk() {
|
||||
isLastChunk = true;
|
||||
if (currentBuffer == null) {
|
||||
isLastChunkSent = true;
|
||||
// LastChunk with no more data
|
||||
return LastHttpContent.EMPTY_LAST_CONTENT;
|
||||
}
|
||||
// Previous LastChunk with no more data
|
||||
buffer = currentBuffer;
|
||||
// NextChunk as last non empty from buffer
|
||||
ByteBuf buffer = currentBuffer;
|
||||
currentBuffer = null;
|
||||
return new DefaultHttpContent(buffer);
|
||||
}
|
||||
|
@ -19,16 +19,20 @@ import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufAllocator;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import io.netty.handler.codec.http.DefaultFullHttpRequest;
|
||||
import io.netty.handler.codec.http.HttpConstants;
|
||||
import io.netty.handler.codec.http.HttpContent;
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
import io.netty.handler.codec.http.HttpVersion;
|
||||
import io.netty.handler.codec.http.LastHttpContent;
|
||||
import io.netty.handler.codec.http.multipart.HttpPostRequestEncoder.EncoderMode;
|
||||
import io.netty.handler.codec.http.multipart.HttpPostRequestEncoder.ErrorDataEncoderException;
|
||||
import io.netty.util.CharsetUtil;
|
||||
import io.netty.util.internal.StringUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@ -37,6 +41,7 @@ import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH;
|
||||
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TRANSFER_ENCODING;
|
||||
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
@ -358,4 +363,64 @@ public class HttpPostRequestEncoderTest {
|
||||
content.release();
|
||||
return contentStr;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDataIsMultipleOfChunkSize1() throws Exception {
|
||||
DefaultHttpDataFactory factory = new DefaultHttpDataFactory(DefaultHttpDataFactory.MINSIZE);
|
||||
DefaultFullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
|
||||
HttpMethod.POST, "http://localhost");
|
||||
HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(factory, request, true,
|
||||
HttpConstants.DEFAULT_CHARSET, HttpPostRequestEncoder.EncoderMode.RFC1738);
|
||||
|
||||
MemoryFileUpload first = new MemoryFileUpload("resources", "", "application/json", null,
|
||||
Charset.forName("UTF-8"), -1);
|
||||
first.setMaxSize(-1);
|
||||
first.setContent(new ByteArrayInputStream(new byte[7955]));
|
||||
encoder.addBodyHttpData(first);
|
||||
|
||||
MemoryFileUpload second = new MemoryFileUpload("resources2", "", "application/json", null,
|
||||
Charset.forName("UTF-8"), -1);
|
||||
second.setMaxSize(-1);
|
||||
second.setContent(new ByteArrayInputStream(new byte[7928]));
|
||||
encoder.addBodyHttpData(second);
|
||||
|
||||
assertNotNull(encoder.finalizeRequest());
|
||||
|
||||
checkNextChunkSize(encoder, 8096);
|
||||
checkNextChunkSize(encoder, 8096);
|
||||
|
||||
HttpContent httpContent = encoder.readChunk((ByteBufAllocator) null);
|
||||
assertTrue("Expected LastHttpContent is not received", httpContent instanceof LastHttpContent);
|
||||
httpContent.release();
|
||||
|
||||
assertTrue("Expected end of input is not receive", encoder.isEndOfInput());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDataIsMultipleOfChunkSize2() throws Exception {
|
||||
DefaultFullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
|
||||
HttpMethod.POST, "http://localhost");
|
||||
HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(request, true);
|
||||
int length = 7943;
|
||||
char[] array = new char[length];
|
||||
Arrays.fill(array, 'a');
|
||||
String longText = new String(array);
|
||||
encoder.addBodyAttribute("foo", longText);
|
||||
|
||||
assertNotNull(encoder.finalizeRequest());
|
||||
|
||||
checkNextChunkSize(encoder, 8096);
|
||||
|
||||
HttpContent httpContent = encoder.readChunk((ByteBufAllocator) null);
|
||||
assertTrue("Expected LastHttpContent is not received", httpContent instanceof LastHttpContent);
|
||||
httpContent.release();
|
||||
|
||||
assertTrue("Expected end of input is not receive", encoder.isEndOfInput());
|
||||
}
|
||||
|
||||
private static void checkNextChunkSize(HttpPostRequestEncoder encoder, int expectedSize) throws Exception {
|
||||
HttpContent httpContent = encoder.readChunk((ByteBufAllocator) null);
|
||||
assertEquals("Chunk is not " + expectedSize + " bytes", expectedSize, httpContent.content().readableBytes());
|
||||
httpContent.release();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user