Fix for performance regression on HttpPost RequestDecoder (#10623)

Fix issue #10508 where PARANOID mode slow down about 1000 times compared to ADVANCED.
Also fix a rare issue when internal buffer was growing over a limit, it was partially discarded
using `discardReadBytes()` which causes bad changes within previously discovered HttpData.

Reasons were:

Too many `readByte()` method calls while other ways exist (such as keep in memory the last scan position when trying to find a delimiter or using `bytesBefore(firstByte)` instead of looping externally).

Changes done:
- major change on way buffer are parsed: instead of read byte per byte until found delimiter, try to find the delimiter using `bytesBefore()` and keep the last unfound position to skeep already parsed parts (algorithms are the same but implementation of scan are different)
- Change the condition to discard read bytes when refCnt is at most 1.

Observations using Async-Profiler:
==================================

1) Without optimizations, most of the time (more than 95%) is through `readByte()` method within `loadDataMultipartStandard` method.
2) With using `bytesBefore(byte)` instead of `readByte()` to find various delimiter, the `loadDataMultipartStandard` method is going down to 19 to 33% depending on the test used. the `readByte()` method or equivalent `getByte(pos)` method are going down to 15% (from 95%).

Times are confirming those profiling:
- With optimizations, in SIMPLE mode about 82% better, in ADVANCED mode about 79% better and in PARANOID mode about 99% better (most of the duplicate read accesses are removed or make internally through `bytesBefore(byte)` method)

A benchmark is added to show the behavior of the various cases (one big item, such as File upload, and many items) and various level of detection (Disabled, Simple, Advanced, Paranoid). This benchmark is intend to alert if new implementations make too many differences (such as the previous version where about PARANOID gives about 1000 times slower than other levels, while it is now about at most 10 times).

Extract of Benchmark run:
=========================

Run complete. Total time: 00:13:27

Benchmark                                                                           Mode  Cnt  Score   Error   Units
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderBigAdvancedLevel   thrpt    6  2,248 ± 0,198 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderBigDisabledLevel   thrpt    6  2,067 ± 1,219 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderBigParanoidLevel   thrpt    6  1,109 ± 0,038 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderBigSimpleLevel     thrpt    6  2,326 ± 0,314 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderHighAdvancedLevel  thrpt    6  1,444 ± 0,226 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderHighDisabledLevel  thrpt    6  1,462 ± 0,642 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderHighParanoidLevel  thrpt    6  0,159 ± 0,003 ops/ms
HttpPostMultipartRequestDecoderBenchmark.multipartRequestDecoderHighSimpleLevel    thrpt    6  1,522 ± 0,049 ops/ms
This commit is contained in:
Frédéric Brégier 2020-11-19 08:00:35 +01:00 committed by GitHub
parent 8b2ed77042
commit 1c230405fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 391 additions and 345 deletions

View File

@ -123,6 +123,11 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
*/ */
private Attribute currentAttribute; private Attribute currentAttribute;
/**
* The current Data position before finding delimiter
*/
private int lastDataPosition;
private boolean destroyed; private boolean destroyed;
private int discardThreshold = HttpPostRequestDecoder.DEFAULT_DISCARD_THRESHOLD; private int discardThreshold = HttpPostRequestDecoder.DEFAULT_DISCARD_THRESHOLD;
@ -337,12 +342,15 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
// which is not really usable for us as we may exceed it once we add more bytes. // which is not really usable for us as we may exceed it once we add more bytes.
buf.alloc().buffer(buf.readableBytes()).writeBytes(buf); buf.alloc().buffer(buf.readableBytes()).writeBytes(buf);
} else { } else {
int readPos = undecodedChunk.readerIndex();
int writable = undecodedChunk.writableBytes();
int toWrite = buf.readableBytes();
if (undecodedChunk.refCnt() == 1 && writable < toWrite && readPos + writable >= toWrite) {
undecodedChunk.discardReadBytes();
}
undecodedChunk.writeBytes(buf); undecodedChunk.writeBytes(buf);
} }
parseBody(); parseBody();
if (undecodedChunk != null && undecodedChunk.writerIndex() > discardThreshold) {
undecodedChunk.discardReadBytes();
}
return this; return this;
} }
@ -980,47 +988,6 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
currentFieldAttributes.remove(HttpHeaderValues.FILENAME); currentFieldAttributes.remove(HttpHeaderValues.FILENAME);
} }
/**
* Read one line up to the CRLF or LF
*
* @return the String from one line
* @throws NotEnoughDataDecoderException
* Need more chunks and reset the {@code readerIndex} to the previous
* value
*/
private static String readLineStandard(ByteBuf undecodedChunk, Charset charset) {
int readerIndex = undecodedChunk.readerIndex();
ByteBuf line = undecodedChunk.alloc().heapBuffer(64);
try {
while (undecodedChunk.isReadable()) {
byte nextByte = undecodedChunk.readByte();
if (nextByte == HttpConstants.CR) {
// check but do not changed readerIndex
nextByte = undecodedChunk.getByte(undecodedChunk.readerIndex());
if (nextByte == HttpConstants.LF) {
// force read
undecodedChunk.readByte();
return line.toString(charset);
} else {
// Write CR (not followed by LF)
line.writeByte(HttpConstants.CR);
}
} else if (nextByte == HttpConstants.LF) {
return line.toString(charset);
} else {
line.writeByte(nextByte);
}
}
} catch (IndexOutOfBoundsException e) {
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException(e);
} finally {
line.release();
}
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
/** /**
* Read one line up to the CRLF or LF * Read one line up to the CRLF or LF
* *
@ -1030,44 +997,23 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
* value * value
*/ */
private static String readLine(ByteBuf undecodedChunk, Charset charset) { private static String readLine(ByteBuf undecodedChunk, Charset charset) {
if (!undecodedChunk.hasArray()) { final int readerIndex = undecodedChunk.readerIndex();
return readLineStandard(undecodedChunk, charset); int posLf = undecodedChunk.bytesBefore(HttpConstants.LF);
if (posLf == -1) {
throw new NotEnoughDataDecoderException();
} }
SeekAheadOptimize sao = new SeekAheadOptimize(undecodedChunk); boolean crFound =
int readerIndex = undecodedChunk.readerIndex(); undecodedChunk.getByte(readerIndex + posLf - 1) == HttpConstants.CR;
ByteBuf line = undecodedChunk.alloc().heapBuffer(64); if (crFound) {
try { posLf--;
while (sao.pos < sao.limit) {
byte nextByte = sao.bytes[sao.pos++];
if (nextByte == HttpConstants.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos++];
if (nextByte == HttpConstants.LF) {
sao.setReadPosition(0);
return line.toString(charset);
} else {
// Write CR (not followed by LF)
sao.pos--;
line.writeByte(HttpConstants.CR);
}
} else {
line.writeByte(nextByte);
}
} else if (nextByte == HttpConstants.LF) {
sao.setReadPosition(0);
return line.toString(charset);
} else {
line.writeByte(nextByte);
}
}
} catch (IndexOutOfBoundsException e) {
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException(e);
} finally {
line.release();
} }
undecodedChunk.readerIndex(readerIndex); CharSequence line = undecodedChunk.readCharSequence(posLf, charset);
throw new NotEnoughDataDecoderException(); if (crFound) {
undecodedChunk.skipBytes(2);
} else {
undecodedChunk.skipBytes(1);
}
return line.toString();
} }
/** /**
@ -1085,201 +1031,73 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
* Need more chunks and reset the {@code readerIndex} to the previous * Need more chunks and reset the {@code readerIndex} to the previous
* value * value
*/ */
private static String readDelimiterStandard(ByteBuf undecodedChunk, String delimiter) { private String readDelimiter(ByteBuf undecodedChunk, String delimiter) {
int readerIndex = undecodedChunk.readerIndex(); final int readerIndex = undecodedChunk.readerIndex();
try { try {
StringBuilder sb = new StringBuilder(64); final int len = delimiter.length();
int delimiterPos = 0; if (len + 2 > undecodedChunk.readableBytes()) {
int len = delimiter.length(); // Not able to check if "--" is present
while (undecodedChunk.isReadable() && delimiterPos < len) { throw new NotEnoughDataDecoderException();
byte nextByte = undecodedChunk.readByte(); }
if (nextByte == delimiter.charAt(delimiterPos)) { int newPositionDelimiter = findDelimiter(undecodedChunk, delimiter, 0);
delimiterPos++; if (newPositionDelimiter != 0) {
sb.append((char) nextByte); // Delimiter not fully found
throw new NotEnoughDataDecoderException();
}
byte nextByte = undecodedChunk.getByte(readerIndex + len);
// first check for opening delimiter
if (nextByte == HttpConstants.CR) {
nextByte = undecodedChunk.getByte(readerIndex + len + 1);
if (nextByte == HttpConstants.LF) {
CharSequence line = undecodedChunk.readCharSequence(len, charset);
undecodedChunk.skipBytes(2);
return line.toString();
} else { } else {
// error since CR must be followed by LF
// delimiter not found so break here ! // delimiter not found so break here !
undecodedChunk.readerIndex(readerIndex); undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException(); throw new NotEnoughDataDecoderException();
} }
} } else if (nextByte == HttpConstants.LF) {
// Now check if either opening delimiter or closing delimiter CharSequence line = undecodedChunk.readCharSequence(len, charset);
if (undecodedChunk.isReadable()) { undecodedChunk.skipBytes(1);
byte nextByte = undecodedChunk.readByte(); return line.toString();
// first check for opening delimiter } else if (nextByte == '-') {
if (nextByte == HttpConstants.CR) { // second check for closing delimiter
nextByte = undecodedChunk.readByte(); nextByte = undecodedChunk.getByte(readerIndex + len + 1);
if (nextByte == HttpConstants.LF) { if (nextByte == '-') {
return sb.toString(); CharSequence line = undecodedChunk.readCharSequence(len + 2, charset);
} else { // now try to find if CRLF or LF there
// error since CR must be followed by LF if (undecodedChunk.isReadable()) {
// delimiter not found so break here ! nextByte = undecodedChunk.readByte();
undecodedChunk.readerIndex(readerIndex); if (nextByte == HttpConstants.CR) {
throw new NotEnoughDataDecoderException();
}
} else if (nextByte == HttpConstants.LF) {
return sb.toString();
} else if (nextByte == '-') {
sb.append('-');
// second check for closing delimiter
nextByte = undecodedChunk.readByte();
if (nextByte == '-') {
sb.append('-');
// now try to find if CRLF or LF there
if (undecodedChunk.isReadable()) {
nextByte = undecodedChunk.readByte(); nextByte = undecodedChunk.readByte();
if (nextByte == HttpConstants.CR) { if (nextByte == HttpConstants.LF) {
nextByte = undecodedChunk.readByte(); return line.toString();
if (nextByte == HttpConstants.LF) {
return sb.toString();
} else {
// error CR without LF
// delimiter not found so break here !
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
} else if (nextByte == HttpConstants.LF) {
return sb.toString();
} else { } else {
// No CRLF but ok however (Adobe Flash uploader) // error CR without LF
// minus 1 since we read one char ahead but // delimiter not found so break here !
// should not undecodedChunk.readerIndex(readerIndex);
undecodedChunk.readerIndex(undecodedChunk.readerIndex() - 1); throw new NotEnoughDataDecoderException();
return sb.toString();
} }
} } else if (nextByte == HttpConstants.LF) {
// FIXME what do we do here? return line.toString();
// either considering it is fine, either waiting for
// more data to come?
// lets try considering it is fine...
return sb.toString();
}
// only one '-' => not enough
// whatever now => error since incomplete
}
}
} catch (IndexOutOfBoundsException e) {
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException(e);
}
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
/**
* Read one line up to --delimiter or --delimiter-- and if existing the CRLF
* or LF. Note that CRLF or LF are mandatory for opening delimiter
* (--delimiter) but not for closing delimiter (--delimiter--) since some
* clients does not include CRLF in this case.
*
* @param delimiter
* of the form --string, such that '--' is already included
* @return the String from one line as the delimiter searched (opening or
* closing)
* @throws NotEnoughDataDecoderException
* Need more chunks and reset the readerInder to the previous
* value
*/
private static String readDelimiter(ByteBuf undecodedChunk, String delimiter) {
if (!undecodedChunk.hasArray()) {
return readDelimiterStandard(undecodedChunk, delimiter);
}
SeekAheadOptimize sao = new SeekAheadOptimize(undecodedChunk);
int readerIndex = undecodedChunk.readerIndex();
int delimiterPos = 0;
int len = delimiter.length();
try {
StringBuilder sb = new StringBuilder(64);
// check conformity with delimiter
while (sao.pos < sao.limit && delimiterPos < len) {
byte nextByte = sao.bytes[sao.pos++];
if (nextByte == delimiter.charAt(delimiterPos)) {
delimiterPos++;
sb.append((char) nextByte);
} else {
// delimiter not found so break here !
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
}
// Now check if either opening delimiter or closing delimiter
if (sao.pos < sao.limit) {
byte nextByte = sao.bytes[sao.pos++];
if (nextByte == HttpConstants.CR) {
// first check for opening delimiter
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos++];
if (nextByte == HttpConstants.LF) {
sao.setReadPosition(0);
return sb.toString();
} else { } else {
// error CR without LF // No CRLF but ok however (Adobe Flash uploader)
// delimiter not found so break here ! // minus 1 since we read one char ahead but
undecodedChunk.readerIndex(readerIndex); // should not
throw new NotEnoughDataDecoderException(); undecodedChunk.readerIndex(undecodedChunk.readerIndex() - 1);
return line.toString();
} }
} else {
// error since CR must be followed by LF
// delimiter not found so break here !
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
} else if (nextByte == HttpConstants.LF) {
// same first check for opening delimiter where LF used with
// no CR
sao.setReadPosition(0);
return sb.toString();
} else if (nextByte == '-') {
sb.append('-');
// second check for closing delimiter
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos++];
if (nextByte == '-') {
sb.append('-');
// now try to find if CRLF or LF there
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos++];
if (nextByte == HttpConstants.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos++];
if (nextByte == HttpConstants.LF) {
sao.setReadPosition(0);
return sb.toString();
} else {
// error CR without LF
// delimiter not found so break here !
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
} else {
// error CR without LF
// delimiter not found so break here !
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
} else if (nextByte == HttpConstants.LF) {
sao.setReadPosition(0);
return sb.toString();
} else {
// No CRLF but ok however (Adobe Flash
// uploader)
// minus 1 since we read one char ahead but
// should not
sao.setReadPosition(1);
return sb.toString();
}
}
// FIXME what do we do here?
// either considering it is fine, either waiting for
// more data to come?
// lets try considering it is fine...
sao.setReadPosition(0);
return sb.toString();
}
// whatever now => error since incomplete
// only one '-' => not enough or whatever not enough
// element
} }
// FIXME what do we do here?
// either considering it is fine, either waiting for
// more data to come?
// lets try considering it is fine...
return line.toString();
} }
// only one '-' => not enough
// whatever now => error since incomplete
} }
} catch (IndexOutOfBoundsException e) { } catch (IndexOutOfBoundsException e) {
undecodedChunk.readerIndex(readerIndex); undecodedChunk.readerIndex(readerIndex);
@ -1290,96 +1108,93 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
} }
/** /**
* Load the field value or file data from a Multipart request * @param undecodedChunk the source where the delimiter is to be found
* @param delimiter the string to find out
* @param offset the offset from readerIndex within the undecodedChunk to
* start from to find out the delimiter
* *
* @return {@code true} if the last chunk is loaded (boundary delimiter found), {@code false} if need more chunks * @return a number >= 0 if found, else new offset with negative value
* @throws ErrorDataDecoderException * (to inverse), both from readerIndex
* @throws NotEnoughDataDecoderException
* Need more chunks while relative position with readerIndex is 0
*/ */
private static boolean loadDataMultipartStandard(ByteBuf undecodedChunk, String delimiter, HttpData httpData) { private static int findDelimiter(ByteBuf undecodedChunk, String delimiter, int offset) {
final int startReaderIndex = undecodedChunk.readerIndex(); final int startReaderIndex = undecodedChunk.readerIndex();
final int delimeterLength = delimiter.length(); final int delimeterLength = delimiter.length();
int index = 0; final int toRead = undecodedChunk.readableBytes();
int lastPosition = startReaderIndex; int newOffset = offset;
byte prevByte = HttpConstants.LF; boolean delimiterNotFound = true;
boolean delimiterFound = false; while (delimiterNotFound && newOffset + delimeterLength <= toRead) {
while (undecodedChunk.isReadable()) { int posFirstChar = undecodedChunk
final byte nextByte = undecodedChunk.readByte(); .bytesBefore(startReaderIndex + newOffset, toRead - newOffset,
// Check the delimiter (byte) delimiter.codePointAt(0));
if (prevByte == HttpConstants.LF && nextByte == delimiter.codePointAt(index)) { if (posFirstChar == -1) {
index++; newOffset = toRead;
if (delimeterLength == index) { return -newOffset;
delimiterFound = true; }
newOffset = posFirstChar + offset;
if (newOffset + delimeterLength > toRead) {
return -newOffset;
}
// assume will found it
delimiterNotFound = false;
for (int index = 1; index < delimeterLength; index++) {
if (undecodedChunk.getByte(startReaderIndex + newOffset + index) != delimiter.codePointAt(index)) {
// ignore first found offset and redo search from next char
newOffset++;
delimiterNotFound = true;
break; break;
} }
continue;
} }
lastPosition = undecodedChunk.readerIndex(); }
if (nextByte == HttpConstants.LF) { if (delimiterNotFound || newOffset + delimeterLength > toRead) {
index = 0; if (newOffset == 0) {
lastPosition -= (prevByte == HttpConstants.CR)? 2 : 1; throw new NotEnoughDataDecoderException();
} }
prevByte = nextByte; return -newOffset;
} }
if (prevByte == HttpConstants.CR) { return newOffset;
lastPosition--;
}
ByteBuf content = undecodedChunk.retainedSlice(startReaderIndex, lastPosition - startReaderIndex);
try {
httpData.addContent(content, delimiterFound);
} catch (IOException e) {
throw new ErrorDataDecoderException(e);
}
undecodedChunk.readerIndex(lastPosition);
return delimiterFound;
} }
/** /**
* Load the field value from a Multipart request * Load the field value from a Multipart request
* *
* @return {@code true} if the last chunk is loaded (boundary delimiter found), {@code false} if need more chunks * @return {@code true} if the last chunk is loaded (boundary delimiter found), {@code false} if need more chunks
*
* @throws ErrorDataDecoderException * @throws ErrorDataDecoderException
*/ */
private static boolean loadDataMultipart(ByteBuf undecodedChunk, String delimiter, HttpData httpData) { private boolean loadDataMultipart(ByteBuf undecodedChunk, String delimiter,
if (!undecodedChunk.hasArray()) { HttpData httpData) {
return loadDataMultipartStandard(undecodedChunk, delimiter, httpData);
}
final SeekAheadOptimize sao = new SeekAheadOptimize(undecodedChunk);
final int startReaderIndex = undecodedChunk.readerIndex(); final int startReaderIndex = undecodedChunk.readerIndex();
final int delimeterLength = delimiter.length(); int newOffset;
int index = 0;
int lastRealPos = sao.pos;
byte prevByte = HttpConstants.LF;
boolean delimiterFound = false;
while (sao.pos < sao.limit) {
final byte nextByte = sao.bytes[sao.pos++];
// Check the delimiter
if (prevByte == HttpConstants.LF && nextByte == delimiter.codePointAt(index)) {
index++;
if (delimeterLength == index) {
delimiterFound = true;
break;
}
continue;
}
lastRealPos = sao.pos;
if (nextByte == HttpConstants.LF) {
index = 0;
lastRealPos -= (prevByte == HttpConstants.CR)? 2 : 1;
}
prevByte = nextByte;
}
if (prevByte == HttpConstants.CR) {
lastRealPos--;
}
final int lastPosition = sao.getReadPosition(lastRealPos);
final ByteBuf content = undecodedChunk.retainedSlice(startReaderIndex, lastPosition - startReaderIndex);
try { try {
httpData.addContent(content, delimiterFound); newOffset = findDelimiter(undecodedChunk, delimiter, lastDataPosition);
if (newOffset < 0) {
// delimiter not found
lastDataPosition = -newOffset;
return false;
}
} catch (NotEnoughDataDecoderException e) {
// Not enough data and no change to lastDataPosition
return false;
}
// found delimiter but still need to check if CRLF before
int startDelimiter = newOffset;
if (undecodedChunk.getByte(startReaderIndex + startDelimiter - 1) == HttpConstants.LF) {
startDelimiter--;
if (undecodedChunk.getByte(startReaderIndex + startDelimiter - 1) == HttpConstants.CR) {
startDelimiter--;
}
}
ByteBuf content = undecodedChunk.retainedSlice(startReaderIndex, startDelimiter);
try {
httpData.addContent(content, true);
} catch (IOException e) { } catch (IOException e) {
throw new ErrorDataDecoderException(e); throw new ErrorDataDecoderException(e);
} }
undecodedChunk.readerIndex(lastPosition); lastDataPosition = 0;
return delimiterFound; undecodedChunk.readerIndex(startReaderIndex + startDelimiter);
return true;
} }
/** /**

View File

@ -299,12 +299,15 @@ public class HttpPostStandardRequestDecoder implements InterfaceHttpPostRequestD
// which is not really usable for us as we may exceed it once we add more bytes. // which is not really usable for us as we may exceed it once we add more bytes.
buf.alloc().buffer(buf.readableBytes()).writeBytes(buf); buf.alloc().buffer(buf.readableBytes()).writeBytes(buf);
} else { } else {
int readPos = undecodedChunk.readerIndex();
int writable = undecodedChunk.writableBytes();
int toWrite = buf.readableBytes();
if (undecodedChunk.refCnt() == 1 && writable < toWrite && readPos + writable >= toWrite) {
undecodedChunk.discardReadBytes();
}
undecodedChunk.writeBytes(buf); undecodedChunk.writeBytes(buf);
} }
parseBody(); parseBody();
if (undecodedChunk != null && undecodedChunk.writerIndex() > discardThreshold) {
undecodedChunk.discardReadBytes();
}
return this; return this;
} }

View File

@ -132,6 +132,7 @@ public class HttpPostRequestDecoderTest {
// Create decoder instance to test. // Create decoder instance to test.
final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req); final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req);
assertFalse(decoder.getBodyHttpDatas().isEmpty()); assertFalse(decoder.getBodyHttpDatas().isEmpty());
req.release();
decoder.destroy(); decoder.destroy();
} }
@ -178,6 +179,7 @@ public class HttpPostRequestDecoderTest {
assertNotNull(datar); assertNotNull(datar);
assertEquals(datas[i].getBytes(CharsetUtil.UTF_8).length, datar.length); assertEquals(datas[i].getBytes(CharsetUtil.UTF_8).length, datar.length);
req.release();
decoder.destroy(); decoder.destroy();
} }
} }
@ -211,6 +213,7 @@ public class HttpPostRequestDecoderTest {
// Create decoder instance to test. // Create decoder instance to test.
final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req); final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req);
assertFalse(decoder.getBodyHttpDatas().isEmpty()); assertFalse(decoder.getBodyHttpDatas().isEmpty());
req.release();
decoder.destroy(); decoder.destroy();
} }
@ -368,6 +371,7 @@ public class HttpPostRequestDecoderTest {
// Create decoder instance to test. // Create decoder instance to test.
final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req); final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req);
assertFalse(decoder.getBodyHttpDatas().isEmpty()); assertFalse(decoder.getBodyHttpDatas().isEmpty());
req.release();
decoder.destroy(); decoder.destroy();
} }
@ -397,6 +401,7 @@ public class HttpPostRequestDecoderTest {
assertTrue(part1 instanceof FileUpload); assertTrue(part1 instanceof FileUpload);
FileUpload fileUpload = (FileUpload) part1; FileUpload fileUpload = (FileUpload) part1;
assertEquals("tmp 0.txt", fileUpload.getFilename()); assertEquals("tmp 0.txt", fileUpload.getFilename());
req.release();
decoder.destroy(); decoder.destroy();
} }
@ -427,6 +432,7 @@ public class HttpPostRequestDecoderTest {
// Create decoder instance to test without any exception. // Create decoder instance to test without any exception.
final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req); final HttpPostRequestDecoder decoder = new HttpPostRequestDecoder(inMemoryFactory, req);
assertFalse(decoder.getBodyHttpDatas().isEmpty()); assertFalse(decoder.getBodyHttpDatas().isEmpty());
req.release();
decoder.destroy(); decoder.destroy();
} }
@ -459,8 +465,8 @@ public class HttpPostRequestDecoderTest {
FileUpload fileUpload = (FileUpload) part1; FileUpload fileUpload = (FileUpload) part1;
byte[] fileBytes = fileUpload.get(); byte[] fileBytes = fileUpload.get();
assertTrue("the filecontent should not be decoded", filecontent.equals(new String(fileBytes))); assertTrue("the filecontent should not be decoded", filecontent.equals(new String(fileBytes)));
decoder.destroy();
req.release(); req.release();
decoder.destroy();
} }
@Test @Test
@ -538,8 +544,8 @@ public class HttpPostRequestDecoderTest {
} catch (HttpPostRequestDecoder.ErrorDataDecoderException e) { } catch (HttpPostRequestDecoder.ErrorDataDecoderException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException); assertTrue(e.getCause() instanceof IllegalArgumentException);
} finally { } finally {
decoder.destroy();
content.release(); content.release();
decoder.destroy();
} }
} }
@ -573,8 +579,8 @@ public class HttpPostRequestDecoderTest {
assertTrue("the item should be a FileUpload", part1 instanceof FileUpload); assertTrue("the item should be a FileUpload", part1 instanceof FileUpload);
FileUpload fileUpload = (FileUpload) part1; FileUpload fileUpload = (FileUpload) part1;
assertEquals("the filename should be decoded", filename, fileUpload.getFilename()); assertEquals("the filename should be decoded", filename, fileUpload.getFilename());
decoder.destroy();
req.release(); req.release();
decoder.destroy();
} }
// https://github.com/netty/netty/pull/7265 // https://github.com/netty/netty/pull/7265
@ -609,8 +615,8 @@ public class HttpPostRequestDecoderTest {
assertTrue("the item should be a FileUpload", part1 instanceof FileUpload); assertTrue("the item should be a FileUpload", part1 instanceof FileUpload);
FileUpload fileUpload = (FileUpload) part1; FileUpload fileUpload = (FileUpload) part1;
assertEquals("the filename should be decoded", filename, fileUpload.getFilename()); assertEquals("the filename should be decoded", filename, fileUpload.getFilename());
decoder.destroy();
req.release(); req.release();
decoder.destroy();
} }
// https://github.com/netty/netty/pull/7265 // https://github.com/netty/netty/pull/7265
@ -704,6 +710,7 @@ public class HttpPostRequestDecoderTest {
assertTrue(part1 instanceof FileUpload); assertTrue(part1 instanceof FileUpload);
FileUpload fileUpload = (FileUpload) part1; FileUpload fileUpload = (FileUpload) part1;
assertEquals("tmp-0.txt", fileUpload.getFilename()); assertEquals("tmp-0.txt", fileUpload.getFilename());
req.release();
decoder.destroy(); decoder.destroy();
} }
@ -752,7 +759,7 @@ public class HttpPostRequestDecoderTest {
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/",
Unpooled.copiedBuffer("a=1&&b=2", CharsetUtil.US_ASCII)); Unpooled.copiedBuffer("a=1&&b=2", CharsetUtil.US_ASCII));
try { try {
new HttpPostStandardRequestDecoder(request); new HttpPostStandardRequestDecoder(request).destroy();
} finally { } finally {
assertTrue(request.release()); assertTrue(request.release());
} }
@ -772,7 +779,7 @@ public class HttpPostRequestDecoderTest {
buf.writeCharSequence("a=b&foo=%22bar%22&==", CharsetUtil.US_ASCII); buf.writeCharSequence("a=b&foo=%22bar%22&==", CharsetUtil.US_ASCII);
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", buf); FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", buf);
try { try {
new HttpPostStandardRequestDecoder(request); new HttpPostStandardRequestDecoder(request).destroy();
} finally { } finally {
assertTrue(request.release()); assertTrue(request.release());
} }
@ -823,8 +830,8 @@ public class HttpPostRequestDecoderTest {
FileUpload fileUpload = (FileUpload) part1; FileUpload fileUpload = (FileUpload) part1;
assertEquals("the filename should be decoded", filename, fileUpload.getFilename()); assertEquals("the filename should be decoded", filename, fileUpload.getFilename());
decoder.destroy();
req.release(); req.release();
decoder.destroy();
} }
@Test @Test
@ -860,8 +867,8 @@ public class HttpPostRequestDecoderTest {
assertTrue(attr.getByteBuf().isDirect()); assertTrue(attr.getByteBuf().isDirect());
assertEquals("los angeles", attr.getValue()); assertEquals("los angeles", attr.getValue());
decoder.destroy();
req.release(); req.release();
decoder.destroy();
} }
@Test @Test
@ -877,7 +884,7 @@ public class HttpPostRequestDecoderTest {
} catch (HttpPostRequestDecoder.ErrorDataDecoderException e) { } catch (HttpPostRequestDecoder.ErrorDataDecoderException e) {
assertEquals("Invalid hex byte at index '0' in string: '%'", e.getMessage()); assertEquals("Invalid hex byte at index '0' in string: '%'", e.getMessage());
} finally { } finally {
req.release(); assertTrue(req.release());
} }
} }
@ -894,7 +901,7 @@ public class HttpPostRequestDecoderTest {
} catch (HttpPostRequestDecoder.ErrorDataDecoderException e) { } catch (HttpPostRequestDecoder.ErrorDataDecoderException e) {
assertEquals("Invalid hex byte at index '0' in string: '%2'", e.getMessage()); assertEquals("Invalid hex byte at index '0' in string: '%2'", e.getMessage());
} finally { } finally {
req.release(); assertTrue(req.release());
} }
} }
@ -911,7 +918,7 @@ public class HttpPostRequestDecoderTest {
} catch (HttpPostRequestDecoder.ErrorDataDecoderException e) { } catch (HttpPostRequestDecoder.ErrorDataDecoderException e) {
assertEquals("Invalid hex byte at index '0' in string: '%Zc'", e.getMessage()); assertEquals("Invalid hex byte at index '0' in string: '%Zc'", e.getMessage());
} finally { } finally {
req.release(); assertTrue(req.release());
} }
} }
@ -928,7 +935,7 @@ public class HttpPostRequestDecoderTest {
} catch (HttpPostRequestDecoder.ErrorDataDecoderException e) { } catch (HttpPostRequestDecoder.ErrorDataDecoderException e) {
assertEquals("Invalid hex byte at index '0' in string: '%2g'", e.getMessage()); assertEquals("Invalid hex byte at index '0' in string: '%2g'", e.getMessage());
} finally { } finally {
req.release(); assertTrue(req.release());
} }
} }
} }

View File

@ -0,0 +1,202 @@
/*
* Copyright 2020 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http.multipart;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http.DefaultHttpContent;
import io.netty.handler.codec.http.DefaultHttpRequest;
import io.netty.handler.codec.http.DefaultLastHttpContent;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.microbench.util.AbstractMicrobenchmark;
import io.netty.util.ResourceLeakDetector;
import io.netty.util.ResourceLeakDetector.Level;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import java.util.concurrent.TimeUnit;
@Threads(1)
@Warmup(iterations = 2)
@Measurement(iterations = 3)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public class HttpPostMultipartRequestDecoderBenchmark
extends AbstractMicrobenchmark {
public double testHighNumberChunks(boolean big, boolean noDisk) {
String BOUNDARY = "01f136d9282f";
int size = 8 * 1024;
int chunkNumber = 64;
StringBuilder stringBuilder = new StringBuilder(size);
stringBuilder.setLength(size);
String data = stringBuilder.toString();
byte[] bodyStartBytes = ("--" + BOUNDARY + "\n" +
"Content-Disposition: form-data; name=\"msg_id\"\n\n15200\n--" +
BOUNDARY +
"\nContent-Disposition: form-data; name=\"msg1\"; filename=\"file1.txt\"\n\n" +
data).getBytes();
byte[] bodyPartBigBytes = data.getBytes();
byte[] intermediaryBytes = ("\n--" + BOUNDARY +
"\nContent-Disposition: form-data; name=\"msg2\"; filename=\"file2.txt\"\n\n" +
data).getBytes();
byte[] finalBigBytes = ("\n" + "--" + BOUNDARY + "--\n").getBytes();
ByteBuf firstBuf = Unpooled.wrappedBuffer(bodyStartBytes);
ByteBuf finalBuf = Unpooled.wrappedBuffer(finalBigBytes);
ByteBuf nextBuf;
if (big) {
nextBuf = Unpooled.wrappedBuffer(bodyPartBigBytes);
} else {
nextBuf = Unpooled.wrappedBuffer(intermediaryBytes);
}
DefaultHttpRequest req =
new DefaultHttpRequest(HttpVersion.HTTP_1_0, HttpMethod.POST, "/up");
req.headers().add(HttpHeaderNames.CONTENT_TYPE,
"multipart/form-data; boundary=" + BOUNDARY);
long start = System.nanoTime();
DefaultHttpDataFactory defaultHttpDataFactory =
new DefaultHttpDataFactory(noDisk? 1024 * 1024 : 16 * 1024);
HttpPostRequestDecoder decoder =
new HttpPostRequestDecoder(defaultHttpDataFactory, req);
firstBuf.retain();
decoder.offer(new DefaultHttpContent(firstBuf));
firstBuf.release();
for (int i = 1; i < chunkNumber; i++) {
nextBuf.retain();
decoder.offer(new DefaultHttpContent(nextBuf));
nextBuf.release();
nextBuf.readerIndex(0);
}
finalBuf.retain();
decoder.offer(new DefaultLastHttpContent(finalBuf));
finalBuf.release();
while (decoder.hasNext()) {
InterfaceHttpData httpData = decoder.next();
}
while (finalBuf.refCnt() > 0) {
finalBuf.release();
}
while (nextBuf.refCnt() > 0) {
nextBuf.release();
}
while (finalBuf.refCnt() > 0) {
finalBuf.release();
}
long stop = System.nanoTime();
double time = (stop - start) / 1000000.0;
defaultHttpDataFactory.cleanAllHttpData();
defaultHttpDataFactory.cleanRequestHttpData(req);
decoder.destroy();
return time;
}
@Benchmark
public double multipartRequestDecoderHighDisabledLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.DISABLED);
return testHighNumberChunks(false, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderBigDisabledLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.DISABLED);
return testHighNumberChunks(true, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderHighSimpleLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.SIMPLE);
return testHighNumberChunks(false, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderBigSimpleLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.SIMPLE);
return testHighNumberChunks(true, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderHighAdvancedLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.ADVANCED);
return testHighNumberChunks(false, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderBigAdvancedLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.ADVANCED);
return testHighNumberChunks(true, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderHighParanoidLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.PARANOID);
return testHighNumberChunks(false, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
@Benchmark
public double multipartRequestDecoderBigParanoidLevel() {
final Level level = ResourceLeakDetector.getLevel();
try {
ResourceLeakDetector.setLevel(Level.PARANOID);
return testHighNumberChunks(true, true);
} finally {
ResourceLeakDetector.setLevel(level);
}
}
}

View File

@ -0,0 +1,19 @@
/*
* Copyright 2020 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Benchmarks for {@link io.netty.handler.codec.http.multipart}.
*/
package io.netty.handler.codec.http.multipart;