Deduplicate and simplify code in HttpPostMultipartRequestDecoder
Motivation: - A `HttpPostMultipartRequestDecoder` contains two pairs of the same methods: `readFileUploadByteMultipartStandard`+`readFileUploadByteMultipart` and `loadFieldMultipartStandard`+`loadFieldMultipart`. - These methods use `NotEnoughDataDecoderException` to detecting not last data chunk (exception handling is very expensive). - These methods can be greatly simplified. - Methods `loadFieldMultipart` and `loadFieldMultipartStandard` has an unnecessary catching for the `IndexOutOfBoundsException`. Modifications: - Remove duplicate methods. - Replace handling `NotEnoughDataDecoderException` by the return of a boolean result. - Simplify code. Result: The code is cleaner and easier to support. Less exception handling logic.
This commit is contained in:
parent
64a3e6c69c
commit
3e9f617504
@ -511,7 +511,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
|
|||||||
.getValue()) : 0L;
|
.getValue()) : 0L;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ErrorDataDecoderException(e);
|
throw new ErrorDataDecoderException(e);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException ignored) {
|
||||||
size = 0;
|
size = 0;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@ -534,9 +534,8 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// load data
|
// load data
|
||||||
try {
|
if (!loadDataMultipart(undecodedChunk, multipartDataBoundary, currentAttribute)) {
|
||||||
loadFieldMultipart(undecodedChunk, multipartDataBoundary, currentAttribute);
|
// Delimiter is not found. Need more chunks.
|
||||||
} catch (NotEnoughDataDecoderException ignored) {
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
Attribute finalAttribute = currentAttribute;
|
Attribute finalAttribute = currentAttribute;
|
||||||
@ -883,12 +882,8 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// load data as much as possible
|
// load data as much as possible
|
||||||
try {
|
if (!loadDataMultipart(undecodedChunk, delimiter, currentFileUpload)) {
|
||||||
readFileUploadByteMultipart(undecodedChunk, delimiter, currentFileUpload);
|
// Delimiter is not found. Need more chunks.
|
||||||
} catch (NotEnoughDataDecoderException e) {
|
|
||||||
// do not change the buffer position
|
|
||||||
// since some can be already saved into FileUpload
|
|
||||||
// So do not change the currentStatus
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (currentFileUpload.isCompleted()) {
|
if (currentFileUpload.isCompleted()) {
|
||||||
@ -1271,442 +1266,96 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read a FileUpload data as Byte (Binary) and add the bytes directly to the
|
* Load the field value or file data from a Multipart request
|
||||||
* FileUpload. If the delimiter is found, the FileUpload is completed.
|
|
||||||
*
|
*
|
||||||
* @throws NotEnoughDataDecoderException
|
* @return {@code true} if the last chunk is loaded (boundary delimiter found), {@code false} if need more chunks
|
||||||
* Need more chunks but do not reset the readerInder since some
|
|
||||||
* values will be already added to the FileOutput
|
|
||||||
* @throws ErrorDataDecoderException
|
* @throws ErrorDataDecoderException
|
||||||
* write IO error occurs with the FileUpload
|
|
||||||
*/
|
*/
|
||||||
private static void readFileUploadByteMultipartStandard(ByteBuf undecodedChunk, String delimiter,
|
private static boolean loadDataMultipartStandard(ByteBuf undecodedChunk, String delimiter, HttpData httpData) {
|
||||||
FileUpload currentFileUpload) {
|
final int startReaderIndex = undecodedChunk.readerIndex();
|
||||||
int readerIndex = undecodedChunk.readerIndex();
|
final int delimeterLength = delimiter.length();
|
||||||
// found the decoder limit
|
|
||||||
boolean newLine = true;
|
|
||||||
int index = 0;
|
int index = 0;
|
||||||
int lastPosition = undecodedChunk.readerIndex();
|
int lastPosition = startReaderIndex;
|
||||||
boolean found = false;
|
byte prevByte = HttpConstants.LF;
|
||||||
|
boolean delimiterFound = false;
|
||||||
while (undecodedChunk.isReadable()) {
|
while (undecodedChunk.isReadable()) {
|
||||||
byte nextByte = undecodedChunk.readByte();
|
final byte nextByte = undecodedChunk.readByte();
|
||||||
if (newLine) {
|
|
||||||
// Check the delimiter
|
// Check the delimiter
|
||||||
if (nextByte == delimiter.codePointAt(index)) {
|
if (prevByte == HttpConstants.LF && nextByte == delimiter.codePointAt(index)) {
|
||||||
index++;
|
index++;
|
||||||
if (delimiter.length() == index) {
|
if (delimeterLength == index) {
|
||||||
found = true;
|
delimiterFound = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else {
|
continue;
|
||||||
newLine = false;
|
|
||||||
index = 0;
|
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (undecodedChunk.isReadable()) {
|
|
||||||
nextByte = undecodedChunk.readByte();
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 2;
|
|
||||||
} else {
|
|
||||||
// save last valid position
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
|
|
||||||
// Unread next byte.
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
} else {
|
|
||||||
// save last valid position
|
|
||||||
lastPosition = undecodedChunk.readerIndex();
|
lastPosition = undecodedChunk.readerIndex();
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (undecodedChunk.isReadable()) {
|
|
||||||
nextByte = undecodedChunk.readByte();
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
if (nextByte == HttpConstants.LF) {
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
index = 0;
|
||||||
lastPosition = undecodedChunk.readerIndex() - 2;
|
lastPosition -= (prevByte == HttpConstants.CR)? 2 : 1;
|
||||||
} else {
|
|
||||||
// save last valid position
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
|
|
||||||
// Unread next byte.
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
}
|
}
|
||||||
|
prevByte = nextByte;
|
||||||
}
|
}
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
if (prevByte == HttpConstants.CR) {
|
||||||
newLine = true;
|
lastPosition--;
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
} else {
|
|
||||||
// save last valid position
|
|
||||||
lastPosition = undecodedChunk.readerIndex();
|
|
||||||
}
|
}
|
||||||
}
|
ByteBuf content = undecodedChunk.copy(startReaderIndex, lastPosition - startReaderIndex);
|
||||||
}
|
|
||||||
ByteBuf buffer = undecodedChunk.copy(readerIndex, lastPosition - readerIndex);
|
|
||||||
if (found) {
|
|
||||||
// found so lastPosition is correct and final
|
|
||||||
try {
|
try {
|
||||||
currentFileUpload.addContent(buffer, true);
|
httpData.addContent(content, delimiterFound);
|
||||||
// just before the CRLF and delimiter
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ErrorDataDecoderException(e);
|
throw new ErrorDataDecoderException(e);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// possibly the delimiter is partially found but still the last
|
|
||||||
// position is OK
|
|
||||||
try {
|
|
||||||
currentFileUpload.addContent(buffer, false);
|
|
||||||
// last valid char (not CR, not LF, not beginning of delimiter)
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
undecodedChunk.readerIndex(lastPosition);
|
||||||
throw new NotEnoughDataDecoderException();
|
return delimiterFound;
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ErrorDataDecoderException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read a FileUpload data as Byte (Binary) and add the bytes directly to the
|
|
||||||
* FileUpload. If the delimiter is found, the FileUpload is completed.
|
|
||||||
*
|
|
||||||
* @throws NotEnoughDataDecoderException
|
|
||||||
* Need more chunks but do not reset the {@code readerIndex} since some
|
|
||||||
* values will be already added to the FileOutput
|
|
||||||
* @throws ErrorDataDecoderException
|
|
||||||
* write IO error occurs with the FileUpload
|
|
||||||
*/
|
|
||||||
private static void readFileUploadByteMultipart(ByteBuf undecodedChunk, String delimiter,
|
|
||||||
FileUpload currentFileUpload) {
|
|
||||||
if (!undecodedChunk.hasArray()) {
|
|
||||||
readFileUploadByteMultipartStandard(undecodedChunk, delimiter, currentFileUpload);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
SeekAheadOptimize sao = new SeekAheadOptimize(undecodedChunk);
|
|
||||||
int readerIndex = undecodedChunk.readerIndex();
|
|
||||||
// found the decoder limit
|
|
||||||
boolean newLine = true;
|
|
||||||
int index = 0;
|
|
||||||
int lastrealpos = sao.pos;
|
|
||||||
int lastPosition;
|
|
||||||
boolean found = false;
|
|
||||||
|
|
||||||
while (sao.pos < sao.limit) {
|
|
||||||
byte nextByte = sao.bytes[sao.pos++];
|
|
||||||
if (newLine) {
|
|
||||||
// Check the delimiter
|
|
||||||
if (nextByte == delimiter.codePointAt(index)) {
|
|
||||||
index++;
|
|
||||||
if (delimiter.length() == index) {
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
newLine = false;
|
|
||||||
index = 0;
|
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (sao.pos < sao.limit) {
|
|
||||||
nextByte = sao.bytes[sao.pos++];
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 2;
|
|
||||||
} else {
|
|
||||||
// unread next byte
|
|
||||||
sao.pos--;
|
|
||||||
|
|
||||||
// save last valid position
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 1;
|
|
||||||
} else {
|
|
||||||
// save last valid position
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (sao.pos < sao.limit) {
|
|
||||||
nextByte = sao.bytes[sao.pos++];
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 2;
|
|
||||||
} else {
|
|
||||||
// unread next byte
|
|
||||||
sao.pos--;
|
|
||||||
|
|
||||||
// save last valid position
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 1;
|
|
||||||
} else {
|
|
||||||
// save last valid position
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lastPosition = sao.getReadPosition(lastrealpos);
|
|
||||||
ByteBuf buffer = undecodedChunk.copy(readerIndex, lastPosition - readerIndex);
|
|
||||||
if (found) {
|
|
||||||
// found so lastPosition is correct and final
|
|
||||||
try {
|
|
||||||
currentFileUpload.addContent(buffer, true);
|
|
||||||
// just before the CRLF and delimiter
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ErrorDataDecoderException(e);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// possibly the delimiter is partially found but still the last
|
|
||||||
// position is OK
|
|
||||||
try {
|
|
||||||
currentFileUpload.addContent(buffer, false);
|
|
||||||
// last valid char (not CR, not LF, not beginning of delimiter)
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
throw new NotEnoughDataDecoderException();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ErrorDataDecoderException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load the field value from a Multipart request
|
* Load the field value from a Multipart request
|
||||||
*
|
*
|
||||||
* @throws NotEnoughDataDecoderException
|
* @return {@code true} if the last chunk is loaded (boundary delimiter found), {@code false} if need more chunks
|
||||||
* Need more chunks
|
|
||||||
* @throws ErrorDataDecoderException
|
* @throws ErrorDataDecoderException
|
||||||
*/
|
*/
|
||||||
private static void loadFieldMultipartStandard(ByteBuf undecodedChunk, String delimiter,
|
private static boolean loadDataMultipart(ByteBuf undecodedChunk, String delimiter, HttpData httpData) {
|
||||||
Attribute currentAttribute) {
|
|
||||||
int readerIndex = undecodedChunk.readerIndex();
|
|
||||||
try {
|
|
||||||
// found the decoder limit
|
|
||||||
boolean newLine = true;
|
|
||||||
int index = 0;
|
|
||||||
int lastPosition = undecodedChunk.readerIndex();
|
|
||||||
boolean found = false;
|
|
||||||
while (undecodedChunk.isReadable()) {
|
|
||||||
byte nextByte = undecodedChunk.readByte();
|
|
||||||
if (newLine) {
|
|
||||||
// Check the delimiter
|
|
||||||
if (nextByte == delimiter.codePointAt(index)) {
|
|
||||||
index++;
|
|
||||||
if (delimiter.length() == index) {
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
newLine = false;
|
|
||||||
index = 0;
|
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (undecodedChunk.isReadable()) {
|
|
||||||
nextByte = undecodedChunk.readByte();
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 2;
|
|
||||||
} else {
|
|
||||||
// Unread second nextByte
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
}
|
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
} else {
|
|
||||||
lastPosition = undecodedChunk.readerIndex();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (undecodedChunk.isReadable()) {
|
|
||||||
nextByte = undecodedChunk.readByte();
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 2;
|
|
||||||
} else {
|
|
||||||
// Unread second nextByte
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
}
|
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastPosition = undecodedChunk.readerIndex() - 1;
|
|
||||||
} else {
|
|
||||||
lastPosition = undecodedChunk.readerIndex();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (found) {
|
|
||||||
// found so lastPosition is correct
|
|
||||||
// but position is just after the delimiter (either close
|
|
||||||
// delimiter or simple one)
|
|
||||||
// so go back of delimiter size
|
|
||||||
try {
|
|
||||||
currentAttribute.addContent(
|
|
||||||
undecodedChunk.copy(readerIndex, lastPosition - readerIndex), true);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ErrorDataDecoderException(e);
|
|
||||||
}
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
currentAttribute.addContent(
|
|
||||||
undecodedChunk.copy(readerIndex, lastPosition - readerIndex), false);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ErrorDataDecoderException(e);
|
|
||||||
}
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
throw new NotEnoughDataDecoderException();
|
|
||||||
}
|
|
||||||
} catch (IndexOutOfBoundsException e) {
|
|
||||||
undecodedChunk.readerIndex(readerIndex);
|
|
||||||
throw new NotEnoughDataDecoderException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load the field value from a Multipart request
|
|
||||||
*
|
|
||||||
* @throws NotEnoughDataDecoderException
|
|
||||||
* Need more chunks
|
|
||||||
* @throws ErrorDataDecoderException
|
|
||||||
*/
|
|
||||||
private static void loadFieldMultipart(ByteBuf undecodedChunk, String delimiter, Attribute currentAttribute) {
|
|
||||||
if (!undecodedChunk.hasArray()) {
|
if (!undecodedChunk.hasArray()) {
|
||||||
loadFieldMultipartStandard(undecodedChunk, delimiter, currentAttribute);
|
return loadDataMultipartStandard(undecodedChunk, delimiter, httpData);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
SeekAheadOptimize sao = new SeekAheadOptimize(undecodedChunk);
|
final SeekAheadOptimize sao = new SeekAheadOptimize(undecodedChunk);
|
||||||
int readerIndex = undecodedChunk.readerIndex();
|
final int startReaderIndex = undecodedChunk.readerIndex();
|
||||||
try {
|
final int delimeterLength = delimiter.length();
|
||||||
// found the decoder limit
|
|
||||||
boolean newLine = true;
|
|
||||||
int index = 0;
|
int index = 0;
|
||||||
int lastPosition;
|
int lastRealPos = sao.pos;
|
||||||
int lastrealpos = sao.pos;
|
byte prevByte = HttpConstants.LF;
|
||||||
boolean found = false;
|
boolean delimiterFound = false;
|
||||||
|
|
||||||
while (sao.pos < sao.limit) {
|
while (sao.pos < sao.limit) {
|
||||||
byte nextByte = sao.bytes[sao.pos++];
|
final byte nextByte = sao.bytes[sao.pos++];
|
||||||
if (newLine) {
|
|
||||||
// Check the delimiter
|
// Check the delimiter
|
||||||
if (nextByte == delimiter.codePointAt(index)) {
|
if (prevByte == HttpConstants.LF && nextByte == delimiter.codePointAt(index)) {
|
||||||
index++;
|
index++;
|
||||||
if (delimiter.length() == index) {
|
if (delimeterLength == index) {
|
||||||
found = true;
|
delimiterFound = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else {
|
continue;
|
||||||
newLine = false;
|
}
|
||||||
index = 0;
|
lastRealPos = sao.pos;
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (sao.pos < sao.limit) {
|
|
||||||
nextByte = sao.bytes[sao.pos++];
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
if (nextByte == HttpConstants.LF) {
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
index = 0;
|
||||||
lastrealpos = sao.pos - 2;
|
lastRealPos -= (prevByte == HttpConstants.CR)? 2 : 1;
|
||||||
} else {
|
|
||||||
// Unread last nextByte
|
|
||||||
sao.pos--;
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
}
|
||||||
|
prevByte = nextByte;
|
||||||
}
|
}
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
if (prevByte == HttpConstants.CR) {
|
||||||
newLine = true;
|
lastRealPos--;
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 1;
|
|
||||||
} else {
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
}
|
||||||
}
|
final int lastPosition = sao.getReadPosition(lastRealPos);
|
||||||
} else {
|
final ByteBuf content = undecodedChunk.copy(startReaderIndex, lastPosition - startReaderIndex);
|
||||||
// continue until end of line
|
|
||||||
if (nextByte == HttpConstants.CR) {
|
|
||||||
if (sao.pos < sao.limit) {
|
|
||||||
nextByte = sao.bytes[sao.pos++];
|
|
||||||
if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 2;
|
|
||||||
} else {
|
|
||||||
// Unread last nextByte
|
|
||||||
sao.pos--;
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (nextByte == HttpConstants.LF) {
|
|
||||||
newLine = true;
|
|
||||||
index = 0;
|
|
||||||
lastrealpos = sao.pos - 1;
|
|
||||||
} else {
|
|
||||||
lastrealpos = sao.pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lastPosition = sao.getReadPosition(lastrealpos);
|
|
||||||
if (found) {
|
|
||||||
// found so lastPosition is correct
|
|
||||||
// but position is just after the delimiter (either close
|
|
||||||
// delimiter or simple one)
|
|
||||||
// so go back of delimiter size
|
|
||||||
try {
|
try {
|
||||||
currentAttribute.addContent(
|
httpData.addContent(content, delimiterFound);
|
||||||
undecodedChunk.copy(readerIndex, lastPosition - readerIndex), true);
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ErrorDataDecoderException(e);
|
throw new ErrorDataDecoderException(e);
|
||||||
}
|
}
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
undecodedChunk.readerIndex(lastPosition);
|
||||||
} else {
|
return delimiterFound;
|
||||||
try {
|
|
||||||
currentAttribute.addContent(
|
|
||||||
undecodedChunk.copy(readerIndex, lastPosition - readerIndex), false);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ErrorDataDecoderException(e);
|
|
||||||
}
|
|
||||||
undecodedChunk.readerIndex(lastPosition);
|
|
||||||
throw new NotEnoughDataDecoderException();
|
|
||||||
}
|
|
||||||
} catch (IndexOutOfBoundsException e) {
|
|
||||||
undecodedChunk.readerIndex(readerIndex);
|
|
||||||
throw new NotEnoughDataDecoderException(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1714,25 +1363,25 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
|
|||||||
*
|
*
|
||||||
* @return the cleaned String
|
* @return the cleaned String
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("StatementWithEmptyBody")
|
|
||||||
private static String cleanString(String field) {
|
private static String cleanString(String field) {
|
||||||
StringBuilder sb = new StringBuilder(field.length());
|
int size = field.length();
|
||||||
for (int i = 0; i < field.length(); i++) {
|
StringBuilder sb = new StringBuilder(size);
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
char nextChar = field.charAt(i);
|
char nextChar = field.charAt(i);
|
||||||
if (nextChar == HttpConstants.COLON) {
|
switch (nextChar) {
|
||||||
|
case HttpConstants.COLON:
|
||||||
|
case HttpConstants.COMMA:
|
||||||
|
case HttpConstants.EQUALS:
|
||||||
|
case HttpConstants.SEMICOLON:
|
||||||
|
case HttpConstants.HT:
|
||||||
sb.append(HttpConstants.SP_CHAR);
|
sb.append(HttpConstants.SP_CHAR);
|
||||||
} else if (nextChar == HttpConstants.COMMA) {
|
break;
|
||||||
sb.append(HttpConstants.SP_CHAR);
|
case HttpConstants.DOUBLE_QUOTE:
|
||||||
} else if (nextChar == HttpConstants.EQUALS) {
|
|
||||||
sb.append(HttpConstants.SP_CHAR);
|
|
||||||
} else if (nextChar == HttpConstants.SEMICOLON) {
|
|
||||||
sb.append(HttpConstants.SP_CHAR);
|
|
||||||
} else if (nextChar == HttpConstants.HT) {
|
|
||||||
sb.append(HttpConstants.SP_CHAR);
|
|
||||||
} else if (nextChar == HttpConstants.DOUBLE_QUOTE) {
|
|
||||||
// nothing added, just removes it
|
// nothing added, just removes it
|
||||||
} else {
|
break;
|
||||||
|
default:
|
||||||
sb.append(nextChar);
|
sb.append(nextChar);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return sb.toString().trim();
|
return sb.toString().trim();
|
||||||
|
Loading…
Reference in New Issue
Block a user