Merge pull request #299 from fredericBregier/3

3 update branch 3 from very same issues and improvements fixed for #290 #291 #292
This commit is contained in:
Frédéric Brégier 2012-05-02 06:26:15 -07:00
commit 9ab14fba6d
6 changed files with 507 additions and 23 deletions

View File

@ -115,9 +115,9 @@ public abstract class AbstractDiskHttpData extends AbstractHttpData {
int written = 0;
while (written < size) {
written += localfileChannel.write(byteBuffer);
localfileChannel.force(false);
}
buffer.readerIndex(buffer.readerIndex() + written);
localfileChannel.force(false);
localfileChannel.close();
completed = true;
}
@ -141,7 +141,6 @@ public abstract class AbstractDiskHttpData extends AbstractHttpData {
}
while (written < localsize) {
written += fileChannel.write(byteBuffer);
fileChannel.force(false);
}
size += localsize;
buffer.readerIndex(buffer.readerIndex() + written);
@ -154,6 +153,7 @@ public abstract class AbstractDiskHttpData extends AbstractHttpData {
FileOutputStream outputStream = new FileOutputStream(file);
fileChannel = outputStream.getChannel();
}
fileChannel.force(false);
fileChannel.close();
fileChannel = null;
completed = true;
@ -191,9 +191,10 @@ public abstract class AbstractDiskHttpData extends AbstractHttpData {
while (read > 0) {
byteBuffer.position(read).flip();
written += localfileChannel.write(byteBuffer);
localfileChannel.force(false);
read = inputStream.read(bytes);
}
localfileChannel.force(false);
localfileChannel.close());
size = written;
if (definedSize > 0 && definedSize < size) {
file.delete();
@ -288,6 +289,8 @@ public abstract class AbstractDiskHttpData extends AbstractHttpData {
FileChannel in = inputStream.getChannel();
FileChannel out = outputStream.getChannel();
long destsize = in.transferTo(0, size, out);
in.close();
out.close();
if (destsize == size) {
file.delete();
file = dest;

View File

@ -200,8 +200,8 @@ public abstract class AbstractMemoryHttpData extends AbstractHttpData {
int written = 0;
while (written < length) {
written += fileChannel.write(byteBuffer);
fileChannel.force(false);
}
fileChannel.force(false);
fileChannel.close();
isRenamed = true;
return written == length;

View File

@ -116,18 +116,59 @@ final class HttpPostBodyUtil {
private HttpPostBodyUtil() {
}
//Some commons methods between HttpPostRequestDecoder and HttpMessageDecoder
/**
* Skip control Characters
* @param buffer
*/
static void skipControlCharacters(ChannelBuffer buffer) {
for (;;) {
char c = (char) buffer.readUnsignedByte();
if (!Character.isISOControl(c) && !Character.isWhitespace(c)) {
buffer.readerIndex(buffer.readerIndex() - 1);
break;
* Exception when NO Backend Array is found
*/
static class SeekAheadNoBackArrayException extends Exception {
private static final long serialVersionUID = -630418804938699495L;
}
/**
* This class intends to decrease the CPU in seeking ahead some bytes in
* HttpPostRequestDecoder
*/
static class SeekAheadOptimize {
byte[] bytes;
int readerIndex;
int pos;
int limit;
ChannelBuffer buffer;
/**
* @param buffer
*/
SeekAheadOptimize(ChannelBuffer buffer) throws SeekAheadNoBackArrayException {
if (!buffer.hasArray()) {
throw new SeekAheadNoBackArrayException();
}
this.buffer = buffer;
this.bytes = buffer.array();
this.pos = this.readerIndex = buffer.readerIndex();
this.limit = buffer.writerIndex();
}
/**
*
* @param minus this value will be used as (currentPos - minus) to set
* the current readerIndex in the buffer.
*/
void setReadPosition(int minus) {
pos -= minus;
readerIndex = pos;
buffer.readerIndex(readerIndex);
}
void clear() {
this.buffer = null;
this.bytes = null;
this.limit = 0;
this.pos = 0;
this.readerIndex = 0;
}
}

View File

@ -26,6 +26,8 @@ import java.util.TreeMap;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.handler.codec.http2.HttpPostBodyUtil.SeekAheadNoBackArrayException;
import org.jboss.netty.handler.codec.http2.HttpPostBodyUtil.SeekAheadOptimize;
import org.jboss.netty.handler.codec.http.HttpPostBodyUtil.TransferEncodingMechanism;
/**
@ -427,7 +429,7 @@ public class HttpPostRequestDecoder {
* @throws ErrorDataDecoderException if there is a problem with the charset decoding or
* other errors
*/
private void parseBodyAttributes() throws ErrorDataDecoderException {
private void parseBodyAttributesStandard() throws ErrorDataDecoderException {
int firstpos = undecodedChunk.readerIndex();
int currentpos = firstpos;
int equalpos = firstpos;
@ -538,6 +540,141 @@ public class HttpPostRequestDecoder {
}
}
/**
* This method fill the map and list with as much Attribute as possible from Body in
* not Multipart mode.
*
* @throws ErrorDataDecoderException if there is a problem with the charset decoding or
* other errors
*/
private void parseBodyAttributes() throws ErrorDataDecoderException {
SeekAheadOptimize sao = null;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
parseBodyAttributesStandard();
return;
}
int firstpos = undecodedChunk.readerIndex();
int currentpos = firstpos;
int equalpos = firstpos;
int ampersandpos = firstpos;
if (currentStatus == MultiPartStatus.NOTSTARTED) {
currentStatus = MultiPartStatus.DISPOSITION;
}
boolean contRead = true;
try {
loop:
while (sao.pos < sao.limit) {
char read = (char) (sao.bytes[sao.pos ++] & 0xFF);
currentpos ++;
switch (currentStatus) {
case DISPOSITION:// search '='
if (read == '=') {
currentStatus = MultiPartStatus.FIELD;
equalpos = currentpos - 1;
String key = decodeAttribute(
undecodedChunk.toString(firstpos, equalpos - firstpos, charset),
charset);
currentAttribute = factory.createAttribute(request, key);
firstpos = currentpos;
} else if (read == '&') { // special empty FIELD
currentStatus = MultiPartStatus.DISPOSITION;
ampersandpos = currentpos - 1;
String key = decodeAttribute(undecodedChunk.toString(firstpos, ampersandpos - firstpos, charset), charset);
currentAttribute = factory.createAttribute(request, key);
currentAttribute.setValue(""); // empty
addHttpData(currentAttribute);
currentAttribute = null;
firstpos = currentpos;
contRead = true;
}
break;
case FIELD:// search '&' or end of line
if (read == '&') {
currentStatus = MultiPartStatus.DISPOSITION;
ampersandpos = currentpos - 1;
setFinalBuffer(undecodedChunk.slice(firstpos, ampersandpos - firstpos));
firstpos = currentpos;
contRead = true;
} else if (read == HttpCodecUtil.CR) {
if (sao.pos < sao.limit) {
read = (char) (sao.bytes[sao.pos ++] & 0xFF);
currentpos++;
if (read == HttpCodecUtil.LF) {
currentStatus = MultiPartStatus.PREEPILOGUE;
ampersandpos = currentpos - 2;
sao.setReadPosition(0);
setFinalBuffer(
undecodedChunk.slice(firstpos, ampersandpos - firstpos));
firstpos = currentpos;
contRead = false;
break loop;
} else {
// Error
sao.setReadPosition(0);
contRead = false;
throw new ErrorDataDecoderException("Bad end of line");
}
} else {
if (sao.limit > 0) {
currentpos --;
}
}
} else if (read == HttpCodecUtil.LF) {
currentStatus = MultiPartStatus.PREEPILOGUE;
ampersandpos = currentpos - 1;
sao.setReadPosition(0);
setFinalBuffer(
undecodedChunk.slice(firstpos, ampersandpos - firstpos));
firstpos = currentpos;
contRead = false;
break loop;
}
break;
default:
// just stop
sao.setReadPosition(0);
contRead = false;
break loop;
}
}
if (isLastChunk && currentAttribute != null) {
// special case
ampersandpos = currentpos;
if (ampersandpos > firstpos) {
setFinalBuffer(
undecodedChunk.slice(firstpos, ampersandpos - firstpos));
} else if (! currentAttribute.isCompleted()) {
setFinalBuffer(ChannelBuffers.EMPTY_BUFFER);
}
firstpos = currentpos;
currentStatus = MultiPartStatus.EPILOGUE;
return;
}
if (contRead && currentAttribute != null) {
// reset index except if to continue in case of FIELD status
if (currentStatus == MultiPartStatus.FIELD) {
currentAttribute.addContent(
undecodedChunk.slice(firstpos, currentpos - firstpos),
false);
firstpos = currentpos;
}
undecodedChunk.readerIndex(firstpos);
} else {
// end of line so keep index
}
} catch (ErrorDataDecoderException e) {
// error while decoding
undecodedChunk.readerIndex(firstpos);
throw e;
} catch (IOException e) {
// error while decoding
undecodedChunk.readerIndex(firstpos);
throw new ErrorDataDecoderException(e);
}
}
private void setFinalBuffer(ChannelBuffer buffer) throws ErrorDataDecoderException, IOException {
currentAttribute.addContent(buffer, true);
String value = decodeAttribute(
@ -700,6 +837,37 @@ public class HttpPostRequestDecoder {
}
}
/**
* Skip control Characters
*/
void skipControlCharacters() {
SeekAheadOptimize sao = null;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e) {
skipControlCharactersStandard(undecodedChunk);
return;
}
while (sao.pos < sao.limit) {
char c = (char) (sao.bytes[sao.pos ++] & 0xFF);
if (!Character.isISOControl(c) && !Character.isWhitespace(c)) {
sao.setReadPosition(1);
return;
}
}
sao.setReadPosition(0);
}
static void skipControlCharactersStandard(ChannelBuffer buffer) {
for (;;) {
char c = (char) buffer.readUnsignedByte();
if (!Character.isISOControl(c) && !Character.isWhitespace(c)) {
buffer.readerIndex(buffer.readerIndex() - 1);
break;
}
}
}
/**
* Find the next Multipart Delimiter
* @param delimiter delimiter to find
@ -714,7 +882,7 @@ public class HttpPostRequestDecoder {
throws ErrorDataDecoderException {
// --AaB03x or --AaB03x--
int readerIndex = undecodedChunk.readerIndex();
HttpPostBodyUtil.skipControlCharacters(undecodedChunk);
skipControlCharacters(undecodedChunk);
skipOneLine();
String newline;
try {
@ -755,7 +923,7 @@ public class HttpPostRequestDecoder {
}
// read many lines until empty line with newline found! Store all data
while (!skipOneLine()) {
HttpPostBodyUtil.skipControlCharacters(undecodedChunk);
skipControlCharacters(undecodedChunk);
String newline;
try {
newline = readLine();
@ -1038,7 +1206,7 @@ public class HttpPostRequestDecoder {
* @throws NotEnoughDataDecoderException Need more chunks and
* reset the readerInder to the previous value
*/
private String readLine() throws NotEnoughDataDecoderException {
private String readLineStandard() throws NotEnoughDataDecoderException {
int readerIndex = undecodedChunk.readerIndex();
try {
StringBuilder sb = new StringBuilder(64);
@ -1062,6 +1230,48 @@ public class HttpPostRequestDecoder {
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
/**
* Read one line up to the CRLF or LF
* @return the String from one line
* @throws NotEnoughDataDecoderException Need more chunks and
* reset the readerInder to the previous value
*/
private String readLine() throws NotEnoughDataDecoderException {
SeekAheadOptimize sao = null;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
return readLineStandard();
}
int readerIndex = undecodedChunk.readerIndex();
try {
StringBuilder sb = new StringBuilder(64);
while (sao.pos < sao.limit) {
byte nextByte = sao.bytes[sao.pos ++];
if (nextByte == HttpCodecUtil.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos ++];
if (nextByte == HttpCodecUtil.LF) {
sao.setReadPosition(0);
return sb.toString();
}
} else {
sb.append((char) nextByte);
}
} else if (nextByte == HttpCodecUtil.LF) {
sao.setReadPosition(0);
return sb.toString();
} else {
sb.append((char) nextByte);
}
}
} catch (IndexOutOfBoundsException e) {
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException(e);
}
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException();
}
/**
* Read a FileUpload data as Byte (Binary) and add the bytes directly to the
@ -1071,7 +1281,7 @@ public class HttpPostRequestDecoder {
* do not reset the readerInder since some values will be already added to the FileOutput
* @throws ErrorDataDecoderException write IO error occurs with the FileUpload
*/
private void readFileUploadByteMultipart(String delimiter)
private void readFileUploadByteMultipartStandard(String delimiter)
throws NotEnoughDataDecoderException, ErrorDataDecoderException {
int readerIndex = undecodedChunk.readerIndex();
// found the decoder limit
@ -1157,12 +1367,128 @@ public class HttpPostRequestDecoder {
}
}
/**
* Read a FileUpload data as Byte (Binary) and add the bytes directly to the
* FileUpload. If the delimiter is found, the FileUpload is completed.
* @param delimiter
* @throws NotEnoughDataDecoderException Need more chunks but
* do not reset the readerInder since some values will be already added to the FileOutput
* @throws ErrorDataDecoderException write IO error occurs with the FileUpload
*/
private void readFileUploadByteMultipart(String delimiter)
throws NotEnoughDataDecoderException, ErrorDataDecoderException {
SeekAheadOptimize sao = null;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
readFileUploadByteMultipartStandard(delimiter);
return;
}
int readerIndex = undecodedChunk.readerIndex();
// found the decoder limit
boolean newLine = true;
int index = 0;
int lastPosition = undecodedChunk.readerIndex();
boolean found = false;
while (sao.pos < sao.limit) {
byte nextByte = sao.bytes[sao.pos ++];
if (newLine) {
// Check the delimiter
if (nextByte == delimiter.codePointAt(index)) {
index ++;
if (delimiter.length() == index) {
found = true;
sao.setReadPosition(0);
break;
}
continue;
} else {
newLine = false;
index = 0;
// continue until end of line
if (nextByte == HttpCodecUtil.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos ++];
if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 2;
}
} else {
// save last valid position
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
} else if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 1;
} else {
// save last valid position
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
}
} else {
// continue until end of line
if (nextByte == HttpCodecUtil.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos ++];
if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 2;
}
} else {
// save last valid position
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
} else if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 1;
} else {
// save last valid position
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
}
}
ChannelBuffer buffer = undecodedChunk.slice(readerIndex, lastPosition - readerIndex);
if (found) {
// found so lastPosition is correct and final
try {
currentFileUpload.addContent(buffer, true);
// just before the CRLF and delimiter
undecodedChunk.readerIndex(lastPosition);
} catch (IOException e) {
throw new ErrorDataDecoderException(e);
}
} else {
// possibly the delimiter is partially found but still the last position is OK
try {
currentFileUpload.addContent(buffer, false);
// last valid char (not CR, not LF, not beginning of delimiter)
undecodedChunk.readerIndex(lastPosition);
throw new NotEnoughDataDecoderException();
} catch (IOException e) {
throw new ErrorDataDecoderException(e);
}
}
}
/**
* Load the field value from a Multipart request
* @throws NotEnoughDataDecoderException Need more chunks
* @throws ErrorDataDecoderException
*/
private void loadFieldMultipart(String delimiter)
private void loadFieldMultipartStandard(String delimiter)
throws NotEnoughDataDecoderException, ErrorDataDecoderException {
int readerIndex = undecodedChunk.readerIndex();
try {
@ -1252,6 +1578,120 @@ public class HttpPostRequestDecoder {
}
}
/**
* Load the field value from a Multipart request
* @throws NotEnoughDataDecoderException Need more chunks
* @throws ErrorDataDecoderException
*/
private void loadFieldMultipart(String delimiter)
throws NotEnoughDataDecoderException, ErrorDataDecoderException {
SeekAheadOptimize sao = null;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
loadFieldMultipartStandard(delimiter);
return;
}
int readerIndex = undecodedChunk.readerIndex();
try {
// found the decoder limit
boolean newLine = true;
int index = 0;
int lastPosition = undecodedChunk.readerIndex();
boolean found = false;
while (sao.pos < sao.limit) {
byte nextByte = sao.bytes[sao.pos ++];
if (newLine) {
// Check the delimiter
if (nextByte == delimiter.codePointAt(index)) {
index ++;
if (delimiter.length() == index) {
found = true;
sao.setReadPosition(0);
break;
}
continue;
} else {
newLine = false;
index = 0;
// continue until end of line
if (nextByte == HttpCodecUtil.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos ++];
if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 2;
}
} else {
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
} else if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 1;
} else {
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
}
} else {
// continue until end of line
if (nextByte == HttpCodecUtil.CR) {
if (sao.pos < sao.limit) {
nextByte = sao.bytes[sao.pos ++];
if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 2;
}
} else {
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
} else if (nextByte == HttpCodecUtil.LF) {
newLine = true;
index = 0;
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex() - 1;
} else {
sao.setReadPosition(0);
lastPosition = undecodedChunk.readerIndex();
}
}
}
if (found) {
// found so lastPosition is correct
// but position is just after the delimiter (either close delimiter or simple one)
// so go back of delimiter size
try {
currentAttribute.addContent(
undecodedChunk.slice(readerIndex, lastPosition - readerIndex), true);
} catch (IOException e) {
throw new ErrorDataDecoderException(e);
}
undecodedChunk.readerIndex(lastPosition);
} else {
try {
currentAttribute.addContent(
undecodedChunk.slice(readerIndex, lastPosition - readerIndex), false);
} catch (IOException e) {
throw new ErrorDataDecoderException(e);
}
undecodedChunk.readerIndex(lastPosition);
throw new NotEnoughDataDecoderException();
}
} catch (IndexOutOfBoundsException e) {
undecodedChunk.readerIndex(readerIndex);
throw new NotEnoughDataDecoderException(e);
}
}
/**
* Clean the String from any unallowed character
* @return the cleaned String

View File

@ -64,7 +64,7 @@ public class MixedAttribute implements Attribute {
.getName());
if (((MemoryAttribute) attribute).getChannelBuffer() != null) {
diskAttribute.addContent(((MemoryAttribute) attribute)
.getChannelBuffer(), last);
.getChannelBuffer(), true);
}
attribute = diskAttribute;
}

View File

@ -57,7 +57,7 @@ public class MixedFileUpload implements FileUpload {
definedSize);
if (((MemoryFileUpload) fileUpload).getChannelBuffer() != null) {
diskFileUpload.addContent(((MemoryFileUpload) fileUpload)
.getChannelBuffer(), last);
.getChannelBuffer(), true);
}
fileUpload = diskFileUpload;
}