2008-11-19 08:22:15 +01:00
|
|
|
/*
|
|
|
|
* JBoss, Home of Professional Open Source
|
|
|
|
* Copyright 2005-2008, Red Hat Middleware LLC, and individual contributors
|
|
|
|
* by the @authors tag. See the copyright.txt in the distribution for a
|
|
|
|
* full listing of individual contributors.
|
|
|
|
*
|
|
|
|
* This is free software; you can redistribute it and/or modify it
|
|
|
|
* under the terms of the GNU Lesser General Public License as
|
|
|
|
* published by the Free Software Foundation; either version 2.1 of
|
|
|
|
* the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This software is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this software; if not, write to the Free
|
|
|
|
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
|
|
|
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
|
|
|
|
*/
|
|
|
|
package org.jboss.netty.handler.codec.http;
|
|
|
|
|
|
|
|
import java.util.List;
|
2008-11-30 17:22:03 +01:00
|
|
|
import java.util.regex.Matcher;
|
|
|
|
import java.util.regex.Pattern;
|
2008-11-19 08:22:15 +01:00
|
|
|
|
|
|
|
import org.jboss.netty.buffer.ChannelBuffer;
|
|
|
|
import org.jboss.netty.buffer.ChannelBuffers;
|
|
|
|
import org.jboss.netty.channel.Channel;
|
|
|
|
import org.jboss.netty.channel.ChannelHandlerContext;
|
2009-03-10 08:20:27 +01:00
|
|
|
import org.jboss.netty.handler.codec.frame.TooLongFrameException;
|
2008-11-19 08:22:15 +01:00
|
|
|
import org.jboss.netty.handler.codec.replay.ReplayingDecoder;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Decodes an Http type message.
|
|
|
|
*
|
|
|
|
* @author The Netty Project (netty-dev@lists.jboss.org)
|
|
|
|
* @author Andy Taylor (andy.taylor@jboss.org)
|
|
|
|
* @author Trustin Lee (tlee@redhat.com)
|
2008-12-03 10:00:29 +01:00
|
|
|
* @version $Rev$, $Date$
|
2008-11-19 08:22:15 +01:00
|
|
|
*/
|
2008-12-03 10:00:29 +01:00
|
|
|
public abstract class HttpMessageDecoder extends ReplayingDecoder<HttpMessageDecoder.State> {
|
2008-11-19 08:22:15 +01:00
|
|
|
|
2008-11-30 17:22:03 +01:00
|
|
|
private static final Pattern INITIAL_PATTERN = Pattern.compile(
|
|
|
|
"^\\s*(\\S+)\\s+(\\S+)\\s+(.*)\\s*$");
|
|
|
|
private static final Pattern HEADER_PATTERN = Pattern.compile(
|
|
|
|
"^\\s*(\\S+)\\s*:\\s*(.*)\\s*$");
|
2008-11-19 08:22:15 +01:00
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private final int maxInitialLineLength;
|
|
|
|
private final int maxHeaderSize;
|
2009-03-09 09:50:24 +01:00
|
|
|
private final int maxChunkSize;
|
2009-02-12 07:09:29 +01:00
|
|
|
protected volatile HttpMessage message;
|
|
|
|
private volatile ChannelBuffer content;
|
2009-03-10 08:20:27 +01:00
|
|
|
private volatile int headerSize;
|
2009-02-12 07:09:29 +01:00
|
|
|
private volatile int chunkSize;
|
|
|
|
|
2008-12-03 10:00:29 +01:00
|
|
|
/**
|
|
|
|
* @author The Netty Project (netty-dev@lists.jboss.org)
|
|
|
|
* @author Trustin Lee (tlee@redhat.com)
|
|
|
|
* @version $Rev$, $Date$
|
|
|
|
*
|
|
|
|
* @apiviz.exclude
|
|
|
|
*/
|
|
|
|
protected enum State {
|
2009-02-12 06:02:22 +01:00
|
|
|
SKIP_CONTROL_CHARS,
|
2008-11-19 08:22:15 +01:00
|
|
|
READ_INITIAL,
|
|
|
|
READ_HEADER,
|
2009-02-12 08:17:29 +01:00
|
|
|
READ_VARIABLE_LENGTH_CONTENT,
|
2009-03-09 09:50:24 +01:00
|
|
|
READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS,
|
2008-11-19 08:22:15 +01:00
|
|
|
READ_FIXED_LENGTH_CONTENT,
|
2009-03-09 09:50:24 +01:00
|
|
|
READ_FIXED_LENGTH_CONTENT_AS_CHUNKS,
|
2008-11-19 08:22:15 +01:00
|
|
|
READ_CHUNK_SIZE,
|
|
|
|
READ_CHUNKED_CONTENT,
|
2009-03-09 09:50:24 +01:00
|
|
|
READ_CHUNKED_CONTENT_AS_CHUNKS,
|
2009-02-12 06:41:22 +01:00
|
|
|
READ_CHUNK_DELIMITER,
|
|
|
|
READ_CHUNK_FOOTER;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
2009-03-10 09:57:02 +01:00
|
|
|
protected HttpMessageDecoder() {
|
|
|
|
this(4096, 8192, 8192);
|
|
|
|
}
|
|
|
|
|
|
|
|
protected HttpMessageDecoder(
|
|
|
|
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize) {
|
|
|
|
|
2009-03-09 09:50:24 +01:00
|
|
|
super(State.SKIP_CONTROL_CHARS, true);
|
2009-03-10 09:57:02 +01:00
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
if (maxInitialLineLength <= 0) {
|
|
|
|
throw new IllegalArgumentException(
|
|
|
|
"maxInitialLineLength must be a positive integer: " +
|
|
|
|
maxInitialLineLength);
|
|
|
|
}
|
|
|
|
if (maxHeaderSize <= 0) {
|
|
|
|
throw new IllegalArgumentException(
|
|
|
|
"maxHeaderSize must be a positive integer: " +
|
|
|
|
maxChunkSize);
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
if (maxChunkSize < 0) {
|
|
|
|
throw new IllegalArgumentException(
|
2009-03-10 08:53:15 +01:00
|
|
|
"maxChunkSize must be a positive integer: " +
|
2009-03-09 09:50:24 +01:00
|
|
|
maxChunkSize);
|
|
|
|
}
|
2009-03-10 08:20:27 +01:00
|
|
|
this.maxInitialLineLength = maxInitialLineLength;
|
|
|
|
this.maxHeaderSize = maxHeaderSize;
|
2009-03-09 09:50:24 +01:00
|
|
|
this.maxChunkSize = maxChunkSize;
|
|
|
|
}
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
@Override
|
2008-12-03 10:00:29 +01:00
|
|
|
protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buffer, State state) throws Exception {
|
2008-11-19 08:22:15 +01:00
|
|
|
switch (state) {
|
2009-02-12 06:02:22 +01:00
|
|
|
case SKIP_CONTROL_CHARS: {
|
|
|
|
try {
|
|
|
|
skipControlCharacters(buffer);
|
|
|
|
checkpoint(State.READ_INITIAL);
|
|
|
|
} finally {
|
|
|
|
checkpoint();
|
|
|
|
}
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
case READ_INITIAL: {
|
2009-03-10 08:20:27 +01:00
|
|
|
String[] initialLine = splitInitialLine(readLine(buffer, maxInitialLineLength));
|
|
|
|
if (initialLine.length < 3) {
|
|
|
|
// Invalid initial line - ignore.
|
|
|
|
checkpoint(State.SKIP_CONTROL_CHARS);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
message = createMessage(initialLine);
|
|
|
|
checkpoint(State.READ_HEADER);
|
|
|
|
headerSize = 0;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
case READ_HEADER: {
|
2009-03-04 14:50:19 +01:00
|
|
|
State nextState = readHeaders(buffer);
|
|
|
|
checkpoint(nextState);
|
|
|
|
if (nextState == State.READ_CHUNK_SIZE) {
|
|
|
|
// Chunked encoding
|
2009-03-10 08:53:15 +01:00
|
|
|
// Generate HttpMessage first. HttpChunks will follow.
|
|
|
|
return message;
|
2009-03-30 04:14:11 +02:00
|
|
|
} else if (nextState == State.SKIP_CONTROL_CHARS) {
|
|
|
|
// No content is expected.
|
|
|
|
return message;
|
2009-02-12 08:32:53 +01:00
|
|
|
} else {
|
|
|
|
int contentLength = message.getContentLength(-1);
|
|
|
|
if (contentLength == 0 || contentLength == -1 && isDecodingRequest()) {
|
|
|
|
content = ChannelBuffers.EMPTY_BUFFER;
|
|
|
|
return reset();
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
|
2009-03-10 08:53:15 +01:00
|
|
|
switch (nextState) {
|
|
|
|
case READ_FIXED_LENGTH_CONTENT:
|
|
|
|
if (contentLength > maxChunkSize) {
|
2009-03-09 09:50:24 +01:00
|
|
|
// Generate HttpMessage first. HttpChunks will follow.
|
|
|
|
checkpoint(State.READ_FIXED_LENGTH_CONTENT_AS_CHUNKS);
|
|
|
|
message.addHeader(HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED);
|
|
|
|
// chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT_AS_CHUNKS
|
|
|
|
// state reads data chunk by chunk.
|
|
|
|
chunkSize = message.getContentLength(-1);
|
|
|
|
return message;
|
2009-03-10 08:53:15 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case READ_VARIABLE_LENGTH_CONTENT:
|
|
|
|
if (buffer.readableBytes() > maxChunkSize) {
|
2009-03-09 09:50:24 +01:00
|
|
|
// Generate HttpMessage first. HttpChunks will follow.
|
|
|
|
checkpoint(State.READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS);
|
|
|
|
message.addHeader(HttpHeaders.Names.TRANSFER_ENCODING, HttpHeaders.Values.CHUNKED);
|
|
|
|
return message;
|
|
|
|
}
|
2009-03-10 08:53:15 +01:00
|
|
|
break;
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
2008-11-30 17:29:35 +01:00
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
// We return null here, this forces decode to be called again where we will decode the content
|
2008-11-19 08:22:15 +01:00
|
|
|
return null;
|
|
|
|
}
|
2009-02-12 08:17:29 +01:00
|
|
|
case READ_VARIABLE_LENGTH_CONTENT: {
|
2008-11-19 08:22:15 +01:00
|
|
|
if (content == null) {
|
2008-12-08 10:02:33 +01:00
|
|
|
content = ChannelBuffers.dynamicBuffer(channel.getConfig().getBufferFactory());
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-02-12 08:17:29 +01:00
|
|
|
//this will cause a replay error until the channel is closed where this will read what's left in the buffer
|
2008-11-19 08:22:15 +01:00
|
|
|
content.writeBytes(buffer.readBytes(buffer.readableBytes()));
|
|
|
|
return reset();
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
case READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS: {
|
|
|
|
// Keep reading data as a chunk until the end of connection is reached.
|
|
|
|
int chunkSize = Math.min(maxChunkSize, buffer.readableBytes());
|
|
|
|
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(chunkSize));
|
|
|
|
|
|
|
|
if (!buffer.readable()) {
|
|
|
|
// Reached to the end of the connection.
|
|
|
|
reset();
|
|
|
|
if (!chunk.isLast()) {
|
|
|
|
// Append the last chunk.
|
|
|
|
return new HttpChunk[] { chunk, HttpChunk.LAST_CHUNK };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return chunk;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
case READ_FIXED_LENGTH_CONTENT: {
|
|
|
|
//we have a content-length so we just read the correct number of bytes
|
|
|
|
readFixedLengthContent(buffer);
|
|
|
|
return reset();
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
case READ_FIXED_LENGTH_CONTENT_AS_CHUNKS: {
|
|
|
|
int chunkSize = this.chunkSize;
|
|
|
|
HttpChunk chunk;
|
|
|
|
if (chunkSize > maxChunkSize) {
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes(maxChunkSize));
|
|
|
|
chunkSize -= maxChunkSize;
|
|
|
|
} else {
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes(chunkSize));
|
|
|
|
chunkSize = 0;
|
|
|
|
}
|
|
|
|
this.chunkSize = chunkSize;
|
|
|
|
|
|
|
|
if (chunkSize == 0) {
|
|
|
|
// Read all content.
|
|
|
|
reset();
|
|
|
|
if (!chunk.isLast()) {
|
|
|
|
// Append the last chunk.
|
|
|
|
return new HttpChunk[] { chunk, HttpChunk.LAST_CHUNK };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return chunk;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
/**
|
|
|
|
* everything else after this point takes care of reading chunked content. basically, read chunk size,
|
|
|
|
* read chunk, read and ignore the CRLF and repeat until 0
|
|
|
|
*/
|
|
|
|
case READ_CHUNK_SIZE: {
|
2009-03-10 08:20:27 +01:00
|
|
|
String line = readLine(buffer, maxInitialLineLength);
|
2009-03-09 09:50:24 +01:00
|
|
|
int chunkSize = getChunkSize(line);
|
|
|
|
this.chunkSize = chunkSize;
|
2008-11-19 08:22:15 +01:00
|
|
|
if (chunkSize == 0) {
|
2009-02-12 06:41:22 +01:00
|
|
|
checkpoint(State.READ_CHUNK_FOOTER);
|
2009-02-26 10:08:34 +01:00
|
|
|
return null;
|
2009-03-10 08:53:15 +01:00
|
|
|
} else if (chunkSize > maxChunkSize) {
|
|
|
|
// A chunk is too large. Split them into multiple chunks again.
|
|
|
|
checkpoint(State.READ_CHUNKED_CONTENT_AS_CHUNKS);
|
2009-02-12 06:41:22 +01:00
|
|
|
} else {
|
2008-12-03 10:00:29 +01:00
|
|
|
checkpoint(State.READ_CHUNKED_CONTENT);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
case READ_CHUNKED_CONTENT: {
|
2009-03-10 08:53:15 +01:00
|
|
|
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(chunkSize));
|
|
|
|
checkpoint(State.READ_CHUNK_DELIMITER);
|
|
|
|
return chunk;
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
|
|
|
case READ_CHUNKED_CONTENT_AS_CHUNKS: {
|
|
|
|
int chunkSize = this.chunkSize;
|
|
|
|
HttpChunk chunk;
|
|
|
|
if (chunkSize > maxChunkSize) {
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes(maxChunkSize));
|
|
|
|
chunkSize -= maxChunkSize;
|
2009-02-12 07:09:29 +01:00
|
|
|
} else {
|
2009-03-09 09:50:24 +01:00
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes(chunkSize));
|
|
|
|
chunkSize = 0;
|
|
|
|
}
|
|
|
|
this.chunkSize = chunkSize;
|
|
|
|
|
|
|
|
if (chunkSize == 0) {
|
|
|
|
// Read all content.
|
2009-02-12 07:09:29 +01:00
|
|
|
checkpoint(State.READ_CHUNK_DELIMITER);
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!chunk.isLast()) {
|
2009-02-12 07:09:29 +01:00
|
|
|
return chunk;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-02-12 06:41:22 +01:00
|
|
|
case READ_CHUNK_DELIMITER: {
|
2009-02-12 06:48:25 +01:00
|
|
|
for (;;) {
|
|
|
|
byte next = buffer.readByte();
|
|
|
|
if (next == HttpCodecUtil.CR) {
|
|
|
|
if (buffer.readByte() == HttpCodecUtil.LF) {
|
|
|
|
checkpoint(State.READ_CHUNK_SIZE);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
} else if (next == HttpCodecUtil.LF) {
|
|
|
|
checkpoint(State.READ_CHUNK_SIZE);
|
|
|
|
return null;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
2009-02-12 06:41:22 +01:00
|
|
|
case READ_CHUNK_FOOTER: {
|
2009-03-10 08:20:27 +01:00
|
|
|
// Skip the footer; does anyone use it?
|
|
|
|
try {
|
|
|
|
if (!skipLine(buffer)) {
|
|
|
|
if (maxChunkSize == 0) {
|
|
|
|
// Chunked encoding disabled.
|
|
|
|
return reset();
|
|
|
|
} else {
|
|
|
|
reset();
|
|
|
|
// The last chunk, which is empty
|
|
|
|
return HttpChunk.LAST_CHUNK;
|
|
|
|
}
|
2009-02-12 07:25:13 +01:00
|
|
|
}
|
2009-03-10 08:20:27 +01:00
|
|
|
} finally {
|
|
|
|
checkpoint();
|
2009-02-12 06:41:22 +01:00
|
|
|
}
|
2009-02-12 07:26:05 +01:00
|
|
|
return null;
|
2009-02-12 06:41:22 +01:00
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
default: {
|
|
|
|
throw new Error("Shouldn't reach here.");
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-30 04:14:11 +02:00
|
|
|
|
|
|
|
protected boolean isContentAlwaysEmpty(HttpMessage msg) {
|
|
|
|
if (msg instanceof HttpResponse) {
|
|
|
|
HttpResponse res = (HttpResponse) msg;
|
|
|
|
int code = res.getStatus().getCode();
|
|
|
|
if (code < 200) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
switch (code) {
|
|
|
|
case 204: case 205: case 304:
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
private Object reset() {
|
2009-02-13 09:55:06 +01:00
|
|
|
HttpMessage message = this.message;
|
|
|
|
ChannelBuffer content = this.content;
|
|
|
|
|
2009-03-10 09:37:45 +01:00
|
|
|
if (content != null) {
|
|
|
|
message.setContent(content);
|
|
|
|
this.content = null;
|
|
|
|
}
|
2009-02-13 09:55:06 +01:00
|
|
|
this.message = null;
|
2009-03-10 09:37:45 +01:00
|
|
|
|
2009-02-12 06:02:22 +01:00
|
|
|
checkpoint(State.SKIP_CONTROL_CHARS);
|
2008-11-19 08:22:15 +01:00
|
|
|
return message;
|
|
|
|
}
|
|
|
|
|
2009-02-12 06:02:22 +01:00
|
|
|
private void skipControlCharacters(ChannelBuffer buffer) {
|
|
|
|
for (;;) {
|
|
|
|
char c = (char) buffer.readUnsignedByte();
|
|
|
|
if (!Character.isISOControl(c) &&
|
|
|
|
!Character.isWhitespace(c)) {
|
|
|
|
buffer.readerIndex(buffer.readerIndex() - 1);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
private void readFixedLengthContent(ChannelBuffer buffer) {
|
2009-02-12 08:17:29 +01:00
|
|
|
int length = message.getContentLength(-1);
|
2008-11-19 08:22:15 +01:00
|
|
|
if (content == null) {
|
2008-11-30 17:22:03 +01:00
|
|
|
content = buffer.readBytes(length);
|
|
|
|
} else {
|
|
|
|
content.writeBytes(buffer.readBytes(length));
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private State readHeaders(ChannelBuffer buffer) throws TooLongFrameException {
|
2008-11-19 08:22:15 +01:00
|
|
|
message.clearHeaders();
|
2009-03-10 08:20:27 +01:00
|
|
|
String line = readHeader(buffer);
|
2008-11-19 08:22:15 +01:00
|
|
|
String lastHeader = null;
|
2008-11-30 17:22:03 +01:00
|
|
|
while (line.length() != 0) {
|
2008-11-19 08:22:15 +01:00
|
|
|
if (line.startsWith(" ") || line.startsWith("\t")) {
|
|
|
|
List<String> current = message.getHeaders(lastHeader);
|
|
|
|
int lastPos = current.size() - 1;
|
|
|
|
String newString = current.get(lastPos) + line.trim();
|
|
|
|
current.remove(lastPos);
|
|
|
|
current.add(newString);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
String[] header = splitHeader(line);
|
|
|
|
message.addHeader(header[0], header[1]);
|
|
|
|
lastHeader = header[0];
|
|
|
|
}
|
2009-03-10 08:20:27 +01:00
|
|
|
line = readHeader(buffer);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2008-11-30 17:22:03 +01:00
|
|
|
|
2008-12-03 10:00:29 +01:00
|
|
|
State nextState;
|
2009-03-30 04:14:11 +02:00
|
|
|
|
|
|
|
if (isContentAlwaysEmpty(message)) {
|
|
|
|
nextState = State.SKIP_CONTROL_CHARS;
|
|
|
|
} else if (message.isChunked()) {
|
2008-12-03 10:00:29 +01:00
|
|
|
nextState = State.READ_CHUNK_SIZE;
|
2009-02-12 08:17:29 +01:00
|
|
|
} else if (message.getContentLength(-1) >= 0) {
|
2009-02-12 05:37:48 +01:00
|
|
|
nextState = State.READ_FIXED_LENGTH_CONTENT;
|
|
|
|
} else {
|
2009-02-12 08:17:29 +01:00
|
|
|
nextState = State.READ_VARIABLE_LENGTH_CONTENT;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-03-04 14:50:19 +01:00
|
|
|
return nextState;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private String readHeader(ChannelBuffer buffer) throws TooLongFrameException {
|
|
|
|
StringBuilder sb = new StringBuilder(64);
|
|
|
|
int headerSize = this.headerSize;
|
|
|
|
while (true) {
|
|
|
|
byte nextByte = buffer.readByte();
|
|
|
|
if (nextByte == HttpCodecUtil.CR) {
|
|
|
|
nextByte = buffer.readByte();
|
|
|
|
if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
this.headerSize = headerSize + 2;
|
|
|
|
return sb.toString();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
this.headerSize = headerSize + 1;
|
|
|
|
return sb.toString();
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Abort decoding if the header part is too large.
|
|
|
|
if (headerSize >= maxHeaderSize) {
|
|
|
|
throw new TooLongFrameException(
|
|
|
|
"HTTP header is larger than " +
|
|
|
|
maxHeaderSize + " bytes.");
|
|
|
|
|
|
|
|
}
|
|
|
|
headerSize ++;
|
|
|
|
sb.append((char) nextByte);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-02-12 08:32:53 +01:00
|
|
|
protected abstract boolean isDecodingRequest();
|
2009-03-10 08:20:27 +01:00
|
|
|
protected abstract HttpMessage createMessage(String[] initialLine) throws Exception;
|
2008-11-19 08:22:15 +01:00
|
|
|
|
|
|
|
private int getChunkSize(String hex) {
|
2009-02-12 06:23:39 +01:00
|
|
|
hex = hex.trim();
|
|
|
|
for (int i = 0; i < hex.length(); i ++) {
|
|
|
|
char c = hex.charAt(i);
|
|
|
|
if (c == ';' || Character.isWhitespace(c) || Character.isISOControl(c)) {
|
|
|
|
hex = hex.substring(0, i);
|
|
|
|
break;
|
|
|
|
}
|
2009-02-12 06:10:25 +01:00
|
|
|
}
|
2009-02-12 06:23:39 +01:00
|
|
|
|
2009-02-12 06:10:25 +01:00
|
|
|
return Integer.parseInt(hex, 16);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private String readLine(ChannelBuffer buffer, int maxLineLength) throws TooLongFrameException {
|
2009-02-12 06:49:19 +01:00
|
|
|
StringBuilder sb = new StringBuilder(64);
|
2009-03-10 08:20:27 +01:00
|
|
|
int lineLength = 0;
|
2008-11-19 08:22:15 +01:00
|
|
|
while (true) {
|
2009-02-12 06:48:25 +01:00
|
|
|
byte nextByte = buffer.readByte();
|
2008-11-19 08:22:15 +01:00
|
|
|
if (nextByte == HttpCodecUtil.CR) {
|
2009-02-12 06:48:25 +01:00
|
|
|
nextByte = buffer.readByte();
|
2008-11-30 17:22:03 +01:00
|
|
|
if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
return sb.toString();
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
else if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
return sb.toString();
|
|
|
|
}
|
|
|
|
else {
|
2009-03-10 08:20:27 +01:00
|
|
|
if (lineLength >= maxLineLength) {
|
|
|
|
throw new TooLongFrameException(
|
|
|
|
"An HTTP line is larger than " + maxLineLength +
|
|
|
|
" bytes.");
|
|
|
|
}
|
|
|
|
lineLength ++;
|
2008-11-19 08:22:15 +01:00
|
|
|
sb.append((char) nextByte);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
/**
|
|
|
|
* Returns {@code true} if only if the skipped line was not empty.
|
|
|
|
* Please note that an empty line is also skipped, while {@code} false is
|
|
|
|
* returned.
|
|
|
|
*/
|
|
|
|
private boolean skipLine(ChannelBuffer buffer) {
|
|
|
|
int lineLength = 0;
|
|
|
|
while (true) {
|
|
|
|
byte nextByte = buffer.readByte();
|
|
|
|
if (nextByte == HttpCodecUtil.CR) {
|
|
|
|
nextByte = buffer.readByte();
|
|
|
|
if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
return lineLength != 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
return lineLength != 0;
|
|
|
|
}
|
|
|
|
else if (!Character.isWhitespace((char) nextByte)) {
|
|
|
|
lineLength ++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private String[] splitInitialLine(String sb) {
|
2008-11-30 17:22:03 +01:00
|
|
|
Matcher m = INITIAL_PATTERN.matcher(sb);
|
|
|
|
if (m.matches()) {
|
|
|
|
return new String[] { m.group(1), m.group(2), m.group(3) };
|
|
|
|
} else {
|
|
|
|
throw new IllegalArgumentException("Invalid initial line: " + sb);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private String[] splitHeader(String sb) {
|
2008-11-30 17:22:03 +01:00
|
|
|
Matcher m = HEADER_PATTERN.matcher(sb);
|
|
|
|
if (m.matches()) {
|
|
|
|
return new String[] { m.group(1), m.group(2) };
|
|
|
|
} else {
|
|
|
|
throw new IllegalArgumentException("Invalid header syntax: " + sb);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|