2008-11-19 08:22:15 +01:00
|
|
|
/*
|
2009-08-28 09:15:49 +02:00
|
|
|
* Copyright 2009 Red Hat, Inc.
|
2009-06-19 19:48:17 +02:00
|
|
|
*
|
2009-08-28 09:15:49 +02:00
|
|
|
* Red Hat licenses this file to you under the Apache License, version 2.0
|
|
|
|
* (the "License"); you may not use this file except in compliance with the
|
|
|
|
* License. You may obtain a copy of the License at:
|
2008-11-19 08:22:15 +01:00
|
|
|
*
|
2009-08-28 09:15:49 +02:00
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
2008-11-19 08:22:15 +01:00
|
|
|
*
|
2009-08-28 09:15:49 +02:00
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
* License for the specific language governing permissions and limitations
|
|
|
|
* under the License.
|
2008-11-19 08:22:15 +01:00
|
|
|
*/
|
2011-12-09 04:38:59 +01:00
|
|
|
package io.netty.handler.codec.http;
|
2008-11-19 08:22:15 +01:00
|
|
|
|
|
|
|
import java.util.List;
|
|
|
|
|
2011-12-09 04:38:59 +01:00
|
|
|
import io.netty.buffer.ChannelBuffer;
|
|
|
|
import io.netty.buffer.ChannelBuffers;
|
|
|
|
import io.netty.channel.Channel;
|
|
|
|
import io.netty.channel.ChannelHandlerContext;
|
|
|
|
import io.netty.channel.ChannelPipeline;
|
|
|
|
import io.netty.handler.codec.frame.TooLongFrameException;
|
|
|
|
import io.netty.handler.codec.replay.ReplayingDecoder;
|
2008-11-19 08:22:15 +01:00
|
|
|
|
|
|
|
/**
|
2009-06-19 17:35:19 +02:00
|
|
|
* Decodes {@link ChannelBuffer}s into {@link HttpMessage}s and
|
|
|
|
* {@link HttpChunk}s.
|
|
|
|
*
|
|
|
|
* <h3>Parameters that prevents excessive memory consumption</h3>
|
2009-06-19 18:45:30 +02:00
|
|
|
* <table border="1">
|
2009-06-19 17:35:19 +02:00
|
|
|
* <tr>
|
|
|
|
* <th>Name</th><th>Meaning</th>
|
|
|
|
* </tr>
|
|
|
|
* <tr>
|
|
|
|
* <td>{@code maxInitialLineLength}</td>
|
|
|
|
* <td>The maximum length of the initial line
|
|
|
|
* (e.g. {@code "GET / HTTP/1.0"} or {@code "HTTP/1.0 200 OK"})
|
|
|
|
* If the length of the initial line exceeds this value, a
|
|
|
|
* {@link TooLongFrameException} will be raised.</td>
|
|
|
|
* </tr>
|
|
|
|
* <tr>
|
|
|
|
* <td>{@code maxHeaderSize}</td>
|
|
|
|
* <td>The maximum length of all headers. If the sum of the length of each
|
|
|
|
* header exceeds this value, a {@link TooLongFrameException} will be raised.</td>
|
|
|
|
* </tr>
|
|
|
|
* <tr>
|
|
|
|
* <td>{@code maxChunkSize}</td>
|
|
|
|
* <td>The maximum length of the content or each chunk. If the content length
|
2009-11-17 07:02:42 +01:00
|
|
|
* (or the length of each chunk) exceeds this value, the content or chunk
|
|
|
|
* will be split into multiple {@link HttpChunk}s whose length is
|
|
|
|
* {@code maxChunkSize} at maximum.</td>
|
2009-06-19 17:35:19 +02:00
|
|
|
* </tr>
|
|
|
|
* </table>
|
|
|
|
*
|
2009-11-17 07:02:42 +01:00
|
|
|
* <h3>Chunked Content</h3>
|
|
|
|
*
|
|
|
|
* If the content of an HTTP message is greater than {@code maxChunkSize} or
|
|
|
|
* the transfer encoding of the HTTP message is 'chunked', this decoder
|
|
|
|
* generates one {@link HttpMessage} instance and its following
|
|
|
|
* {@link HttpChunk}s per single HTTP message to avoid excessive memory
|
|
|
|
* consumption. For example, the following HTTP message:
|
|
|
|
* <pre>
|
|
|
|
* GET / HTTP/1.1
|
|
|
|
* Transfer-Encoding: chunked
|
|
|
|
*
|
|
|
|
* 1a
|
|
|
|
* abcdefghijklmnopqrstuvwxyz
|
|
|
|
* 10
|
|
|
|
* 1234567890abcdef
|
|
|
|
* 0
|
2009-11-17 08:19:28 +01:00
|
|
|
* Content-MD5: ...
|
|
|
|
* <i>[blank line]</i>
|
2010-01-26 10:43:20 +01:00
|
|
|
* </pre>
|
2009-11-17 07:02:42 +01:00
|
|
|
* triggers {@link HttpRequestDecoder} to generate 4 objects:
|
|
|
|
* <ol>
|
|
|
|
* <li>An {@link HttpRequest} whose {@link HttpMessage#isChunked() chunked}
|
2010-01-26 10:43:20 +01:00
|
|
|
* property is {@code true},</li>
|
2009-11-17 07:02:42 +01:00
|
|
|
* <li>The first {@link HttpChunk} whose content is {@code 'abcdefghijklmnopqrstuvwxyz'},</li>
|
|
|
|
* <li>The second {@link HttpChunk} whose content is {@code '1234567890abcdef'}, and</li>
|
|
|
|
* <li>An {@link HttpChunkTrailer} which marks the end of the content.</li>
|
|
|
|
* </ol>
|
|
|
|
*
|
|
|
|
* If you prefer not to handle {@link HttpChunk}s by yourself for your
|
|
|
|
* convenience, insert {@link HttpChunkAggregator} after this decoder in the
|
|
|
|
* {@link ChannelPipeline}. However, please note that your server might not
|
|
|
|
* be as memory efficient as without the aggregator.
|
|
|
|
*
|
2009-06-19 17:35:19 +02:00
|
|
|
* <h3>Extensibility</h3>
|
|
|
|
*
|
|
|
|
* Please note that this decoder is designed to be extended to implement
|
|
|
|
* a protocol derived from HTTP, such as
|
|
|
|
* <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and
|
|
|
|
* <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a>.
|
|
|
|
* To implement the decoder of such a derived protocol, extend this class and
|
|
|
|
* implement all abstract methods properly.
|
2009-07-20 05:37:35 +02:00
|
|
|
* @apiviz.landmark
|
2008-11-19 08:22:15 +01:00
|
|
|
*/
|
2008-12-03 10:00:29 +01:00
|
|
|
public abstract class HttpMessageDecoder extends ReplayingDecoder<HttpMessageDecoder.State> {
|
2008-11-19 08:22:15 +01:00
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private final int maxInitialLineLength;
|
|
|
|
private final int maxHeaderSize;
|
2009-03-09 09:50:24 +01:00
|
|
|
private final int maxChunkSize;
|
2010-02-19 10:00:00 +01:00
|
|
|
private HttpMessage message;
|
|
|
|
private ChannelBuffer content;
|
|
|
|
private long chunkSize;
|
2009-06-15 10:45:39 +02:00
|
|
|
private int headerSize;
|
2009-02-12 07:09:29 +01:00
|
|
|
|
2008-12-03 10:00:29 +01:00
|
|
|
/**
|
2009-06-19 17:35:19 +02:00
|
|
|
* The internal state of {@link HttpMessageDecoder}.
|
|
|
|
* <em>Internal use only</em>.
|
2008-12-03 10:00:29 +01:00
|
|
|
* @apiviz.exclude
|
|
|
|
*/
|
2011-11-10 03:49:44 +01:00
|
|
|
protected enum State {
|
2009-02-12 06:02:22 +01:00
|
|
|
SKIP_CONTROL_CHARS,
|
2008-11-19 08:22:15 +01:00
|
|
|
READ_INITIAL,
|
|
|
|
READ_HEADER,
|
2009-02-12 08:17:29 +01:00
|
|
|
READ_VARIABLE_LENGTH_CONTENT,
|
2009-03-09 09:50:24 +01:00
|
|
|
READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS,
|
2008-11-19 08:22:15 +01:00
|
|
|
READ_FIXED_LENGTH_CONTENT,
|
2009-03-09 09:50:24 +01:00
|
|
|
READ_FIXED_LENGTH_CONTENT_AS_CHUNKS,
|
2008-11-19 08:22:15 +01:00
|
|
|
READ_CHUNK_SIZE,
|
|
|
|
READ_CHUNKED_CONTENT,
|
2009-03-09 09:50:24 +01:00
|
|
|
READ_CHUNKED_CONTENT_AS_CHUNKS,
|
2009-02-12 06:41:22 +01:00
|
|
|
READ_CHUNK_DELIMITER,
|
2011-11-12 19:14:16 +01:00
|
|
|
READ_CHUNK_FOOTER
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
2009-06-19 17:35:19 +02:00
|
|
|
/**
|
|
|
|
* Creates a new instance with the default
|
2009-09-07 05:27:26 +02:00
|
|
|
* {@code maxInitialLineLength (4096}}, {@code maxHeaderSize (8192)}, and
|
|
|
|
* {@code maxChunkSize (8192)}.
|
2009-06-19 17:35:19 +02:00
|
|
|
*/
|
2009-03-10 09:57:02 +01:00
|
|
|
protected HttpMessageDecoder() {
|
|
|
|
this(4096, 8192, 8192);
|
|
|
|
}
|
|
|
|
|
2009-06-19 17:35:19 +02:00
|
|
|
/**
|
|
|
|
* Creates a new instance with the specified parameters.
|
|
|
|
*/
|
2009-03-10 09:57:02 +01:00
|
|
|
protected HttpMessageDecoder(
|
|
|
|
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize) {
|
|
|
|
|
2009-03-09 09:50:24 +01:00
|
|
|
super(State.SKIP_CONTROL_CHARS, true);
|
2009-03-10 09:57:02 +01:00
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
if (maxInitialLineLength <= 0) {
|
|
|
|
throw new IllegalArgumentException(
|
|
|
|
"maxInitialLineLength must be a positive integer: " +
|
|
|
|
maxInitialLineLength);
|
|
|
|
}
|
|
|
|
if (maxHeaderSize <= 0) {
|
|
|
|
throw new IllegalArgumentException(
|
|
|
|
"maxHeaderSize must be a positive integer: " +
|
2010-08-26 06:18:53 +02:00
|
|
|
maxHeaderSize);
|
2009-03-10 08:20:27 +01:00
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
if (maxChunkSize < 0) {
|
|
|
|
throw new IllegalArgumentException(
|
2009-03-10 08:53:15 +01:00
|
|
|
"maxChunkSize must be a positive integer: " +
|
2009-03-09 09:50:24 +01:00
|
|
|
maxChunkSize);
|
|
|
|
}
|
2009-03-10 08:20:27 +01:00
|
|
|
this.maxInitialLineLength = maxInitialLineLength;
|
|
|
|
this.maxHeaderSize = maxHeaderSize;
|
2009-03-09 09:50:24 +01:00
|
|
|
this.maxChunkSize = maxChunkSize;
|
|
|
|
}
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
@Override
|
2008-12-03 10:00:29 +01:00
|
|
|
protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buffer, State state) throws Exception {
|
2008-11-19 08:22:15 +01:00
|
|
|
switch (state) {
|
2009-02-12 06:02:22 +01:00
|
|
|
case SKIP_CONTROL_CHARS: {
|
|
|
|
try {
|
|
|
|
skipControlCharacters(buffer);
|
|
|
|
checkpoint(State.READ_INITIAL);
|
|
|
|
} finally {
|
|
|
|
checkpoint();
|
|
|
|
}
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
case READ_INITIAL: {
|
2009-03-10 08:20:27 +01:00
|
|
|
String[] initialLine = splitInitialLine(readLine(buffer, maxInitialLineLength));
|
|
|
|
if (initialLine.length < 3) {
|
|
|
|
// Invalid initial line - ignore.
|
|
|
|
checkpoint(State.SKIP_CONTROL_CHARS);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
message = createMessage(initialLine);
|
|
|
|
checkpoint(State.READ_HEADER);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
case READ_HEADER: {
|
2009-03-04 14:50:19 +01:00
|
|
|
State nextState = readHeaders(buffer);
|
|
|
|
checkpoint(nextState);
|
|
|
|
if (nextState == State.READ_CHUNK_SIZE) {
|
|
|
|
// Chunked encoding
|
2009-11-17 08:19:28 +01:00
|
|
|
message.setChunked(true);
|
2009-03-10 08:53:15 +01:00
|
|
|
// Generate HttpMessage first. HttpChunks will follow.
|
|
|
|
return message;
|
2009-03-30 04:14:11 +02:00
|
|
|
} else if (nextState == State.SKIP_CONTROL_CHARS) {
|
|
|
|
// No content is expected.
|
2009-03-30 04:19:11 +02:00
|
|
|
// Remove the headers which are not supposed to be present not
|
|
|
|
// to confuse subsequent handlers.
|
|
|
|
message.removeHeader(HttpHeaders.Names.TRANSFER_ENCODING);
|
2009-03-30 04:14:11 +02:00
|
|
|
return message;
|
2009-02-12 08:32:53 +01:00
|
|
|
} else {
|
2010-01-26 05:31:54 +01:00
|
|
|
long contentLength = HttpHeaders.getContentLength(message, -1);
|
2009-02-12 08:32:53 +01:00
|
|
|
if (contentLength == 0 || contentLength == -1 && isDecodingRequest()) {
|
|
|
|
content = ChannelBuffers.EMPTY_BUFFER;
|
|
|
|
return reset();
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
|
2009-03-10 08:53:15 +01:00
|
|
|
switch (nextState) {
|
|
|
|
case READ_FIXED_LENGTH_CONTENT:
|
2010-10-17 20:34:03 +02:00
|
|
|
if (contentLength > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) {
|
2009-03-09 09:50:24 +01:00
|
|
|
// Generate HttpMessage first. HttpChunks will follow.
|
|
|
|
checkpoint(State.READ_FIXED_LENGTH_CONTENT_AS_CHUNKS);
|
2009-11-17 08:19:28 +01:00
|
|
|
message.setChunked(true);
|
2009-03-09 09:50:24 +01:00
|
|
|
// chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT_AS_CHUNKS
|
|
|
|
// state reads data chunk by chunk.
|
2010-01-26 05:31:54 +01:00
|
|
|
chunkSize = HttpHeaders.getContentLength(message, -1);
|
2009-03-09 09:50:24 +01:00
|
|
|
return message;
|
2009-03-10 08:53:15 +01:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
case READ_VARIABLE_LENGTH_CONTENT:
|
2010-10-17 20:34:03 +02:00
|
|
|
if (buffer.readableBytes() > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) {
|
2009-03-09 09:50:24 +01:00
|
|
|
// Generate HttpMessage first. HttpChunks will follow.
|
|
|
|
checkpoint(State.READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS);
|
2009-11-17 08:19:28 +01:00
|
|
|
message.setChunked(true);
|
2009-03-09 09:50:24 +01:00
|
|
|
return message;
|
|
|
|
}
|
2009-03-10 08:53:15 +01:00
|
|
|
break;
|
2010-10-17 20:34:03 +02:00
|
|
|
default:
|
|
|
|
throw new IllegalStateException("Unexpected state: " + nextState);
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
2008-11-30 17:29:35 +01:00
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
// We return null here, this forces decode to be called again where we will decode the content
|
2008-11-19 08:22:15 +01:00
|
|
|
return null;
|
|
|
|
}
|
2009-02-12 08:17:29 +01:00
|
|
|
case READ_VARIABLE_LENGTH_CONTENT: {
|
2008-11-19 08:22:15 +01:00
|
|
|
if (content == null) {
|
2008-12-08 10:02:33 +01:00
|
|
|
content = ChannelBuffers.dynamicBuffer(channel.getConfig().getBufferFactory());
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-02-12 08:17:29 +01:00
|
|
|
//this will cause a replay error until the channel is closed where this will read what's left in the buffer
|
2008-11-19 08:22:15 +01:00
|
|
|
content.writeBytes(buffer.readBytes(buffer.readableBytes()));
|
|
|
|
return reset();
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
case READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS: {
|
|
|
|
// Keep reading data as a chunk until the end of connection is reached.
|
|
|
|
int chunkSize = Math.min(maxChunkSize, buffer.readableBytes());
|
|
|
|
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(chunkSize));
|
|
|
|
|
|
|
|
if (!buffer.readable()) {
|
|
|
|
// Reached to the end of the connection.
|
|
|
|
reset();
|
|
|
|
if (!chunk.isLast()) {
|
|
|
|
// Append the last chunk.
|
2009-06-15 08:33:30 +02:00
|
|
|
return new Object[] { chunk, HttpChunk.LAST_CHUNK };
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return chunk;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
case READ_FIXED_LENGTH_CONTENT: {
|
|
|
|
//we have a content-length so we just read the correct number of bytes
|
|
|
|
readFixedLengthContent(buffer);
|
|
|
|
return reset();
|
|
|
|
}
|
2009-03-09 09:50:24 +01:00
|
|
|
case READ_FIXED_LENGTH_CONTENT_AS_CHUNKS: {
|
2009-05-08 14:45:10 +02:00
|
|
|
long chunkSize = this.chunkSize;
|
2009-03-09 09:50:24 +01:00
|
|
|
HttpChunk chunk;
|
|
|
|
if (chunkSize > maxChunkSize) {
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes(maxChunkSize));
|
|
|
|
chunkSize -= maxChunkSize;
|
|
|
|
} else {
|
2009-05-08 14:45:10 +02:00
|
|
|
assert chunkSize <= Integer.MAX_VALUE;
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize));
|
2009-03-09 09:50:24 +01:00
|
|
|
chunkSize = 0;
|
|
|
|
}
|
|
|
|
this.chunkSize = chunkSize;
|
|
|
|
|
|
|
|
if (chunkSize == 0) {
|
|
|
|
// Read all content.
|
|
|
|
reset();
|
|
|
|
if (!chunk.isLast()) {
|
|
|
|
// Append the last chunk.
|
2009-06-15 08:33:30 +02:00
|
|
|
return new Object[] { chunk, HttpChunk.LAST_CHUNK };
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return chunk;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
/**
|
|
|
|
* everything else after this point takes care of reading chunked content. basically, read chunk size,
|
|
|
|
* read chunk, read and ignore the CRLF and repeat until 0
|
|
|
|
*/
|
|
|
|
case READ_CHUNK_SIZE: {
|
2009-03-10 08:20:27 +01:00
|
|
|
String line = readLine(buffer, maxInitialLineLength);
|
2009-03-09 09:50:24 +01:00
|
|
|
int chunkSize = getChunkSize(line);
|
|
|
|
this.chunkSize = chunkSize;
|
2008-11-19 08:22:15 +01:00
|
|
|
if (chunkSize == 0) {
|
2009-02-12 06:41:22 +01:00
|
|
|
checkpoint(State.READ_CHUNK_FOOTER);
|
2009-02-26 10:08:34 +01:00
|
|
|
return null;
|
2009-03-10 08:53:15 +01:00
|
|
|
} else if (chunkSize > maxChunkSize) {
|
|
|
|
// A chunk is too large. Split them into multiple chunks again.
|
|
|
|
checkpoint(State.READ_CHUNKED_CONTENT_AS_CHUNKS);
|
2009-02-12 06:41:22 +01:00
|
|
|
} else {
|
2008-12-03 10:00:29 +01:00
|
|
|
checkpoint(State.READ_CHUNKED_CONTENT);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
case READ_CHUNKED_CONTENT: {
|
2009-05-08 14:45:10 +02:00
|
|
|
assert chunkSize <= Integer.MAX_VALUE;
|
|
|
|
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize));
|
2009-03-10 08:53:15 +01:00
|
|
|
checkpoint(State.READ_CHUNK_DELIMITER);
|
|
|
|
return chunk;
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
|
|
|
case READ_CHUNKED_CONTENT_AS_CHUNKS: {
|
2009-05-08 14:45:10 +02:00
|
|
|
long chunkSize = this.chunkSize;
|
2009-03-09 09:50:24 +01:00
|
|
|
HttpChunk chunk;
|
|
|
|
if (chunkSize > maxChunkSize) {
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes(maxChunkSize));
|
|
|
|
chunkSize -= maxChunkSize;
|
2009-02-12 07:09:29 +01:00
|
|
|
} else {
|
2009-05-08 14:45:10 +02:00
|
|
|
assert chunkSize <= Integer.MAX_VALUE;
|
|
|
|
chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize));
|
2009-03-09 09:50:24 +01:00
|
|
|
chunkSize = 0;
|
|
|
|
}
|
|
|
|
this.chunkSize = chunkSize;
|
|
|
|
|
|
|
|
if (chunkSize == 0) {
|
|
|
|
// Read all content.
|
2009-02-12 07:09:29 +01:00
|
|
|
checkpoint(State.READ_CHUNK_DELIMITER);
|
2009-03-09 09:50:24 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!chunk.isLast()) {
|
2009-02-12 07:09:29 +01:00
|
|
|
return chunk;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-02-12 06:41:22 +01:00
|
|
|
case READ_CHUNK_DELIMITER: {
|
2009-02-12 06:48:25 +01:00
|
|
|
for (;;) {
|
|
|
|
byte next = buffer.readByte();
|
|
|
|
if (next == HttpCodecUtil.CR) {
|
|
|
|
if (buffer.readByte() == HttpCodecUtil.LF) {
|
|
|
|
checkpoint(State.READ_CHUNK_SIZE);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
} else if (next == HttpCodecUtil.LF) {
|
|
|
|
checkpoint(State.READ_CHUNK_SIZE);
|
|
|
|
return null;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
2009-02-12 06:41:22 +01:00
|
|
|
case READ_CHUNK_FOOTER: {
|
2009-11-17 06:39:01 +01:00
|
|
|
HttpChunkTrailer trailer = readTrailingHeaders(buffer);
|
2009-11-17 05:55:56 +01:00
|
|
|
if (maxChunkSize == 0) {
|
|
|
|
// Chunked encoding disabled.
|
|
|
|
return reset();
|
|
|
|
} else {
|
|
|
|
reset();
|
|
|
|
// The last chunk, which is empty
|
2009-11-17 06:39:01 +01:00
|
|
|
return trailer;
|
2009-02-12 06:41:22 +01:00
|
|
|
}
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
default: {
|
|
|
|
throw new Error("Shouldn't reach here.");
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-30 04:14:11 +02:00
|
|
|
protected boolean isContentAlwaysEmpty(HttpMessage msg) {
|
|
|
|
if (msg instanceof HttpResponse) {
|
|
|
|
HttpResponse res = (HttpResponse) msg;
|
|
|
|
int code = res.getStatus().getCode();
|
|
|
|
if (code < 200) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
switch (code) {
|
|
|
|
case 204: case 205: case 304:
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
private Object reset() {
|
2009-02-13 09:55:06 +01:00
|
|
|
HttpMessage message = this.message;
|
|
|
|
ChannelBuffer content = this.content;
|
|
|
|
|
2009-03-10 09:37:45 +01:00
|
|
|
if (content != null) {
|
|
|
|
message.setContent(content);
|
|
|
|
this.content = null;
|
|
|
|
}
|
2009-02-13 09:55:06 +01:00
|
|
|
this.message = null;
|
2009-03-10 09:37:45 +01:00
|
|
|
|
2009-02-12 06:02:22 +01:00
|
|
|
checkpoint(State.SKIP_CONTROL_CHARS);
|
2008-11-19 08:22:15 +01:00
|
|
|
return message;
|
|
|
|
}
|
|
|
|
|
2009-02-12 06:02:22 +01:00
|
|
|
private void skipControlCharacters(ChannelBuffer buffer) {
|
|
|
|
for (;;) {
|
|
|
|
char c = (char) buffer.readUnsignedByte();
|
|
|
|
if (!Character.isISOControl(c) &&
|
|
|
|
!Character.isWhitespace(c)) {
|
|
|
|
buffer.readerIndex(buffer.readerIndex() - 1);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
private void readFixedLengthContent(ChannelBuffer buffer) {
|
2010-01-26 05:31:54 +01:00
|
|
|
long length = HttpHeaders.getContentLength(message, -1);
|
2009-05-08 14:45:10 +02:00
|
|
|
assert length <= Integer.MAX_VALUE;
|
|
|
|
|
2008-11-19 08:22:15 +01:00
|
|
|
if (content == null) {
|
2009-05-08 14:45:10 +02:00
|
|
|
content = buffer.readBytes((int) length);
|
2008-11-30 17:22:03 +01:00
|
|
|
} else {
|
2009-05-08 14:45:10 +02:00
|
|
|
content.writeBytes(buffer.readBytes((int) length));
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private State readHeaders(ChannelBuffer buffer) throws TooLongFrameException {
|
2009-06-15 10:45:39 +02:00
|
|
|
headerSize = 0;
|
2009-06-15 10:05:27 +02:00
|
|
|
final HttpMessage message = this.message;
|
2009-03-10 08:20:27 +01:00
|
|
|
String line = readHeader(buffer);
|
2010-07-07 08:43:34 +02:00
|
|
|
String name = null;
|
|
|
|
String value = null;
|
2009-06-15 10:05:27 +02:00
|
|
|
if (line.length() != 0) {
|
|
|
|
message.clearHeaders();
|
|
|
|
do {
|
2009-06-15 10:13:52 +02:00
|
|
|
char firstChar = line.charAt(0);
|
2010-07-07 08:43:34 +02:00
|
|
|
if (name != null && (firstChar == ' ' || firstChar == '\t')) {
|
|
|
|
value = value + ' ' + line.trim();
|
2009-06-15 10:13:52 +02:00
|
|
|
} else {
|
2010-07-07 08:43:34 +02:00
|
|
|
if (name != null) {
|
|
|
|
message.addHeader(name, value);
|
|
|
|
}
|
2009-06-15 10:05:27 +02:00
|
|
|
String[] header = splitHeader(line);
|
2010-07-07 08:43:34 +02:00
|
|
|
name = header[0];
|
|
|
|
value = header[1];
|
2009-06-15 10:05:27 +02:00
|
|
|
}
|
2009-06-15 10:35:24 +02:00
|
|
|
|
|
|
|
line = readHeader(buffer);
|
2009-06-15 10:05:27 +02:00
|
|
|
} while (line.length() != 0);
|
2010-07-07 08:43:34 +02:00
|
|
|
|
|
|
|
// Add the last header.
|
|
|
|
if (name != null) {
|
|
|
|
message.addHeader(name, value);
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2008-11-30 17:22:03 +01:00
|
|
|
|
2008-12-03 10:00:29 +01:00
|
|
|
State nextState;
|
2009-03-30 04:14:11 +02:00
|
|
|
|
|
|
|
if (isContentAlwaysEmpty(message)) {
|
|
|
|
nextState = State.SKIP_CONTROL_CHARS;
|
2009-11-17 08:39:02 +01:00
|
|
|
} else if (message.isChunked()) {
|
|
|
|
// HttpMessage.isChunked() returns true when either:
|
|
|
|
// 1) HttpMessage.setChunked(true) was called or
|
|
|
|
// 2) 'Transfer-Encoding' is 'chunked'.
|
|
|
|
// Because this decoder did not call HttpMessage.setChunked(true)
|
|
|
|
// yet, HttpMessage.isChunked() should return true only when
|
|
|
|
// 'Transfer-Encoding' is 'chunked'.
|
2008-12-03 10:00:29 +01:00
|
|
|
nextState = State.READ_CHUNK_SIZE;
|
2010-01-26 05:31:54 +01:00
|
|
|
} else if (HttpHeaders.getContentLength(message, -1) >= 0) {
|
2009-02-12 05:37:48 +01:00
|
|
|
nextState = State.READ_FIXED_LENGTH_CONTENT;
|
|
|
|
} else {
|
2009-02-12 08:17:29 +01:00
|
|
|
nextState = State.READ_VARIABLE_LENGTH_CONTENT;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-03-04 14:50:19 +01:00
|
|
|
return nextState;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
2009-11-17 06:39:01 +01:00
|
|
|
private HttpChunkTrailer readTrailingHeaders(ChannelBuffer buffer) throws TooLongFrameException {
|
2009-11-17 05:55:56 +01:00
|
|
|
headerSize = 0;
|
|
|
|
String line = readHeader(buffer);
|
|
|
|
String lastHeader = null;
|
|
|
|
if (line.length() != 0) {
|
2009-11-17 06:39:01 +01:00
|
|
|
HttpChunkTrailer trailer = new DefaultHttpChunkTrailer();
|
2009-11-17 05:55:56 +01:00
|
|
|
do {
|
|
|
|
char firstChar = line.charAt(0);
|
|
|
|
if (lastHeader != null && (firstChar == ' ' || firstChar == '\t')) {
|
2009-11-17 06:39:01 +01:00
|
|
|
List<String> current = trailer.getHeaders(lastHeader);
|
|
|
|
if (current.size() != 0) {
|
|
|
|
int lastPos = current.size() - 1;
|
|
|
|
String newString = current.get(lastPos) + line.trim();
|
|
|
|
current.set(lastPos, newString);
|
|
|
|
} else {
|
|
|
|
// Content-Length, Transfer-Encoding, or Trailer
|
|
|
|
}
|
2009-11-17 05:55:56 +01:00
|
|
|
} else {
|
|
|
|
String[] header = splitHeader(line);
|
2009-11-17 06:39:01 +01:00
|
|
|
String name = header[0];
|
|
|
|
if (!name.equalsIgnoreCase(HttpHeaders.Names.CONTENT_LENGTH) &&
|
|
|
|
!name.equalsIgnoreCase(HttpHeaders.Names.TRANSFER_ENCODING) &&
|
|
|
|
!name.equalsIgnoreCase(HttpHeaders.Names.TRAILER)) {
|
|
|
|
trailer.addHeader(name, header[1]);
|
|
|
|
}
|
|
|
|
lastHeader = name;
|
2009-11-17 05:55:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
line = readHeader(buffer);
|
|
|
|
} while (line.length() != 0);
|
|
|
|
|
2009-11-17 06:39:01 +01:00
|
|
|
return trailer;
|
2009-11-17 05:55:56 +01:00
|
|
|
}
|
2009-11-17 06:39:01 +01:00
|
|
|
|
|
|
|
return HttpChunk.LAST_CHUNK;
|
2009-11-17 05:55:56 +01:00
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private String readHeader(ChannelBuffer buffer) throws TooLongFrameException {
|
|
|
|
StringBuilder sb = new StringBuilder(64);
|
|
|
|
int headerSize = this.headerSize;
|
2009-06-15 11:06:56 +02:00
|
|
|
|
|
|
|
loop:
|
|
|
|
for (;;) {
|
|
|
|
char nextByte = (char) buffer.readByte();
|
|
|
|
headerSize ++;
|
|
|
|
|
|
|
|
switch (nextByte) {
|
|
|
|
case HttpCodecUtil.CR:
|
|
|
|
nextByte = (char) buffer.readByte();
|
|
|
|
headerSize ++;
|
2009-03-10 08:20:27 +01:00
|
|
|
if (nextByte == HttpCodecUtil.LF) {
|
2009-06-15 11:06:56 +02:00
|
|
|
break loop;
|
2009-03-10 08:20:27 +01:00
|
|
|
}
|
2009-06-15 11:06:56 +02:00
|
|
|
break;
|
|
|
|
case HttpCodecUtil.LF:
|
|
|
|
break loop;
|
2009-03-10 08:20:27 +01:00
|
|
|
}
|
|
|
|
|
2009-06-15 11:06:56 +02:00
|
|
|
// Abort decoding if the header part is too large.
|
|
|
|
if (headerSize >= maxHeaderSize) {
|
2010-10-19 07:40:44 +02:00
|
|
|
// TODO: Respond with Bad Request and discard the traffic
|
|
|
|
// or close the connection.
|
|
|
|
// No need to notify the upstream handlers - just log.
|
|
|
|
// If decoding a response, just throw an exception.
|
2009-06-15 11:06:56 +02:00
|
|
|
throw new TooLongFrameException(
|
|
|
|
"HTTP header is larger than " +
|
|
|
|
maxHeaderSize + " bytes.");
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
}
|
2009-06-15 11:06:56 +02:00
|
|
|
|
|
|
|
sb.append(nextByte);
|
2009-03-10 08:20:27 +01:00
|
|
|
}
|
2009-06-15 11:06:56 +02:00
|
|
|
|
|
|
|
this.headerSize = headerSize;
|
|
|
|
return sb.toString();
|
2009-03-10 08:20:27 +01:00
|
|
|
}
|
|
|
|
|
2009-02-12 08:32:53 +01:00
|
|
|
protected abstract boolean isDecodingRequest();
|
2009-03-10 08:20:27 +01:00
|
|
|
protected abstract HttpMessage createMessage(String[] initialLine) throws Exception;
|
2008-11-19 08:22:15 +01:00
|
|
|
|
|
|
|
private int getChunkSize(String hex) {
|
2009-02-12 06:23:39 +01:00
|
|
|
hex = hex.trim();
|
|
|
|
for (int i = 0; i < hex.length(); i ++) {
|
|
|
|
char c = hex.charAt(i);
|
|
|
|
if (c == ';' || Character.isWhitespace(c) || Character.isISOControl(c)) {
|
|
|
|
hex = hex.substring(0, i);
|
|
|
|
break;
|
|
|
|
}
|
2009-02-12 06:10:25 +01:00
|
|
|
}
|
2009-02-12 06:23:39 +01:00
|
|
|
|
2009-02-12 06:10:25 +01:00
|
|
|
return Integer.parseInt(hex, 16);
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private String readLine(ChannelBuffer buffer, int maxLineLength) throws TooLongFrameException {
|
2009-02-12 06:49:19 +01:00
|
|
|
StringBuilder sb = new StringBuilder(64);
|
2009-03-10 08:20:27 +01:00
|
|
|
int lineLength = 0;
|
2008-11-19 08:22:15 +01:00
|
|
|
while (true) {
|
2009-02-12 06:48:25 +01:00
|
|
|
byte nextByte = buffer.readByte();
|
2008-11-19 08:22:15 +01:00
|
|
|
if (nextByte == HttpCodecUtil.CR) {
|
2009-02-12 06:48:25 +01:00
|
|
|
nextByte = buffer.readByte();
|
2008-11-30 17:22:03 +01:00
|
|
|
if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
return sb.toString();
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
else if (nextByte == HttpCodecUtil.LF) {
|
|
|
|
return sb.toString();
|
|
|
|
}
|
|
|
|
else {
|
2009-03-10 08:20:27 +01:00
|
|
|
if (lineLength >= maxLineLength) {
|
2010-10-19 07:40:44 +02:00
|
|
|
// TODO: Respond with Bad Request and discard the traffic
|
|
|
|
// or close the connection.
|
|
|
|
// No need to notify the upstream handlers - just log.
|
|
|
|
// If decoding a response, just throw an exception.
|
2009-03-10 08:20:27 +01:00
|
|
|
throw new TooLongFrameException(
|
|
|
|
"An HTTP line is larger than " + maxLineLength +
|
|
|
|
" bytes.");
|
|
|
|
}
|
|
|
|
lineLength ++;
|
2008-11-19 08:22:15 +01:00
|
|
|
sb.append((char) nextByte);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-03-10 08:20:27 +01:00
|
|
|
private String[] splitInitialLine(String sb) {
|
2009-06-15 09:30:07 +02:00
|
|
|
int aStart;
|
|
|
|
int aEnd;
|
|
|
|
int bStart;
|
|
|
|
int bEnd;
|
|
|
|
int cStart;
|
|
|
|
int cEnd;
|
|
|
|
|
|
|
|
aStart = findNonWhitespace(sb, 0);
|
|
|
|
aEnd = findWhitespace(sb, aStart);
|
|
|
|
|
|
|
|
bStart = findNonWhitespace(sb, aEnd);
|
|
|
|
bEnd = findWhitespace(sb, bStart);
|
|
|
|
|
|
|
|
cStart = findNonWhitespace(sb, bEnd);
|
|
|
|
cEnd = findEndOfString(sb);
|
|
|
|
|
|
|
|
return new String[] {
|
|
|
|
sb.substring(aStart, aEnd),
|
|
|
|
sb.substring(bStart, bEnd),
|
2010-05-27 14:38:54 +02:00
|
|
|
cStart < cEnd? sb.substring(cStart, cEnd) : "" };
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
private String[] splitHeader(String sb) {
|
2009-09-10 12:34:49 +02:00
|
|
|
final int length = sb.length();
|
2009-06-15 09:30:07 +02:00
|
|
|
int nameStart;
|
|
|
|
int nameEnd;
|
|
|
|
int colonEnd;
|
|
|
|
int valueStart;
|
|
|
|
int valueEnd;
|
|
|
|
|
|
|
|
nameStart = findNonWhitespace(sb, 0);
|
2009-09-10 12:34:49 +02:00
|
|
|
for (nameEnd = nameStart; nameEnd < length; nameEnd ++) {
|
2009-06-15 09:30:07 +02:00
|
|
|
char ch = sb.charAt(nameEnd);
|
|
|
|
if (ch == ':' || Character.isWhitespace(ch)) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-09-10 12:34:49 +02:00
|
|
|
for (colonEnd = nameEnd; colonEnd < length; colonEnd ++) {
|
2009-06-15 09:30:07 +02:00
|
|
|
if (sb.charAt(colonEnd) == ':') {
|
|
|
|
colonEnd ++;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
valueStart = findNonWhitespace(sb, colonEnd);
|
2009-09-10 12:34:49 +02:00
|
|
|
if (valueStart == length) {
|
|
|
|
return new String[] {
|
|
|
|
sb.substring(nameStart, nameEnd),
|
|
|
|
""
|
|
|
|
};
|
|
|
|
}
|
2009-06-15 09:30:07 +02:00
|
|
|
|
2009-09-10 12:34:49 +02:00
|
|
|
valueEnd = findEndOfString(sb);
|
2009-06-15 09:30:07 +02:00
|
|
|
return new String[] {
|
|
|
|
sb.substring(nameStart, nameEnd),
|
2009-09-10 12:34:49 +02:00
|
|
|
sb.substring(valueStart, valueEnd)
|
|
|
|
};
|
2009-06-15 09:30:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private int findNonWhitespace(String sb, int offset) {
|
|
|
|
int result;
|
|
|
|
for (result = offset; result < sb.length(); result ++) {
|
|
|
|
if (!Character.isWhitespace(sb.charAt(result))) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
private int findWhitespace(String sb, int offset) {
|
|
|
|
int result;
|
|
|
|
for (result = offset; result < sb.length(); result ++) {
|
|
|
|
if (Character.isWhitespace(sb.charAt(result))) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
private int findEndOfString(String sb) {
|
|
|
|
int result;
|
|
|
|
for (result = sb.length(); result > 0; result --) {
|
|
|
|
if (!Character.isWhitespace(sb.charAt(result - 1))) {
|
|
|
|
break;
|
|
|
|
}
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
2009-06-15 09:30:07 +02:00
|
|
|
return result;
|
2008-11-19 08:22:15 +01:00
|
|
|
}
|
|
|
|
}
|