From 165a035a15ed1ed1183e8b8c63d69b0022f3d0b1 Mon Sep 17 00:00:00 2001 From: Norman Maurer Date: Mon, 26 Jul 2021 17:11:18 +0200 Subject: [PATCH] Add some size checks to make code more robust and more clear (#11512) Motivation: While its technical impossible that a chunk is larger than 64kb it still makes things easier to read and more robust to add some size checks to LzfDecoder. Modifications: Check the maximum length Result: More robust and easier to reason about code --- .../handler/codec/compression/LzfDecoder.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/codec/src/main/java/io/netty/handler/codec/compression/LzfDecoder.java b/codec/src/main/java/io/netty/handler/codec/compression/LzfDecoder.java index b867b67b96..05a35b14b9 100644 --- a/codec/src/main/java/io/netty/handler/codec/compression/LzfDecoder.java +++ b/codec/src/main/java/io/netty/handler/codec/compression/LzfDecoder.java @@ -17,6 +17,7 @@ package io.netty.handler.codec.compression; import com.ning.compress.BufferRecycler; import com.ning.compress.lzf.ChunkDecoder; +import com.ning.compress.lzf.LZFChunk; import com.ning.compress.lzf.util.ChunkDecoderFactory; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; @@ -137,6 +138,15 @@ public class LzfDecoder extends ByteToMessageDecoder { } chunkLength = in.readUnsignedShort(); + // chunkLength can never exceed MAX_CHUNK_LEN as MAX_CHUNK_LEN is 64kb and readUnsignedShort can + // never return anything bigger as well. Let's add some check any way to make things easier in terms + // of debugging if we ever hit this because of an bug. + if (chunkLength > LZFChunk.MAX_CHUNK_LEN) { + throw new DecompressionException(String.format( + "chunk length exceeds maximum: %d (expected: =< %d)", + chunkLength, LZFChunk.MAX_CHUNK_LEN)); + } + if (type != BLOCK_TYPE_COMPRESSED) { break; } @@ -147,6 +157,15 @@ public class LzfDecoder extends ByteToMessageDecoder { } originalLength = in.readUnsignedShort(); + // originalLength can never exceed MAX_CHUNK_LEN as MAX_CHUNK_LEN is 64kb and readUnsignedShort can + // never return anything bigger as well. Let's add some check any way to make things easier in terms + // of debugging if we ever hit this because of an bug. + if (originalLength > LZFChunk.MAX_CHUNK_LEN) { + throw new DecompressionException(String.format( + "original length exceeds maximum: %d (expected: =< %d)", + chunkLength, LZFChunk.MAX_CHUNK_LEN)); + } + currentState = State.DECOMPRESS_DATA; // fall through case DECOMPRESS_DATA: