From f2cc94c7d4aeb77c1d4ada151dfab00608ab283a Mon Sep 17 00:00:00 2001 From: Norman Maurer Date: Thu, 9 Sep 2021 14:53:58 +0200 Subject: [PATCH] Merge pull request from GHSA-grg4-wf29-r9vv Motivation: We should do the Bzip2 decoding in a streaming fashion and so ensure we propagate the buffer as soon as possible through the pipeline. This allows the users to release these buffers as fast as possible. Modification: - Change the Bzip2Decoder to do the decompression of data in a streaming fashion. - Add some safety check to ensure the block length never execeeds the maximum (as defined in the spec) Result: No more risk of an OOME by decompress some large data via bzip2. Thanks to Ori Hollander of JFrog Security for reporting the issue. (we got acquired during the process and now Vdoo is part of JFrog company) --- .../codec/compression/Bzip2BlockDecompressor.java | 5 +++++ .../handler/codec/compression/Bzip2Constants.java | 2 ++ .../handler/codec/compression/Bzip2Decoder.java | 15 ++++++++------- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/codec/src/main/java/io/netty/handler/codec/compression/Bzip2BlockDecompressor.java b/codec/src/main/java/io/netty/handler/codec/compression/Bzip2BlockDecompressor.java index 9b8ff3f04c..801900c487 100644 --- a/codec/src/main/java/io/netty/handler/codec/compression/Bzip2BlockDecompressor.java +++ b/codec/src/main/java/io/netty/handler/codec/compression/Bzip2BlockDecompressor.java @@ -228,6 +228,11 @@ final class Bzip2BlockDecompressor { bwtBlock[bwtBlockLength++] = nextByte; } } + if (bwtBlockLength > MAX_BLOCK_LENGTH) { + throw new DecompressionException("block length exceeds max block length: " + + bwtBlockLength + " > " + MAX_BLOCK_LENGTH); + } + this.bwtBlockLength = bwtBlockLength; initialiseInverseBWT(); return true; diff --git a/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Constants.java b/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Constants.java index ba8fee54d3..087f45faa0 100644 --- a/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Constants.java +++ b/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Constants.java @@ -49,6 +49,8 @@ final class Bzip2Constants { static final int MIN_BLOCK_SIZE = 1; static final int MAX_BLOCK_SIZE = 9; + static final int MAX_BLOCK_LENGTH = MAX_BLOCK_SIZE * BASE_BLOCK_SIZE; + /** * Maximum possible Huffman alphabet size. */ diff --git a/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Decoder.java b/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Decoder.java index f3b54fd6ab..f28bad932b 100644 --- a/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Decoder.java +++ b/codec/src/main/java/io/netty/handler/codec/compression/Bzip2Decoder.java @@ -289,26 +289,27 @@ public class Bzip2Decoder extends ByteToMessageDecoder { } final int blockLength = blockDecompressor.blockLength(); - final ByteBuf uncompressed = ctx.alloc().buffer(blockLength); - boolean success = false; + ByteBuf uncompressed = ctx.alloc().buffer(blockLength); try { int uncByte; while ((uncByte = blockDecompressor.read()) >= 0) { uncompressed.writeByte(uncByte); } - + // We did read all the data, lets reset the state and do the CRC check. + currentState = State.INIT_BLOCK; int currentBlockCRC = blockDecompressor.checkCRC(); streamCRC = (streamCRC << 1 | streamCRC >>> 31) ^ currentBlockCRC; ctx.fireChannelRead(uncompressed); - success = true; + uncompressed = null; } finally { - if (!success) { + if (uncompressed != null) { uncompressed.release(); } } - currentState = State.INIT_BLOCK; - break; + // Return here so the ByteBuf that was put in the List will be forwarded to the user and so can be + // released as soon as possible. + return; case EOF: in.skipBytes(in.readableBytes()); return;