Cleanup buffer tests.

Motivation:

There is some cleanup that can be done.

Modifications:

- Use intializer list expression where possible
- Remove unused imports.

Result:

Cleaner code.
This commit is contained in:
Norman Maurer 2018-02-01 11:54:46 +01:00
parent 011841e454
commit fbbaf2bd7e
4 changed files with 33 additions and 39 deletions

View File

@ -19,7 +19,6 @@ import io.netty.util.AsciiString;
import io.netty.util.CharsetUtil;
import org.junit.Test;
import java.nio.Buffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.util.ArrayList;
@ -463,13 +462,13 @@ public class ByteBufUtilTest {
@Test
public void testIsTextWithUtf8() {
byte[][] validUtf8Bytes = new byte[][]{
byte[][] validUtf8Bytes = {
"netty".getBytes(CharsetUtil.UTF_8),
new byte[]{(byte) 0x24},
new byte[]{(byte) 0xC2, (byte) 0xA2},
new byte[]{(byte) 0xE2, (byte) 0x82, (byte) 0xAC},
new byte[]{(byte) 0xF0, (byte) 0x90, (byte) 0x8D, (byte) 0x88},
new byte[]{(byte) 0x24,
{(byte) 0x24},
{(byte) 0xC2, (byte) 0xA2},
{(byte) 0xE2, (byte) 0x82, (byte) 0xAC},
{(byte) 0xF0, (byte) 0x90, (byte) 0x8D, (byte) 0x88},
{(byte) 0x24,
(byte) 0xC2, (byte) 0xA2,
(byte) 0xE2, (byte) 0x82, (byte) 0xAC,
(byte) 0xF0, (byte) 0x90, (byte) 0x8D, (byte) 0x88} // multiple characters
@ -477,18 +476,18 @@ public class ByteBufUtilTest {
for (byte[] bytes : validUtf8Bytes) {
assertIsText(bytes, true, CharsetUtil.UTF_8);
}
byte[][] invalidUtf8Bytes = new byte[][]{
new byte[]{(byte) 0x80},
new byte[]{(byte) 0xF0, (byte) 0x82, (byte) 0x82, (byte) 0xAC}, // Overlong encodings
new byte[]{(byte) 0xC2}, // not enough bytes
new byte[]{(byte) 0xE2, (byte) 0x82}, // not enough bytes
new byte[]{(byte) 0xF0, (byte) 0x90, (byte) 0x8D}, // not enough bytes
new byte[]{(byte) 0xC2, (byte) 0xC0}, // not correct bytes
new byte[]{(byte) 0xE2, (byte) 0x82, (byte) 0xC0}, // not correct bytes
new byte[]{(byte) 0xF0, (byte) 0x90, (byte) 0x8D, (byte) 0xC0}, // not correct bytes
new byte[]{(byte) 0xC1, (byte) 0x80}, // out of lower bound
new byte[]{(byte) 0xE0, (byte) 0x80, (byte) 0x80}, // out of lower bound
new byte[]{(byte) 0xED, (byte) 0xAF, (byte) 0x80} // out of upper bound
byte[][] invalidUtf8Bytes = {
{(byte) 0x80},
{(byte) 0xF0, (byte) 0x82, (byte) 0x82, (byte) 0xAC}, // Overlong encodings
{(byte) 0xC2}, // not enough bytes
{(byte) 0xE2, (byte) 0x82}, // not enough bytes
{(byte) 0xF0, (byte) 0x90, (byte) 0x8D}, // not enough bytes
{(byte) 0xC2, (byte) 0xC0}, // not correct bytes
{(byte) 0xE2, (byte) 0x82, (byte) 0xC0}, // not correct bytes
{(byte) 0xF0, (byte) 0x90, (byte) 0x8D, (byte) 0xC0}, // not correct bytes
{(byte) 0xC1, (byte) 0x80}, // out of lower bound
{(byte) 0xE0, (byte) 0x80, (byte) 0x80}, // out of lower bound
{(byte) 0xED, (byte) 0xAF, (byte) 0x80} // out of upper bound
};
for (byte[] bytes : invalidUtf8Bytes) {
assertIsText(bytes, false, CharsetUtil.UTF_8);
@ -497,8 +496,8 @@ public class ByteBufUtilTest {
@Test
public void testIsTextWithoutOptimization() {
byte[] validBytes = new byte[]{(byte) 0x01, (byte) 0xD8, (byte) 0x37, (byte) 0xDC};
byte[] invalidBytes = new byte[]{(byte) 0x01, (byte) 0xD8};
byte[] validBytes = {(byte) 0x01, (byte) 0xD8, (byte) 0x37, (byte) 0xDC};
byte[] invalidBytes = {(byte) 0x01, (byte) 0xD8};
assertIsText(validBytes, true, CharsetUtil.UTF_16LE);
assertIsText(invalidBytes, false, CharsetUtil.UTF_16LE);
@ -506,8 +505,8 @@ public class ByteBufUtilTest {
@Test
public void testIsTextWithAscii() {
byte[] validBytes = new byte[]{(byte) 0x00, (byte) 0x01, (byte) 0x37, (byte) 0x7F};
byte[] invalidBytes = new byte[]{(byte) 0x80, (byte) 0xFF};
byte[] validBytes = {(byte) 0x00, (byte) 0x01, (byte) 0x37, (byte) 0x7F};
byte[] invalidBytes = {(byte) 0x80, (byte) 0xFF};
assertIsText(validBytes, true, CharsetUtil.US_ASCII);
assertIsText(invalidBytes, false, CharsetUtil.US_ASCII);
@ -518,21 +517,21 @@ public class ByteBufUtilTest {
ByteBuf buffer = Unpooled.buffer();
try {
buffer.writeBytes(new byte[4]);
int[][] validIndexLengthPairs = new int[][] {
new int[]{4, 0},
new int[]{0, 4},
new int[]{1, 3},
int[][] validIndexLengthPairs = {
{4, 0},
{0, 4},
{1, 3},
};
for (int[] pair : validIndexLengthPairs) {
assertTrue(ByteBufUtil.isText(buffer, pair[0], pair[1], CharsetUtil.US_ASCII));
}
int[][] invalidIndexLengthPairs = new int[][]{
new int[]{4, 1},
new int[]{-1, 2},
new int[]{3, -1},
new int[]{3, -2},
new int[]{5, 0},
new int[]{1, 5},
int[][] invalidIndexLengthPairs = {
{4, 1},
{-1, 2},
{3, -1},
{3, -2},
{5, 0},
{1, 5},
};
for (int[] pair : invalidIndexLengthPairs) {
try {

View File

@ -27,7 +27,6 @@ import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
import static io.netty.buffer.Unpooled.*;
import static io.netty.util.ReferenceCountUtil.*;
import static org.junit.Assert.*;
public class FixedCompositeByteBufTest {

View File

@ -25,7 +25,6 @@ import java.util.Queue;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
public class SimpleLeakAwareByteBufTest extends BigEndianHeapByteBufTest {
private final Class<? extends ByteBuf> clazz = leakClass();

View File

@ -15,14 +15,11 @@
*/
package io.netty.buffer;
import io.netty.util.CharsetUtil;
import io.netty.util.internal.PlatformDependent;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
public class WrappedUnpooledUnsafeByteBufTest extends BigEndianUnsafeDirectByteBufTest {
@Before