Fix most inspector warnings

Motivation:

It's good to minimize potentially broken windows.

Modifications:

Fix most inspector warnings from our profile

Result:

Cleaner code
This commit is contained in:
Trustin Lee 2014-07-02 19:04:11 +09:00
parent cea3b6b2ab
commit 330404da07
155 changed files with 439 additions and 618 deletions

View File

@ -1,65 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright 2012 The Netty Project
~
~ The Netty Project licenses this file to you under the Apache License,
~ version 2.0 (the "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at:
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
~ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
~ License for the specific language governing permissions and limitations
~ under the License.
-->
<FindBugsFilter>
<!-- Tests -->
<Match>
<Class name="~.*Test(\$[^\$]+)*"/>
</Match>
<!-- Generated code -->
<Match>
<Class name="~.*\.LocalTimeProtocol(\$[^\$]+)*"/>
</Match>
<!-- Noise -->
<Match>
<Bug code="Co,SF"
category="I18N"
pattern="REC_CATCH_EXCEPTION,UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR,DB_DUPLICATE_SWITCH_CLAUSES,VO_VOLATILE_REFERENCE_TO_ARRAY" />
</Match>
<!-- Known false positives -->
<Match>
<Class name="~.*Channel(Group)?Future"/>
<Method name="~await.*"/>
<Bug pattern="PS_PUBLIC_SEMAPHORES"/>
</Match>
<Match>
<Class name="~.*SelectorLoop"/>
<Method name="run"/>
<Bug code="ESync"/>
</Match>
<Match>
<Class name="~.*Channel"/>
<Or>
<Method name="setClosed"/>
<Method name="setInterestOpsNow"/>
</Or>
<Bug pattern="USM_USELESS_SUBCLASS_METHOD"/>
</Match>
<Match>
<Class name="~.*HttpTunnelingChannelHandler"/>
<Method name="~await.*"/>
<Bug pattern="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE,RV_RETURN_VALUE_IGNORED2"/>
</Match>
<!-- Known issues that don't matter -->
<Match>
<Or>
<Class name="~.*\.util\.internal\.Concurrent[A-Za-z]*HashMap(\$[^\$]+)*"/>
<Class name="~.*\.util\.internal\..*TransferQueue(\$[^\$]+)*"/>
<Class name="~.*\.util\.internal\.MapBackedSet"/>
</Or>
<Bug pattern="SE_TRANSIENT_FIELD_NOT_RESTORED,SE_BAD_FIELD"/>
</Match>
</FindBugsFilter>

View File

@ -37,7 +37,6 @@ public abstract class AbstractReferenceCountedByteBuf extends AbstractByteBuf {
refCntUpdater = updater;
}
@SuppressWarnings("FieldMayBeFinal")
private volatile int refCnt = 1;
protected AbstractReferenceCountedByteBuf(int maxCapacity) {

View File

@ -189,7 +189,7 @@ public class ByteBufInputStream extends InputStream implements DataInput {
loop: while (true) {
if (!buffer.isReadable()) {
return (lineBuf.length() > 0) ? lineBuf.toString() : null;
return lineBuf.length() > 0 ? lineBuf.toString() : null;
}
int c = buffer.readUnsignedByte();
@ -198,7 +198,7 @@ public class ByteBufInputStream extends InputStream implements DataInput {
break loop;
case '\r':
if (buffer.isReadable() && buffer.getUnsignedByte(buffer.readerIndex()) == '\n') {
if (buffer.isReadable() && (char) buffer.getUnsignedByte(buffer.readerIndex()) == '\n') {
buffer.skipBytes(1);
}
break loop;

View File

@ -289,7 +289,6 @@ public class CompositeByteBuf extends AbstractReferenceCountedByteBuf {
ByteBuf consolidated = allocBuffer(capacity);
// We're not using foreach to avoid creating an iterator.
// noinspection ForLoopReplaceableByForEach
for (int i = 0; i < numComponents; i ++) {
Component c = components.get(i);
ByteBuf b = c.buf;
@ -1098,7 +1097,6 @@ public class CompositeByteBuf extends AbstractReferenceCountedByteBuf {
} else {
int count = 0;
int componentsCount = components.size();
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < componentsCount; i++) {
Component c = components.get(i);
count += c.buf.nioBufferCount();

View File

@ -41,7 +41,7 @@ final class FixedCompositeByteBuf extends AbstractReferenceCountedByteBuf {
private final Object[] buffers;
private final boolean direct;
public FixedCompositeByteBuf(ByteBufAllocator allocator, ByteBuf... buffers) {
FixedCompositeByteBuf(ByteBufAllocator allocator, ByteBuf... buffers) {
super(Integer.MAX_VALUE);
if (buffers.length == 0) {
this.buffers = EMPTY;
@ -204,7 +204,6 @@ final class FixedCompositeByteBuf extends AbstractReferenceCountedByteBuf {
private Component findComponent(int index) {
int readable = 0;
//noinspection ForLoopReplaceableByForEach
for (int i = 0 ; i < buffers.length; i++) {
Component comp = null;
ByteBuf b;
@ -545,7 +544,6 @@ final class FixedCompositeByteBuf extends AbstractReferenceCountedByteBuf {
@Override
protected void deallocate() {
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < buffers.length; i++) {
buffer(i).release();
}

View File

@ -147,7 +147,6 @@ abstract class PooledByteBuf<T> extends AbstractReferenceCountedByteBuf {
}
}
@SuppressWarnings("unchecked")
private void recycle() {
recyclerHandle.recycle(this);
}

View File

@ -308,7 +308,7 @@ final class PooledDirectByteBuf extends PooledByteBuf<ByteBuffer> {
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpBuf);
} catch (ClosedChannelException e) {
} catch (ClosedChannelException ignored) {
return -1;
}
}

View File

@ -232,7 +232,7 @@ final class PooledHeapByteBuf extends PooledByteBuf<byte[]> {
index = idx(index);
try {
return in.read((ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length));
} catch (ClosedChannelException e) {
} catch (ClosedChannelException ignored) {
return -1;
}
}

View File

@ -309,7 +309,7 @@ final class PooledUnsafeDirectByteBuf extends PooledByteBuf<ByteBuffer> {
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpBuf);
} catch (ClosedChannelException e) {
} catch (ClosedChannelException ignored) {
return -1;
}
}

View File

@ -36,7 +36,7 @@ class ReadOnlyByteBufferBuf extends AbstractReferenceCountedByteBuf {
private final ByteBufAllocator allocator;
private ByteBuffer tmpNioBuf;
public ReadOnlyByteBufferBuf(ByteBufAllocator allocator, ByteBuffer buffer) {
ReadOnlyByteBufferBuf(ByteBufAllocator allocator, ByteBuffer buffer) {
super(buffer.remaining());
if (!buffer.isReadOnly()) {
throw new IllegalArgumentException("must be a readonly buffer: " + StringUtil.simpleClassName(buffer));
@ -276,7 +276,7 @@ class ReadOnlyByteBufferBuf extends AbstractReferenceCountedByteBuf {
ByteBuffer src;
try {
src = (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length);
} catch (IllegalArgumentException e) {
} catch (IllegalArgumentException ignored) {
throw new IndexOutOfBoundsException("Too many bytes to read - Need " + (index + length));
}

View File

@ -30,7 +30,7 @@ final class ReadOnlyUnsafeDirectByteBuf extends ReadOnlyByteBufferBuf {
private static final boolean NATIVE_ORDER = ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN;
private final long memoryAddress;
public ReadOnlyUnsafeDirectByteBuf(ByteBufAllocator allocator, ByteBuffer buffer) {
ReadOnlyUnsafeDirectByteBuf(ByteBufAllocator allocator, ByteBuffer buffer) {
super(allocator, buffer);
memoryAddress = PlatformDependent.directBufferAddress(buffer);
}

View File

@ -39,7 +39,7 @@ public class SlicedByteBuf extends AbstractDerivedByteBuf {
public SlicedByteBuf(ByteBuf buffer, int index, int length) {
super(length);
if (index < 0 || index > buffer.capacity() - length) {
throw new IndexOutOfBoundsException(buffer.toString() + ".slice(" + index + ", " + length + ')');
throw new IndexOutOfBoundsException(buffer + ".slice(" + index + ", " + length + ')');
}
if (buffer instanceof SlicedByteBuf) {

View File

@ -535,7 +535,7 @@ public class UnpooledDirectByteBuf extends AbstractReferenceCountedByteBuf {
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpNioBuf);
} catch (ClosedChannelException e) {
} catch (ClosedChannelException ignored) {
return -1;
}
}
@ -556,7 +556,7 @@ public class UnpooledDirectByteBuf extends AbstractReferenceCountedByteBuf {
ByteBuffer src;
try {
src = (ByteBuffer) buffer.duplicate().clear().position(index).limit(index + length);
} catch (IllegalArgumentException e) {
} catch (IllegalArgumentException ignored) {
throw new IndexOutOfBoundsException("Too many bytes to read - Need " + (index + length));
}

View File

@ -254,7 +254,7 @@ public class UnpooledHeapByteBuf extends AbstractReferenceCountedByteBuf {
ensureAccessible();
try {
return in.read((ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length));
} catch (ClosedChannelException e) {
} catch (ClosedChannelException ignored) {
return -1;
}
}

View File

@ -444,7 +444,7 @@ public class UnpooledUnsafeDirectByteBuf extends AbstractReferenceCountedByteBuf
tmpBuf.clear().position(index).limit(index + length);
try {
return in.read(tmpBuf);
} catch (ClosedChannelException e) {
} catch (ClosedChannelException ignored) {
return -1;
}
}

View File

@ -773,6 +773,7 @@ public class WrappedByteBuf extends ByteBuf {
}
@Override
@SuppressWarnings("EqualsWhichDoesntCheckParameterClass")
public boolean equals(Object obj) {
return buf.equals(obj);
}

View File

@ -18,10 +18,8 @@ package io.netty.buffer;
import io.netty.util.CharsetUtil;
import io.netty.util.IllegalReferenceCountException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.io.ByteArrayInputStream;
@ -1477,6 +1475,7 @@ public abstract class AbstractByteBufTest {
}
@Test
@SuppressWarnings("ObjectEqualsNull")
public void testEquals() {
assertFalse(buffer.equals(null));
assertFalse(buffer.equals(new Object()));
@ -1728,7 +1727,6 @@ public abstract class AbstractByteBufTest {
assertThat(lastIndex.get(), is(CAPACITY / 4));
}
@Ignore
@Test
public void testInternalNioBuffer() {
testInternalNioBuffer(128);
@ -1796,7 +1794,7 @@ public abstract class AbstractByteBufTest {
return;
}
}
Assert.assertArrayEquals(bytes, channel.writtenBytes());
assertArrayEquals(bytes, channel.writtenBytes());
latch.countDown();
}
try {
@ -1850,7 +1848,7 @@ public abstract class AbstractByteBufTest {
return;
}
}
Assert.assertArrayEquals(bytes, out.toByteArray());
assertArrayEquals(bytes, out.toByteArray());
latch.countDown();
}
try {
@ -1899,11 +1897,11 @@ public abstract class AbstractByteBufTest {
byte[] array = new byte[8];
buf.readBytes(array);
Assert.assertArrayEquals(bytes, array);
assertArrayEquals(bytes, array);
Arrays.fill(array, (byte) 0);
buf.getBytes(0, array);
Assert.assertArrayEquals(bytes, array);
assertArrayEquals(bytes, array);
latch.countDown();
}
@ -1929,6 +1927,7 @@ public abstract class AbstractByteBufTest {
}
@Test
@SuppressWarnings("ForLoopThatDoesntUseLoopVariable")
public void testNioBufferExposeOnlyRegion() {
final ByteBuf buffer = releaseLater(newBuffer(8));
byte[] data = new byte[8];

View File

@ -71,8 +71,7 @@ public abstract class AbstractCompositeByteBufTest extends AbstractByteBufTest {
buffers.add(EMPTY_BUFFER);
}
buffer = Unpooled.wrappedBuffer(
Integer.MAX_VALUE, buffers.toArray(new ByteBuf[buffers.size()])).order(order);
buffer = wrappedBuffer(Integer.MAX_VALUE, buffers.toArray(new ByteBuf[buffers.size()])).order(order);
// Truncate to the requested capacity.
buffer.capacity(length);
@ -802,7 +801,7 @@ public abstract class AbstractCompositeByteBufTest extends AbstractByteBufTest {
CompositeByteBuf cbuf = releaseLater(compositeBuffer());
int len = 8 * 4;
for (int i = 0; i < len; i += 4) {
ByteBuf buf = Unpooled.buffer().writeInt(i);
ByteBuf buf = buffer().writeInt(i);
cbuf.capacity(cbuf.writerIndex()).addComponent(buf).writerIndex(i + 4);
}
cbuf.writeByte(1);

View File

@ -15,7 +15,6 @@
*/
package io.netty.buffer;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
@ -44,14 +43,6 @@ public class DuplicateByteBufTest extends AbstractByteBufTest {
new DuplicatedByteBuf(null);
}
@Ignore
@Test
// Test which shows bug
// https://github.com/netty/netty/issues/1802
public void testInternalNioBuffer() {
super.testInternalNioBuffer();
}
// See https://github.com/netty/netty/issues/1800
@Test
public void testIncreaseCapacityWrapped() {

View File

@ -46,15 +46,15 @@ public class ReadOnlyByteBufTest {
@Test
public void testUnwrap() {
ByteBuf buf = buffer(1);
assertSame(buf, Unpooled.unmodifiableBuffer(buf).unwrap());
assertSame(buf, unmodifiableBuffer(buf).unwrap());
}
@Test
public void shouldHaveSameByteOrder() {
ByteBuf buf = buffer(1);
assertSame(BIG_ENDIAN, Unpooled.unmodifiableBuffer(buf).order());
assertSame(BIG_ENDIAN, unmodifiableBuffer(buf).order());
buf = buf.order(LITTLE_ENDIAN);
assertSame(LITTLE_ENDIAN, Unpooled.unmodifiableBuffer(buf).order());
assertSame(LITTLE_ENDIAN, unmodifiableBuffer(buf).order());
}
@Test

View File

@ -445,8 +445,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readInt());
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyInt(null).capacity());
assertEquals(0, Unpooled.copyInt(EMPTY_INTS).capacity());
assertEquals(0, copyInt(null).capacity());
assertEquals(0, copyInt(EMPTY_INTS).capacity());
}
@Test
@ -465,8 +465,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readShort());
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyShort((short[]) null).capacity());
assertEquals(0, Unpooled.copyShort(EMPTY_SHORTS).capacity());
assertEquals(0, copyShort((short[]) null).capacity());
assertEquals(0, copyShort(EMPTY_SHORTS).capacity());
}
@Test
@ -477,8 +477,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readShort());
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyShort((int[]) null).capacity());
assertEquals(0, Unpooled.copyShort(EMPTY_INTS).capacity());
assertEquals(0, copyShort((int[]) null).capacity());
assertEquals(0, copyShort(EMPTY_INTS).capacity());
}
@Test
@ -497,8 +497,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readMedium());
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyMedium(null).capacity());
assertEquals(0, Unpooled.copyMedium(EMPTY_INTS).capacity());
assertEquals(0, copyMedium(null).capacity());
assertEquals(0, copyMedium(EMPTY_INTS).capacity());
}
@Test
@ -517,8 +517,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readLong());
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyLong(null).capacity());
assertEquals(0, Unpooled.copyLong(EMPTY_LONGS).capacity());
assertEquals(0, copyLong(null).capacity());
assertEquals(0, copyLong(EMPTY_LONGS).capacity());
}
@Test
@ -537,8 +537,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readFloat(), 0.01);
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyFloat(null).capacity());
assertEquals(0, Unpooled.copyFloat(EMPTY_FLOATS).capacity());
assertEquals(0, copyFloat(null).capacity());
assertEquals(0, copyFloat(EMPTY_FLOATS).capacity());
}
@Test
@ -557,8 +557,8 @@ public class UnpooledTest {
assertEquals(4, buffer.readDouble(), 0.01);
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyDouble(null).capacity());
assertEquals(0, Unpooled.copyDouble(EMPTY_DOUBLES).capacity());
assertEquals(0, copyDouble(null).capacity());
assertEquals(0, copyDouble(EMPTY_DOUBLES).capacity());
}
@Test
@ -569,8 +569,8 @@ public class UnpooledTest {
assertFalse(buffer.readBoolean());
assertFalse(buffer.isReadable());
assertEquals(0, Unpooled.copyBoolean(null).capacity());
assertEquals(0, Unpooled.copyBoolean(EMPTY_BOOLEANS).capacity());
assertEquals(0, copyBoolean(null).capacity());
assertEquals(0, copyBoolean(EMPTY_BOOLEANS).capacity());
}
@Test

View File

@ -20,6 +20,8 @@ import io.netty.handler.codec.DecoderException;
public final class DnsResponseException extends DecoderException {
private static final long serialVersionUID = -8519053051363525286L;
private final DnsResponseCode code;
public DnsResponseException(DnsResponseCode code) {

View File

@ -14,10 +14,6 @@
*/
package io.netty.handler.codec.http;
import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION;
import static io.netty.handler.codec.http.HttpHeaders.Names.UPGRADE;
import static io.netty.handler.codec.http.HttpResponseStatus.SWITCHING_PROTOCOLS;
import static io.netty.util.ReferenceCountUtil.release;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
@ -26,6 +22,10 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static io.netty.handler.codec.http.HttpHeaders.Names.*;
import static io.netty.handler.codec.http.HttpResponseStatus.*;
import static io.netty.util.ReferenceCountUtil.*;
/**
* Client-side handler for handling an HTTP upgrade handshake to another protocol. When the first
* HTTP request is sent, this handler will add all appropriate headers to perform an upgrade to the
@ -71,7 +71,7 @@ public class HttpClientUpgradeHandler extends HttpObjectAggregator {
*/
public interface UpgradeCodec {
/**
* Returns the name of the protocol supported by this codec, as indicated by the {@link UPGRADE} header.
* Returns the name of the protocol supported by this codec, as indicated by the {@code 'UPGRADE'} header.
*/
String protocol();
@ -207,7 +207,7 @@ public class HttpClientUpgradeHandler extends HttpObjectAggregator {
}
}
private void removeThisHandler(ChannelHandlerContext ctx) {
private static void removeThisHandler(ChannelHandlerContext ctx) {
ctx.pipeline().remove(ctx.name());
}
@ -226,7 +226,7 @@ public class HttpClientUpgradeHandler extends HttpObjectAggregator {
StringBuilder builder = new StringBuilder();
for (String part : connectionParts) {
builder.append(part);
builder.append(",");
builder.append(',');
}
builder.append(UPGRADE);
request.headers().set(CONNECTION, builder.toString());

View File

@ -124,6 +124,7 @@ public class HttpContentCompressor extends HttpContentEncoder {
wrapper, compressionLevel, windowBits, memLevel)));
}
@SuppressWarnings("FloatingPointEquality")
protected ZlibWrapper determineWrapper(CharSequence acceptEncoding) {
float starQ = -1.0f;
float gzipQ = -1.0f;

View File

@ -24,6 +24,7 @@ import io.netty.handler.codec.AsciiString;
import io.netty.handler.codec.DecoderResult;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.TooLongFrameException;
import io.netty.handler.codec.http.HttpObjectDecoder.State;
import io.netty.util.internal.AppendableCharSequence;
import java.util.List;
@ -101,7 +102,7 @@ import static io.netty.buffer.ByteBufUtil.*;
* To implement the decoder of such a derived protocol, extend this class and
* implement all abstract methods properly.
*/
public abstract class HttpObjectDecoder extends ReplayingDecoder<HttpObjectDecoder.State> {
public abstract class HttpObjectDecoder extends ReplayingDecoder<State> {
private final int maxInitialLineLength;
private final int maxHeaderSize;

View File

@ -267,7 +267,7 @@ public final class CorsConfig {
* @return {@link Builder} to support method chaining.
*/
public static Builder withOrigin(final String origin) {
if (origin.equals("*")) {
if ("*".equals(origin)) {
return new Builder();
}
return new Builder(origin);

View File

@ -22,8 +22,10 @@ import io.netty.util.internal.PlatformDependent;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/**
* Default factory giving Attribute and FileUpload according to constructor
@ -145,7 +147,7 @@ public class DefaultHttpDataFactory implements HttpDataFactory {
private static void checkHttpDataSize(HttpData data) {
try {
data.checkSize(data.length());
} catch (IOException e) {
} catch (IOException ignored) {
throw new IllegalArgumentException("Attribute bigger than maxSize allowed");
}
}
@ -235,15 +237,18 @@ public class DefaultHttpDataFactory implements HttpDataFactory {
@Override
public void cleanAllHttpData() {
for (HttpRequest request : requestFileDeleteMap.keySet()) {
List<HttpData> fileToDelete = requestFileDeleteMap.get(request);
Iterator<Entry<HttpRequest, List<HttpData>>> i = requestFileDeleteMap.entrySet().iterator();
while (i.hasNext()) {
Entry<HttpRequest, List<HttpData>> e = i.next();
i.remove();
List<HttpData> fileToDelete = e.getValue();
if (fileToDelete != null) {
for (HttpData data: fileToDelete) {
for (HttpData data : fileToDelete) {
data.delete();
}
fileToDelete.clear();
}
requestFileDeleteMap.remove(request);
}
}
}

View File

@ -106,7 +106,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
/**
* Used in Multipart
*/
private Map<String, Attribute> currentFieldAttributes;
private Map<CharSequence, Attribute> currentFieldAttributes;
/**
* The current FileUpload that is currently in decode process
@ -516,7 +516,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
// load data
try {
loadFieldMultipart(multipartDataBoundary);
} catch (NotEnoughDataDecoderException e) {
} catch (NotEnoughDataDecoderException ignored) {
return null;
}
Attribute finalAttribute = currentAttribute;
@ -561,7 +561,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e) {
} catch (SeekAheadNoBackArrayException ignored) {
try {
skipControlCharactersStandard();
} catch (IndexOutOfBoundsException e1) {
@ -608,7 +608,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
int readerIndex = undecodedChunk.readerIndex();
try {
skipControlCharacters();
} catch (NotEnoughDataDecoderException e1) {
} catch (NotEnoughDataDecoderException ignored) {
undecodedChunk.readerIndex(readerIndex);
return null;
}
@ -616,7 +616,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
String newline;
try {
newline = readDelimiter(delimiter);
} catch (NotEnoughDataDecoderException e) {
} catch (NotEnoughDataDecoderException ignored) {
undecodedChunk.readerIndex(readerIndex);
return null;
}
@ -648,7 +648,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
private InterfaceHttpData findMultipartDisposition() {
int readerIndex = undecodedChunk.readerIndex();
if (currentStatus == MultiPartStatus.DISPOSITION) {
currentFieldAttributes = new TreeMap<String, Attribute>(CaseIgnoringComparator.INSTANCE);
currentFieldAttributes = new TreeMap<CharSequence, Attribute>(CaseIgnoringComparator.INSTANCE);
}
// read many lines until empty line with newline found! Store all data
while (!skipOneLine()) {
@ -656,7 +656,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
try {
skipControlCharacters();
newline = readLine();
} catch (NotEnoughDataDecoderException e) {
} catch (NotEnoughDataDecoderException ignored) {
undecodedChunk.readerIndex(readerIndex);
return null;
}
@ -842,7 +842,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
size = lengthAttribute != null ? Long.parseLong(lengthAttribute.getValue()) : 0L;
} catch (IOException e) {
throw new ErrorDataDecoderException(e);
} catch (NumberFormatException e) {
} catch (NumberFormatException ignored) {
size = 0;
}
try {
@ -991,7 +991,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
} catch (SeekAheadNoBackArrayException ignored) {
return readLineStandard();
}
int readerIndex = undecodedChunk.readerIndex();
@ -1142,7 +1142,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
} catch (SeekAheadNoBackArrayException ignored) {
return readDelimiterStandard(delimiter);
}
int readerIndex = undecodedChunk.readerIndex();
@ -1371,7 +1371,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
} catch (SeekAheadNoBackArrayException ignored) {
readFileUploadByteMultipartStandard(delimiter);
return;
}
@ -1592,7 +1592,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
} catch (SeekAheadNoBackArrayException ignored) {
loadFieldMultipartStandard(delimiter);
return;
}
@ -1699,6 +1699,7 @@ public class HttpPostMultipartRequestDecoder implements InterfaceHttpPostRequest
*
* @return the cleaned String
*/
@SuppressWarnings("IfStatementWithIdenticalBranches")
private static String cleanString(String field) {
StringBuilder sb = new StringBuilder(field.length());
for (int i = 0; i < field.length(); i++) {

View File

@ -156,7 +156,8 @@ public class HttpPostRequestDecoder implements InterfaceHttpPostRequestDecoder {
String[] headerContentType = splitHeaderContentType(contentType);
if (headerContentType[0].toLowerCase().startsWith(
HttpHeaders.Values.MULTIPART_FORM_DATA.toString())) {
int mrank = 1, crank = 2;
int mrank;
int crank;
if (headerContentType[1].toLowerCase().startsWith(
HttpHeaders.Values.BOUNDARY.toString())) {
mrank = 1;

View File

@ -511,7 +511,7 @@ public class HttpPostStandardRequestDecoder implements InterfaceHttpPostRequestD
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e1) {
} catch (SeekAheadNoBackArrayException ignored) {
parseBodyAttributesStandard();
return;
}
@ -659,11 +659,11 @@ public class HttpPostStandardRequestDecoder implements InterfaceHttpPostRequestD
SeekAheadOptimize sao;
try {
sao = new SeekAheadOptimize(undecodedChunk);
} catch (SeekAheadNoBackArrayException e) {
} catch (SeekAheadNoBackArrayException ignored) {
try {
skipControlCharactersStandard();
} catch (IndexOutOfBoundsException e1) {
throw new NotEnoughDataDecoderException(e1);
} catch (IndexOutOfBoundsException e) {
throw new NotEnoughDataDecoderException(e);
}
return;
}

View File

@ -55,7 +55,7 @@ public interface InterfaceHttpPostRequestDecoder {
* @throws HttpPostRequestDecoder.NotEnoughDataDecoderException
* Need more chunks
*/
List<InterfaceHttpData> getBodyHttpDatas() throws HttpPostRequestDecoder.NotEnoughDataDecoderException;
List<InterfaceHttpData> getBodyHttpDatas();
/**
* This getMethod returns a List of all HttpDatas with the given name from
@ -68,7 +68,7 @@ public interface InterfaceHttpPostRequestDecoder {
* @throws HttpPostRequestDecoder.NotEnoughDataDecoderException
* need more chunks
*/
List<InterfaceHttpData> getBodyHttpDatas(String name) throws HttpPostRequestDecoder.NotEnoughDataDecoderException;
List<InterfaceHttpData> getBodyHttpDatas(String name);
/**
* This getMethod returns the first InterfaceHttpData with the given name from
@ -82,7 +82,7 @@ public interface InterfaceHttpPostRequestDecoder {
* @throws HttpPostRequestDecoder.NotEnoughDataDecoderException
* need more chunks
*/
InterfaceHttpData getBodyHttpData(String name) throws HttpPostRequestDecoder.NotEnoughDataDecoderException;
InterfaceHttpData getBodyHttpData(String name);
/**
* Initialized the internals from a new chunk
@ -93,8 +93,7 @@ public interface InterfaceHttpPostRequestDecoder {
* if there is a problem with the charset decoding or other
* errors
*/
InterfaceHttpPostRequestDecoder offer(HttpContent content)
throws HttpPostRequestDecoder.ErrorDataDecoderException;
InterfaceHttpPostRequestDecoder offer(HttpContent content);
/**
* True if at current getStatus, there is an available decoded
@ -106,7 +105,7 @@ public interface InterfaceHttpPostRequestDecoder {
* @throws HttpPostRequestDecoder.EndOfDataDecoderException
* No more data will be available
*/
boolean hasNext() throws HttpPostRequestDecoder.EndOfDataDecoderException;
boolean hasNext();
/**
* Returns the next available InterfaceHttpData or null if, at the time it
@ -120,7 +119,7 @@ public interface InterfaceHttpPostRequestDecoder {
* @throws HttpPostRequestDecoder.EndOfDataDecoderException
* No more data will be available
*/
InterfaceHttpData next() throws HttpPostRequestDecoder.EndOfDataDecoderException;
InterfaceHttpData next();
/**
* Destroy the {@link InterfaceHttpPostRequestDecoder} and release all it resources. After this method

View File

@ -17,7 +17,6 @@ package io.netty.handler.codec.rtsp;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.util.CharsetUtil;
@ -26,7 +25,6 @@ import static io.netty.handler.codec.http.HttpConstants.*;
/**
* Encodes an RTSP response represented in {@link FullHttpResponse} into
* a {@link ByteBuf}.
*/
public class RtspResponseEncoder extends RtspObjectEncoder<HttpResponse> {
private static final byte[] CRLF = { CR, LF };

View File

@ -33,11 +33,11 @@ public class SpdyHeaderBlockRawEncoder extends SpdyHeaderBlockEncoder {
this.version = version.getVersion();
}
private void setLengthField(ByteBuf buffer, int writerIndex, int length) {
private static void setLengthField(ByteBuf buffer, int writerIndex, int length) {
buffer.setInt(writerIndex, length);
}
private void writeLengthField(ByteBuf buffer, int length) {
private static void writeLengthField(ByteBuf buffer, int length) {
buffer.writeInt(length);
}

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.spdy;
import io.netty.channel.ChannelPromise;
import io.netty.util.internal.PlatformDependent;
import java.io.Serializable;
import java.util.Comparator;
import java.util.Map;
import java.util.Queue;
@ -320,7 +321,10 @@ final class SpdySession {
}
}
private final class PriorityComparator implements Comparator<Integer> {
private final class PriorityComparator implements Comparator<Integer>, Serializable {
private static final long serialVersionUID = 1161471649740544848L;
@Override
public int compare(Integer id1, Integer id2) {
StreamState state1 = activeStreams.get(id1);

View File

@ -15,8 +15,6 @@
*/
package io.netty.handler.codec.http;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import io.netty.buffer.ByteBuf;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.handler.stream.ChunkedFile;
@ -25,6 +23,7 @@ import io.netty.handler.stream.ChunkedNioFile;
import io.netty.handler.stream.ChunkedNioStream;
import io.netty.handler.stream.ChunkedStream;
import io.netty.handler.stream.ChunkedWriteHandler;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.File;
@ -32,7 +31,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.Channels;
import org.junit.Test;
import static org.junit.Assert.*;
public class HttpChunkedInputTest {
private static final byte[] BYTES = new byte[1024 * 64];
@ -119,6 +118,6 @@ public class HttpChunkedInputTest {
}
assertEquals(BYTES.length * inputs.length, read);
assertTrue("Last chunk must be DefaultLastHttpContent", lastHttpContent == LastHttpContent.EMPTY_LAST_CONTENT);
assertSame("Last chunk must be DefaultLastHttpContent", LastHttpContent.EMPTY_LAST_CONTENT, lastHttpContent);
}
}

View File

@ -15,11 +15,12 @@
*/
package io.netty.handler.codec.http;
import org.junit.Test;
import java.text.ParseException;
import java.util.Date;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.*;
public class HttpHeaderDateFormatTest {
/**
@ -35,28 +36,24 @@ public class HttpHeaderDateFormatTest {
HttpHeaderDateFormat format = HttpHeaderDateFormat.get();
final Date parsedDateWithSingleDigitDay = format.parse("Sun, 6 Nov 1994 08:49:37 GMT");
Assert.assertNotNull(parsedDateWithSingleDigitDay);
Assert.assertEquals(DATE, parsedDateWithSingleDigitDay);
assertNotNull(parsedDateWithSingleDigitDay);
assertEquals(DATE, parsedDateWithSingleDigitDay);
final Date parsedDateWithDoubleDigitDay = format.parse("Sun, 06 Nov 1994 08:49:37 GMT");
Assert.assertNotNull(parsedDateWithDoubleDigitDay);
Assert.assertEquals(DATE, parsedDateWithDoubleDigitDay);
assertNotNull(parsedDateWithDoubleDigitDay);
assertEquals(DATE, parsedDateWithDoubleDigitDay);
final Date parsedDateWithDashSeparatorSingleDigitDay = format.parse("Sunday, 06-Nov-94 08:49:37 GMT");
Assert.assertNotNull(parsedDateWithDashSeparatorSingleDigitDay);
Assert.assertEquals(DATE, parsedDateWithDashSeparatorSingleDigitDay);
assertNotNull(parsedDateWithDashSeparatorSingleDigitDay);
assertEquals(DATE, parsedDateWithDashSeparatorSingleDigitDay);
final Date parsedDateWithSingleDoubleDigitDay = format.parse("Sunday, 6-Nov-94 08:49:37 GMT");
Assert.assertNotNull(parsedDateWithSingleDoubleDigitDay);
Assert.assertEquals(DATE, parsedDateWithSingleDoubleDigitDay);
assertNotNull(parsedDateWithSingleDoubleDigitDay);
assertEquals(DATE, parsedDateWithSingleDoubleDigitDay);
final Date parsedDateWithoutGMT = format.parse("Sun Nov 6 08:49:37 1994");
Assert.assertNotNull(parsedDateWithoutGMT);
Assert.assertEquals(DATE, parsedDateWithoutGMT);
}
private Date parseDate(HttpHeaderDateFormat dateFormat, String dateStr) throws ParseException {
return dateFormat.parse(dateStr);
assertNotNull(parsedDateWithoutGMT);
assertEquals(DATE, parsedDateWithoutGMT);
}
@Test
@ -64,7 +61,7 @@ public class HttpHeaderDateFormatTest {
HttpHeaderDateFormat format = HttpHeaderDateFormat.get();
final String formatted = format.format(DATE);
Assert.assertNotNull(formatted);
Assert.assertEquals("Sun, 06 Nov 1994 08:49:37 GMT", formatted);
assertNotNull(formatted);
assertEquals("Sun, 06 Nov 1994 08:49:37 GMT", formatted);
}
}

View File

@ -20,6 +20,7 @@ import io.netty.buffer.CompositeByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.handler.codec.DecoderResultProvider;
import io.netty.handler.codec.TooLongFrameException;
import io.netty.handler.codec.http.HttpHeaders.Names;
import io.netty.util.CharsetUtil;
@ -308,7 +309,7 @@ public class HttpObjectAggregatorTest {
ch.writeInbound(Unpooled.copiedBuffer("GET / HTTP/1.0 with extra\r\n", CharsetUtil.UTF_8));
Object inbound = ch.readInbound();
assertThat(inbound, is(instanceOf(FullHttpRequest.class)));
assertTrue(((FullHttpRequest) inbound).decoderResult().isFailure());
assertTrue(((DecoderResultProvider) inbound).decoderResult().isFailure());
assertNull(ch.readInbound());
ch.finish();
}
@ -319,7 +320,7 @@ public class HttpObjectAggregatorTest {
ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.0 BAD_CODE Bad Server\r\n", CharsetUtil.UTF_8));
Object inbound = ch.readInbound();
assertThat(inbound, is(instanceOf(FullHttpResponse.class)));
assertTrue(((FullHttpResponse) inbound).decoderResult().isFailure());
assertTrue(((DecoderResultProvider) inbound).decoderResult().isFailure());
assertNull(ch.readInbound());
ch.finish();
}

View File

@ -136,7 +136,6 @@ public class HttpRequestDecoderTest {
}
// if header is done it should produce a HttpRequest
boolean headerDone = a + amount == headerLength;
channel.writeInbound(Unpooled.wrappedBuffer(content, a, amount));
a += amount;
}

View File

@ -47,7 +47,7 @@ public class HttpResponseEncoderTest {
buffer.release();
FileRegion region = channel.readOutbound();
assertSame(region, FILE_REGION);
assertSame(FILE_REGION, region);
region.release();
buffer = channel.readOutbound();
assertEquals("\r\n", buffer.toString(CharsetUtil.US_ASCII));

View File

@ -78,7 +78,7 @@ public class CorsHandlerTest {
@Test
public void preflightDeleteRequestWithCustomHeaders() {
final CorsConfig config = CorsConfig.withOrigin("http://localhost:8888")
.allowedRequestMethods(HttpMethod.GET, HttpMethod.DELETE)
.allowedRequestMethods(GET, DELETE)
.build();
final HttpResponse response = preflightRequest(config, "http://localhost:8888", "content-type, xheader1");
assertThat(response.headers().get(ACCESS_CONTROL_ALLOW_ORIGIN), is("http://localhost:8888"));
@ -89,7 +89,7 @@ public class CorsHandlerTest {
@Test
public void preflightGetRequestWithCustomHeaders() {
final CorsConfig config = CorsConfig.withOrigin("http://localhost:8888")
.allowedRequestMethods(HttpMethod.OPTIONS, HttpMethod.GET, HttpMethod.DELETE)
.allowedRequestMethods(OPTIONS, GET, DELETE)
.allowedRequestHeaders("content-type", "xheader1")
.build();
final HttpResponse response = preflightRequest(config, "http://localhost:8888", "content-type, xheader1");

View File

@ -172,7 +172,7 @@ public class SpdySessionHandlerTest {
// a RST_STREAM frame for that Stream-ID
sessionHandler.writeInbound(new DefaultSpdyRstStreamFrame(remoteStreamId, 3));
assertNull(sessionHandler.readOutbound());
remoteStreamId += 2;
//remoteStreamId += 2;
// Check if session handler honors UNIDIRECTIONAL streams
spdySynStreamFrame.setLast(false);

View File

@ -116,8 +116,7 @@ public abstract class AbstractHttp2ConnectionHandler extends ByteToMessageDecode
}
// Create a local stream used for the HTTP cleartext upgrade.
createLocalStream(HTTP_UPGRADE_STREAM_ID, true
);
createLocalStream(HTTP_UPGRADE_STREAM_ID, true);
}
/**
@ -138,8 +137,7 @@ public abstract class AbstractHttp2ConnectionHandler extends ByteToMessageDecode
applyRemoteSettings(settings);
// Create a stream in the half-closed state.
createRemoteStream(HTTP_UPGRADE_STREAM_ID, true
);
createRemoteStream(HTTP_UPGRADE_STREAM_ID, true);
}
@Override

View File

@ -15,24 +15,14 @@
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.FRAME_HEADER_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.FRAME_LENGTH_MASK;
import static io.netty.handler.codec.http2.Http2CodecUtil.INT_FIELD_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_FRAME_PAYLOAD_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.PRIORITY_ENTRY_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.SETTINGS_COMPRESS_DATA;
import static io.netty.handler.codec.http2.Http2CodecUtil.SETTINGS_ENABLE_PUSH;
import static io.netty.handler.codec.http2.Http2CodecUtil.SETTINGS_HEADER_TABLE_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.SETTINGS_INITIAL_WINDOW_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.SETTINGS_MAX_CONCURRENT_STREAMS;
import static io.netty.handler.codec.http2.Http2CodecUtil.SETTING_ENTRY_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.readUnsignedInt;
import static io.netty.handler.codec.http2.Http2Exception.protocolError;
import static io.netty.util.CharsetUtil.UTF_8;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.channel.ChannelHandlerContext;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static io.netty.handler.codec.http2.Http2Exception.*;
import static io.netty.util.CharsetUtil.*;
/**
* A {@link Http2FrameReader} that supports all frame types defined by the HTTP/2 specification.
*/

View File

@ -15,27 +15,15 @@
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.FRAME_HEADER_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.INT_FIELD_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_FRAME_PAYLOAD_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_UNSIGNED_BYTE;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_UNSIGNED_INT;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_UNSIGNED_SHORT;
import static io.netty.handler.codec.http2.Http2CodecUtil.MAX_WEIGHT;
import static io.netty.handler.codec.http2.Http2CodecUtil.MIN_WEIGHT;
import static io.netty.handler.codec.http2.Http2CodecUtil.PRIORITY_ENTRY_LENGTH;
import static io.netty.handler.codec.http2.Http2CodecUtil.calcSettingsPayloadLength;
import static io.netty.handler.codec.http2.Http2CodecUtil.writeFrameHeader;
import static io.netty.handler.codec.http2.Http2CodecUtil.writeSettingsPayload;
import static io.netty.handler.codec.http2.Http2CodecUtil.writeUnsignedInt;
import static io.netty.handler.codec.http2.Http2CodecUtil.writeUnsignedShort;
import static io.netty.util.CharsetUtil.UTF_8;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.CompositeByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static io.netty.util.CharsetUtil.*;
/**
* A {@link Http2FrameWriter} that supports all frame types defined by the HTTP/2 specification.
*/
@ -125,7 +113,7 @@ public class DefaultHttp2FrameWriter implements Http2FrameWriter {
ByteBuf frame = ctx.alloc().buffer(FRAME_HEADER_LENGTH + PRIORITY_ENTRY_LENGTH);
writeFrameHeader(frame, PRIORITY_ENTRY_LENGTH, Http2FrameType.PRIORITY,
Http2Flags.EMPTY, streamId);
long word1 = exclusive ? (0x80000000L | streamDependency) : streamDependency;
long word1 = exclusive ? 0x80000000L | streamDependency : streamDependency;
writeUnsignedInt(word1, frame);
// Adjust the weight so that it fits into a single byte on the wire.
@ -381,7 +369,7 @@ public class DefaultHttp2FrameWriter implements Http2FrameWriter {
// Write the priority.
if (hasPriority) {
long word1 = exclusive ? (0x80000000L | streamDependency) : streamDependency;
long word1 = exclusive ? 0x80000000L | streamDependency : streamDependency;
writeUnsignedInt(word1, firstFrame);
// Adjust the weight so that it fits into a single byte on the wire.

View File

@ -14,13 +14,6 @@
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.base64.Base64Dialect.URL_SAFE;
import static io.netty.handler.codec.http2.Http2CodecUtil.HTTP_UPGRADE_PROTOCOL_NAME;
import static io.netty.handler.codec.http2.Http2CodecUtil.HTTP_UPGRADE_SETTINGS_HEADER;
import static io.netty.handler.codec.http2.Http2CodecUtil.calcSettingsPayloadLength;
import static io.netty.handler.codec.http2.Http2CodecUtil.writeSettingsPayload;
import static io.netty.util.CharsetUtil.UTF_8;
import static io.netty.util.ReferenceCountUtil.release;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.base64.Base64;
@ -28,18 +21,22 @@ import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpClientUpgradeHandler;
import io.netty.handler.codec.http.HttpRequest;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static io.netty.handler.codec.base64.Base64Dialect.*;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static io.netty.util.CharsetUtil.*;
import static io.netty.util.ReferenceCountUtil.*;
/**
* Client-side cleartext upgrade codec from HTTP to HTTP/2.
*/
public class Http2ClientUpgradeCodec implements HttpClientUpgradeHandler.UpgradeCodec {
private static final List<String> UPGRADE_HEADERS = Collections.unmodifiableList(Arrays
.asList(HTTP_UPGRADE_SETTINGS_HEADER));
private static final List<String> UPGRADE_HEADERS = Collections.singletonList(HTTP_UPGRADE_SETTINGS_HEADER);
private final String handlerName;
private final AbstractHttp2ConnectionHandler connectionHandler;

View File

@ -15,19 +15,21 @@
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.format;
import static io.netty.util.CharsetUtil.UTF_8;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.handler.codec.http2.Http2StreamRemovalPolicy.Action;
import static io.netty.handler.codec.http2.Http2Error.*;
import static io.netty.handler.codec.http2.Http2Exception.*;
import static io.netty.util.CharsetUtil.*;
/**
* Constants and utility method used for encoding/decoding HTTP2 frames.
*/
public final class Http2CodecUtil {
private static final byte[] CONNECTION_PREFACE = "PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n".getBytes(UTF_8);
private static final byte[] EMPTY_PING = new byte[8];
@ -46,8 +48,8 @@ public final class Http2CodecUtil {
public static final int SETTING_ENTRY_LENGTH = 5;
public static final int PRIORITY_ENTRY_LENGTH = 5;
public static final int INT_FIELD_LENGTH = 4;
public static final short MAX_WEIGHT = (short) 256;
public static final short MIN_WEIGHT = (short) 1;
public static final short MAX_WEIGHT = 256;
public static final short MIN_WEIGHT = 1;
public static final short SETTINGS_HEADER_TABLE_SIZE = 1;
public static final short SETTINGS_ENABLE_PUSH = 2;
@ -144,18 +146,18 @@ public final class Http2CodecUtil {
* Writes a big-endian (32-bit) unsigned integer to the buffer.
*/
public static void writeUnsignedInt(long value, ByteBuf out) {
out.writeByte((int) ((value >> 24) & 0xFF));
out.writeByte((int) ((value >> 16) & 0xFF));
out.writeByte((int) ((value >> 8) & 0xFF));
out.writeByte((int) ((value & 0xFF)));
out.writeByte((int) (value >> 24 & 0xFF));
out.writeByte((int) (value >> 16 & 0xFF));
out.writeByte((int) (value >> 8 & 0xFF));
out.writeByte((int) (value & 0xFF));
}
/**
* Writes a big-endian (16-bit) unsigned integer to the buffer.
*/
public static void writeUnsignedShort(int value, ByteBuf out) {
out.writeByte((int) ((value >> 8) & 0xFF));
out.writeByte((int) ((value & 0xFF)));
out.writeByte(value >> 8 & 0xFF);
out.writeByte(value & 0xFF);
}
/**

View File

@ -163,11 +163,8 @@ public class Http2Flags {
if (getClass() != obj.getClass()) {
return false;
}
Http2Flags other = (Http2Flags) obj;
if (value != other.value) {
return false;
}
return true;
return value == ((Http2Flags) obj).value;
}
@Override
@ -195,7 +192,7 @@ public class Http2Flags {
if (padLowPresent()) {
builder.append("PAD_LOW,");
}
builder.append(")");
builder.append(')');
return builder.toString();
}

View File

@ -232,11 +232,11 @@ public abstract class Http2Headers implements Iterable<Entry<String, String>> {
StringBuilder builder = new StringBuilder("Http2Headers[");
for (Map.Entry<String, String> header : this) {
builder.append(header.getKey());
builder.append(":");
builder.append(':');
builder.append(header.getValue());
builder.append(",");
builder.append(',');
}
builder.append("]");
builder.append(']');
return builder.toString();
}
}

View File

@ -14,13 +14,6 @@
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.base64.Base64Dialect.URL_SAFE;
import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST;
import static io.netty.handler.codec.http2.Http2CodecUtil.HTTP_UPGRADE_PROTOCOL_NAME;
import static io.netty.handler.codec.http2.Http2CodecUtil.HTTP_UPGRADE_SETTINGS_HEADER;
import static io.netty.handler.codec.http2.Http2CodecUtil.writeFrameHeader;
import static io.netty.handler.codec.http2.Http2Flags.EMPTY;
import static io.netty.handler.codec.http2.Http2FrameType.SETTINGS;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
@ -30,18 +23,24 @@ import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpServerUpgradeHandler;
import io.netty.util.CharsetUtil;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static io.netty.handler.codec.base64.Base64Dialect.*;
import static io.netty.handler.codec.http.HttpResponseStatus.*;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static io.netty.handler.codec.http2.Http2Flags.*;
import static io.netty.handler.codec.http2.Http2FrameType.*;
/**
* Server-side codec for performing a cleartext upgrade from HTTP/1.x to HTTP/2.
*/
public class Http2ServerUpgradeCodec implements HttpServerUpgradeHandler.UpgradeCodec {
private static final List<String> REQUIRED_UPGRADE_HEADERS = Collections
.unmodifiableList(Arrays.asList(HTTP_UPGRADE_SETTINGS_HEADER));
private static final List<String> REQUIRED_UPGRADE_HEADERS =
Collections.singletonList(HTTP_UPGRADE_SETTINGS_HEADER);
private final String handlerName;
private final AbstractHttp2ConnectionHandler connectionHandler;
private final Http2FrameReader frameReader;
@ -73,7 +72,7 @@ public class Http2ServerUpgradeCodec implements HttpServerUpgradeHandler.Upgrade
}
this.handlerName = handlerName;
this.connectionHandler = connectionHandler;
this.frameReader = new DefaultHttp2FrameReader();
frameReader = new DefaultHttp2FrameReader();
}
@Override
@ -139,8 +138,7 @@ public class Http2ServerUpgradeCodec implements HttpServerUpgradeHandler.Upgrade
final Http2Settings decodedSettings = new Http2Settings();
frameReader.readFrame(ctx, frame, new Http2FrameAdapter() {
@Override
public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings)
throws Http2Exception {
public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) {
decodedSettings.copy(settings);
}
});
@ -154,8 +152,7 @@ public class Http2ServerUpgradeCodec implements HttpServerUpgradeHandler.Upgrade
* Creates an HTTP2-Settings header with the given payload. The payload buffer is released.
*/
private static ByteBuf createSettingsFrame(ChannelHandlerContext ctx, ByteBuf payload) {
ByteBuf frame =
ctx.alloc().buffer(Http2CodecUtil.FRAME_HEADER_LENGTH + payload.readableBytes());
ByteBuf frame = ctx.alloc().buffer(FRAME_HEADER_LENGTH + payload.readableBytes());
writeFrameHeader(frame, payload.readableBytes(), SETTINGS, EMPTY, 0);
frame.writeBytes(payload);
payload.release();

View File

@ -187,12 +187,12 @@ public class Http2Settings {
* @return this object.
*/
public Http2Settings copy(Http2Settings source) {
this.enabled = source.enabled;
this.allowCompressedData = source.allowCompressedData;
this.initialWindowSize = source.initialWindowSize;
this.maxConcurrentStreams = source.maxConcurrentStreams;
this.maxHeaderTableSize = source.maxHeaderTableSize;
this.pushEnabled = source.pushEnabled;
enabled = source.enabled;
allowCompressedData = source.allowCompressedData;
initialWindowSize = source.initialWindowSize;
maxConcurrentStreams = source.maxConcurrentStreams;
maxHeaderTableSize = source.maxHeaderTableSize;
pushEnabled = source.pushEnabled;
return this;
}
@ -236,31 +236,29 @@ public class Http2Settings {
if (maxConcurrentStreams != other.maxConcurrentStreams) {
return false;
}
if (pushEnabled != other.pushEnabled) {
return false;
}
return true;
return pushEnabled == other.pushEnabled;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder("Http2Settings [");
if (hasMaxHeaderTableSize()) {
builder.append("maxHeaderTableSize=").append(maxHeaderTableSize).append(",");
builder.append("maxHeaderTableSize=").append(maxHeaderTableSize).append(',');
}
if (hasPushEnabled()) {
builder.append("pushEnabled=").append(pushEnabled).append(",");
builder.append("pushEnabled=").append(pushEnabled).append(',');
}
if (hasMaxConcurrentStreams()) {
builder.append("maxConcurrentStreams=").append(maxConcurrentStreams).append(",");
builder.append("maxConcurrentStreams=").append(maxConcurrentStreams).append(',');
}
if (hasInitialWindowSize()) {
builder.append("initialWindowSize=").append(initialWindowSize).append(",");
builder.append("initialWindowSize=").append(initialWindowSize).append(',');
}
if (hasAllowCompressedData()) {
builder.append("allowCompressedData=").append(allowCompressedData).append(",");
builder.append("allowCompressedData=").append(allowCompressedData).append(',');
}
builder.append("]");
builder.append(']');
return builder.toString();
}

View File

@ -15,8 +15,7 @@
package io.netty.handler.codec.http2;
/**
* A policy for determining when it is appropriate to remove streams from a
* {@link Http2StreamRegistry}.
* A policy for determining when it is appropriate to remove streams from an HTTP/2 stream registry.
*/
public interface Http2StreamRemovalPolicy {

View File

@ -30,8 +30,6 @@ import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.*;
/**

View File

@ -15,9 +15,7 @@
package io.netty.handler.codec.http2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import java.util.HashSet;
import java.util.Iterator;
@ -26,7 +24,7 @@ import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.junit.Test;
import static org.junit.Assert.*;
/**
@ -73,7 +71,7 @@ public class DefaultHttp2HeadersTest {
// Now iterate through the headers, removing them from the original set.
for (Map.Entry<String, String> entry : builder.build()) {
assertTrue(headers.remove(entry.getKey() + ":" + entry.getValue()));
assertTrue(headers.remove(entry.getKey() + ':' + entry.getValue()));
}
// Make sure we removed them all.

View File

@ -15,22 +15,17 @@
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_FLOW_CONTROL_WINDOW_SIZE;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2InboundFlowController.FrameWriter;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static org.mockito.Mockito.*;
/**
* Tests for {@link DefaultHttp2InboundFlowController}.
*/
@ -70,7 +65,7 @@ public class DefaultHttp2InboundFlowControllerTest {
@Test
public void halfWindowRemainingShouldUpdateConnectionWindow() throws Http2Exception {
int dataSize = (DEFAULT_FLOW_CONTROL_WINDOW_SIZE / 2) + 1;
int dataSize = DEFAULT_FLOW_CONTROL_WINDOW_SIZE / 2 + 1;
int newWindow = DEFAULT_FLOW_CONTROL_WINDOW_SIZE - dataSize;
int windowDelta = DEFAULT_FLOW_CONTROL_WINDOW_SIZE - newWindow;
@ -81,7 +76,7 @@ public class DefaultHttp2InboundFlowControllerTest {
@Test
public void halfWindowRemainingShouldUpdateAllWindows() throws Http2Exception {
int dataSize = (DEFAULT_FLOW_CONTROL_WINDOW_SIZE / 2) + 1;
int dataSize = DEFAULT_FLOW_CONTROL_WINDOW_SIZE / 2 + 1;
int initialWindowSize = DEFAULT_FLOW_CONTROL_WINDOW_SIZE;
int windowDelta = getWindowDelta(initialWindowSize, initialWindowSize, dataSize);
@ -102,7 +97,7 @@ public class DefaultHttp2InboundFlowControllerTest {
controller.initialInboundWindowSize(newInitialWindowSize);
// Clear any previous calls to the writer.
Mockito.reset(frameWriter);
reset(frameWriter);
// Send the next frame and verify that the expected window updates were sent.
applyFlowControl(initialWindowSize, false);

View File

@ -15,27 +15,19 @@
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_FLOW_CONTROL_WINDOW_SIZE;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http2.Http2OutboundFlowController.FrameWriter;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Tests for {@link DefaultHttp2OutboundFlowController}.
*/
@ -536,18 +528,12 @@ public class DefaultHttp2OutboundFlowControllerTest {
captureWrite(STREAM_A, captor, false);
int aWritten = captor.getValue().readableBytes();
int min = aWritten;
int max = aWritten;
captureWrite(STREAM_B, captor, false);
int bWritten = captor.getValue().readableBytes();
min = Math.min(min, bWritten);
max = Math.max(max, bWritten);
captureWrite(STREAM_D, captor, false);
int dWritten = captor.getValue().readableBytes();
min = Math.min(min, dWritten);
max = Math.max(max, dWritten);
assertEquals(999, aWritten + bWritten + dWritten);
assertEquals(333, aWritten);
@ -579,7 +565,7 @@ public class DefaultHttp2OutboundFlowControllerTest {
connection.stream(stream).setPriority(parent, (short) weight, exclusive);
}
private ByteBuf dummyData(int size) {
private static ByteBuf dummyData(int size) {
ByteBuf buffer = Unpooled.buffer(size);
buffer.writerIndex(size);
return buffer;

View File

@ -15,52 +15,32 @@
package io.netty.handler.codec.http2;
import static io.netty.buffer.Unpooled.EMPTY_BUFFER;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT;
import static io.netty.handler.codec.http2.Http2CodecUtil.connectionPrefaceBuf;
import static io.netty.handler.codec.http2.Http2CodecUtil.emptyPingBuf;
import static io.netty.handler.codec.http2.Http2Error.NO_ERROR;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.protocolError;
import static io.netty.handler.codec.http2.Http2Headers.EMPTY_HEADERS;
import static io.netty.handler.codec.http2.Http2Stream.State.HALF_CLOSED_LOCAL;
import static io.netty.handler.codec.http2.Http2Stream.State.OPEN;
import static io.netty.handler.codec.http2.Http2Stream.State.RESERVED_LOCAL;
import static io.netty.handler.codec.http2.Http2Stream.State.RESERVED_REMOTE;
import static io.netty.util.CharsetUtil.UTF_8;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyShort;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.buffer.UnpooledByteBufAllocator;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import java.util.Arrays;
import java.util.Collections;
import io.netty.channel.DefaultChannelPromise;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import java.util.Collections;
import static io.netty.buffer.Unpooled.*;
import static io.netty.handler.codec.http2.Http2CodecUtil.*;
import static io.netty.handler.codec.http2.Http2Error.*;
import static io.netty.handler.codec.http2.Http2Exception.*;
import static io.netty.handler.codec.http2.Http2Headers.*;
import static io.netty.handler.codec.http2.Http2Stream.State.*;
import static io.netty.util.CharsetUtil.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Tests for {@link DelegatingHttp2ConnectionHandlerTest} and its base class
* {@link AbstractHttp2ConnectionHandler}.
@ -122,7 +102,7 @@ public class DelegatingHttp2ConnectionHandlerTest {
when(stream.id()).thenReturn(STREAM_ID);
when(stream.state()).thenReturn(OPEN);
when(pushStream.id()).thenReturn(PUSH_STREAM_ID);
when(connection.activeStreams()).thenReturn(Arrays.asList(stream));
when(connection.activeStreams()).thenReturn(Collections.singletonList(stream));
when(connection.stream(STREAM_ID)).thenReturn(stream);
when(connection.requireStream(STREAM_ID)).thenReturn(stream);
when(connection.local()).thenReturn(local);
@ -197,13 +177,13 @@ public class DelegatingHttp2ConnectionHandlerTest {
when(connection.isServer()).thenReturn(true);
handler = new DelegatingHttp2ConnectionHandler(connection, reader, writer, inboundFlow,
outboundFlow, observer);
handler.channelRead(ctx, Unpooled.copiedBuffer("BAD_PREFACE", UTF_8));
handler.channelRead(ctx, copiedBuffer("BAD_PREFACE", UTF_8));
verify(ctx).close();
}
@Test
public void serverReceivingValidClientPrefaceStringShouldContinueReadingFrames() throws Exception {
Mockito.reset(observer);
reset(observer);
when(connection.isServer()).thenReturn(true);
handler = new DelegatingHttp2ConnectionHandler(connection, reader, writer, inboundFlow,
outboundFlow, observer);
@ -464,7 +444,7 @@ public class DelegatingHttp2ConnectionHandlerTest {
@Test(expected = Http2Exception.class)
public void serverAltSvcReadShouldThrow() throws Exception {
when(connection.isServer()).thenReturn(true);
decode().onAltSvcRead(ctx, STREAM_ID, 1, 2, Unpooled.EMPTY_BUFFER, "www.example.com", null);
decode().onAltSvcRead(ctx, STREAM_ID, 1, 2, EMPTY_BUFFER, "www.example.com", null);
}
@Test
@ -645,7 +625,7 @@ public class DelegatingHttp2ConnectionHandlerTest {
@Test
public void clientWriteAltSvcShouldThrow() throws Exception {
when(connection.isServer()).thenReturn(false);
ChannelFuture future = handler.writeAltSvc(ctx, promise, STREAM_ID, 1, 2, Unpooled.EMPTY_BUFFER,
ChannelFuture future = handler.writeAltSvc(ctx, promise, STREAM_ID, 1, 2, EMPTY_BUFFER,
"www.example.com", null);
assertTrue(future.awaitUninterruptibly().cause() instanceof Http2Exception);
}
@ -662,11 +642,11 @@ public class DelegatingHttp2ConnectionHandlerTest {
private static ByteBuf dummyData() {
// The buffer is purposely 8 bytes so it will even work for a ping frame.
return Unpooled.wrappedBuffer("abcdefgh".getBytes(UTF_8));
return wrappedBuffer("abcdefgh".getBytes(UTF_8));
}
private void mockContext() {
Mockito.reset(ctx);
reset(ctx);
when(ctx.alloc()).thenReturn(UnpooledByteBufAllocator.DEFAULT);
when(ctx.channel()).thenReturn(channel);
when(ctx.newSucceededFuture()).thenReturn(future);

View File

@ -15,14 +15,6 @@
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2TestUtil.runInChannel;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
@ -37,16 +29,20 @@ import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.util.NetUtil;
import java.net.InetSocketAddress;
import java.util.concurrent.CountDownLatch;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.net.InetSocketAddress;
import java.util.concurrent.CountDownLatch;
import static io.netty.handler.codec.http2.Http2TestUtil.*;
import static java.util.concurrent.TimeUnit.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Tests the full HTTP/2 framing stack including the connection and preface handlers.
*/
@ -117,14 +113,14 @@ public class Http2ConnectionRoundtripTest {
new DefaultHttp2Headers.Builder().method("GET").scheme("https")
.authority("example.org").path("/some/path/resource2").build();
final String text = "hello world";
runInChannel(clientChannel, new Http2TestUtil.Http2Runnable() {
runInChannel(clientChannel, new Http2Runnable() {
@Override
public void run() throws Http2Exception {
public void run() {
for (int i = 0, nextStream = 3; i < NUM_STREAMS; ++i, nextStream += 2) {
final int streamId = nextStream;
http2Client.writeHeaders(ctx(), newPromise(), streamId, headers, 0, (short) 16,
false, 0, false, false);
http2Client.writeData(ctx(), newPromise(), streamId,
http2Client.writeHeaders(
ctx(), newPromise(), nextStream, headers, 0, (short) 16, false, 0, false, false);
http2Client.writeData(
ctx(), newPromise(), nextStream,
Unpooled.copiedBuffer(text.getBytes()), 0, true, true, false);
}
}

View File

@ -16,8 +16,7 @@
package io.netty.handler.codec.memcache.binary;
/**
* Contains all possible status values a
* {@link BinaryMemcacheResponseHeader} can return.
* Contains all possible status values a {@link BinaryMemcacheResponse} can return.
*/
public final class BinaryMemcacheResponseStatus {

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.socks;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.socks.SocksAuthRequestDecoder.State;
import io.netty.util.CharsetUtil;
import java.util.List;
@ -26,7 +27,7 @@ import java.util.List;
* Decodes {@link ByteBuf}s into {@link SocksAuthRequest}.
* Before returning SocksRequest decoder removes itself from pipeline.
*/
public class SocksAuthRequestDecoder extends ReplayingDecoder<SocksAuthRequestDecoder.State> {
public class SocksAuthRequestDecoder extends ReplayingDecoder<State> {
private SocksSubnegotiationVersion version;
private int fieldLength;

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.socks;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.socks.SocksAuthResponseDecoder.State;
import java.util.List;
@ -25,7 +26,7 @@ import java.util.List;
* Decodes {@link ByteBuf}s into {@link SocksAuthResponse}.
* Before returning SocksResponse decoder removes itself from pipeline.
*/
public class SocksAuthResponseDecoder extends ReplayingDecoder<SocksAuthResponseDecoder.State> {
public class SocksAuthResponseDecoder extends ReplayingDecoder<State> {
private SocksSubnegotiationVersion version;
private SocksAuthStatus authStatus;

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.socks;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.socks.SocksCmdRequestDecoder.State;
import io.netty.util.CharsetUtil;
import java.util.List;
@ -26,12 +27,13 @@ import java.util.List;
* Decodes {@link ByteBuf}s into {@link SocksCmdRequest}.
* Before returning SocksRequest decoder removes itself from pipeline.
*/
public class SocksCmdRequestDecoder extends ReplayingDecoder<SocksCmdRequestDecoder.State> {
public class SocksCmdRequestDecoder extends ReplayingDecoder<State> {
private SocksProtocolVersion version;
private int fieldLength;
private SocksCmdType cmdType;
private SocksAddressType addressType;
@SuppressWarnings("UnusedDeclaration")
private byte reserved;
private String host;
private int port;

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.socks;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.socks.SocksCmdResponseDecoder.State;
import io.netty.util.CharsetUtil;
import java.util.List;
@ -26,7 +27,7 @@ import java.util.List;
* Decodes {@link ByteBuf}s into {@link SocksCmdResponse}.
* Before returning SocksResponse decoder removes itself from pipeline.
*/
public class SocksCmdResponseDecoder extends ReplayingDecoder<SocksCmdResponseDecoder.State> {
public class SocksCmdResponseDecoder extends ReplayingDecoder<State> {
private SocksProtocolVersion version;
private int fieldLength;

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.socks;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.socks.SocksInitRequestDecoder.State;
import java.util.ArrayList;
import java.util.List;
@ -26,7 +27,7 @@ import java.util.List;
* Decodes {@link ByteBuf}s into {@link SocksInitRequest}.
* Before returning SocksRequest decoder removes itself from pipeline.
*/
public class SocksInitRequestDecoder extends ReplayingDecoder<SocksInitRequestDecoder.State> {
public class SocksInitRequestDecoder extends ReplayingDecoder<State> {
private final List<SocksAuthScheme> authSchemes = new ArrayList<SocksAuthScheme>();
private SocksProtocolVersion version;

View File

@ -18,6 +18,7 @@ package io.netty.handler.codec.socks;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.socks.SocksInitResponseDecoder.State;
import java.util.List;
@ -25,7 +26,7 @@ import java.util.List;
* Decodes {@link ByteBuf}s into {@link SocksInitResponse}.
* Before returning SocksResponse decoder removes itself from pipeline.
*/
public class SocksInitResponseDecoder extends ReplayingDecoder<SocksInitResponseDecoder.State> {
public class SocksInitResponseDecoder extends ReplayingDecoder<State> {
private SocksProtocolVersion version;
private SocksAuthScheme authScheme;

View File

@ -48,7 +48,7 @@ public class StompSubframeDecoderTest {
assertNotNull(frame);
assertEquals(StompCommand.CONNECT, frame.command());
StompContentSubframe content = channel.readInbound();
assertSame(content, LastStompContentSubframe.EMPTY_LAST_CONTENT);
assertSame(LastStompContentSubframe.EMPTY_LAST_CONTENT, content);
Object o = channel.readInbound();
assertNull(o);
}
@ -123,13 +123,13 @@ public class StompSubframeDecoderTest {
assertNotNull(frame);
assertEquals(StompCommand.CONNECT, frame.command());
StompContentSubframe content = channel.readInbound();
assertSame(content, LastStompContentSubframe.EMPTY_LAST_CONTENT);
assertSame(LastStompContentSubframe.EMPTY_LAST_CONTENT, content);
StompHeadersSubframe frame2 = channel.readInbound();
assertNotNull(frame2);
assertEquals(StompCommand.CONNECTED, frame2.command());
StompContentSubframe content2 = channel.readInbound();
assertSame(content2, LastStompContentSubframe.EMPTY_LAST_CONTENT);
assertSame(LastStompContentSubframe.EMPTY_LAST_CONTENT, content2);
assertNull(channel.readInbound());
}
}

View File

@ -45,7 +45,6 @@ public class DefaultTextHeaders implements TextHeaders {
return Math.abs(hash % BUCKET_SIZE);
}
@SuppressWarnings("unchecked")
private final HeaderEntry[] entries = new HeaderEntry[BUCKET_SIZE];
private final HeaderEntry head = new HeaderEntry(this);
private final boolean ignoreCase;
@ -71,7 +70,6 @@ public class DefaultTextHeaders implements TextHeaders {
return name;
}
@SuppressWarnings("unchecked")
protected CharSequence convertValue(Object value) {
if (value == null) {
throw new NullPointerException("value");
@ -173,7 +171,6 @@ public class DefaultTextHeaders implements TextHeaders {
}
if (headers instanceof DefaultTextHeaders) {
@SuppressWarnings("unchecked")
DefaultTextHeaders m = (DefaultTextHeaders) headers;
HeaderEntry e = m.head.after;
while (e != m.head) {

View File

@ -286,7 +286,7 @@ public final class Base64 {
(DECODABET[src[srcOffset + 1]] & 0xFF) << 12 |
(DECODABET[src[srcOffset + 2]] & 0xFF) << 6 |
DECODABET[src[srcOffset + 3]] & 0xFF;
} catch (IndexOutOfBoundsException e) {
} catch (IndexOutOfBoundsException ignored) {
throw new IllegalArgumentException("not encoded in Base64");
}

View File

@ -157,7 +157,7 @@ public class Bzip2Decoder extends ByteToMessageDecoder {
int huffmanSymbolCount = 0;
if (bitNumber > 0) {
for (int i = 0; i < 16; i++) {
if ((inUse16 & ((1 << 15) >>> i)) != 0) {
if ((inUse16 & 1 << 15 >>> i) != 0) {
for (int j = 0, k = i << 4; j < 16; j++, k++) {
if (readBoolean(in)) {
huffmanSymbolMap[huffmanSymbolCount++] = (byte) k;
@ -330,7 +330,7 @@ public class Bzip2Decoder extends ByteToMessageDecoder {
}
this.bitCount = bitCount -= n;
return (bitBuffer >>> bitCount) & ((1 << n) - 1);
return bitBuffer >>> bitCount & (1 << n) - 1;
}
private boolean readBoolean(ByteBuf in) {

View File

@ -93,7 +93,7 @@ class Crc32c implements Checksum {
0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351,
};
private static final int LONG_MASK = 0xFFFFFFFF;
private static final long LONG_MASK = 0xFFFFFFFFL;
private static final int BYTE_MASK = 0xFF;
private int crc = ~0;
@ -121,6 +121,6 @@ class Crc32c implements Checksum {
}
private static int crc32c(int crc, int b) {
return (crc >>> 8) ^ CRC_TABLE[(crc ^ (b & BYTE_MASK)) & BYTE_MASK];
return crc >>> 8 ^ CRC_TABLE[(crc ^ b & BYTE_MASK) & BYTE_MASK];
}
}

View File

@ -57,7 +57,7 @@ public class Snappy {
written = 0;
}
public void encode(ByteBuf in, ByteBuf out, int length) {
public void encode(final ByteBuf in, final ByteBuf out, final int length) {
// Write the preamble length to the output buffer
for (int i = 0;; i ++) {
int b = length >>> i * 7;
@ -70,15 +70,14 @@ public class Snappy {
}
int inIndex = in.readerIndex();
final int baseIndex = in.readerIndex();
final int maxIndex = length;
final int baseIndex = inIndex;
final short[] table = getHashTable(maxIndex);
final short[] table = getHashTable(length);
final int shift = 32 - (int) Math.floor(Math.log(table.length) / Math.log(2));
int nextEmit = inIndex;
if (maxIndex - inIndex >= MIN_COMPRESSIBLE_BYTES) {
if (length - inIndex >= MIN_COMPRESSIBLE_BYTES) {
int nextHash = hash(in, ++inIndex, shift);
outer: while (true) {
int skip = 32;
@ -92,7 +91,7 @@ public class Snappy {
nextIndex = inIndex + bytesBetweenHashLookups;
// We need at least 4 remaining bytes to read the hash
if (nextIndex > maxIndex - 4) {
if (nextIndex > length - 4) {
break outer;
}
@ -109,14 +108,14 @@ public class Snappy {
int insertTail;
do {
int base = inIndex;
int matched = 4 + findMatchingLength(in, candidate + 4, inIndex + 4, maxIndex);
int matched = 4 + findMatchingLength(in, candidate + 4, inIndex + 4, length);
inIndex += matched;
int offset = base - candidate;
encodeCopy(out, offset, matched);
in.readerIndex(in.readerIndex() + matched);
insertTail = inIndex - 1;
nextEmit = inIndex;
if (inIndex >= maxIndex - 4) {
if (inIndex >= length - 4) {
break outer;
}
@ -134,8 +133,8 @@ public class Snappy {
}
// If there are any remaining characters, write them out as a literal
if (nextEmit < maxIndex) {
encodeLiteral(in, out, maxIndex - nextEmit);
if (nextEmit < length) {
encodeLiteral(in, out, length - nextEmit);
}
}

View File

@ -27,7 +27,7 @@ class ChannelBufferByteInput implements ByteInput {
private final ByteBuf buffer;
public ChannelBufferByteInput(ByteBuf buffer) {
ChannelBufferByteInput(ByteBuf buffer) {
this.buffer = buffer;
}

View File

@ -32,7 +32,7 @@ class ChannelBufferByteOutput implements ByteOutput {
/**
* Create a new instance which use the given {@link ByteBuf}
*/
public ChannelBufferByteOutput(ByteBuf buffer) {
ChannelBufferByteOutput(ByteBuf buffer) {
this.buffer = buffer;
}

View File

@ -20,13 +20,12 @@ import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.TooLongFrameException;
import org.jboss.marshalling.ByteInput;
import org.jboss.marshalling.Unmarshaller;
import java.io.ObjectStreamConstants;
import java.util.List;
import org.jboss.marshalling.ByteInput;
import org.jboss.marshalling.Unmarshaller;
/**
* {@link ReplayingDecoder} which use an {@link Unmarshaller} to read the Object out of the {@link ByteBuf}.
*
@ -73,7 +72,7 @@ public class CompatibleMarshallingDecoder extends ReplayingDecoder<Void> {
Object obj = unmarshaller.readObject();
unmarshaller.finish();
out.add(obj);
} catch (LimitingByteInput.TooBigObjectException e) {
} catch (LimitingByteInput.TooBigObjectException ignored) {
discardingTooLongFrame = true;
throw new TooLongFrameException();
} finally {

View File

@ -32,7 +32,7 @@ class LimitingByteInput implements ByteInput {
private final long limit;
private long read;
public LimitingByteInput(ByteInput input, long limit) {
LimitingByteInput(ByteInput input, long limit) {
if (limit <= 0) {
throw new IllegalArgumentException("The limit MUST be > 0");
}

View File

@ -27,7 +27,7 @@ class ClassLoaderClassResolver implements ClassResolver {
public Class<?> resolve(String className) throws ClassNotFoundException {
try {
return classLoader.loadClass(className);
} catch (ClassNotFoundException e) {
} catch (ClassNotFoundException ignored) {
return Class.forName(className, false, classLoader);
}
}

View File

@ -65,7 +65,7 @@ class CompactObjectInputStream extends ObjectInputStream {
Class<?> clazz;
try {
clazz = classResolver.resolve(desc.getName());
} catch (ClassNotFoundException ex) {
} catch (ClassNotFoundException ignored) {
clazz = super.resolveClass(desc);
}

View File

@ -21,7 +21,7 @@ import java.util.Map;
final class SoftReferenceMap<K, V> extends ReferenceMap<K, V> {
public SoftReferenceMap(Map<K, Reference<V>> delegate) {
SoftReferenceMap(Map<K, Reference<V>> delegate) {
super(delegate);
}

View File

@ -21,7 +21,7 @@ import java.util.Map;
final class WeakReferenceMap<K, V> extends ReferenceMap<K, V> {
public WeakReferenceMap(Map<K, Reference<V>> delegate) {
WeakReferenceMap(Map<K, Reference<V>> delegate) {
super(delegate);
}

View File

@ -26,9 +26,6 @@ import static io.netty.buffer.Unpooled.*;
import static org.hamcrest.core.Is.*;
import static org.junit.Assert.*;
/**
*/
@SuppressWarnings("ZeroLengthArrayAllocation")
public class ByteArrayDecoderTest {
private EmbeddedChannel ch;

View File

@ -31,6 +31,7 @@ public class JdkZlibTest extends ZlibTest {
}
@Test(expected = DecompressionException.class)
@Override
public void testZLIB_OR_NONE3() throws Exception {
super.testZLIB_OR_NONE3();
}

View File

@ -30,6 +30,7 @@ public class ZlibCrossTest2 extends ZlibTest {
}
@Test(expected = DecompressionException.class)
@Override
public void testZLIB_OR_NONE3() throws Exception {
super.testZLIB_OR_NONE3();
}

View File

@ -35,7 +35,6 @@ public abstract class AbstractReferenceCounted implements ReferenceCounted {
refCntUpdater = updater;
}
@SuppressWarnings("FieldMayBeFinal")
private volatile int refCnt = 1;
@Override

View File

@ -48,7 +48,7 @@ public class DefaultAttributeMap implements AttributeMap {
@SuppressWarnings("UnusedDeclaration")
private volatile AtomicReferenceArray<DefaultAttribute<?>> attributes;
@SuppressWarnings({ "unchecked", "rawtypes" })
@SuppressWarnings("unchecked")
@Override
public <T> Attribute<T> attr(AttributeKey<T> key) {
if (key == null) {

View File

@ -222,7 +222,6 @@ public class HashedWheelTimer implements Timer {
leak = leakDetector.open(this);
}
@SuppressWarnings("unchecked")
private static HashedWheelBucket[] createWheel(int ticksPerWheel) {
if (ticksPerWheel <= 0) {
throw new IllegalArgumentException(
@ -306,7 +305,7 @@ public class HashedWheelTimer implements Timer {
workerThread.interrupt();
try {
workerThread.join(100);
} catch (InterruptedException e) {
} catch (InterruptedException ignored) {
interrupted = true;
}
}
@ -397,8 +396,7 @@ public class HashedWheelTimer implements Timer {
continue;
}
long calculated = timeout.deadline / tickDuration;
long remainingRounds = (calculated - tick) / wheel.length;
timeout.remainingRounds = remainingRounds;
timeout.remainingRounds = (calculated - tick) / wheel.length;
final long ticks = Math.max(calculated, tick); // Ensure we don't schedule for past.
int stopIndex = (int) (ticks & mask);
@ -439,7 +437,7 @@ public class HashedWheelTimer implements Timer {
try {
Thread.sleep(sleepTimeMs);
} catch (InterruptedException e) {
} catch (InterruptedException ignored) {
if (WORKER_STATE_UPDATER.get(HashedWheelTimer.this) == WORKER_STATE_SHUTDOWN) {
return Long.MIN_VALUE;
}

View File

@ -164,7 +164,7 @@ public abstract class Recycler<T> {
private final WeakReference<Thread> owner;
private final int id = ID_GENERATOR.getAndIncrement();
public WeakOrderQueue(Stack<?> stack, Thread thread) {
WeakOrderQueue(Stack<?> stack, Thread thread) {
head = tail = new Link();
owner = new WeakReference<Thread>(thread);
synchronized (stack) {
@ -194,7 +194,7 @@ public abstract class Recycler<T> {
}
// transfer as many items as we can from this queue to the stack, returning true if any were transferred
@SuppressWarnings({ "unchecked", "rawtypes" })
@SuppressWarnings("rawtypes")
boolean transfer(Stack<?> to) {
Link head = this.head;

View File

@ -132,7 +132,7 @@ public final class Version {
private static long parseIso8601(String value) {
try {
return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z").parse(value).getTime();
} catch (ParseException e) {
} catch (ParseException ignored) {
return 0;
}
}

View File

@ -47,7 +47,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
/** The maximum number of elements allowed without allocating more space. */
private int maxSize;
/** The load factor for the map. Used to calculate {@link maxSize}. */
/** The load factor for the map. Used to calculate {@link #maxSize}. */
private final float loadFactor;
private byte[] states;
@ -80,7 +80,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
// Allocate the arrays.
states = new byte[initialCapacity];
keys = new int[initialCapacity];
@SuppressWarnings("unchecked")
@SuppressWarnings({ "unchecked", "SuspiciousArrayCast" })
V[] temp = (V[]) new Object[initialCapacity];
values = temp;
@ -102,7 +102,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
int hash = hash(key);
int capacity = capacity();
int index = hash % capacity;
int increment = 1 + (hash % (capacity - 2));
int increment = 1 + hash % (capacity - 2);
final int startIndex = index;
int firstRemovedIndex = -1;
do {
@ -215,7 +215,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
int i = -1;
while ((i = nextEntryIndex(i + 1)) >= 0) {
V next = values[i];
if (value == next || (value != null && value.equals(next))) {
if (value == next || value != null && value.equals(next)) {
return true;
}
}
@ -268,7 +268,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
private int indexOf(int key) {
int hash = hash(key);
int capacity = capacity();
int increment = 1 + (hash % (capacity - 2));
int increment = 1 + hash % (capacity - 2);
int index = hash % capacity;
int startIndex = index;
do {
@ -308,7 +308,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
/**
* Creates a hash value for the given key.
*/
private int hash(int key) {
private static int hash(int key) {
// Just make sure the integer is positive.
return key & Integer.MAX_VALUE;
}
@ -351,12 +351,12 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
* Adjusts the given capacity value to ensure that it's odd. Even capacities can break probing.
* TODO: would be better to ensure it's prime as well.
*/
private int adjustCapacity(int capacity) {
return capacity |= 1;
private static int adjustCapacity(int capacity) {
return capacity | 1;
}
/**
* Marks the entry at the given index position as {@link REMOVED} and sets the value to
* Marks the entry at the given index position as {@link #REMOVED} and sets the value to
* {@code null}.
* <p>
* TODO: consider performing re-compaction.
@ -395,7 +395,7 @@ public class IntObjectHashMap<V> implements IntObjectMap<V>, Iterable<IntObjectM
// New states array is automatically initialized to AVAILABLE (i.e. 0 == AVAILABLE).
states = new byte[newCapacity];
keys = new int[newCapacity];
@SuppressWarnings("unchecked")
@SuppressWarnings({ "unchecked", "SuspiciousArrayCast" })
V[] temp = (V[]) new Object[newCapacity];
values = temp;

View File

@ -24,7 +24,7 @@ final class DefaultFutureListeners {
private int progressiveSize; // the number of progressive listeners
@SuppressWarnings("unchecked")
public DefaultFutureListeners(
DefaultFutureListeners(
GenericFutureListener<? extends Future<?>> first, GenericFutureListener<? extends Future<?>> second) {
listeners = new GenericFutureListener[2];
listeners[0] = first;

View File

@ -147,7 +147,6 @@ public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> {
if (listeners instanceof DefaultFutureListeners) {
((DefaultFutureListeners) listeners).add(listener);
} else {
@SuppressWarnings("unchecked")
final GenericFutureListener<? extends Future<V>> firstListener =
(GenericFutureListener<? extends Future<V>>) listeners;
listeners = new DefaultFutureListeners(firstListener, listener);
@ -559,7 +558,6 @@ public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> {
if (listeners instanceof DefaultFutureListeners) {
notifyListeners0(this, (DefaultFutureListeners) listeners);
} else {
@SuppressWarnings("unchecked")
final GenericFutureListener<? extends Future<V>> l =
(GenericFutureListener<? extends Future<V>>) listeners;
notifyListener0(this, l);
@ -582,7 +580,6 @@ public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> {
}
});
} else {
@SuppressWarnings("unchecked")
final GenericFutureListener<? extends Future<V>> l =
(GenericFutureListener<? extends Future<V>>) listeners;
execute(executor, new Runnable() {
@ -729,7 +726,7 @@ public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> {
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@SuppressWarnings("unchecked")
void notifyProgressiveListeners(final long progress, final long total) {
final Object listeners = progressiveListeners();
if (listeners == null) {
@ -794,7 +791,7 @@ public class DefaultPromise<V> extends AbstractFuture<V> implements Promise<V> {
private static final class CauseHolder {
final Throwable cause;
private CauseHolder(Throwable cause) {
CauseHolder(Throwable cause) {
this.cause = cause;
}
}

View File

@ -18,5 +18,6 @@ package io.netty.util.concurrent;
/**
* The result of an scheduled asynchronous operation.
*/
@SuppressWarnings("ClassNameSameAsAncestorName")
public interface ScheduledFuture<V> extends Future<V>, java.util.concurrent.ScheduledFuture<V> {
}

View File

@ -30,6 +30,7 @@ public final class EmptyArrays {
public static final short[] EMPTY_SHORTS = new short[0];
public static final long[] EMPTY_LONGS = new long[0];
public static final Object[] EMPTY_OBJECTS = new Object[0];
public static final Class<?>[] EMPTY_CLASSES = new Class[0];
public static final String[] EMPTY_STRINGS = new String[0];
public static final StackTraceElement[] EMPTY_STACK_TRACE = new StackTraceElement[0];
public static final ByteBuffer[] EMPTY_BYTE_BUFFERS = new ByteBuffer[0];

View File

@ -87,8 +87,7 @@ final class PlatformDependent0 {
// http://www.mail-archive.com/jdk6-dev@openjdk.java.net/msg00698.html
try {
unsafe.getClass().getDeclaredMethod(
"copyMemory",
new Class[] { Object.class, long.class, Object.class, long.class, long.class });
"copyMemory", Object.class, long.class, Object.class, long.class, long.class);
logger.debug("sun.misc.Unsafe.copyMemory: available");
} catch (NoSuchMethodError t) {

View File

@ -29,12 +29,12 @@ import java.util.regex.Pattern;
*/
public final class SystemPropertyUtil {
@SuppressWarnings("all")
private static boolean initializedLogger;
private static final InternalLogger logger;
private static boolean loggedException;
static {
initializedLogger = false;
logger = InternalLoggerFactory.getInstance(SystemPropertyUtil.class);
initializedLogger = true;
}

View File

@ -35,6 +35,14 @@ public abstract class InternalLoggerFactory {
static {
final String name = InternalLoggerFactory.class.getName();
InternalLoggerFactory f;
f = newDefaultFactory(name);
defaultFactory = f;
}
@SuppressWarnings("UnusedCatchParameter")
private static InternalLoggerFactory newDefaultFactory(String name) {
InternalLoggerFactory f;
try {
f = new Slf4JLoggerFactory(true);
@ -49,8 +57,7 @@ public abstract class InternalLoggerFactory {
f.newInstance(name).debug("Using java.util.logging as the default logging framework");
}
}
defaultFactory = f;
return f;
}
/**

View File

@ -72,7 +72,7 @@ class Log4JLogger extends AbstractInternalLogger {
try {
logger.isTraceEnabled();
return true;
} catch (NoSuchMethodError e) {
} catch (NoSuchMethodError ignored) {
return false;
}
}

View File

@ -52,7 +52,7 @@ class Slf4JLogger extends AbstractInternalLogger {
}
@Override
public void trace(String format, Object[] argArray) {
public void trace(String format, Object... argArray) {
logger.trace(format, argArray);
}
@ -82,7 +82,7 @@ class Slf4JLogger extends AbstractInternalLogger {
}
@Override
public void debug(String format, Object[] argArray) {
public void debug(String format, Object... argArray) {
logger.debug(format, argArray);
}
@ -112,7 +112,7 @@ class Slf4JLogger extends AbstractInternalLogger {
}
@Override
public void info(String format, Object[] argArray) {
public void info(String format, Object... argArray) {
logger.info(format, argArray);
}
@ -137,7 +137,7 @@ class Slf4JLogger extends AbstractInternalLogger {
}
@Override
public void warn(String format, Object[] argArray) {
public void warn(String format, Object... argArray) {
logger.warn(format, argArray);
}
@ -172,7 +172,7 @@ class Slf4JLogger extends AbstractInternalLogger {
}
@Override
public void error(String format, Object[] argArray) {
public void error(String format, Object... argArray) {
logger.error(format, argArray);
}

View File

@ -54,7 +54,7 @@ public class Slf4JLoggerFactory extends InternalLoggerFactory {
if (LoggerFactory.getILoggerFactory() instanceof NOPLoggerFactory) {
throw new NoClassDefFoundError(buf.toString());
} else {
err.print(buf.toString());
err.print(buf);
err.flush();
}
} finally {

View File

@ -42,10 +42,10 @@ public class DefaultAttributeMapTest {
assertSame(one, map.attr(key));
one.setIfAbsent("Whoohoo");
assertSame(one.get(), "Whoohoo");
assertSame("Whoohoo", one.get());
one.setIfAbsent("What");
assertNotSame(one.get(), "What");
assertNotSame("What", one.get());
one.remove();
assertNull(one.get());
@ -62,7 +62,7 @@ public class DefaultAttributeMapTest {
assertEquals(one.get(), Integer.valueOf(3653));
one.setIfAbsent(1);
assertNotSame(one.get(), 1);
assertNotSame(1, one.get());
one.remove();
assertNull(one.get());

View File

@ -40,14 +40,14 @@ public class RecyclerTest {
private static final Recycler<RecyclableObject> RECYCLER = new Recycler<RecyclableObject>() {
@Override
protected RecyclableObject newObject(Handle handle) {
protected RecyclableObject newObject(Handle<RecyclableObject> handle) {
return new RecyclableObject(handle);
}
};
private final Recycler.Handle handle;
private final Recycler.Handle<RecyclableObject> handle;
private RecyclableObject(Recycler.Handle handle) {
private RecyclableObject(Recycler.Handle<RecyclableObject> handle) {
this.handle = handle;
}

View File

@ -14,17 +14,13 @@
*/
package io.netty.util.collection;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import java.util.HashSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Tests for {@link IntObjectHashMap}.
@ -34,7 +30,7 @@ public class IntObjectHashMapTest {
private static class Value {
private final String name;
public Value(String name) {
Value(String name) {
this.name = name;
}
@ -42,7 +38,7 @@ public class IntObjectHashMapTest {
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + (name == null ? 0 : name.hashCode());
return result;
}

Some files were not shown because too many files have changed in this diff Show More