Remove redundant 'final' method in 'final' class.
This commit is contained in:
parent
1213dc5ace
commit
f78a60df7d
@ -104,7 +104,7 @@ final class DefaultLocalChannel extends AbstractChannel implements LocalChannel
|
||||
return state.get() == ST_CONNECTED;
|
||||
}
|
||||
|
||||
final void setBound() throws ClosedChannelException {
|
||||
void setBound() throws ClosedChannelException {
|
||||
if (!state.compareAndSet(ST_OPEN, ST_BOUND)) {
|
||||
switch (state.get()) {
|
||||
case ST_CLOSED:
|
||||
@ -115,7 +115,7 @@ final class DefaultLocalChannel extends AbstractChannel implements LocalChannel
|
||||
}
|
||||
}
|
||||
|
||||
final void setConnected() {
|
||||
void setConnected() {
|
||||
if (state.get() != ST_CLOSED) {
|
||||
state.set(ST_CONNECTED);
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ final class SocketReceiveBufferPool {
|
||||
@SuppressWarnings("unchecked")
|
||||
private final SoftReference<ByteBuffer>[] pool = new SoftReference[POOL_SIZE];
|
||||
|
||||
final ByteBuffer acquire(int size) {
|
||||
ByteBuffer acquire(int size) {
|
||||
final SoftReference<ByteBuffer>[] pool = this.pool;
|
||||
for (int i = 0; i < POOL_SIZE; i ++) {
|
||||
SoftReference<ByteBuffer> ref = pool[i];
|
||||
@ -59,7 +59,7 @@ final class SocketReceiveBufferPool {
|
||||
return buf;
|
||||
}
|
||||
|
||||
final void release(ByteBuffer buffer) {
|
||||
void release(ByteBuffer buffer) {
|
||||
final SoftReference<ByteBuffer>[] pool = this.pool;
|
||||
for (int i = 0; i < POOL_SIZE; i ++) {
|
||||
SoftReference<ByteBuffer> ref = pool[i];
|
||||
|
@ -45,7 +45,7 @@ final class SocketSendBufferPool {
|
||||
super();
|
||||
}
|
||||
|
||||
final SendBuffer acquire(Object message) {
|
||||
SendBuffer acquire(Object message) {
|
||||
if (message instanceof ChannelBuffer) {
|
||||
return acquire((ChannelBuffer) message);
|
||||
} else if (message instanceof FileRegion) {
|
||||
@ -320,27 +320,27 @@ final class SocketSendBufferPool {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean finished() {
|
||||
public boolean finished() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long writtenBytes() {
|
||||
public long writtenBytes() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long totalBytes() {
|
||||
public long totalBytes() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long transferTo(WritableByteChannel ch) throws IOException {
|
||||
public long transferTo(WritableByteChannel ch) throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final long transferTo(DatagramChannel ch, SocketAddress raddr) throws IOException {
|
||||
public long transferTo(DatagramChannel ch, SocketAddress raddr) throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -197,7 +197,7 @@ public final class LocalTimeProtocol {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
public static com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.jboss.netty.example.localtime.LocalTimeProtocol.internal_static_org_jboss_netty_example_localtime_Location_descriptor;
|
||||
}
|
||||
@ -226,7 +226,7 @@ public final class LocalTimeProtocol {
|
||||
continent_ = org.jboss.netty.example.localtime.LocalTimeProtocol.Continent.AFRICA;
|
||||
}
|
||||
@Override
|
||||
public final boolean isInitialized() {
|
||||
public boolean isInitialized() {
|
||||
if (!hasContinent) {
|
||||
return false;
|
||||
}
|
||||
@ -560,7 +560,7 @@ public final class LocalTimeProtocol {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
public static com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.jboss.netty.example.localtime.LocalTimeProtocol.internal_static_org_jboss_netty_example_localtime_Locations_descriptor;
|
||||
}
|
||||
@ -586,7 +586,7 @@ public final class LocalTimeProtocol {
|
||||
private void initFields() {
|
||||
}
|
||||
@Override
|
||||
public final boolean isInitialized() {
|
||||
public boolean isInitialized() {
|
||||
for (org.jboss.netty.example.localtime.LocalTimeProtocol.Location element : getLocationList()) {
|
||||
if (!element.isInitialized()) {
|
||||
return false;
|
||||
@ -917,7 +917,7 @@ public final class LocalTimeProtocol {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
public static com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.jboss.netty.example.localtime.LocalTimeProtocol.internal_static_org_jboss_netty_example_localtime_LocalTime_descriptor;
|
||||
}
|
||||
@ -981,7 +981,7 @@ public final class LocalTimeProtocol {
|
||||
dayOfWeek_ = org.jboss.netty.example.localtime.LocalTimeProtocol.DayOfWeek.SUNDAY;
|
||||
}
|
||||
@Override
|
||||
public final boolean isInitialized() {
|
||||
public boolean isInitialized() {
|
||||
if (!hasYear) {
|
||||
return false;
|
||||
}
|
||||
@ -1487,7 +1487,7 @@ public final class LocalTimeProtocol {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
public static com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.jboss.netty.example.localtime.LocalTimeProtocol.internal_static_org_jboss_netty_example_localtime_LocalTimes_descriptor;
|
||||
}
|
||||
@ -1513,7 +1513,7 @@ public final class LocalTimeProtocol {
|
||||
private void initFields() {
|
||||
}
|
||||
@Override
|
||||
public final boolean isInitialized() {
|
||||
public boolean isInitialized() {
|
||||
for (org.jboss.netty.example.localtime.LocalTimeProtocol.LocalTime element : getLocalTimeList()) {
|
||||
if (!element.isInitialized()) {
|
||||
return false;
|
||||
|
@ -138,7 +138,7 @@ public final class ConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
||||
* @param hash the hash code for the key
|
||||
* @return the segment
|
||||
*/
|
||||
final Segment<K, V> segmentFor(int hash) {
|
||||
Segment<K, V> segmentFor(int hash) {
|
||||
return segments[hash >>> segmentShift & segmentMask];
|
||||
}
|
||||
|
||||
@ -173,21 +173,21 @@ public final class ConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final K key() {
|
||||
K key() {
|
||||
return (K) key;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final V value() {
|
||||
V value() {
|
||||
return (V) value;
|
||||
}
|
||||
|
||||
final void setValue(V value) {
|
||||
void setValue(V value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
static <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
return new HashEntry[i];
|
||||
}
|
||||
}
|
||||
@ -273,7 +273,7 @@ public final class ConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> Segment<K, V>[] newArray(int i) {
|
||||
static <K, V> Segment<K, V>[] newArray(int i) {
|
||||
return new Segment[i];
|
||||
}
|
||||
|
||||
|
@ -138,7 +138,7 @@ public final class ConcurrentIdentityHashMap<K, V> extends AbstractMap<K, V>
|
||||
* @param hash the hash code for the key
|
||||
* @return the segment
|
||||
*/
|
||||
final Segment<K, V> segmentFor(int hash) {
|
||||
Segment<K, V> segmentFor(int hash) {
|
||||
return segments[hash >>> segmentShift & segmentMask];
|
||||
}
|
||||
|
||||
@ -173,21 +173,21 @@ public final class ConcurrentIdentityHashMap<K, V> extends AbstractMap<K, V>
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final K key() {
|
||||
K key() {
|
||||
return (K) key;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final V value() {
|
||||
V value() {
|
||||
return (V) value;
|
||||
}
|
||||
|
||||
final void setValue(V value) {
|
||||
void setValue(V value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
static <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
return new HashEntry[i];
|
||||
}
|
||||
}
|
||||
@ -273,7 +273,7 @@ public final class ConcurrentIdentityHashMap<K, V> extends AbstractMap<K, V>
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> Segment<K, V>[] newArray(int i) {
|
||||
static <K, V> Segment<K, V>[] newArray(int i) {
|
||||
return new Segment[i];
|
||||
}
|
||||
|
||||
|
@ -145,7 +145,7 @@ public final class ConcurrentIdentityWeakKeyHashMap<K, V> extends AbstractMap<K,
|
||||
* @param hash the hash code for the key
|
||||
* @return the segment
|
||||
*/
|
||||
final Segment<K, V> segmentFor(int hash) {
|
||||
Segment<K, V> segmentFor(int hash) {
|
||||
return segments[hash >>> segmentShift & segmentMask];
|
||||
}
|
||||
|
||||
@ -167,11 +167,11 @@ public final class ConcurrentIdentityWeakKeyHashMap<K, V> extends AbstractMap<K,
|
||||
this.hash = hash;
|
||||
}
|
||||
|
||||
public final int keyHash() {
|
||||
public int keyHash() {
|
||||
return hash;
|
||||
}
|
||||
|
||||
public final Object keyRef() {
|
||||
public Object keyRef() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@ -204,16 +204,16 @@ public final class ConcurrentIdentityWeakKeyHashMap<K, V> extends AbstractMap<K,
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final K key() {
|
||||
K key() {
|
||||
return ((WeakReference<K>) keyRef).get();
|
||||
}
|
||||
|
||||
final V value() {
|
||||
V value() {
|
||||
return dereferenceValue(valueRef);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final V dereferenceValue(Object value) {
|
||||
V dereferenceValue(Object value) {
|
||||
if (value instanceof WeakKeyReference) {
|
||||
return ((Reference<V>) value).get();
|
||||
}
|
||||
@ -221,12 +221,12 @@ public final class ConcurrentIdentityWeakKeyHashMap<K, V> extends AbstractMap<K,
|
||||
return (V) value;
|
||||
}
|
||||
|
||||
final void setValue(V value) {
|
||||
void setValue(V value) {
|
||||
this.valueRef = value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
static <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
return new HashEntry[i];
|
||||
}
|
||||
}
|
||||
@ -318,7 +318,7 @@ public final class ConcurrentIdentityWeakKeyHashMap<K, V> extends AbstractMap<K,
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> Segment<K, V>[] newArray(int i) {
|
||||
static <K, V> Segment<K, V>[] newArray(int i) {
|
||||
return new Segment[i];
|
||||
}
|
||||
|
||||
@ -618,7 +618,7 @@ public final class ConcurrentIdentityWeakKeyHashMap<K, V> extends AbstractMap<K,
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
final void removeStale() {
|
||||
void removeStale() {
|
||||
WeakKeyReference ref;
|
||||
while ((ref = (WeakKeyReference) refQueue.poll()) != null) {
|
||||
remove(ref.keyRef(), ref.keyHash(), null, true);
|
||||
|
@ -145,7 +145,7 @@ public final class ConcurrentWeakKeyHashMap<K, V> extends AbstractMap<K, V> impl
|
||||
* @param hash the hash code for the key
|
||||
* @return the segment
|
||||
*/
|
||||
final Segment<K, V> segmentFor(int hash) {
|
||||
Segment<K, V> segmentFor(int hash) {
|
||||
return segments[hash >>> segmentShift & segmentMask];
|
||||
}
|
||||
|
||||
@ -167,11 +167,11 @@ public final class ConcurrentWeakKeyHashMap<K, V> extends AbstractMap<K, V> impl
|
||||
this.hash = hash;
|
||||
}
|
||||
|
||||
public final int keyHash() {
|
||||
public int keyHash() {
|
||||
return hash;
|
||||
}
|
||||
|
||||
public final Object keyRef() {
|
||||
public Object keyRef() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@ -204,16 +204,16 @@ public final class ConcurrentWeakKeyHashMap<K, V> extends AbstractMap<K, V> impl
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final K key() {
|
||||
K key() {
|
||||
return ((WeakReference<K>) keyRef).get();
|
||||
}
|
||||
|
||||
final V value() {
|
||||
V value() {
|
||||
return dereferenceValue(valueRef);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final V dereferenceValue(Object value) {
|
||||
V dereferenceValue(Object value) {
|
||||
if (value instanceof WeakKeyReference) {
|
||||
return ((Reference<V>) value).get();
|
||||
}
|
||||
@ -221,12 +221,12 @@ public final class ConcurrentWeakKeyHashMap<K, V> extends AbstractMap<K, V> impl
|
||||
return (V) value;
|
||||
}
|
||||
|
||||
final void setValue(V value) {
|
||||
void setValue(V value) {
|
||||
this.valueRef = value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
static <K, V> HashEntry<K, V>[] newArray(int i) {
|
||||
return new HashEntry[i];
|
||||
}
|
||||
}
|
||||
@ -318,7 +318,7 @@ public final class ConcurrentWeakKeyHashMap<K, V> extends AbstractMap<K, V> impl
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final <K, V> Segment<K, V>[] newArray(int i) {
|
||||
static <K, V> Segment<K, V>[] newArray(int i) {
|
||||
return new Segment[i];
|
||||
}
|
||||
|
||||
@ -618,7 +618,7 @@ public final class ConcurrentWeakKeyHashMap<K, V> extends AbstractMap<K, V> impl
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
final void removeStale() {
|
||||
void removeStale() {
|
||||
WeakKeyReference ref;
|
||||
while ((ref = (WeakKeyReference) refQueue.poll()) != null) {
|
||||
remove(ref.keyRef(), ref.keyHash(), null, true);
|
||||
|
@ -437,7 +437,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
volatile Thread waiter; // null until waiting
|
||||
|
||||
// CAS methods for fields
|
||||
final boolean casNext(Node cmp, Node val) {
|
||||
boolean casNext(Node cmp, Node val) {
|
||||
if (AtomicFieldUpdaterUtil.isAvailable()) {
|
||||
return nextUpdater.compareAndSet(this, cmp, val);
|
||||
} else {
|
||||
@ -452,7 +452,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
}
|
||||
}
|
||||
|
||||
final boolean casItem(Object cmp, Object val) {
|
||||
boolean casItem(Object cmp, Object val) {
|
||||
// assert cmp == null || cmp.getClass() != Node.class;
|
||||
if (AtomicFieldUpdaterUtil.isAvailable()) {
|
||||
return itemUpdater.compareAndSet(this, cmp, val);
|
||||
@ -481,7 +481,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
* Links node to itself to avoid garbage retention. Called
|
||||
* only after CASing head field, so uses relaxed write.
|
||||
*/
|
||||
final void forgetNext() {
|
||||
void forgetNext() {
|
||||
this.next = this;
|
||||
}
|
||||
|
||||
@ -494,7 +494,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
* follows either CAS or return from park (if ever parked;
|
||||
* else we don't care).
|
||||
*/
|
||||
final void forgetContents() {
|
||||
void forgetContents() {
|
||||
this.item = this;
|
||||
this.waiter = null;
|
||||
}
|
||||
@ -503,7 +503,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
* Returns true if this node has been matched, including the
|
||||
* case of artificial matches due to cancellation.
|
||||
*/
|
||||
final boolean isMatched() {
|
||||
boolean isMatched() {
|
||||
Object x = item;
|
||||
return x == this || x == null == isData;
|
||||
}
|
||||
@ -511,7 +511,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
/**
|
||||
* Returns true if this is an unmatched request node.
|
||||
*/
|
||||
final boolean isUnmatchedRequest() {
|
||||
boolean isUnmatchedRequest() {
|
||||
return !isData && item == null;
|
||||
}
|
||||
|
||||
@ -520,7 +520,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
* appended to this node because this node is unmatched and
|
||||
* has opposite data mode.
|
||||
*/
|
||||
final boolean cannotPrecede(boolean haveData) {
|
||||
boolean cannotPrecede(boolean haveData) {
|
||||
boolean d = isData;
|
||||
Object x;
|
||||
return d != haveData && (x = item) != this && x != null == d;
|
||||
@ -529,7 +529,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
/**
|
||||
* Tries to artificially match a data node -- used by remove.
|
||||
*/
|
||||
final boolean tryMatchData() {
|
||||
boolean tryMatchData() {
|
||||
// assert isData;
|
||||
Object x = item;
|
||||
if (x != null && x != this && casItem(x, null)) {
|
||||
@ -895,12 +895,12 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean hasNext() {
|
||||
public boolean hasNext() {
|
||||
return nextNode != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final E next() {
|
||||
public E next() {
|
||||
Node p = nextNode;
|
||||
if (p == null) {
|
||||
throw new NoSuchElementException();
|
||||
@ -911,7 +911,7 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void remove() {
|
||||
public void remove() {
|
||||
Node p = lastRet;
|
||||
if (p == null) {
|
||||
throw new IllegalStateException();
|
||||
|
@ -68,7 +68,7 @@ public final class NonReentrantLock extends AbstractQueuedSynchronizer
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final boolean tryAcquire(int acquires) {
|
||||
protected boolean tryAcquire(int acquires) {
|
||||
if (compareAndSetState(0, 1)) {
|
||||
owner = Thread.currentThread();
|
||||
return true;
|
||||
@ -77,7 +77,7 @@ public final class NonReentrantLock extends AbstractQueuedSynchronizer
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final boolean tryRelease(int releases) {
|
||||
protected boolean tryRelease(int releases) {
|
||||
if (Thread.currentThread() != owner) {
|
||||
throw new IllegalMonitorStateException();
|
||||
}
|
||||
@ -87,7 +87,7 @@ public final class NonReentrantLock extends AbstractQueuedSynchronizer
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final boolean isHeldExclusively() {
|
||||
protected boolean isHeldExclusively() {
|
||||
return getState() != 0 && owner == Thread.currentThread();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user