LocalChannelWrite event sequencing issue
Motivation: https://github.com/netty/netty/pull/4143 addressed a few ordering issues but an ordering issue still remained if the Promise for a write completes, and a listener of that promise does a write on a peer channel. The ordering was subject to how potentially 2 different executors would run a task, but it should be coordinated such that the first write is read first. Modifications: - Keep track of the finishPeerRead task run on the executor if necessary and ensure it completes before current channel read occurs Result: Ordering of events for echo type situations is preserved.
This commit is contained in:
parent
544ee95e58
commit
3d4b53fce5
@ -27,6 +27,7 @@ import io.netty.channel.DefaultChannelConfig;
|
||||
import io.netty.channel.EventLoop;
|
||||
import io.netty.channel.SingleThreadEventLoop;
|
||||
import io.netty.util.ReferenceCountUtil;
|
||||
import io.netty.util.concurrent.Future;
|
||||
import io.netty.util.internal.InternalThreadLocalMap;
|
||||
import io.netty.util.internal.OneTimeTask;
|
||||
import io.netty.util.internal.PlatformDependent;
|
||||
@ -37,6 +38,7 @@ import java.nio.channels.ClosedChannelException;
|
||||
import java.nio.channels.ConnectionPendingException;
|
||||
import java.nio.channels.NotYetConnectedException;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
/**
|
||||
* A {@link Channel} for the local transport.
|
||||
@ -45,8 +47,9 @@ public class LocalChannel extends AbstractChannel {
|
||||
|
||||
private enum State { OPEN, BOUND, CONNECTED, CLOSED }
|
||||
|
||||
@SuppressWarnings({ "rawtypes" })
|
||||
private static final AtomicReferenceFieldUpdater<LocalChannel, Future> FINISH_READ_FUTURE_UPDATER;
|
||||
private static final ChannelMetadata METADATA = new ChannelMetadata(false);
|
||||
|
||||
private static final int MAX_READER_STACK_DEPTH = 8;
|
||||
|
||||
private final ChannelConfig config = new DefaultChannelConfig(this);
|
||||
@ -81,6 +84,18 @@ public class LocalChannel extends AbstractChannel {
|
||||
private volatile boolean readInProgress;
|
||||
private volatile boolean registerInProgress;
|
||||
private volatile boolean writeInProgress;
|
||||
private volatile Future<?> finishReadFuture;
|
||||
|
||||
static {
|
||||
@SuppressWarnings({ "rawtypes" })
|
||||
AtomicReferenceFieldUpdater<LocalChannel, Future> finishReadFutureUpdater =
|
||||
PlatformDependent.newAtomicReferenceFieldUpdater(LocalChannel.class, "finishReadFuture");
|
||||
if (finishReadFutureUpdater == null) {
|
||||
finishReadFutureUpdater =
|
||||
AtomicReferenceFieldUpdater.newUpdater(LocalChannel.class, Future.class, "finishReadFuture");
|
||||
}
|
||||
FINISH_READ_FUTURE_UPDATER = finishReadFutureUpdater;
|
||||
}
|
||||
|
||||
public LocalChannel() {
|
||||
super(null);
|
||||
@ -328,16 +343,37 @@ public class LocalChannel extends AbstractChannel {
|
||||
if (peer.eventLoop() == eventLoop() && !peer.writeInProgress) {
|
||||
finishPeerRead0(peer);
|
||||
} else {
|
||||
peer.eventLoop().execute(new OneTimeTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
finishPeerRead0(peer);
|
||||
}
|
||||
});
|
||||
runFinishPeerReadTask(peer);
|
||||
}
|
||||
}
|
||||
|
||||
private static void finishPeerRead0(LocalChannel peer) {
|
||||
private void runFinishPeerReadTask(final LocalChannel peer) {
|
||||
// If the peer is writing, we must wait until after reads are completed for that peer before we can read. So
|
||||
// we keep track of the task, and coordinate later that our read can't happen until the peer is done.
|
||||
final Runnable finishPeerReadTask = new OneTimeTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
finishPeerRead0(peer);
|
||||
}
|
||||
};
|
||||
if (peer.writeInProgress) {
|
||||
peer.finishReadFuture = peer.eventLoop().submit(finishPeerReadTask);
|
||||
} else {
|
||||
peer.eventLoop().execute(finishPeerReadTask);
|
||||
}
|
||||
}
|
||||
|
||||
private void finishPeerRead0(LocalChannel peer) {
|
||||
Future<?> peerFinishReadFuture = peer.finishReadFuture;
|
||||
if (peerFinishReadFuture != null) {
|
||||
if (!peerFinishReadFuture.isDone()) {
|
||||
runFinishPeerReadTask(peer);
|
||||
return;
|
||||
} else {
|
||||
// Lazy unset to make sure we don't prematurely unset it while scheduling a new task.
|
||||
FINISH_READ_FUTURE_UPDATER.compareAndSet(peer, peerFinishReadFuture, null);
|
||||
}
|
||||
}
|
||||
ChannelPipeline peerPipeline = peer.pipeline();
|
||||
if (peer.readInProgress) {
|
||||
peer.readInProgress = false;
|
||||
|
@ -59,7 +59,7 @@ public class LocalChannelTest {
|
||||
|
||||
private static final InternalLogger logger = InternalLoggerFactory.getInstance(LocalChannelTest.class);
|
||||
|
||||
private static final String LOCAL_ADDR_ID = "test.id";
|
||||
private static final LocalAddress TEST_ADDRESS = new LocalAddress("test.id");
|
||||
|
||||
private static EventLoopGroup group1;
|
||||
private static EventLoopGroup group2;
|
||||
@ -85,7 +85,6 @@ public class LocalChannelTest {
|
||||
@Test
|
||||
public void testLocalAddressReuse() throws Exception {
|
||||
for (int i = 0; i < 2; i ++) {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
|
||||
@ -106,11 +105,11 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).sync().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).sync().channel();
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).sync().channel();
|
||||
cc = cb.connect(sc.localAddress()).sync().channel();
|
||||
final Channel ccCpy = cc;
|
||||
cc.eventLoop().execute(new Runnable() {
|
||||
@Override
|
||||
@ -129,7 +128,7 @@ public class LocalChannelTest {
|
||||
|
||||
assertNull(String.format(
|
||||
"Expected null, got channel '%s' for local address '%s'",
|
||||
LocalChannelRegistry.get(addr), addr), LocalChannelRegistry.get(addr));
|
||||
LocalChannelRegistry.get(TEST_ADDRESS), TEST_ADDRESS), LocalChannelRegistry.get(TEST_ADDRESS));
|
||||
} finally {
|
||||
closeChannel(cc);
|
||||
closeChannel(sc);
|
||||
@ -139,7 +138,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testWriteFailsFastOnClosedChannel() throws Exception {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
|
||||
@ -160,10 +158,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).sync().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).sync().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).sync().channel();
|
||||
cc = cb.connect(sc.localAddress()).sync().channel();
|
||||
|
||||
// Close the channel and write something.
|
||||
cc.close().sync();
|
||||
@ -189,7 +187,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testServerCloseChannelSameEventLoop() throws Exception {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
ServerBootstrap sb = new ServerBootstrap()
|
||||
.group(group2)
|
||||
@ -204,7 +201,7 @@ public class LocalChannelTest {
|
||||
Channel sc = null;
|
||||
Channel cc = null;
|
||||
try {
|
||||
sc = sb.bind(addr).sync().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).sync().channel();
|
||||
|
||||
Bootstrap b = new Bootstrap()
|
||||
.group(group2)
|
||||
@ -215,7 +212,7 @@ public class LocalChannelTest {
|
||||
// discard
|
||||
}
|
||||
});
|
||||
cc = b.connect(addr).sync().channel();
|
||||
cc = b.connect(sc.localAddress()).sync().channel();
|
||||
cc.writeAndFlush(new Object());
|
||||
assertTrue(latch.await(5, SECONDS));
|
||||
} finally {
|
||||
@ -226,7 +223,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void localChannelRaceCondition() throws Exception {
|
||||
final LocalAddress address = new LocalAddress(LOCAL_ADDR_ID);
|
||||
final CountDownLatch closeLatch = new CountDownLatch(1);
|
||||
final EventLoopGroup clientGroup = new DefaultEventLoopGroup(1) {
|
||||
@Override
|
||||
@ -271,7 +267,7 @@ public class LocalChannelTest {
|
||||
closeLatch.countDown();
|
||||
}
|
||||
}).
|
||||
bind(address).
|
||||
bind(TEST_ADDRESS).
|
||||
sync().channel();
|
||||
Bootstrap bootstrap = new Bootstrap();
|
||||
bootstrap.group(clientGroup).
|
||||
@ -282,7 +278,7 @@ public class LocalChannelTest {
|
||||
/* Do nothing */
|
||||
}
|
||||
});
|
||||
ChannelFuture future = bootstrap.connect(address);
|
||||
ChannelFuture future = bootstrap.connect(sc.localAddress());
|
||||
assertTrue("Connection should finish, not time out", future.await(200));
|
||||
cc = future.channel();
|
||||
} finally {
|
||||
@ -294,7 +290,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testReRegister() {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
|
||||
@ -315,10 +310,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).syncUninterruptibly().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).syncUninterruptibly().channel();
|
||||
cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel();
|
||||
|
||||
cc.deregister().syncUninterruptibly();
|
||||
} finally {
|
||||
@ -329,7 +324,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testCloseInWritePromiseCompletePreservesOrder() throws InterruptedException {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
final CountDownLatch messageLatch = new CountDownLatch(2);
|
||||
@ -363,10 +357,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).syncUninterruptibly().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).syncUninterruptibly().channel();
|
||||
cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel();
|
||||
|
||||
final Channel ccCpy = cc;
|
||||
// Make sure a write operation is executed in the eventloop
|
||||
@ -397,7 +391,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testWriteInWritePromiseCompletePreservesOrder() throws InterruptedException {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
final CountDownLatch messageLatch = new CountDownLatch(2);
|
||||
@ -428,10 +421,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).syncUninterruptibly().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).syncUninterruptibly().channel();
|
||||
cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel();
|
||||
|
||||
final Channel ccCpy = cc;
|
||||
// Make sure a write operation is executed in the eventloop
|
||||
@ -462,7 +455,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testPeerWriteInWritePromiseCompleteDifferentEventLoopPreservesOrder() throws InterruptedException {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
final CountDownLatch messageLatch = new CountDownLatch(2);
|
||||
@ -510,10 +502,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).syncUninterruptibly().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).syncUninterruptibly().channel();
|
||||
cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel();
|
||||
assertTrue(serverChannelLatch.await(5, SECONDS));
|
||||
|
||||
final Channel ccCpy = cc;
|
||||
@ -544,7 +536,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testPeerWriteInWritePromiseCompleteSameEventLoopPreservesOrder() throws InterruptedException {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
final CountDownLatch messageLatch = new CountDownLatch(2);
|
||||
@ -593,10 +584,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).syncUninterruptibly().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).syncUninterruptibly().channel();
|
||||
cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel();
|
||||
assertTrue(serverChannelLatch.await(5, SECONDS));
|
||||
|
||||
final Channel ccCpy = cc;
|
||||
@ -630,7 +621,6 @@ public class LocalChannelTest {
|
||||
|
||||
@Test
|
||||
public void testClosePeerInWritePromiseCompleteSameEventLoopPreservesOrder() throws InterruptedException {
|
||||
LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID);
|
||||
Bootstrap cb = new Bootstrap();
|
||||
ServerBootstrap sb = new ServerBootstrap();
|
||||
final CountDownLatch messageLatch = new CountDownLatch(2);
|
||||
@ -673,10 +663,10 @@ public class LocalChannelTest {
|
||||
Channel cc = null;
|
||||
try {
|
||||
// Start server
|
||||
sc = sb.bind(addr).syncUninterruptibly().channel();
|
||||
sc = sb.bind(TEST_ADDRESS).syncUninterruptibly().channel();
|
||||
|
||||
// Connect to the server
|
||||
cc = cb.connect(addr).syncUninterruptibly().channel();
|
||||
cc = cb.connect(sc.localAddress()).syncUninterruptibly().channel();
|
||||
|
||||
assertTrue(serverChannelLatch.await(5, SECONDS));
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user