IOCP: pass tests under windows
GitOrigin-RevId: bbb13f722c911609e7cf120e7a06e02eb13c616f
This commit is contained in:
parent
9d1a1a1155
commit
180de003a6
@ -56,7 +56,7 @@ void ConcurrentScheduler::init(int32 threads_n) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#if TD_PORT_WINDOWS
|
#if TD_PORT_WINDOWS
|
||||||
iocp_ = std::make_unique<IOCP>();
|
iocp_ = std::make_unique<detail::IOCP>();
|
||||||
iocp_->init();
|
iocp_->init();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -90,7 +90,10 @@ void ConcurrentScheduler::start() {
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#if TD_PORT_WINDOWS
|
#if TD_PORT_WINDOWS
|
||||||
iocp_thread_ = td::thread([&iocp_] { iocp_->loop(); });
|
iocp_thread_ = td::thread([this] {
|
||||||
|
auto guard = this->get_send_guard();
|
||||||
|
this->iocp_->loop();
|
||||||
|
});
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
state_ = State::Run;
|
state_ = State::Run;
|
||||||
|
@ -41,7 +41,7 @@ class ConcurrentScheduler : private Scheduler::Callback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
SchedulerGuard get_send_guard() {
|
SchedulerGuard get_send_guard() {
|
||||||
return schedulers_[0]->get_const_guard();
|
return schedulers_.back()->get_const_guard();
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_one_thread_run();
|
void test_one_thread_run();
|
||||||
@ -87,7 +87,7 @@ class ConcurrentScheduler : private Scheduler::Callback {
|
|||||||
std::vector<thread> threads_;
|
std::vector<thread> threads_;
|
||||||
#endif
|
#endif
|
||||||
#if TD_PORT_WINDOWS
|
#if TD_PORT_WINDOWS
|
||||||
std::unique_ptr<IOCP> iocp_;
|
std::unique_ptr<detail::IOCP> iocp_;
|
||||||
td::thread iocp_thread_;
|
td::thread iocp_thread_;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -56,9 +56,11 @@ void Scheduler::ServiceActor::start_up() {
|
|||||||
if (!inbound_) {
|
if (!inbound_) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#if !TD_PORT_WINDOWS
|
||||||
auto &fd = inbound_->reader_get_event_fd();
|
auto &fd = inbound_->reader_get_event_fd();
|
||||||
::td::subscribe(fd.get_poll_info().extract_pollable_fd(this), PollFlags::Read());
|
::td::subscribe(fd.get_poll_info().extract_pollable_fd(this), PollFlags::Read());
|
||||||
subscribed_ = true;
|
subscribed_ = true;
|
||||||
|
#endif
|
||||||
yield();
|
yield();
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -440,6 +442,7 @@ void Scheduler::run_poll(double timeout) {
|
|||||||
#if TD_PORT_WINDOWS
|
#if TD_PORT_WINDOWS
|
||||||
CHECK(inbound_queue_);
|
CHECK(inbound_queue_);
|
||||||
inbound_queue_->reader_get_event_fd().wait(timeout_ms);
|
inbound_queue_->reader_get_event_fd().wait(timeout_ms);
|
||||||
|
service_actor_.notify();
|
||||||
#elif TD_PORT_POSIX
|
#elif TD_PORT_POSIX
|
||||||
poll_.run(timeout_ms);
|
poll_.run(timeout_ms);
|
||||||
#endif
|
#endif
|
||||||
|
@ -36,7 +36,7 @@ TEST(Actors, SendLater) {
|
|||||||
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
||||||
sb.clear();
|
sb.clear();
|
||||||
Scheduler scheduler;
|
Scheduler scheduler;
|
||||||
scheduler.init();
|
scheduler.init(0, {std::make_shared<MpscPollableQueue<EventFull>>()}, nullptr);
|
||||||
|
|
||||||
auto guard = scheduler.get_guard();
|
auto guard = scheduler.get_guard();
|
||||||
class Worker : public Actor {
|
class Worker : public Actor {
|
||||||
@ -93,7 +93,7 @@ class XReceiver final : public Actor {
|
|||||||
TEST(Actors, simple_pass_event_arguments) {
|
TEST(Actors, simple_pass_event_arguments) {
|
||||||
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
||||||
Scheduler scheduler;
|
Scheduler scheduler;
|
||||||
scheduler.init();
|
scheduler.init(0, {std::make_shared<MpscPollableQueue<EventFull>>()}, nullptr);
|
||||||
|
|
||||||
auto guard = scheduler.get_guard();
|
auto guard = scheduler.get_guard();
|
||||||
auto id = create_actor<XReceiver>("XR").release();
|
auto id = create_actor<XReceiver>("XR").release();
|
||||||
@ -200,7 +200,7 @@ class PrintChar final : public Actor {
|
|||||||
TEST(Actors, simple_hand_yield) {
|
TEST(Actors, simple_hand_yield) {
|
||||||
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
||||||
Scheduler scheduler;
|
Scheduler scheduler;
|
||||||
scheduler.init();
|
scheduler.init(0, {std::make_shared<MpscPollableQueue<EventFull>>()}, nullptr);
|
||||||
sb.clear();
|
sb.clear();
|
||||||
int cnt = 1000;
|
int cnt = 1000;
|
||||||
{
|
{
|
||||||
@ -354,7 +354,7 @@ class MasterActor : public MsgActor {
|
|||||||
TEST(Actors, call_after_destruct) {
|
TEST(Actors, call_after_destruct) {
|
||||||
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
SET_VERBOSITY_LEVEL(VERBOSITY_NAME(ERROR));
|
||||||
Scheduler scheduler;
|
Scheduler scheduler;
|
||||||
scheduler.init();
|
scheduler.init(0, {std::make_shared<MpscPollableQueue<EventFull>>()}, nullptr);
|
||||||
{
|
{
|
||||||
auto guard = scheduler.get_guard();
|
auto guard = scheduler.get_guard();
|
||||||
create_actor<MasterActor>("Master").release();
|
create_actor<MasterActor>("Master").release();
|
||||||
|
@ -313,6 +313,8 @@ Status Binlog::destroy(Slice path) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Binlog::do_event(BinlogEvent &&event) {
|
void Binlog::do_event(BinlogEvent &&event) {
|
||||||
|
auto event_size = event.raw_event_.size();
|
||||||
|
|
||||||
if (state_ == State::Run || state_ == State::Reindex) {
|
if (state_ == State::Run || state_ == State::Reindex) {
|
||||||
VLOG(binlog) << "Write binlog event: " << format::cond(state_ == State::Reindex, "[reindex] ");
|
VLOG(binlog) << "Write binlog event: " << format::cond(state_ == State::Reindex, "[reindex] ");
|
||||||
auto validate_status = event.validate();
|
auto validate_status = event.validate();
|
||||||
@ -389,7 +391,7 @@ void Binlog::do_event(BinlogEvent &&event) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fd_events_++;
|
fd_events_++;
|
||||||
fd_size_ += event.raw_event_.size();
|
fd_size_ += event_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Binlog::sync() {
|
void Binlog::sync() {
|
||||||
|
@ -121,7 +121,13 @@ TsCerr::~TsCerr() {
|
|||||||
}
|
}
|
||||||
namespace {
|
namespace {
|
||||||
FileFd &Stderr() {
|
FileFd &Stderr() {
|
||||||
static FileFd res = FileFd::from_native_fd(NativeFd(2, true)).move_as_ok();
|
static FileFd res = FileFd::from_native_fd(NativeFd(
|
||||||
|
#if TD_PORT_POSIX
|
||||||
|
2
|
||||||
|
#elif TD_PORT_WINDOWS
|
||||||
|
GetStdHandle(STD_ERROR_HANDLE)
|
||||||
|
#endif
|
||||||
|
, true)).move_as_ok();
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
|
@ -166,7 +166,7 @@ class Mtproto_ping : public td::Test {
|
|||||||
};
|
};
|
||||||
Mtproto_ping mtproto_ping("Mtproto_ping");
|
Mtproto_ping mtproto_ping("Mtproto_ping");
|
||||||
|
|
||||||
class Context : public AuthKeyHandshakeContext {
|
class HandshakeContext : public AuthKeyHandshakeContext {
|
||||||
public:
|
public:
|
||||||
DhCallback *get_dh_callback() override {
|
DhCallback *get_dh_callback() override {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
@ -226,7 +226,7 @@ class HandshakeTestActor : public Actor {
|
|||||||
|
|
||||||
wait_for_result_ = true;
|
wait_for_result_ = true;
|
||||||
create_actor<HandshakeActor>(
|
create_actor<HandshakeActor>(
|
||||||
"HandshakeActor", std::move(handshake_), std::move(raw_connection_), std::make_unique<Context>(), 10.0,
|
"HandshakeActor", std::move(handshake_), std::move(raw_connection_), std::make_unique<HandshakeContext>(), 10.0,
|
||||||
PromiseCreator::lambda([self = actor_id(this)](Result<std::unique_ptr<RawConnection>> raw_connection) {
|
PromiseCreator::lambda([self = actor_id(this)](Result<std::unique_ptr<RawConnection>> raw_connection) {
|
||||||
send_closure(self, &HandshakeTestActor::got_connection, std::move(raw_connection), 1);
|
send_closure(self, &HandshakeTestActor::got_connection, std::move(raw_connection), 1);
|
||||||
}),
|
}),
|
||||||
|
Loading…
Reference in New Issue
Block a user