Improve log message spelling.

This commit is contained in:
levlam 2023-02-26 13:41:49 +03:00
parent 47724bd4d8
commit ffcc1719ef
8 changed files with 28 additions and 33 deletions

View File

@ -421,13 +421,12 @@ class IdDuplicateCheckerOld {
if (saved_message_ids_.size() == MAX_SAVED_MESSAGE_IDS) {
auto oldest_message_id = *saved_message_ids_.begin();
if (message_id < oldest_message_id) {
return td::Status::Error(2, PSLICE() << "Ignore very old message_id "
<< td::tag("oldest message_id", oldest_message_id)
<< td::tag("got message_id", message_id));
return td::Status::Error(2, PSLICE() << "Ignore very old message " << message_id
<< " older than the oldest known message " << oldest_message_id);
}
}
if (saved_message_ids_.count(message_id) != 0) {
return td::Status::Error(1, PSLICE() << "Ignore duplicated message_id " << td::tag("message_id", message_id));
return td::Status::Error(1, PSLICE() << "Ignore already processed message " << message_id);
}
saved_message_ids_.insert(message_id);
@ -451,16 +450,15 @@ class IdDuplicateCheckerNew {
td::Status check(td::int64 message_id) {
auto insert_result = saved_message_ids_.insert(message_id);
if (!insert_result.second) {
return td::Status::Error(1, PSLICE() << "Ignore duplicated message_id " << td::tag("message_id", message_id));
return td::Status::Error(1, PSLICE() << "Ignore already processed message " << message_id);
}
if (saved_message_ids_.size() == MAX_SAVED_MESSAGE_IDS + 1) {
auto begin_it = saved_message_ids_.begin();
bool is_very_old = begin_it == insert_result.first;
saved_message_ids_.erase(begin_it);
if (is_very_old) {
return td::Status::Error(2, PSLICE() << "Ignore very old message_id "
<< td::tag("oldest message_id", *saved_message_ids_.begin())
<< td::tag("got message_id", message_id));
return td::Status::Error(2, PSLICE() << "Ignore very old message " << message_id
<< " older than the oldest known message " << *saved_message_ids_.begin());
}
}
return td::Status::OK();
@ -477,16 +475,15 @@ class IdDuplicateCheckerNewOther {
}
td::Status check(td::int64 message_id) {
if (!saved_message_ids_.insert(message_id).second) {
return td::Status::Error(1, PSLICE() << "Ignore duplicated message_id " << td::tag("message_id", message_id));
return td::Status::Error(1, PSLICE() << "Ignore already processed message " << message_id);
}
if (saved_message_ids_.size() == MAX_SAVED_MESSAGE_IDS + 1) {
auto begin_it = saved_message_ids_.begin();
bool is_very_old = *begin_it == message_id;
saved_message_ids_.erase(begin_it);
if (is_very_old) {
return td::Status::Error(2, PSLICE() << "Ignore very old message_id "
<< td::tag("oldest message_id", *saved_message_ids_.begin())
<< td::tag("got message_id", message_id));
return td::Status::Error(2, PSLICE() << "Ignore very old message " << message_id
<< " older than the oldest known message " << *saved_message_ids_.begin());
}
}
return td::Status::OK();
@ -505,14 +502,14 @@ class IdDuplicateCheckerNewSimple {
td::Status check(td::int64 message_id) {
auto insert_result = saved_message_ids_.insert(message_id);
if (!insert_result.second) {
return td::Status::Error(1, "Ignore duplicated message_id");
return td::Status::Error(1, "Ignore already processed message");
}
if (saved_message_ids_.size() == MAX_SAVED_MESSAGE_IDS + 1) {
auto begin_it = saved_message_ids_.begin();
bool is_very_old = begin_it == insert_result.first;
saved_message_ids_.erase(begin_it);
if (is_very_old) {
return td::Status::Error(2, "Ignore very old message_id");
return td::Status::Error(2, "Ignore very old message");
}
}
return td::Status::OK();
@ -540,13 +537,12 @@ class IdDuplicateCheckerArray {
return td::Status::OK();
}
if (end_pos_ >= max_size && message_id < saved_message_ids_[0]) {
return td::Status::Error(2, PSLICE() << "Ignore very old message_id "
<< td::tag("oldest message_id", saved_message_ids_[0])
<< td::tag("got message_id", message_id));
return td::Status::Error(2, PSLICE() << "Ignore very old message " << message_id
<< " older than the oldest known message " << saved_message_ids_[0]);
}
auto it = std::lower_bound(&saved_message_ids_[0], &saved_message_ids_[end_pos_], message_id);
if (*it == message_id) {
return td::Status::Error(1, PSLICE() << "Ignore duplicated message_id " << td::tag("message_id", message_id));
return td::Status::Error(1, PSLICE() << "Ignore already processed message " << message_id);
}
std::copy_backward(it, &saved_message_ids_[end_pos_], &saved_message_ids_[end_pos_ + 1]);
*it = message_id;

View File

@ -32,12 +32,12 @@ Status check_message_id_duplicates(int64 *saved_message_ids, size_t max_size, si
return Status::OK();
}
if (end_pos >= max_size && message_id < saved_message_ids[0]) {
return Status::Error(2, PSLICE() << "Ignore very old message_id " << tag("oldest message_id", saved_message_ids[0])
<< tag("got message_id", message_id));
return Status::Error(2, PSLICE() << "Ignore very old message " << message_id
<< " older than the oldest known message " << saved_message_ids[0]);
}
auto it = std::lower_bound(&saved_message_ids[0], &saved_message_ids[end_pos], message_id);
if (*it == message_id) {
return Status::Error(1, PSLICE() << "Ignore duplicated message_id " << tag("message_id", message_id));
return Status::Error(1, PSLICE() << "Ignore already processed message " << message_id);
}
std::copy_backward(it, &saved_message_ids[end_pos], &saved_message_ids[end_pos + 1]);
*it = message_id;
@ -133,14 +133,14 @@ Status AuthData::check_packet(int64 session_id, int64 message_id, double now, bo
// Client is to check that the session_id field in the decrypted message indeed equals to that of an active session
// created by the client.
if (get_session_id() != static_cast<uint64>(session_id)) {
return Status::Error(PSLICE() << "Got packet from different session " << tag("current session_id", get_session_id())
<< tag("got session_id", session_id));
return Status::Error(PSLICE() << "Receive packet from different session " << session_id << " in session "
<< get_session_id());
}
// Client must check that msg_id has even parity for messages from client to server, and odd parity for messages
// from server to client.
if ((message_id & 1) == 0) {
return Status::Error(PSLICE() << "Got invalid message_id " << tag("message_id", message_id));
return Status::Error(PSLICE() << "Receive invalid message identifier " << message_id);
}
TRY_STATUS(duplicate_checker_.check(message_id));
@ -152,8 +152,7 @@ Status AuthData::check_packet(int64 session_id, int64 message_id, double now, bo
// The client would also find this useful (to protect from a replay attack), but only if it is certain of its time
// (for example, if its time has been synchronized with that of the server).
if (server_time_difference_was_updated_ && !is_valid_inbound_msg_id(message_id, now)) {
return Status::Error(PSLICE() << "Ignore message with too old or too new message_id "
<< tag("message_id", message_id));
return Status::Error(PSLICE() << "Ignore too old or too new message " << message_id);
}
return Status::OK();

View File

@ -89,7 +89,7 @@ namespace mtproto {
*
* 6. New session creation
* A notification about new session.
* It is reasonable to store unique_id with current session, in order to process duplicated notifications once.
* It is reasonable to store unique_id with current session in order to process duplicated notifications only once.
*
* Causes all messages older than first_msg_id to be re-sent and notifies about a gap in updates
* output:
@ -849,7 +849,7 @@ void SessionConnection::send_ack(uint64 message_id) {
send_before(Time::now_cached() + ACK_DELAY);
}
auto ack = static_cast<int64>(message_id);
// an easiest way to eliminate duplicated acks for gzipped packets
// an easiest way to eliminate duplicated acknowledgements for gzipped packets
if (to_ack_.empty() || to_ack_.back() != ack) {
to_ack_.push_back(ack);

View File

@ -941,7 +941,7 @@ void AuthManager::on_delete_account_result(NetQueryPtr &result) {
void AuthManager::on_get_authorization(tl_object_ptr<telegram_api::auth_Authorization> auth_ptr) {
if (state_ == State::Ok) {
LOG(WARNING) << "Ignore duplicated auth.Authorization";
LOG(WARNING) << "Ignore duplicate auth.Authorization";
if (query_id_ != 0) {
on_query_ok();
}

View File

@ -16442,7 +16442,7 @@ void MessagesManager::add_notification_id_to_message_id_correspondence(Dialog *d
<< d->dialog_id;
d->notification_id_to_message_id.emplace(notification_id, message_id);
} else if (it->second != message_id) {
LOG(ERROR) << "Have duplicated " << notification_id << " in " << d->dialog_id << " in " << message_id << " and "
LOG(ERROR) << "Have the same " << notification_id << " in " << d->dialog_id << " for " << message_id << " and "
<< it->second;
if (it->second < message_id) {
it->second = message_id;

View File

@ -1057,7 +1057,7 @@ void NotificationManager::flush_pending_updates(int32 group_id, const char *sour
if (!removed_notification_ids.insert(notification_id).second) {
// sometimes there can be deletion of notification without previous addition, because the notification
// has already been deleted at the time of addition and get_notification_object_type was nullptr
VLOG(notifications) << "Remove duplicated deletion of " << notification_id;
VLOG(notifications) << "Remove duplicate deletion of " << notification_id;
notification_id = 0;
}
}

View File

@ -38,7 +38,7 @@ void OptionParser::add_option(Option::Type type, char short_key, Slice long_key,
std::function<Status(Slice)> callback) {
for (auto &option : options_) {
if ((short_key != '\0' && option.short_key == short_key) || (!long_key.empty() && long_key == option.long_key)) {
LOG(ERROR) << "Ignore duplicated option '" << (short_key == '\0' ? '-' : short_key) << "' '" << long_key << "'";
LOG(ERROR) << "Ignore duplicate option '" << (short_key == '\0' ? '-' : short_key) << "' '" << long_key << "'";
}
}
options_.push_back(Option{type, short_key, long_key.str(), description.str(), std::move(callback)});

View File

@ -36,7 +36,7 @@ class FdSet {
}
std::unique_lock<std::mutex> guard(mutex_);
if (fds_.count(fd) >= 1) {
LOG(FATAL) << "Create duplicated fd: " << fd;
LOG(FATAL) << "Create duplicate fd: " << fd;
}
fds_.insert(fd);
}