tdlib-session-container/src/main/java/it/tdlight/reactiveapi/KafkaConsumer.java

95 lines
4.0 KiB
Java
Raw Normal View History

2022-01-13 01:59:26 +01:00
package it.tdlight.reactiveapi;
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
2022-01-13 11:20:44 +01:00
import java.time.Duration;
2022-01-13 01:59:26 +01:00
import java.util.HashMap;
import java.util.Map;
2022-01-13 16:19:10 +01:00
import java.util.logging.Level;
2022-01-13 01:59:26 +01:00
import java.util.regex.Pattern;
import org.apache.kafka.clients.consumer.ConsumerConfig;
2022-01-13 16:19:10 +01:00
import org.apache.kafka.common.errors.RebalanceInProgressException;
2022-01-22 17:45:56 +01:00
import org.apache.kafka.common.record.TimestampType;
2022-01-13 01:59:26 +01:00
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
2022-01-13 16:19:10 +01:00
import reactor.core.publisher.SignalType;
2022-01-13 01:59:26 +01:00
import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions;
2022-01-13 16:19:10 +01:00
import reactor.util.retry.Retry;
2022-01-13 01:59:26 +01:00
public class KafkaConsumer {
private static final Logger LOG = LogManager.getLogger(KafkaConsumer.class);
private final KafkaParameters kafkaParameters;
public KafkaConsumer(KafkaParameters kafkaParameters) {
this.kafkaParameters = kafkaParameters;
}
2022-01-14 19:32:33 +01:00
public KafkaReceiver<Integer, ClientBoundEvent> createReceiver(@NotNull String groupId, @Nullable Long userId) {
2022-01-13 01:59:26 +01:00
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaParameters.bootstrapServers());
2022-03-19 00:06:30 +01:00
props.put(ConsumerConfig.CLIENT_ID_CONFIG, kafkaParameters.clientId() + (userId != null ? ("_" + userId) : ""));
2022-01-13 01:59:26 +01:00
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ClientBoundEventDeserializer.class);
2022-01-14 16:33:54 +01:00
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
2022-01-13 11:20:44 +01:00
ReceiverOptions<Integer, ClientBoundEvent> receiverOptions = ReceiverOptions
.<Integer, ClientBoundEvent>create(props)
2022-01-13 16:19:10 +01:00
.commitInterval(Duration.ofSeconds(10))
.commitBatchSize(64)
.maxCommitAttempts(100)
.maxDeferredCommits(100);
2022-01-13 01:59:26 +01:00
Pattern pattern;
2022-01-14 19:32:33 +01:00
if (userId == null) {
pattern = Pattern.compile("tdlib\\.event\\.[0-9]+");
2022-01-13 01:59:26 +01:00
} else {
2022-01-14 19:32:33 +01:00
pattern = Pattern.compile("tdlib\\.event\\." + userId);
2022-01-13 01:59:26 +01:00
}
ReceiverOptions<Integer, ClientBoundEvent> options = receiverOptions
.subscription(pattern)
.addAssignListener(partitions -> LOG.debug("onPartitionsAssigned {}", partitions))
.addRevokeListener(partitions -> LOG.debug("onPartitionsRevoked {}", partitions));
return KafkaReceiver.create(options);
}
2022-01-22 17:45:56 +01:00
private Flux<TimestampedClientBoundEvent> retryIfCleanup(Flux<TimestampedClientBoundEvent> clientBoundEventFlux) {
2022-01-13 16:19:10 +01:00
return clientBoundEventFlux.retryWhen(Retry
.backoff(Long.MAX_VALUE, Duration.ofMillis(100))
.maxBackoff(Duration.ofSeconds(5))
.filter(ex -> ex instanceof RebalanceInProgressException)
.doBeforeRetry(s -> LOG.warn("Rebalancing in progress")));
}
2022-01-22 17:45:56 +01:00
public Flux<TimestampedClientBoundEvent> consumeMessages(@NotNull String subGroupId, long userId, long liveId) {
return consumeMessagesInternal(subGroupId, userId).filter(e -> e.event().liveId() == liveId);
2022-01-13 01:59:26 +01:00
}
2022-01-22 17:45:56 +01:00
public Flux<TimestampedClientBoundEvent> consumeMessages(@NotNull String subGroupId, long userId) {
2022-01-14 20:04:29 +01:00
return consumeMessagesInternal(subGroupId, userId);
2022-01-13 01:59:26 +01:00
}
2022-01-22 17:45:56 +01:00
public Flux<TimestampedClientBoundEvent> consumeMessages(@NotNull String subGroupId) {
2022-01-14 20:04:29 +01:00
return consumeMessagesInternal(subGroupId, null);
}
2022-01-22 17:45:56 +01:00
private Flux<TimestampedClientBoundEvent> consumeMessagesInternal(@NotNull String subGroupId, @Nullable Long userId) {
2022-01-14 20:04:29 +01:00
return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, userId)
.receive()
.log("consume-messages", Level.FINEST, SignalType.REQUEST)
.doOnNext(result -> result.receiverOffset().acknowledge())
2022-01-22 17:45:56 +01:00
.map(record -> {
if (record.timestampType() == TimestampType.CREATE_TIME) {
return new TimestampedClientBoundEvent(record.timestamp(), record.value());
} else {
return new TimestampedClientBoundEvent(1, record.value());
}
})
2022-01-14 20:04:29 +01:00
.transform(this::retryIfCleanup);
2022-01-13 01:59:26 +01:00
}
}