Improve kafka grouping

This commit is contained in:
Andrea Cavalli 2022-01-13 16:19:10 +01:00
parent a140e7a2b1
commit 3dd6241e2c
11 changed files with 84 additions and 41 deletions

View File

@ -152,6 +152,11 @@
<artifactId>log4j-core</artifactId> <artifactId>log4j-core</artifactId>
<version>2.17.1</version> <version>2.17.1</version>
</dependency> </dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
<version>3.4.4</version>
</dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId> <groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId> <artifactId>jackson-dataformat-yaml</artifactId>

View File

@ -478,18 +478,18 @@ public class AtomixReactiveApi implements ReactiveApi {
} }
@Override @Override
public ReactiveApiClient dynamicClient(long userId) { public ReactiveApiClient dynamicClient(String subGroupId, long userId) {
return new DynamicAtomixReactiveApiClient(this, kafkaConsumer, userId); return new DynamicAtomixReactiveApiClient(this, kafkaConsumer, userId, subGroupId);
} }
@Override @Override
public ReactiveApiClient liveClient(long liveId, long userId) { public ReactiveApiClient liveClient(String subGroupId, long liveId, long userId) {
return new LiveAtomixReactiveApiClient(atomix, kafkaConsumer, liveId, userId); return new LiveAtomixReactiveApiClient(atomix, kafkaConsumer, liveId, userId, subGroupId);
} }
@Override @Override
public ReactiveApiMultiClient multiClient() { public ReactiveApiMultiClient multiClient(String subGroupId) {
return new AtomixReactiveApiMultiClient(this, kafkaConsumer); return new AtomixReactiveApiMultiClient(this, kafkaConsumer, subGroupId);
} }
@Override @Override

View File

@ -19,14 +19,15 @@ import reactor.core.scheduler.Schedulers;
public class AtomixReactiveApiMultiClient implements ReactiveApiMultiClient, AutoCloseable { public class AtomixReactiveApiMultiClient implements ReactiveApiMultiClient, AutoCloseable {
private final ClusterEventService eventService; private final ClusterEventService eventService;
private final KafkaConsumer kafkaConsumer; private final KafkaConsumer kafkaConsumer;
private final String subGroupId;
private volatile boolean closed = false; private volatile boolean closed = false;
AtomixReactiveApiMultiClient(AtomixReactiveApi api, KafkaConsumer kafkaConsumer) { AtomixReactiveApiMultiClient(AtomixReactiveApi api, KafkaConsumer kafkaConsumer, String subGroupId) {
this.eventService = api.getAtomix().getEventService(); this.eventService = api.getAtomix().getEventService();
this.kafkaConsumer = kafkaConsumer; this.kafkaConsumer = kafkaConsumer;
this.subGroupId = subGroupId;
} }
@Override @Override
@ -34,7 +35,7 @@ public class AtomixReactiveApiMultiClient implements ReactiveApiMultiClient, Aut
if (closed) { if (closed) {
return Flux.empty(); return Flux.empty();
} }
return kafkaConsumer.consumeMessages(kafkaConsumer.newRandomGroupId(), ack).takeUntil(s -> closed); return kafkaConsumer.consumeMessages(subGroupId, ack).takeUntil(s -> closed);
} }
@Override @Override

View File

@ -33,12 +33,12 @@ public class DynamicAtomixReactiveApiClient implements ReactiveApiClient, AutoCl
private final Flux<Long> liveIdChange; private final Flux<Long> liveIdChange;
private final Mono<Long> liveIdResolution; private final Mono<Long> liveIdResolution;
DynamicAtomixReactiveApiClient(AtomixReactiveApi api, KafkaConsumer kafkaConsumer, long userId) { DynamicAtomixReactiveApiClient(AtomixReactiveApi api, KafkaConsumer kafkaConsumer, long userId, String subGroupId) {
this.api = api; this.api = api;
this.eventService = api.getAtomix().getEventService(); this.eventService = api.getAtomix().getEventService();
this.userId = userId; this.userId = userId;
clientBoundEvents = kafkaConsumer.consumeMessages(kafkaConsumer.newRandomGroupId(), true, userId) clientBoundEvents = kafkaConsumer.consumeMessages(subGroupId, true, userId)
.doOnNext(e -> liveId.set(e.liveId())) .doOnNext(e -> liveId.set(e.liveId()))
.share(); .share();
@ -107,6 +107,11 @@ public class DynamicAtomixReactiveApiClient implements ReactiveApiClient, AutoCl
return userId; return userId;
} }
@Override
public boolean isPullMode() {
return true;
}
public Flux<Long> liveIdChange() { public Flux<Long> liveIdChange() {
return liveIdChange; return liveIdChange;
} }

View File

@ -19,6 +19,7 @@ import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -84,8 +85,9 @@ public class Entrypoint {
if (instanceSettings.clientAddress == null) { if (instanceSettings.clientAddress == null) {
throw new IllegalArgumentException("A client instance must have an address (host:port)"); throw new IllegalArgumentException("A client instance must have an address (host:port)");
} }
var randomizedClientId = instanceSettings.id + "-" + ThreadLocalRandom.current().nextLong(0, Long.MAX_VALUE);
var address = Address.fromString(instanceSettings.clientAddress); var address = Address.fromString(instanceSettings.clientAddress);
atomixBuilder.withMemberId(instanceSettings.id).withHost(address.host()).withPort(address.port()); atomixBuilder.withMemberId(randomizedClientId).withHost(address.host()).withPort(address.port());
nodeId = null; nodeId = null;
resultingEventTransformerSet = Set.of(); resultingEventTransformerSet = Set.of();
} else { } else {

View File

@ -4,18 +4,22 @@ import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import java.time.Duration; import java.time.Duration;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.logging.Level;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.errors.RebalanceInProgressException;
import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.SignalType;
import reactor.kafka.receiver.KafkaReceiver; import reactor.kafka.receiver.KafkaReceiver;
import reactor.kafka.receiver.ReceiverOptions; import reactor.kafka.receiver.ReceiverOptions;
import reactor.util.retry.Retry;
public class KafkaConsumer { public class KafkaConsumer {
@ -36,10 +40,16 @@ public class KafkaConsumer {
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ClientBoundEventDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ClientBoundEventDeserializer.class);
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, (int) Duration.ofMinutes(5).toMillis());
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, (int) Duration.ofMinutes(5).toMillis());
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
ReceiverOptions<Integer, ClientBoundEvent> receiverOptions = ReceiverOptions ReceiverOptions<Integer, ClientBoundEvent> receiverOptions = ReceiverOptions
.<Integer, ClientBoundEvent>create(props) .<Integer, ClientBoundEvent>create(props)
.commitInterval(Duration.ofSeconds(10)); .commitInterval(Duration.ofSeconds(10))
.commitBatchSize(64)
.pollTimeout(Duration.ofMinutes(2))
.maxCommitAttempts(100)
.maxDeferredCommits(100);
Pattern pattern; Pattern pattern;
if (liveId == null && userId == null) { if (liveId == null && userId == null) {
pattern = Pattern.compile("tdlib\\.event\\.[0-9]+\\.[0-9]+"); pattern = Pattern.compile("tdlib\\.event\\.[0-9]+\\.[0-9]+");
@ -57,37 +67,50 @@ public class KafkaConsumer {
return KafkaReceiver.create(options); return KafkaReceiver.create(options);
} }
public Flux<ClientBoundEvent> consumeMessages(@NotNull String groupId, boolean ack, long userId, long liveId) { private Flux<ClientBoundEvent> retryIfCleanup(Flux<ClientBoundEvent> clientBoundEventFlux) {
return clientBoundEventFlux.retryWhen(Retry
.backoff(Long.MAX_VALUE, Duration.ofMillis(100))
.maxBackoff(Duration.ofSeconds(5))
.filter(ex -> ex instanceof RebalanceInProgressException)
.doBeforeRetry(s -> LOG.warn("Rebalancing in progress")));
}
public Flux<ClientBoundEvent> consumeMessages(@NotNull String subGroupId, boolean ack, long userId, long liveId) {
if (ack) { if (ack) {
return createReceiver(groupId, liveId, userId) return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, liveId, userId)
.receiveAutoAck() .receive()
.flatMapSequential(a -> a) .log("consume-messages", Level.FINEST, SignalType.REQUEST)
.map(ConsumerRecord::value); .doOnNext(result -> result.receiverOffset().acknowledge())
.map(ConsumerRecord::value)
.transform(this::retryIfCleanup);
} else { } else {
return createReceiver(groupId, liveId, userId).receive().map(ConsumerRecord::value); return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, liveId, userId).receive().map(ConsumerRecord::value);
} }
} }
public Flux<ClientBoundEvent> consumeMessages(@NotNull String groupId, boolean ack, long userId) { public Flux<ClientBoundEvent> consumeMessages(@NotNull String subGroupId, boolean ack, long userId) {
if (ack) { if (ack) {
return createReceiver(groupId, null, userId) return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, null, userId)
.receiveAutoAck() .receive()
.flatMapSequential(a -> a) .log("consume-messages", Level.FINEST, SignalType.REQUEST)
.map(ConsumerRecord::value); .doOnNext(result -> result.receiverOffset().acknowledge())
.map(ConsumerRecord::value)
.transform(this::retryIfCleanup);
} else { } else {
return createReceiver(groupId, null, userId).receive().map(ConsumerRecord::value); return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, null, userId).receive().map(ConsumerRecord::value);
} }
} }
public Flux<ClientBoundEvent> consumeMessages(@NotNull String groupId, boolean ack) { public Flux<ClientBoundEvent> consumeMessages(@NotNull String subGroupId, boolean ack) {
if (ack) { if (ack) {
return createReceiver(groupId, null, null).receiveAutoAck().flatMapSequential(a -> a).map(ConsumerRecord::value); return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, null, null)
.receive()
.log("consume-messages", Level.FINEST, SignalType.REQUEST)
.doOnNext(result -> result.receiverOffset().acknowledge())
.map(ConsumerRecord::value)
.transform(this::retryIfCleanup);
} else { } else {
return createReceiver(groupId, null, null).receive().map(ConsumerRecord::value); return createReceiver(kafkaParameters.groupId() + "-" + subGroupId, null, null).receive().map(ConsumerRecord::value);
} }
} }
public String newRandomGroupId() {
return UUID.randomUUID().toString();
}
} }

View File

@ -2,9 +2,9 @@ package it.tdlight.reactiveapi;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public record KafkaParameters(String clientId, String bootstrapServers) { public record KafkaParameters(String groupId, String clientId, String bootstrapServers) {
public KafkaParameters(ClusterSettings clusterSettings, String clientId) { public KafkaParameters(ClusterSettings clusterSettings, String clientId) {
this(clientId, String.join(",", clusterSettings.kafkaBootstrapServers)); this(clientId, clientId, String.join(",", clusterSettings.kafkaBootstrapServers));
} }
} }

View File

@ -33,11 +33,11 @@ public class LiveAtomixReactiveApiClient implements ReactiveApiClient {
private final Flux<ClientBoundEvent> clientBoundEvents; private final Flux<ClientBoundEvent> clientBoundEvents;
LiveAtomixReactiveApiClient(Atomix atomix, KafkaConsumer kafkaConsumer, long liveId, long userId) { LiveAtomixReactiveApiClient(Atomix atomix, KafkaConsumer kafkaConsumer, long liveId, long userId, String subGroupId) {
this.eventService = atomix.getEventService(); this.eventService = atomix.getEventService();
this.liveId = liveId; this.liveId = liveId;
this.userId = userId; this.userId = userId;
this.clientBoundEvents = kafkaConsumer.consumeMessages(kafkaConsumer.newRandomGroupId(), true, userId, liveId).share(); this.clientBoundEvents = kafkaConsumer.consumeMessages(subGroupId, true, userId, liveId).share();
} }
@Override @Override
@ -70,6 +70,11 @@ public class LiveAtomixReactiveApiClient implements ReactiveApiClient {
return userId; return userId;
} }
@Override
public boolean isPullMode() {
return true;
}
static TdApi.Object deserializeResponse(byte[] bytes) { static TdApi.Object deserializeResponse(byte[] bytes) {
try { try {
return TdApi.Deserializer.deserialize(new DataInputStream(new ByteArrayInputStream(bytes))); return TdApi.Deserializer.deserialize(new DataInputStream(new ByteArrayInputStream(bytes)));

View File

@ -46,7 +46,7 @@ public class PeriodicRestarter {
this.api = api; this.api = api;
this.interval = interval; this.interval = interval;
this.multiClient = api.multiClient(); this.multiClient = api.multiClient("periodic-restarter");
} }

View File

@ -27,11 +27,11 @@ public interface ReactiveApi {
*/ */
Mono<Long> resolveUserLiveId(long userId); Mono<Long> resolveUserLiveId(long userId);
ReactiveApiMultiClient multiClient(); ReactiveApiMultiClient multiClient(String subGroupId);
ReactiveApiClient dynamicClient(long userId); ReactiveApiClient dynamicClient(String subGroupId, long userId);
ReactiveApiClient liveClient(long liveId, long userId); ReactiveApiClient liveClient(String subGroupId, long liveId, long userId);
Mono<Void> close(); Mono<Void> close();
} }

View File

@ -13,4 +13,6 @@ public interface ReactiveApiClient {
<T extends TdApi.Object> Mono<T> request(TdApi.Function<T> request, Instant timeout); <T extends TdApi.Object> Mono<T> request(TdApi.Function<T> request, Instant timeout);
long getUserId(); long getUserId();
boolean isPullMode();
} }