Single consumers and producers

This commit is contained in:
Andrea Cavalli 2022-09-10 20:25:54 +02:00
parent e9cbfaaa39
commit bd463a74d2
15 changed files with 245 additions and 114 deletions

View File

@ -29,9 +29,9 @@ public class AtomixReactiveApi implements ReactiveApi {
private final boolean clientOnly; private final boolean clientOnly;
private final KafkaTdlibClient kafkaTDLibClient; private final KafkaSharedTdlibClients kafkaSharedTdlibClients;
@Nullable @Nullable
private final KafkaTdlibServer kafkaTDLibServer; private final KafkaSharedTdlibServers kafkaSharedTdlibServers;
private final Set<ResultingEventTransformer> resultingEventTransformerSet; private final Set<ResultingEventTransformer> resultingEventTransformerSet;
/** /**
@ -53,20 +53,22 @@ public class AtomixReactiveApi implements ReactiveApi {
var kafkaTDLibRequestProducer = new KafkaTdlibRequestProducer(kafkaParameters); var kafkaTDLibRequestProducer = new KafkaTdlibRequestProducer(kafkaParameters);
var kafkaTDLibResponseConsumer = new KafkaTdlibResponseConsumer(kafkaParameters); var kafkaTDLibResponseConsumer = new KafkaTdlibResponseConsumer(kafkaParameters);
var kafkaClientBoundConsumer = new KafkaClientBoundConsumer(kafkaParameters); var kafkaClientBoundConsumer = new KafkaClientBoundConsumer(kafkaParameters);
this.kafkaTDLibClient = new KafkaTdlibClient(kafkaTDLibRequestProducer, var kafkaTdlibClientsChannels = new KafkaTdlibClientsChannels(kafkaTDLibRequestProducer,
kafkaTDLibResponseConsumer, kafkaTDLibResponseConsumer,
kafkaClientBoundConsumer kafkaClientBoundConsumer
); );
this.kafkaSharedTdlibClients = new KafkaSharedTdlibClients(kafkaTdlibClientsChannels);
if (clientOnly) { if (clientOnly) {
this.kafkaTDLibServer = null; this.kafkaSharedTdlibServers = null;
} else { } else {
var kafkaTDLibRequestConsumer = new KafkaTdlibRequestConsumer(kafkaParameters); var kafkaTDLibRequestConsumer = new KafkaTdlibRequestConsumer(kafkaParameters);
var kafkaTDLibResponseProducer = new KafkaTdlibResponseProducer(kafkaParameters); var kafkaTDLibResponseProducer = new KafkaTdlibResponseProducer(kafkaParameters);
var kafkaClientBoundProducer = new KafkaClientBoundProducer(kafkaParameters); var kafkaClientBoundProducer = new KafkaClientBoundProducer(kafkaParameters);
this.kafkaTDLibServer = new KafkaTdlibServer(kafkaTDLibRequestConsumer, var kafkaTDLibServer = new KafkaTdlibServersChannels(kafkaTDLibRequestConsumer,
kafkaTDLibResponseProducer, kafkaTDLibResponseProducer,
kafkaClientBoundProducer kafkaClientBoundProducer
); );
this.kafkaSharedTdlibServers = new KafkaSharedTdlibServers(kafkaTDLibServer);
} }
this.resultingEventTransformerSet = resultingEventTransformerSet; this.resultingEventTransformerSet = resultingEventTransformerSet;
@ -130,7 +132,7 @@ public class AtomixReactiveApi implements ReactiveApi {
userId = createBotSessionRequest.userId(); userId = createBotSessionRequest.userId();
botToken = createBotSessionRequest.token(); botToken = createBotSessionRequest.token();
phoneNumber = null; phoneNumber = null;
reactiveApiPublisher = ReactiveApiPublisher.fromToken(kafkaTDLibServer, resultingEventTransformerSet, reactiveApiPublisher = ReactiveApiPublisher.fromToken(kafkaSharedTdlibServers, resultingEventTransformerSet,
userId, userId,
botToken botToken
); );
@ -139,7 +141,7 @@ public class AtomixReactiveApi implements ReactiveApi {
userId = createUserSessionRequest.userId(); userId = createUserSessionRequest.userId();
botToken = null; botToken = null;
phoneNumber = createUserSessionRequest.phoneNumber(); phoneNumber = createUserSessionRequest.phoneNumber();
reactiveApiPublisher = ReactiveApiPublisher.fromPhoneNumber(kafkaTDLibServer, resultingEventTransformerSet, reactiveApiPublisher = ReactiveApiPublisher.fromPhoneNumber(kafkaSharedTdlibServers, resultingEventTransformerSet,
userId, userId,
phoneNumber phoneNumber
); );
@ -149,13 +151,13 @@ public class AtomixReactiveApi implements ReactiveApi {
botToken = loadSessionFromDiskRequest.token(); botToken = loadSessionFromDiskRequest.token();
phoneNumber = loadSessionFromDiskRequest.phoneNumber(); phoneNumber = loadSessionFromDiskRequest.phoneNumber();
if (loadSessionFromDiskRequest.phoneNumber() != null) { if (loadSessionFromDiskRequest.phoneNumber() != null) {
reactiveApiPublisher = ReactiveApiPublisher.fromPhoneNumber(kafkaTDLibServer, reactiveApiPublisher = ReactiveApiPublisher.fromPhoneNumber(kafkaSharedTdlibServers,
resultingEventTransformerSet, resultingEventTransformerSet,
userId, userId,
phoneNumber phoneNumber
); );
} else { } else {
reactiveApiPublisher = ReactiveApiPublisher.fromToken(kafkaTDLibServer, reactiveApiPublisher = ReactiveApiPublisher.fromToken(kafkaSharedTdlibServers,
resultingEventTransformerSet, resultingEventTransformerSet,
userId, userId,
botToken botToken
@ -223,21 +225,21 @@ public class AtomixReactiveApi implements ReactiveApi {
} }
@Override @Override
public ReactiveApiClient client(String subGroupId, long userId) { public ReactiveApiClient client(long userId) {
return new LiveAtomixReactiveApiClient(kafkaTDLibClient, userId, subGroupId); return new LiveAtomixReactiveApiClient(kafkaSharedTdlibClients, userId);
} }
@Override @Override
public Mono<Void> close() { public Mono<Void> close() {
closeRequested = true; closeRequested = true;
Mono<?> kafkaServerProducersStopper; Mono<?> kafkaServerProducersStopper;
if (kafkaTDLibServer != null) { if (kafkaSharedTdlibServers != null) {
kafkaServerProducersStopper = Mono.fromRunnable(kafkaTDLibServer::close).subscribeOn(Schedulers.boundedElastic()); kafkaServerProducersStopper = Mono.fromRunnable(kafkaSharedTdlibServers::close).subscribeOn(Schedulers.boundedElastic());
} else { } else {
kafkaServerProducersStopper = Mono.empty(); kafkaServerProducersStopper = Mono.empty();
} }
Mono<?> kafkaClientProducersStopper = Mono Mono<?> kafkaClientProducersStopper = Mono
.fromRunnable(kafkaTDLibClient::close) .fromRunnable(kafkaSharedTdlibClients::close)
.subscribeOn(Schedulers.boundedElastic()); .subscribeOn(Schedulers.boundedElastic());
return Mono.when(kafkaServerProducersStopper, kafkaClientProducersStopper); return Mono.when(kafkaServerProducersStopper, kafkaClientProducersStopper);
} }

View File

@ -2,10 +2,8 @@ package it.tdlight.reactiveapi;
import static it.tdlight.reactiveapi.Event.SERIAL_VERSION; import static it.tdlight.reactiveapi.Event.SERIAL_VERSION;
import it.tdlight.common.utils.LibraryVersion;
import it.tdlight.jni.TdApi; import it.tdlight.jni.TdApi;
import it.tdlight.jni.TdApi.Error; import it.tdlight.jni.TdApi.Error;
import it.tdlight.jni.TdApi.Object;
import it.tdlight.reactiveapi.Event.ClientBoundEvent; import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import it.tdlight.reactiveapi.Event.Ignored; import it.tdlight.reactiveapi.Event.Ignored;
import it.tdlight.reactiveapi.Event.OnBotLoginCodeRequested; import it.tdlight.reactiveapi.Event.OnBotLoginCodeRequested;
@ -14,40 +12,25 @@ import it.tdlight.reactiveapi.Event.OnPasswordRequested;
import it.tdlight.reactiveapi.Event.OnRequest; import it.tdlight.reactiveapi.Event.OnRequest;
import it.tdlight.reactiveapi.Event.OnRequest.Request; import it.tdlight.reactiveapi.Event.OnRequest.Request;
import it.tdlight.reactiveapi.Event.OnResponse; import it.tdlight.reactiveapi.Event.OnResponse;
import it.tdlight.reactiveapi.Event.OnResponse.InvalidResponse;
import it.tdlight.reactiveapi.Event.OnResponse.Response; import it.tdlight.reactiveapi.Event.OnResponse.Response;
import it.tdlight.reactiveapi.Event.OnUpdateData; import it.tdlight.reactiveapi.Event.OnUpdateData;
import it.tdlight.reactiveapi.Event.OnUpdateError; import it.tdlight.reactiveapi.Event.OnUpdateError;
import it.tdlight.reactiveapi.Event.OnUserLoginCodeRequested; import it.tdlight.reactiveapi.Event.OnUserLoginCodeRequested;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.net.ConnectException;
import java.nio.charset.StandardCharsets;
import java.time.Duration; import java.time.Duration;
import java.time.Instant; import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.kafka.common.errors.SerializationException; import org.apache.kafka.common.errors.SerializationException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import reactor.core.Disposable; import reactor.core.Disposable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoSink;
import reactor.core.publisher.SignalType;
import reactor.core.publisher.Sinks; import reactor.core.publisher.Sinks;
import reactor.core.publisher.Sinks.EmitFailureHandler; import reactor.core.publisher.Sinks.EmitFailureHandler;
import reactor.core.publisher.Sinks.Many; import reactor.core.publisher.Sinks.Many;
@ -63,23 +46,18 @@ abstract class BaseAtomixReactiveApiClient implements ReactiveApiClient, AutoClo
protected final long userId; protected final long userId;
// Temporary id used to make requests // Temporary id used to make requests
private final long clientId; private final long clientId;
private final Many<OnRequest<?>> requests private final Many<OnRequest<?>> requests;
= Sinks.many().unicast().onBackpressureBuffer(Queues.<Event.OnRequest<?>>small().get());
private final Map<Long, CompletableFuture<Timestamped<OnResponse<TdApi.Object>>>> responses private final Map<Long, CompletableFuture<Timestamped<OnResponse<TdApi.Object>>>> responses
= new ConcurrentHashMap<>(); = new ConcurrentHashMap<>();
private final AtomicLong requestId = new AtomicLong(0); private final AtomicLong requestId = new AtomicLong(0);
private final Disposable subscription; private final Disposable subscription;
public BaseAtomixReactiveApiClient(KafkaTdlibClient kafkaTdlibClient, long userId) { public BaseAtomixReactiveApiClient(KafkaSharedTdlibClients kafkaSharedTdlibClients, long userId) {
this.userId = userId; this.userId = userId;
this.clientId = System.nanoTime(); this.clientId = System.nanoTime();
var subscription1 = kafkaTdlibClient.request().sendMessages(userId, requests.asFlux()) this.requests = kafkaSharedTdlibClients.requests();
.subscribeOn(Schedulers.boundedElastic())
.subscribe(v -> {}, ex -> LOG.error("Failed to send requests", ex));
var subscription2 = kafkaTdlibClient.response() var disposable2 = kafkaSharedTdlibClients.responses(clientId)
.consumeMessages("td-responses", userId)
.filter(response -> response.data().clientId() == clientId)
.doOnNext(response -> { .doOnNext(response -> {
var responseSink = responses.get(response.data().requestId()); var responseSink = responses.get(response.data().requestId());
if (responseSink == null) { if (responseSink == null) {
@ -92,8 +70,7 @@ abstract class BaseAtomixReactiveApiClient implements ReactiveApiClient, AutoClo
.subscribeOn(Schedulers.parallel()) .subscribeOn(Schedulers.parallel())
.subscribe(); .subscribe();
this.subscription = () -> { this.subscription = () -> {
subscription1.dispose(); disposable2.dispose();
subscription2.dispose();
}; };
} }
@ -131,7 +108,7 @@ abstract class BaseAtomixReactiveApiClient implements ReactiveApiClient, AutoClo
} }
}) })
.doFinally(s -> this.responses.remove(requestId)); .doFinally(s -> this.responses.remove(requestId));
requests.emitNext(new Request<>(clientId, requestId, request, timeout), EmitFailureHandler.busyLooping(TEN_MS)); requests.emitNext(new Request<>(userId, clientId, requestId, request, timeout), EmitFailureHandler.busyLooping(TEN_MS));
return response; return response;
}); });
} }

View File

@ -54,10 +54,12 @@ public sealed interface Event {
sealed interface OnRequest<T extends TdApi.Object> extends ServerBoundEvent { sealed interface OnRequest<T extends TdApi.Object> extends ServerBoundEvent {
record Request<T extends TdApi.Object>(long clientId, long requestId, TdApi.Function<T> request, record Request<T extends TdApi.Object>(long userId, long clientId, long requestId, TdApi.Function<T> request,
Instant timeout) implements OnRequest<T> {} Instant timeout) implements OnRequest<T> {}
record InvalidRequest<T extends TdApi.Object>(long clientId, long requestId) implements OnRequest<T> {} record InvalidRequest<T extends TdApi.Object>(long userId, long clientId, long requestId) implements OnRequest<T> {}
long userId();
long clientId(); long clientId();

View File

@ -51,6 +51,7 @@ public abstract class KafkaConsumer<K> {
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, toIntExact(Duration.ofMinutes(5).toMillis())); props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, toIntExact(Duration.ofMinutes(5).toMillis()));
props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1048576"); props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "1048576");
props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "100"); props.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "100");
props.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "10000");
ReceiverOptions<Integer, K> receiverOptions = ReceiverOptions ReceiverOptions<Integer, K> receiverOptions = ReceiverOptions
.<Integer, K>create(props) .<Integer, K>create(props)
.commitInterval(Duration.ofSeconds(10)) .commitInterval(Duration.ofSeconds(10))

View File

@ -0,0 +1,92 @@
package it.tdlight.reactiveapi;
import static java.util.Objects.requireNonNullElse;
import static reactor.core.publisher.Sinks.EmitFailureHandler.busyLooping;
import it.tdlight.jni.TdApi.Object;
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import it.tdlight.reactiveapi.Event.OnRequest;
import it.tdlight.reactiveapi.Event.OnResponse;
import java.io.Closeable;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.logging.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import reactor.core.Disposable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.GroupedFlux;
import reactor.core.publisher.SignalType;
import reactor.core.publisher.Sinks;
import reactor.core.publisher.Sinks.Many;
import reactor.core.scheduler.Schedulers;
import reactor.util.concurrent.Queues;
public class KafkaSharedTdlibClients implements Closeable {
private static final Logger LOG = LogManager.getLogger(KafkaSharedTdlibClients.class);
private final KafkaTdlibClientsChannels kafkaTdlibClientsChannels;
private final AtomicReference<Disposable> responsesSub = new AtomicReference<>();
private final Disposable requestsSub;
private final AtomicReference<Disposable> eventsSub = new AtomicReference<>();
private final Flux<GroupedFlux<Long, Timestamped<OnResponse<Object>>>> responses;
private final Flux<GroupedFlux<Long, Timestamped<ClientBoundEvent>>> events;
private final Many<OnRequest<?>> requests = Sinks.many().unicast()
.onBackpressureBuffer(Queues.<OnRequest<?>>get(65535).get());
public KafkaSharedTdlibClients(KafkaTdlibClientsChannels kafkaTdlibClientsChannels) {
this.kafkaTdlibClientsChannels = kafkaTdlibClientsChannels;
this.responses = kafkaTdlibClientsChannels.response().consumeMessages("td-responses")
.onBackpressureBuffer()
.groupBy(k1 -> k1.data().clientId(), 1)
.replay()
.autoConnect(1, this.responsesSub::set);
this.events = kafkaTdlibClientsChannels.events().consumeMessages("td-handler")
.onBackpressureBuffer()
.groupBy(k -> k.data().userId(), 1)
.doOnNext(g -> LOG.info("Receiving updates of client: {}", g.key()))
.replay()
.autoConnect(1, this.eventsSub::set);
this.requestsSub = kafkaTdlibClientsChannels.request()
.sendMessages(0L, requests.asFlux())
.subscribeOn(Schedulers.parallel())
.subscribe();
}
public Flux<Timestamped<OnResponse<Object>>> responses(long clientId) {
return responses.filter(group -> group.key() == clientId)
.take(1, true)
.singleOrEmpty()
.flatMapMany(Function.identity())
.log("req-" + clientId, Level.FINE, SignalType.REQUEST);
}
public Flux<Timestamped<ClientBoundEvent>> events(long userId) {
return events.filter(group -> group.key() == userId)
.take(1, true)
.singleOrEmpty()
.flatMapMany(Function.identity())
.doOnSubscribe(s -> LOG.info("Reading updates of client: {}", userId));
//.log("event-" + userId, Level.FINE, SignalType.REQUEST);
}
public Many<OnRequest<?>> requests() {
return requests;
}
@Override
public void close() {
requestsSub.dispose();
var responsesSub = this.responsesSub.get();
if (responsesSub != null) {
responsesSub.dispose();
}
var eventsSub = this.eventsSub.get();
if (eventsSub != null) {
eventsSub.dispose();
}
kafkaTdlibClientsChannels.close();
}
}

View File

@ -0,0 +1,76 @@
package it.tdlight.reactiveapi;
import static java.util.Objects.requireNonNullElse;
import static reactor.core.publisher.Sinks.EmitFailureHandler.busyLooping;
import it.tdlight.jni.TdApi;
import it.tdlight.jni.TdApi.Object;
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import it.tdlight.reactiveapi.Event.OnRequest;
import it.tdlight.reactiveapi.Event.OnResponse;
import java.io.Closeable;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.logging.Level;
import reactor.core.Disposable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.GroupedFlux;
import reactor.core.publisher.SignalType;
import reactor.core.publisher.Sinks;
import reactor.core.publisher.Sinks.Many;
import reactor.core.scheduler.Schedulers;
import reactor.util.concurrent.Queues;
public class KafkaSharedTdlibServers implements Closeable {
private final KafkaTdlibServersChannels kafkaTdlibServersChannels;
private final Disposable responsesSub;
private final AtomicReference<Disposable> requestsSub = new AtomicReference<>();
private final Many<OnResponse<TdApi.Object>> responses = Sinks.many().unicast().onBackpressureBuffer(
Queues.<OnResponse<TdApi.Object>>get(65535).get());
private final Flux<GroupedFlux<Long, Timestamped<OnRequest<Object>>>> requests;
public KafkaSharedTdlibServers(KafkaTdlibServersChannels kafkaTdlibServersChannels) {
this.kafkaTdlibServersChannels = kafkaTdlibServersChannels;
this.responsesSub = kafkaTdlibServersChannels.response()
.sendMessages(0L, responses.asFlux())
.subscribeOn(Schedulers.parallel())
.subscribe();
this.requests = kafkaTdlibServersChannels.request()
.consumeMessages("td-requests")
.onBackpressureBuffer()
.groupBy(k -> k.data().userId(), 1)
.replay()
.autoConnect(1, this.requestsSub::set);
}
public Flux<Timestamped<OnRequest<Object>>> requests(long userId) {
return requests.filter(group -> group.key() == userId)
.take(1, true)
.singleOrEmpty()
.flatMapMany(Function.identity())
.log("req-" + userId, Level.FINE, SignalType.REQUEST, SignalType.ON_NEXT);
}
public Disposable events(Flux<ClientBoundEvent> eventFlux) {
return kafkaTdlibServersChannels.events()
.sendMessages(0L, eventFlux)
.subscribeOn(Schedulers.parallel())
.subscribe();
}
public Many<OnResponse<TdApi.Object>> responses() {
return responses;
}
@Override
public void close() {
responsesSub.dispose();
var requestsSub = this.requestsSub.get();
if (requestsSub != null) {
requestsSub.dispose();
}
kafkaTdlibServersChannels.close();
}
}

View File

@ -1,15 +0,0 @@
package it.tdlight.reactiveapi;
import it.tdlight.jni.TdApi;
import java.io.Closeable;
import java.io.IOException;
public record KafkaTdlibClient(KafkaTdlibRequestProducer request,
KafkaTdlibResponseConsumer response,
KafkaClientBoundConsumer events) implements Closeable {
@Override
public void close() {
request.close();
}
}

View File

@ -0,0 +1,13 @@
package it.tdlight.reactiveapi;
import java.io.Closeable;
public record KafkaTdlibClientsChannels(KafkaTdlibRequestProducer request,
KafkaTdlibResponseConsumer response,
KafkaClientBoundConsumer events) implements Closeable {
@Override
public void close() {
request.close();
}
}

View File

@ -1,16 +0,0 @@
package it.tdlight.reactiveapi;
import it.tdlight.jni.TdApi;
import java.io.Closeable;
import java.io.IOException;
public record KafkaTdlibServer(KafkaTdlibRequestConsumer request,
KafkaTdlibResponseProducer response,
KafkaClientBoundProducer events) implements Closeable {
@Override
public void close() {
response.close();
events.close();
}
}

View File

@ -0,0 +1,14 @@
package it.tdlight.reactiveapi;
import java.io.Closeable;
public record KafkaTdlibServersChannels(KafkaTdlibRequestConsumer request,
KafkaTdlibResponseProducer response,
KafkaClientBoundProducer events) implements Closeable {
@Override
public void close() {
response.close();
events.close();
}
}

View File

@ -1,20 +1,15 @@
package it.tdlight.reactiveapi; package it.tdlight.reactiveapi;
import it.tdlight.reactiveapi.Event.ClientBoundEvent; import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import java.time.Duration;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.retry.Retry;
public class LiveAtomixReactiveApiClient extends BaseAtomixReactiveApiClient { public class LiveAtomixReactiveApiClient extends BaseAtomixReactiveApiClient {
private final Flux<ClientBoundEvent> clientBoundEvents; private final Flux<ClientBoundEvent> clientBoundEvents;
LiveAtomixReactiveApiClient(KafkaTdlibClient kafkaTdlibClient, long userId, String subGroupId) { LiveAtomixReactiveApiClient(KafkaSharedTdlibClients kafkaSharedTdlibClients, long userId) {
super(kafkaTdlibClient, userId); super(kafkaSharedTdlibClients, userId);
this.clientBoundEvents = kafkaTdlibClient.events() this.clientBoundEvents = kafkaSharedTdlibClients.events(userId).map(Timestamped::data);
.consumeMessages(subGroupId, userId)
.map(Timestamped::data);
} }
@Override @Override

View File

@ -11,7 +11,7 @@ public interface ReactiveApi {
Mono<CreateSessionResponse> createSession(CreateSessionRequest req); Mono<CreateSessionResponse> createSession(CreateSessionRequest req);
ReactiveApiClient client(String subGroupId, long userId); ReactiveApiClient client(long userId);
Mono<Void> close(); Mono<Void> close();

View File

@ -4,9 +4,7 @@ import static it.tdlight.reactiveapi.AuthPhase.LOGGED_IN;
import static it.tdlight.reactiveapi.AuthPhase.LOGGED_OUT; import static it.tdlight.reactiveapi.AuthPhase.LOGGED_OUT;
import static it.tdlight.reactiveapi.Event.SERIAL_VERSION; import static it.tdlight.reactiveapi.Event.SERIAL_VERSION;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static java.util.concurrent.CompletableFuture.completedFuture;
import com.google.common.primitives.Longs;
import it.tdlight.common.Init; import it.tdlight.common.Init;
import it.tdlight.common.ReactiveTelegramClient; import it.tdlight.common.ReactiveTelegramClient;
import it.tdlight.common.Response; import it.tdlight.common.Response;
@ -39,9 +37,7 @@ import it.tdlight.reactiveapi.ResultingEvent.ClusterBoundResultingEvent;
import it.tdlight.reactiveapi.ResultingEvent.ResultingEventPublisherClosed; import it.tdlight.reactiveapi.ResultingEvent.ResultingEventPublisherClosed;
import it.tdlight.reactiveapi.ResultingEvent.TDLibBoundResultingEvent; import it.tdlight.reactiveapi.ResultingEvent.TDLibBoundResultingEvent;
import it.tdlight.tdlight.ClientManager; import it.tdlight.tdlight.ClientManager;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -52,7 +48,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.StringJoiner; import java.util.StringJoiner;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.apache.kafka.common.errors.SerializationException; import org.apache.kafka.common.errors.SerializationException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
@ -74,8 +69,7 @@ public abstract class ReactiveApiPublisher {
private static final Duration SPECIAL_RAW_TIMEOUT_DURATION = Duration.ofMinutes(5); private static final Duration SPECIAL_RAW_TIMEOUT_DURATION = Duration.ofMinutes(5);
private static final Duration TEN_MS = Duration.ofMillis(10); private static final Duration TEN_MS = Duration.ofMillis(10);
private final KafkaSharedTdlibServers kafkaSharedTdlibServers;
private final KafkaTdlibServer kafkaTdlibServer;
private final Set<ResultingEventTransformer> resultingEventTransformerSet; private final Set<ResultingEventTransformer> resultingEventTransformerSet;
private final ReactiveTelegramClient rawTelegramClient; private final ReactiveTelegramClient rawTelegramClient;
private final Flux<Signal> telegramClient; private final Flux<Signal> telegramClient;
@ -83,18 +77,18 @@ public abstract class ReactiveApiPublisher {
private final AtomicReference<State> state = new AtomicReference<>(new State(LOGGED_OUT)); private final AtomicReference<State> state = new AtomicReference<>(new State(LOGGED_OUT));
protected final long userId; protected final long userId;
private final Many<OnResponse<TdApi.Object>> responses private final Many<OnResponse<TdApi.Object>> responses;
= Sinks.many().unicast().onBackpressureBuffer(Queues.<OnResponse<TdApi.Object>>small().get());
private final AtomicReference<Disposable> disposable = new AtomicReference<>(); private final AtomicReference<Disposable> disposable = new AtomicReference<>();
private final AtomicReference<Path> path = new AtomicReference<>(); private final AtomicReference<Path> path = new AtomicReference<>();
private ReactiveApiPublisher(KafkaTdlibServer kafkaTdlibServer, private ReactiveApiPublisher(KafkaSharedTdlibServers kafkaSharedTdlibServers,
Set<ResultingEventTransformer> resultingEventTransformerSet, Set<ResultingEventTransformer> resultingEventTransformerSet,
long userId) { long userId) {
this.kafkaTdlibServer = kafkaTdlibServer; this.kafkaSharedTdlibServers = kafkaSharedTdlibServers;
this.resultingEventTransformerSet = resultingEventTransformerSet; this.resultingEventTransformerSet = resultingEventTransformerSet;
this.userId = userId; this.userId = userId;
this.responses = this.kafkaSharedTdlibServers.responses();
this.rawTelegramClient = ClientManager.createReactive(); this.rawTelegramClient = ClientManager.createReactive();
try { try {
Init.start(); Init.start();
@ -118,18 +112,18 @@ public abstract class ReactiveApiPublisher {
}); });
} }
public static ReactiveApiPublisher fromToken(KafkaTdlibServer kafkaTdlibServer, public static ReactiveApiPublisher fromToken(KafkaSharedTdlibServers kafkaSharedTdlibServers,
Set<ResultingEventTransformer> resultingEventTransformerSet, Set<ResultingEventTransformer> resultingEventTransformerSet,
long userId, long userId,
String token) { String token) {
return new ReactiveApiPublisherToken(kafkaTdlibServer, resultingEventTransformerSet, userId, token); return new ReactiveApiPublisherToken(kafkaSharedTdlibServers, resultingEventTransformerSet, userId, token);
} }
public static ReactiveApiPublisher fromPhoneNumber(KafkaTdlibServer kafkaTdlibServer, public static ReactiveApiPublisher fromPhoneNumber(KafkaSharedTdlibServers kafkaSharedTdlibServers,
Set<ResultingEventTransformer> resultingEventTransformerSet, Set<ResultingEventTransformer> resultingEventTransformerSet,
long userId, long userId,
long phoneNumber) { long phoneNumber) {
return new ReactiveApiPublisherPhoneNumber(kafkaTdlibServer, return new ReactiveApiPublisherPhoneNumber(kafkaSharedTdlibServers,
resultingEventTransformerSet, resultingEventTransformerSet,
userId, userId,
phoneNumber phoneNumber
@ -210,7 +204,7 @@ public abstract class ReactiveApiPublisher {
// Buffer requests to avoid halting the event loop // Buffer requests to avoid halting the event loop
.onBackpressureBuffer(); .onBackpressureBuffer();
kafkaTdlibServer.events().sendMessages(userId, messagesToSend).subscribeOn(Schedulers.parallel()).subscribe(); kafkaSharedTdlibServers.events(messagesToSend);
publishedResultingEvents publishedResultingEvents
// Obtain only cluster-bound events // Obtain only cluster-bound events
@ -463,7 +457,7 @@ public abstract class ReactiveApiPublisher {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private Disposable registerTopics() { private Disposable registerTopics() {
var subscription1 = kafkaTdlibServer.request().consumeMessages("td-requests-handler", userId) var subscription1 = kafkaSharedTdlibServers.requests(userId)
.flatMapSequential(req -> this .flatMapSequential(req -> this
.handleRequest(req.data()) .handleRequest(req.data())
.doOnNext(response -> this.responses.emitNext(response, EmitFailureHandler.busyLooping(TEN_MS))) .doOnNext(response -> this.responses.emitNext(response, EmitFailureHandler.busyLooping(TEN_MS)))
@ -471,14 +465,8 @@ public abstract class ReactiveApiPublisher {
) )
.subscribeOn(Schedulers.parallel()) .subscribeOn(Schedulers.parallel())
.subscribe(); .subscribe();
var subscription2 = this.kafkaTdlibServer
.response()
.sendMessages(userId, responses.asFlux())
.subscribeOn(Schedulers.parallel())
.subscribe();
return () -> { return () -> {
subscription1.dispose(); subscription1.dispose();
subscription2.dispose();
}; };
} }
@ -543,11 +531,11 @@ public abstract class ReactiveApiPublisher {
private final String botToken; private final String botToken;
public ReactiveApiPublisherToken(KafkaTdlibServer kafkaTdlibServer, public ReactiveApiPublisherToken(KafkaSharedTdlibServers kafkaSharedTdlibServers,
Set<ResultingEventTransformer> resultingEventTransformerSet, Set<ResultingEventTransformer> resultingEventTransformerSet,
long userId, long userId,
String botToken) { String botToken) {
super(kafkaTdlibServer, resultingEventTransformerSet, userId); super(kafkaSharedTdlibServers, resultingEventTransformerSet, userId);
this.botToken = botToken; this.botToken = botToken;
} }
@ -574,11 +562,11 @@ public abstract class ReactiveApiPublisher {
private final long phoneNumber; private final long phoneNumber;
public ReactiveApiPublisherPhoneNumber(KafkaTdlibServer kafkaTdlibServer, public ReactiveApiPublisherPhoneNumber(KafkaSharedTdlibServers kafkaSharedTdlibServers,
Set<ResultingEventTransformer> resultingEventTransformerSet, Set<ResultingEventTransformer> resultingEventTransformerSet,
long userId, long userId,
long phoneNumber) { long phoneNumber) {
super(kafkaTdlibServer, resultingEventTransformerSet, userId); super(kafkaSharedTdlibServers, resultingEventTransformerSet, userId);
this.phoneNumber = phoneNumber; this.phoneNumber = phoneNumber;
} }

View File

@ -27,11 +27,12 @@ public class TdlibRequestDeserializer<T extends TdApi.Object> implements Deseria
try { try {
var bais = new ByteArrayInputStream(data); var bais = new ByteArrayInputStream(data);
var dais = new DataInputStream(bais); var dais = new DataInputStream(bais);
var userId = dais.readLong();
var clientId = dais.readLong(); var clientId = dais.readLong();
var requestId = dais.readLong(); var requestId = dais.readLong();
if (dais.readInt() != SERIAL_VERSION) { if (dais.readInt() != SERIAL_VERSION) {
// Deprecated request // Deprecated request
return new InvalidRequest<>(clientId, requestId); return new InvalidRequest<>(userId, clientId, requestId);
} else { } else {
long millis = dais.readLong(); long millis = dais.readLong();
Instant timeout; Instant timeout;
@ -42,7 +43,7 @@ public class TdlibRequestDeserializer<T extends TdApi.Object> implements Deseria
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
TdApi.Function<T> request = (TdApi.Function<T>) TdApi.Deserializer.deserialize(dais); TdApi.Function<T> request = (TdApi.Function<T>) TdApi.Deserializer.deserialize(dais);
return new Request<>(clientId, requestId, request, timeout); return new Request<>(userId, clientId, requestId, request, timeout);
} }
} catch (UnsupportedOperationException | IOException e) { } catch (UnsupportedOperationException | IOException e) {
throw new SerializationException(e); throw new SerializationException(e);

View File

@ -25,6 +25,7 @@ public class TdlibRequestSerializer<T extends TdApi.Object> implements Serialize
} else { } else {
try(var baos = new ByteArrayOutputStream()) { try(var baos = new ByteArrayOutputStream()) {
try (var daos = new DataOutputStream(baos)) { try (var daos = new DataOutputStream(baos)) {
daos.writeLong(data.userId());
daos.writeLong(data.clientId()); daos.writeLong(data.clientId());
daos.writeLong(data.requestId()); daos.writeLong(data.requestId());
daos.writeInt(SERIAL_VERSION); daos.writeInt(SERIAL_VERSION);