Code cleanup
This commit is contained in:
parent
116e082d56
commit
a93d6d4e24
33
pom.xml
33
pom.xml
@ -60,7 +60,14 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.projectreactor</groupId>
|
<groupId>io.projectreactor</groupId>
|
||||||
<artifactId>reactor-bom</artifactId>
|
<artifactId>reactor-bom</artifactId>
|
||||||
<version>2020.0.22</version>
|
<version>2020.0.23</version>
|
||||||
|
<type>pom</type>
|
||||||
|
<scope>import</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.rsocket</groupId>
|
||||||
|
<artifactId>rsocket-bom</artifactId>
|
||||||
|
<version>1.1.3</version>
|
||||||
<type>pom</type>
|
<type>pom</type>
|
||||||
<scope>import</scope>
|
<scope>import</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
@ -72,7 +79,7 @@
|
|||||||
<artifactId>reactor-tools</artifactId>
|
<artifactId>reactor-tools</artifactId>
|
||||||
<classifier>original</classifier>
|
<classifier>original</classifier>
|
||||||
<scope>runtime</scope>
|
<scope>runtime</scope>
|
||||||
<version>3.4.22</version>
|
<version>3.4.23</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jetbrains</groupId>
|
<groupId>org.jetbrains</groupId>
|
||||||
@ -127,6 +134,22 @@
|
|||||||
<groupId>io.projectreactor.kafka</groupId>
|
<groupId>io.projectreactor.kafka</groupId>
|
||||||
<artifactId>reactor-kafka</artifactId>
|
<artifactId>reactor-kafka</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.rsocket</groupId>
|
||||||
|
<artifactId>rsocket-core</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.rsocket</groupId>
|
||||||
|
<artifactId>rsocket-load-balancer</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.rsocket</groupId>
|
||||||
|
<artifactId>rsocket-transport-local</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.rsocket</groupId>
|
||||||
|
<artifactId>rsocket-transport-netty</artifactId>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.logging.log4j</groupId>
|
<groupId>org.apache.logging.log4j</groupId>
|
||||||
<artifactId>log4j-api</artifactId>
|
<artifactId>log4j-api</artifactId>
|
||||||
@ -151,6 +174,12 @@
|
|||||||
<groupId>net.minecrell</groupId>
|
<groupId>net.minecrell</groupId>
|
||||||
<artifactId>terminalconsoleappender</artifactId>
|
<artifactId>terminalconsoleappender</artifactId>
|
||||||
<version>1.3.0</version>
|
<version>1.3.0</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.logging.log4j</groupId>
|
||||||
|
<artifactId>log4j-core</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -2,9 +2,14 @@ package it.tdlight.reactiveapi;
|
|||||||
|
|
||||||
import static java.util.Objects.requireNonNull;
|
import static java.util.Objects.requireNonNull;
|
||||||
|
|
||||||
|
import it.tdlight.jni.TdApi.Object;
|
||||||
import it.tdlight.reactiveapi.CreateSessionRequest.CreateBotSessionRequest;
|
import it.tdlight.reactiveapi.CreateSessionRequest.CreateBotSessionRequest;
|
||||||
import it.tdlight.reactiveapi.CreateSessionRequest.CreateUserSessionRequest;
|
import it.tdlight.reactiveapi.CreateSessionRequest.CreateUserSessionRequest;
|
||||||
import it.tdlight.reactiveapi.CreateSessionRequest.LoadSessionFromDiskRequest;
|
import it.tdlight.reactiveapi.CreateSessionRequest.LoadSessionFromDiskRequest;
|
||||||
|
import it.tdlight.reactiveapi.ChannelFactory.KafkaChannelFactory;
|
||||||
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnRequest;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnResponse;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
@ -31,9 +36,9 @@ public class AtomixReactiveApi implements ReactiveApi {
|
|||||||
|
|
||||||
private final AtomixReactiveApiMode mode;
|
private final AtomixReactiveApiMode mode;
|
||||||
|
|
||||||
private final KafkaSharedTdlibClients kafkaSharedTdlibClients;
|
private final ClientsSharedTdlib kafkaSharedTdlibClients;
|
||||||
@Nullable
|
@Nullable
|
||||||
private final KafkaSharedTdlibServers kafkaSharedTdlibServers;
|
private final TdlibChannelsSharedServer kafkaSharedTdlibServers;
|
||||||
private final ReactiveApiMultiClient client;
|
private final ReactiveApiMultiClient client;
|
||||||
|
|
||||||
private final Set<ResultingEventTransformer> resultingEventTransformerSet;
|
private final Set<ResultingEventTransformer> resultingEventTransformerSet;
|
||||||
@ -60,35 +65,36 @@ public class AtomixReactiveApi implements ReactiveApi {
|
|||||||
@Nullable DiskSessionsManager diskSessions,
|
@Nullable DiskSessionsManager diskSessions,
|
||||||
@NotNull Set<ResultingEventTransformer> resultingEventTransformerSet) {
|
@NotNull Set<ResultingEventTransformer> resultingEventTransformerSet) {
|
||||||
this.mode = mode;
|
this.mode = mode;
|
||||||
|
ChannelFactory channelFactory = new KafkaChannelFactory();
|
||||||
if (mode != AtomixReactiveApiMode.SERVER) {
|
if (mode != AtomixReactiveApiMode.SERVER) {
|
||||||
var kafkaTDLibRequestProducer = new KafkaTdlibRequestProducer(kafkaParameters);
|
EventProducer<OnRequest<?>> kafkaTDLibRequestProducer = ChannelProducerTdlibRequest.create(channelFactory, kafkaParameters);
|
||||||
var kafkaTDLibResponseConsumer = new KafkaTdlibResponseConsumer(kafkaParameters);
|
EventConsumer<OnResponse<Object>> kafkaTDLibResponseConsumer = ChannelConsumerTdlibResponse.create(channelFactory, kafkaParameters);
|
||||||
var kafkaClientBoundConsumers = new HashMap<String, KafkaClientBoundConsumer>();
|
HashMap<String, EventConsumer<ClientBoundEvent>> kafkaClientBoundConsumers = new HashMap<>();
|
||||||
for (String lane : kafkaParameters.getAllLanes()) {
|
for (String lane : kafkaParameters.getAllLanes()) {
|
||||||
kafkaClientBoundConsumers.put(lane, new KafkaClientBoundConsumer(kafkaParameters, lane));
|
kafkaClientBoundConsumers.put(lane, ChannelConsumerClientBoundEvent.create(channelFactory, kafkaParameters, lane));
|
||||||
}
|
}
|
||||||
var kafkaTdlibClientsChannels = new KafkaTdlibClientsChannels(kafkaTDLibRequestProducer,
|
var kafkaTdlibClientsChannels = new TdlibChannelsClients(kafkaTDLibRequestProducer,
|
||||||
kafkaTDLibResponseConsumer,
|
kafkaTDLibResponseConsumer,
|
||||||
kafkaClientBoundConsumers
|
kafkaClientBoundConsumers
|
||||||
);
|
);
|
||||||
this.kafkaSharedTdlibClients = new KafkaSharedTdlibClients(kafkaTdlibClientsChannels);
|
this.kafkaSharedTdlibClients = new ClientsSharedTdlib(kafkaTdlibClientsChannels);
|
||||||
this.client = new LiveAtomixReactiveApiClient(kafkaSharedTdlibClients);
|
this.client = new LiveAtomixReactiveApiClient(kafkaSharedTdlibClients);
|
||||||
} else {
|
} else {
|
||||||
this.kafkaSharedTdlibClients = null;
|
this.kafkaSharedTdlibClients = null;
|
||||||
this.client = null;
|
this.client = null;
|
||||||
}
|
}
|
||||||
if (mode != AtomixReactiveApiMode.CLIENT) {
|
if (mode != AtomixReactiveApiMode.CLIENT) {
|
||||||
var kafkaTDLibRequestConsumer = new KafkaTdlibRequestConsumer(kafkaParameters);
|
EventConsumer<OnRequest<Object>> kafkaTDLibRequestConsumer = ChannelConsumerTdlibRequest.create(channelFactory, kafkaParameters);
|
||||||
var kafkaTDLibResponseProducer = new KafkaTdlibResponseProducer(kafkaParameters);
|
EventProducer<OnResponse<Object>> kafkaTDLibResponseProducer = ChannelProducerTdlibResponse.create(channelFactory, kafkaParameters);
|
||||||
var kafkaClientBoundProducers = new HashMap<String, KafkaClientBoundProducer>();
|
var kafkaClientBoundProducers = new HashMap<String, EventProducer<ClientBoundEvent>>();
|
||||||
for (String lane : kafkaParameters.getAllLanes()) {
|
for (String lane : kafkaParameters.getAllLanes()) {
|
||||||
kafkaClientBoundProducers.put(lane, new KafkaClientBoundProducer(kafkaParameters, lane));
|
kafkaClientBoundProducers.put(lane, ChannelProducerClientBoundEvent.create(channelFactory, kafkaParameters, lane));
|
||||||
}
|
}
|
||||||
var kafkaTDLibServer = new KafkaTdlibServersChannels(kafkaTDLibRequestConsumer,
|
var kafkaTDLibServer = new TdlibChannelsServers(kafkaTDLibRequestConsumer,
|
||||||
kafkaTDLibResponseProducer,
|
kafkaTDLibResponseProducer,
|
||||||
kafkaClientBoundProducers
|
kafkaClientBoundProducers
|
||||||
);
|
);
|
||||||
this.kafkaSharedTdlibServers = new KafkaSharedTdlibServers(kafkaTDLibServer);
|
this.kafkaSharedTdlibServers = new TdlibChannelsSharedServer(kafkaTDLibServer);
|
||||||
} else {
|
} else {
|
||||||
this.kafkaSharedTdlibServers = null;
|
this.kafkaSharedTdlibServers = null;
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ abstract class BaseAtomixReactiveApiClient implements ReactiveApiMultiClient {
|
|||||||
private final AtomicLong requestId = new AtomicLong(0);
|
private final AtomicLong requestId = new AtomicLong(0);
|
||||||
private final Disposable subscription;
|
private final Disposable subscription;
|
||||||
|
|
||||||
public BaseAtomixReactiveApiClient(KafkaSharedTdlibClients kafkaSharedTdlibClients) {
|
public BaseAtomixReactiveApiClient(ClientsSharedTdlib kafkaSharedTdlibClients) {
|
||||||
this.clientId = System.nanoTime();
|
this.clientId = System.nanoTime();
|
||||||
this.requests = kafkaSharedTdlibClients.requests();
|
this.requests = kafkaSharedTdlibClients.requests();
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package it.tdlight.reactiveapi;
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
public enum KafkaChannelCodec {
|
public enum ChannelCodec {
|
||||||
CLIENT_BOUND_EVENT("event", ClientBoundEventSerializer.class, ClientBoundEventDeserializer.class),
|
CLIENT_BOUND_EVENT("event", ClientBoundEventSerializer.class, ClientBoundEventDeserializer.class),
|
||||||
TDLIB_REQUEST("request", TdlibRequestSerializer.class, TdlibRequestDeserializer.class),
|
TDLIB_REQUEST("request", TdlibRequestSerializer.class, TdlibRequestDeserializer.class),
|
||||||
TDLIB_RESPONSE("response", TdlibResponseSerializer.class, TdlibResponseDeserializer.class);
|
TDLIB_RESPONSE("response", TdlibResponseSerializer.class, TdlibResponseDeserializer.class);
|
||||||
@ -9,7 +9,7 @@ public enum KafkaChannelCodec {
|
|||||||
private final Class<?> serializerClass;
|
private final Class<?> serializerClass;
|
||||||
private final Class<?> deserializerClass;
|
private final Class<?> deserializerClass;
|
||||||
|
|
||||||
KafkaChannelCodec(String kafkaName,
|
ChannelCodec(String kafkaName,
|
||||||
Class<?> serializerClass,
|
Class<?> serializerClass,
|
||||||
Class<?> deserializerClass) {
|
Class<?> deserializerClass) {
|
||||||
this.name = kafkaName;
|
this.name = kafkaName;
|
@ -0,0 +1,22 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
|
|
||||||
|
public class ChannelConsumerClientBoundEvent {
|
||||||
|
|
||||||
|
private ChannelConsumerClientBoundEvent() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EventConsumer<ClientBoundEvent> create(ChannelFactory channelFactory, KafkaParameters kafkaParameters,
|
||||||
|
String lane) {
|
||||||
|
var codec = ChannelCodec.CLIENT_BOUND_EVENT;
|
||||||
|
String name;
|
||||||
|
if (lane.isBlank()) {
|
||||||
|
name = codec.getKafkaName();
|
||||||
|
} else {
|
||||||
|
name = codec.getKafkaName() + "-" + lane;
|
||||||
|
}
|
||||||
|
return channelFactory.newConsumer(kafkaParameters, false, codec, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,19 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.jni.TdApi.Object;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnRequest;
|
||||||
|
|
||||||
|
public class ChannelConsumerTdlibRequest {
|
||||||
|
|
||||||
|
private ChannelConsumerTdlibRequest() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EventConsumer<OnRequest<Object>> create(ChannelFactory channelFactory, KafkaParameters kafkaParameters) {
|
||||||
|
return channelFactory.newConsumer(kafkaParameters,
|
||||||
|
true,
|
||||||
|
ChannelCodec.TDLIB_REQUEST,
|
||||||
|
ChannelCodec.TDLIB_REQUEST.getKafkaName()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,18 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.jni.TdApi.Object;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnResponse;
|
||||||
|
|
||||||
|
public class ChannelConsumerTdlibResponse {
|
||||||
|
|
||||||
|
private ChannelConsumerTdlibResponse() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EventConsumer<OnResponse<Object>> create(ChannelFactory channelFactory, KafkaParameters kafkaParameters) {
|
||||||
|
return channelFactory.newConsumer(kafkaParameters,
|
||||||
|
true,
|
||||||
|
ChannelCodec.TDLIB_RESPONSE,
|
||||||
|
ChannelCodec.TDLIB_RESPONSE.getKafkaName()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
34
src/main/java/it/tdlight/reactiveapi/ChannelFactory.java
Normal file
34
src/main/java/it/tdlight/reactiveapi/ChannelFactory.java
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.reactiveapi.kafka.KafkaConsumer;
|
||||||
|
import it.tdlight.reactiveapi.kafka.KafkaProducer;
|
||||||
|
|
||||||
|
public interface ChannelFactory {
|
||||||
|
|
||||||
|
<T> EventConsumer<T> newConsumer(KafkaParameters kafkaParameters,
|
||||||
|
boolean quickResponse,
|
||||||
|
ChannelCodec channelCodec,
|
||||||
|
String channelName);
|
||||||
|
|
||||||
|
<T> EventProducer<T> newProducer(KafkaParameters kafkaParameters,
|
||||||
|
ChannelCodec channelCodec,
|
||||||
|
String channelName);
|
||||||
|
|
||||||
|
class KafkaChannelFactory implements ChannelFactory {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T> EventConsumer<T> newConsumer(KafkaParameters kafkaParameters,
|
||||||
|
boolean quickResponse,
|
||||||
|
ChannelCodec channelCodec,
|
||||||
|
String channelName) {
|
||||||
|
return new KafkaConsumer<>(kafkaParameters, quickResponse, channelCodec, channelName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T> EventProducer<T> newProducer(KafkaParameters kafkaParameters,
|
||||||
|
ChannelCodec channelCodec,
|
||||||
|
String channelName) {
|
||||||
|
return new KafkaProducer<>(kafkaParameters, channelCodec, channelName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,24 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
|
|
||||||
|
public class ChannelProducerClientBoundEvent {
|
||||||
|
|
||||||
|
private static final ChannelCodec CODEC = ChannelCodec.CLIENT_BOUND_EVENT;
|
||||||
|
|
||||||
|
private ChannelProducerClientBoundEvent() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EventProducer<ClientBoundEvent> create(ChannelFactory channelFactory, KafkaParameters kafkaParameters, String lane) {
|
||||||
|
String name;
|
||||||
|
if (lane.isBlank()) {
|
||||||
|
name = CODEC.getKafkaName();
|
||||||
|
} else {
|
||||||
|
name = CODEC.getKafkaName() + "-" + lane;
|
||||||
|
}
|
||||||
|
return channelFactory.newProducer(kafkaParameters,
|
||||||
|
CODEC,
|
||||||
|
name
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.reactiveapi.Event.OnRequest;
|
||||||
|
|
||||||
|
public class ChannelProducerTdlibRequest {
|
||||||
|
|
||||||
|
private ChannelProducerTdlibRequest() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EventProducer<OnRequest<?>> create(ChannelFactory channelFactory, KafkaParameters kafkaParameters) {
|
||||||
|
return channelFactory.newProducer(kafkaParameters,
|
||||||
|
ChannelCodec.TDLIB_REQUEST,
|
||||||
|
ChannelCodec.TDLIB_REQUEST.getKafkaName()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,18 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.jni.TdApi.Object;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnResponse;
|
||||||
|
|
||||||
|
public class ChannelProducerTdlibResponse {
|
||||||
|
|
||||||
|
private ChannelProducerTdlibResponse() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EventProducer<OnResponse<Object>> create(ChannelFactory channelFactory, KafkaParameters kafkaParameters) {
|
||||||
|
return channelFactory.newProducer(kafkaParameters,
|
||||||
|
ChannelCodec.TDLIB_RESPONSE,
|
||||||
|
ChannelCodec.TDLIB_RESPONSE.getKafkaName()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,36 +1,28 @@
|
|||||||
package it.tdlight.reactiveapi;
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
import static java.util.Objects.requireNonNullElse;
|
|
||||||
|
|
||||||
import it.tdlight.jni.TdApi.Object;
|
import it.tdlight.jni.TdApi.Object;
|
||||||
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
import it.tdlight.reactiveapi.Event.OnRequest;
|
import it.tdlight.reactiveapi.Event.OnRequest;
|
||||||
import it.tdlight.reactiveapi.Event.OnResponse;
|
import it.tdlight.reactiveapi.Event.OnResponse;
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.logging.Level;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import reactor.core.Disposable;
|
import reactor.core.Disposable;
|
||||||
import reactor.core.publisher.BufferOverflowStrategy;
|
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.GroupedFlux;
|
|
||||||
import reactor.core.publisher.SignalType;
|
|
||||||
import reactor.core.publisher.Sinks;
|
import reactor.core.publisher.Sinks;
|
||||||
import reactor.core.publisher.Sinks.Many;
|
import reactor.core.publisher.Sinks.Many;
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
import reactor.util.concurrent.Queues;
|
import reactor.util.concurrent.Queues;
|
||||||
|
|
||||||
public class KafkaSharedTdlibClients implements Closeable {
|
public class ClientsSharedTdlib implements Closeable {
|
||||||
|
|
||||||
private static final Logger LOG = LogManager.getLogger(KafkaSharedTdlibClients.class);
|
private static final Logger LOG = LogManager.getLogger(ClientsSharedTdlib.class);
|
||||||
|
|
||||||
private final KafkaTdlibClientsChannels kafkaTdlibClientsChannels;
|
private final TdlibChannelsClients kafkaTdlibClientsChannels;
|
||||||
private final AtomicReference<Disposable> responsesSub = new AtomicReference<>();
|
private final AtomicReference<Disposable> responsesSub = new AtomicReference<>();
|
||||||
private final Disposable requestsSub;
|
private final Disposable requestsSub;
|
||||||
private final AtomicReference<Disposable> eventsSub = new AtomicReference<>();
|
private final AtomicReference<Disposable> eventsSub = new AtomicReference<>();
|
||||||
@ -39,7 +31,7 @@ public class KafkaSharedTdlibClients implements Closeable {
|
|||||||
private final Many<OnRequest<?>> requests = Sinks.many().unicast()
|
private final Many<OnRequest<?>> requests = Sinks.many().unicast()
|
||||||
.onBackpressureBuffer(Queues.<OnRequest<?>>get(65535).get());
|
.onBackpressureBuffer(Queues.<OnRequest<?>>get(65535).get());
|
||||||
|
|
||||||
public KafkaSharedTdlibClients(KafkaTdlibClientsChannels kafkaTdlibClientsChannels) {
|
public ClientsSharedTdlib(TdlibChannelsClients kafkaTdlibClientsChannels) {
|
||||||
this.kafkaTdlibClientsChannels = kafkaTdlibClientsChannels;
|
this.kafkaTdlibClientsChannels = kafkaTdlibClientsChannels;
|
||||||
this.responses = kafkaTdlibClientsChannels.response().consumeMessages("td-responses");
|
this.responses = kafkaTdlibClientsChannels.response().consumeMessages("td-responses");
|
||||||
this.events = kafkaTdlibClientsChannels.events().entrySet().stream()
|
this.events = kafkaTdlibClientsChannels.events().entrySet().stream()
|
15
src/main/java/it/tdlight/reactiveapi/EventConsumer.java
Normal file
15
src/main/java/it/tdlight/reactiveapi/EventConsumer.java
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
|
||||||
|
public interface EventConsumer<K> {
|
||||||
|
|
||||||
|
boolean isQuickResponse();
|
||||||
|
|
||||||
|
ChannelCodec getChannelCodec();
|
||||||
|
|
||||||
|
String getChannelName();
|
||||||
|
|
||||||
|
Flux<Timestamped<K>> consumeMessages(@NotNull String subGroupId);
|
||||||
|
}
|
15
src/main/java/it/tdlight/reactiveapi/EventProducer.java
Normal file
15
src/main/java/it/tdlight/reactiveapi/EventProducer.java
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
public interface EventProducer<K> {
|
||||||
|
|
||||||
|
ChannelCodec getChannelCodec();
|
||||||
|
|
||||||
|
String getChannelName();
|
||||||
|
|
||||||
|
Mono<Void> sendMessages(Flux<K> eventsFlux);
|
||||||
|
|
||||||
|
void close();
|
||||||
|
}
|
@ -1,36 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
|
||||||
|
|
||||||
public class KafkaClientBoundConsumer extends KafkaConsumer<ClientBoundEvent> {
|
|
||||||
|
|
||||||
private static final KafkaChannelCodec CODEC = KafkaChannelCodec.CLIENT_BOUND_EVENT;
|
|
||||||
private final String lane;
|
|
||||||
private final String name;
|
|
||||||
|
|
||||||
public KafkaClientBoundConsumer(KafkaParameters kafkaParameters, String lane) {
|
|
||||||
super(kafkaParameters);
|
|
||||||
this.lane = lane;
|
|
||||||
if (lane.isBlank()) {
|
|
||||||
this.name = CODEC.getKafkaName();
|
|
||||||
} else {
|
|
||||||
this.name = CODEC.getKafkaName() + "-" + lane;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public KafkaChannelCodec getChannelCodec() {
|
|
||||||
return CODEC;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getChannelName() {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isQuickResponse() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,29 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
|
||||||
|
|
||||||
public class KafkaClientBoundProducer extends KafkaProducer<ClientBoundEvent> {
|
|
||||||
|
|
||||||
private static final KafkaChannelCodec CODEC = KafkaChannelCodec.CLIENT_BOUND_EVENT;
|
|
||||||
|
|
||||||
private final String name;
|
|
||||||
|
|
||||||
public KafkaClientBoundProducer(KafkaParameters kafkaParameters, String lane) {
|
|
||||||
super(kafkaParameters);
|
|
||||||
if (lane.isBlank()) {
|
|
||||||
this.name = CODEC.getKafkaName();
|
|
||||||
} else {
|
|
||||||
this.name = CODEC.getKafkaName() + "-" + lane;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public KafkaChannelCodec getChannelCodec() {
|
|
||||||
return CODEC;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getChannelName() {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public record KafkaTdlibClientsChannels(KafkaTdlibRequestProducer request,
|
|
||||||
KafkaTdlibResponseConsumer response,
|
|
||||||
Map<String, KafkaClientBoundConsumer> events) implements Closeable {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
request.close();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import it.tdlight.jni.TdApi;
|
|
||||||
import it.tdlight.reactiveapi.Event.OnRequest;
|
|
||||||
|
|
||||||
public class KafkaTdlibRequestConsumer extends KafkaConsumer<OnRequest<TdApi.Object>> {
|
|
||||||
|
|
||||||
public KafkaTdlibRequestConsumer(KafkaParameters kafkaParameters) {
|
|
||||||
super(kafkaParameters);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public KafkaChannelCodec getChannelCodec() {
|
|
||||||
return KafkaChannelCodec.TDLIB_REQUEST;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getChannelName() {
|
|
||||||
return getChannelCodec().getKafkaName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isQuickResponse() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import it.tdlight.reactiveapi.Event.OnRequest;
|
|
||||||
|
|
||||||
public class KafkaTdlibRequestProducer extends KafkaProducer<OnRequest<?>> {
|
|
||||||
|
|
||||||
public KafkaTdlibRequestProducer(KafkaParameters kafkaParameters) {
|
|
||||||
super(kafkaParameters);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public KafkaChannelCodec getChannelCodec() {
|
|
||||||
return KafkaChannelCodec.TDLIB_REQUEST;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getChannelName() {
|
|
||||||
return getChannelCodec().getKafkaName();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import it.tdlight.jni.TdApi;
|
|
||||||
import it.tdlight.reactiveapi.Event.OnResponse;
|
|
||||||
|
|
||||||
public class KafkaTdlibResponseConsumer extends KafkaConsumer<OnResponse<TdApi.Object>> {
|
|
||||||
|
|
||||||
public KafkaTdlibResponseConsumer(KafkaParameters kafkaParameters) {
|
|
||||||
super(kafkaParameters);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public KafkaChannelCodec getChannelCodec() {
|
|
||||||
return KafkaChannelCodec.TDLIB_RESPONSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getChannelName() {
|
|
||||||
return getChannelCodec().getKafkaName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isQuickResponse() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import it.tdlight.jni.TdApi;
|
|
||||||
import it.tdlight.reactiveapi.Event.OnResponse;
|
|
||||||
|
|
||||||
public class KafkaTdlibResponseProducer extends KafkaProducer<OnResponse<TdApi.Object>> {
|
|
||||||
|
|
||||||
public KafkaTdlibResponseProducer(KafkaParameters kafkaParameters) {
|
|
||||||
super(kafkaParameters);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public KafkaChannelCodec getChannelCodec() {
|
|
||||||
return KafkaChannelCodec.TDLIB_RESPONSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getChannelName() {
|
|
||||||
return getChannelCodec().getKafkaName();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
package it.tdlight.reactiveapi;
|
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public record KafkaTdlibServersChannels(KafkaTdlibRequestConsumer request,
|
|
||||||
KafkaTdlibResponseProducer response,
|
|
||||||
Map<String, KafkaClientBoundProducer> events) implements Closeable {
|
|
||||||
|
|
||||||
public KafkaClientBoundProducer events(String lane) {
|
|
||||||
var p = events.get(lane);
|
|
||||||
if (p == null) {
|
|
||||||
throw new IllegalArgumentException("No lane " + lane);
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
response.close();
|
|
||||||
events.values().forEach(KafkaProducer::close);
|
|
||||||
}
|
|
||||||
}
|
|
@ -8,9 +8,9 @@ import reactor.core.publisher.Flux;
|
|||||||
|
|
||||||
public class LiveAtomixReactiveApiClient extends BaseAtomixReactiveApiClient {
|
public class LiveAtomixReactiveApiClient extends BaseAtomixReactiveApiClient {
|
||||||
|
|
||||||
private final KafkaSharedTdlibClients kafkaSharedTdlibClients;
|
private final ClientsSharedTdlib kafkaSharedTdlibClients;
|
||||||
|
|
||||||
LiveAtomixReactiveApiClient(KafkaSharedTdlibClients kafkaSharedTdlibClients) {
|
LiveAtomixReactiveApiClient(ClientsSharedTdlib kafkaSharedTdlibClients) {
|
||||||
super(kafkaSharedTdlibClients);
|
super(kafkaSharedTdlibClients);
|
||||||
this.kafkaSharedTdlibClients = kafkaSharedTdlibClients;
|
this.kafkaSharedTdlibClients = kafkaSharedTdlibClients;
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,7 @@ public abstract class ReactiveApiPublisher {
|
|||||||
private static final Duration SPECIAL_RAW_TIMEOUT_DURATION = Duration.ofMinutes(5);
|
private static final Duration SPECIAL_RAW_TIMEOUT_DURATION = Duration.ofMinutes(5);
|
||||||
|
|
||||||
private static final Duration HUNDRED_MS = Duration.ofMillis(100);
|
private static final Duration HUNDRED_MS = Duration.ofMillis(100);
|
||||||
private final KafkaSharedTdlibServers kafkaSharedTdlibServers;
|
private final TdlibChannelsSharedServer kafkaSharedTdlibServers;
|
||||||
private final Set<ResultingEventTransformer> resultingEventTransformerSet;
|
private final Set<ResultingEventTransformer> resultingEventTransformerSet;
|
||||||
private final ReactiveTelegramClient rawTelegramClient;
|
private final ReactiveTelegramClient rawTelegramClient;
|
||||||
private final Flux<Signal> telegramClient;
|
private final Flux<Signal> telegramClient;
|
||||||
@ -85,7 +85,7 @@ public abstract class ReactiveApiPublisher {
|
|||||||
private final AtomicReference<Disposable> disposable = new AtomicReference<>();
|
private final AtomicReference<Disposable> disposable = new AtomicReference<>();
|
||||||
private final AtomicReference<Path> path = new AtomicReference<>();
|
private final AtomicReference<Path> path = new AtomicReference<>();
|
||||||
|
|
||||||
private ReactiveApiPublisher(KafkaSharedTdlibServers kafkaSharedTdlibServers,
|
private ReactiveApiPublisher(TdlibChannelsSharedServer kafkaSharedTdlibServers,
|
||||||
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
||||||
long userId, String lane) {
|
long userId, String lane) {
|
||||||
this.kafkaSharedTdlibServers = kafkaSharedTdlibServers;
|
this.kafkaSharedTdlibServers = kafkaSharedTdlibServers;
|
||||||
@ -114,7 +114,7 @@ public abstract class ReactiveApiPublisher {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ReactiveApiPublisher fromToken(KafkaSharedTdlibServers kafkaSharedTdlibServers,
|
public static ReactiveApiPublisher fromToken(TdlibChannelsSharedServer kafkaSharedTdlibServers,
|
||||||
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
||||||
long userId,
|
long userId,
|
||||||
String token,
|
String token,
|
||||||
@ -122,7 +122,7 @@ public abstract class ReactiveApiPublisher {
|
|||||||
return new ReactiveApiPublisherToken(kafkaSharedTdlibServers, resultingEventTransformerSet, userId, token, lane);
|
return new ReactiveApiPublisherToken(kafkaSharedTdlibServers, resultingEventTransformerSet, userId, token, lane);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ReactiveApiPublisher fromPhoneNumber(KafkaSharedTdlibServers kafkaSharedTdlibServers,
|
public static ReactiveApiPublisher fromPhoneNumber(TdlibChannelsSharedServer kafkaSharedTdlibServers,
|
||||||
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
||||||
long userId,
|
long userId,
|
||||||
long phoneNumber,
|
long phoneNumber,
|
||||||
@ -551,7 +551,7 @@ public abstract class ReactiveApiPublisher {
|
|||||||
|
|
||||||
private final String botToken;
|
private final String botToken;
|
||||||
|
|
||||||
public ReactiveApiPublisherToken(KafkaSharedTdlibServers kafkaSharedTdlibServers,
|
public ReactiveApiPublisherToken(TdlibChannelsSharedServer kafkaSharedTdlibServers,
|
||||||
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
||||||
long userId,
|
long userId,
|
||||||
String botToken,
|
String botToken,
|
||||||
@ -583,7 +583,7 @@ public abstract class ReactiveApiPublisher {
|
|||||||
|
|
||||||
private final long phoneNumber;
|
private final long phoneNumber;
|
||||||
|
|
||||||
public ReactiveApiPublisherPhoneNumber(KafkaSharedTdlibServers kafkaSharedTdlibServers,
|
public ReactiveApiPublisherPhoneNumber(TdlibChannelsSharedServer kafkaSharedTdlibServers,
|
||||||
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
Set<ResultingEventTransformer> resultingEventTransformerSet,
|
||||||
long userId,
|
long userId,
|
||||||
long phoneNumber,
|
long phoneNumber,
|
||||||
|
@ -0,0 +1,18 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.jni.TdApi.Object;
|
||||||
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnRequest;
|
||||||
|
import it.tdlight.reactiveapi.Event.OnResponse;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public record TdlibChannelsClients(EventProducer<OnRequest<?>> request,
|
||||||
|
EventConsumer<OnResponse<Object>> response,
|
||||||
|
Map<String, EventConsumer<ClientBoundEvent>> events) implements Closeable {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
request.close();
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,24 @@
|
|||||||
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public record TdlibChannelsServers(EventConsumer<Event.OnRequest<it.tdlight.jni.TdApi.Object>> request,
|
||||||
|
EventProducer<Event.OnResponse<it.tdlight.jni.TdApi.Object>> response,
|
||||||
|
Map<String, EventProducer<ClientBoundEvent>> events) implements Closeable {
|
||||||
|
|
||||||
|
public EventProducer<ClientBoundEvent> events(String lane) {
|
||||||
|
var p = events.get(lane);
|
||||||
|
if (p == null) {
|
||||||
|
throw new IllegalArgumentException("No lane " + lane);
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
response.close();
|
||||||
|
events.values().forEach(EventProducer::close);
|
||||||
|
}
|
||||||
|
}
|
@ -1,37 +1,31 @@
|
|||||||
package it.tdlight.reactiveapi;
|
package it.tdlight.reactiveapi;
|
||||||
|
|
||||||
import static java.util.Objects.requireNonNullElse;
|
|
||||||
|
|
||||||
import it.tdlight.jni.TdApi;
|
import it.tdlight.jni.TdApi;
|
||||||
import it.tdlight.jni.TdApi.Object;
|
import it.tdlight.jni.TdApi.Object;
|
||||||
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
|
||||||
import it.tdlight.reactiveapi.Event.OnRequest;
|
import it.tdlight.reactiveapi.Event.OnRequest;
|
||||||
import it.tdlight.reactiveapi.Event.OnResponse;
|
import it.tdlight.reactiveapi.Event.OnResponse;
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import reactor.core.Disposable;
|
import reactor.core.Disposable;
|
||||||
import reactor.core.publisher.BufferOverflowStrategy;
|
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.GroupedFlux;
|
|
||||||
import reactor.core.publisher.SignalType;
|
import reactor.core.publisher.SignalType;
|
||||||
import reactor.core.publisher.Sinks;
|
import reactor.core.publisher.Sinks;
|
||||||
import reactor.core.publisher.Sinks.Many;
|
import reactor.core.publisher.Sinks.Many;
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
import reactor.util.concurrent.Queues;
|
import reactor.util.concurrent.Queues;
|
||||||
|
|
||||||
public class KafkaSharedTdlibServers implements Closeable {
|
public class TdlibChannelsSharedServer implements Closeable {
|
||||||
|
|
||||||
private final KafkaTdlibServersChannels kafkaTdlibServersChannels;
|
private final TdlibChannelsServers kafkaTdlibServersChannels;
|
||||||
private final Disposable responsesSub;
|
private final Disposable responsesSub;
|
||||||
private final AtomicReference<Disposable> requestsSub = new AtomicReference<>();
|
private final AtomicReference<Disposable> requestsSub = new AtomicReference<>();
|
||||||
private final Many<OnResponse<TdApi.Object>> responses = Sinks.many().unicast().onBackpressureBuffer(
|
private final Many<OnResponse<TdApi.Object>> responses = Sinks.many().unicast().onBackpressureBuffer(
|
||||||
Queues.<OnResponse<TdApi.Object>>get(65535).get());
|
Queues.<OnResponse<TdApi.Object>>get(65535).get());
|
||||||
private final Flux<Timestamped<OnRequest<Object>>> requests;
|
private final Flux<Timestamped<OnRequest<Object>>> requests;
|
||||||
|
|
||||||
public KafkaSharedTdlibServers(KafkaTdlibServersChannels kafkaTdlibServersChannels) {
|
public TdlibChannelsSharedServer(TdlibChannelsServers kafkaTdlibServersChannels) {
|
||||||
this.kafkaTdlibServersChannels = kafkaTdlibServersChannels;
|
this.kafkaTdlibServersChannels = kafkaTdlibServersChannels;
|
||||||
this.responsesSub = kafkaTdlibServersChannels.response()
|
this.responsesSub = kafkaTdlibServersChannels.response()
|
||||||
.sendMessages(responses.asFlux().log("responses", Level.FINEST, SignalType.ON_NEXT))
|
.sendMessages(responses.asFlux().log("responses", Level.FINEST, SignalType.ON_NEXT))
|
@ -1,16 +1,20 @@
|
|||||||
package it.tdlight.reactiveapi;
|
package it.tdlight.reactiveapi.kafka;
|
||||||
|
|
||||||
import static java.lang.Math.toIntExact;
|
import static java.lang.Math.toIntExact;
|
||||||
|
|
||||||
import it.tdlight.common.Init;
|
import it.tdlight.common.Init;
|
||||||
import it.tdlight.common.utils.CantLoadLibrary;
|
import it.tdlight.common.utils.CantLoadLibrary;
|
||||||
|
import it.tdlight.reactiveapi.EventConsumer;
|
||||||
|
import it.tdlight.reactiveapi.ChannelCodec;
|
||||||
|
import it.tdlight.reactiveapi.KafkaParameters;
|
||||||
|
import it.tdlight.reactiveapi.ReactorUtils;
|
||||||
|
import it.tdlight.reactiveapi.Timestamped;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.regex.Pattern;
|
|
||||||
import org.apache.kafka.clients.consumer.CommitFailedException;
|
import org.apache.kafka.clients.consumer.CommitFailedException;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||||
import org.apache.kafka.common.errors.RebalanceInProgressException;
|
import org.apache.kafka.common.errors.RebalanceInProgressException;
|
||||||
@ -19,21 +23,29 @@ import org.apache.kafka.common.serialization.IntegerDeserializer;
|
|||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.SignalType;
|
import reactor.core.publisher.SignalType;
|
||||||
import reactor.kafka.receiver.KafkaReceiver;
|
import reactor.kafka.receiver.KafkaReceiver;
|
||||||
import reactor.kafka.receiver.ReceiverOptions;
|
import reactor.kafka.receiver.ReceiverOptions;
|
||||||
import reactor.util.retry.Retry;
|
import reactor.util.retry.Retry;
|
||||||
|
|
||||||
public abstract class KafkaConsumer<K> {
|
public final class KafkaConsumer<K> implements EventConsumer<K> {
|
||||||
|
|
||||||
private static final Logger LOG = LogManager.getLogger(KafkaConsumer.class);
|
private static final Logger LOG = LogManager.getLogger(KafkaConsumer.class);
|
||||||
|
|
||||||
private final KafkaParameters kafkaParameters;
|
private final KafkaParameters kafkaParameters;
|
||||||
|
private final boolean quickResponse;
|
||||||
|
private final ChannelCodec channelCodec;
|
||||||
|
private final String channelName;
|
||||||
|
|
||||||
public KafkaConsumer(KafkaParameters kafkaParameters) {
|
public KafkaConsumer(KafkaParameters kafkaParameters,
|
||||||
|
boolean quickResponse,
|
||||||
|
ChannelCodec channelCodec,
|
||||||
|
String channelName) {
|
||||||
this.kafkaParameters = kafkaParameters;
|
this.kafkaParameters = kafkaParameters;
|
||||||
|
this.quickResponse = quickResponse;
|
||||||
|
this.channelCodec = channelCodec;
|
||||||
|
this.channelName = channelName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public KafkaReceiver<Integer, K> createReceiver(@NotNull String kafkaGroupId) {
|
public KafkaReceiver<Integer, K> createReceiver(@NotNull String kafkaGroupId) {
|
||||||
@ -69,13 +81,22 @@ public abstract class KafkaConsumer<K> {
|
|||||||
return KafkaReceiver.create(options);
|
return KafkaReceiver.create(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract KafkaChannelCodec getChannelCodec();
|
@Override
|
||||||
|
public boolean isQuickResponse() {
|
||||||
|
return quickResponse;
|
||||||
|
}
|
||||||
|
|
||||||
public abstract String getChannelName();
|
@Override
|
||||||
|
public ChannelCodec getChannelCodec() {
|
||||||
|
return channelCodec;
|
||||||
|
}
|
||||||
|
|
||||||
public abstract boolean isQuickResponse();
|
@Override
|
||||||
|
public String getChannelName() {
|
||||||
|
return channelName;
|
||||||
|
}
|
||||||
|
|
||||||
protected Flux<Timestamped<K>> retryIfCleanup(Flux<Timestamped<K>> eventFlux) {
|
public Flux<Timestamped<K>> retryIfCleanup(Flux<Timestamped<K>> eventFlux) {
|
||||||
return eventFlux.retryWhen(Retry
|
return eventFlux.retryWhen(Retry
|
||||||
.backoff(Long.MAX_VALUE, Duration.ofMillis(100))
|
.backoff(Long.MAX_VALUE, Duration.ofMillis(100))
|
||||||
.maxBackoff(Duration.ofSeconds(5))
|
.maxBackoff(Duration.ofSeconds(5))
|
||||||
@ -84,7 +105,7 @@ public abstract class KafkaConsumer<K> {
|
|||||||
.doBeforeRetry(s -> LOG.warn("Rebalancing in progress")));
|
.doBeforeRetry(s -> LOG.warn("Rebalancing in progress")));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Flux<Timestamped<K>> retryIfCommitFailed(Flux<Timestamped<K>> eventFlux) {
|
public Flux<Timestamped<K>> retryIfCommitFailed(Flux<Timestamped<K>> eventFlux) {
|
||||||
return eventFlux.retryWhen(Retry
|
return eventFlux.retryWhen(Retry
|
||||||
.backoff(10, Duration.ofSeconds(1))
|
.backoff(10, Duration.ofSeconds(1))
|
||||||
.maxBackoff(Duration.ofSeconds(5))
|
.maxBackoff(Duration.ofSeconds(5))
|
||||||
@ -98,6 +119,7 @@ public abstract class KafkaConsumer<K> {
|
|||||||
+ " with max.poll.records.")));
|
+ " with max.poll.records.")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Flux<Timestamped<K>> consumeMessages(@NotNull String subGroupId) {
|
public Flux<Timestamped<K>> consumeMessages(@NotNull String subGroupId) {
|
||||||
return consumeMessagesInternal(subGroupId);
|
return consumeMessagesInternal(subGroupId);
|
||||||
}
|
}
|
@ -1,5 +1,8 @@
|
|||||||
package it.tdlight.reactiveapi;
|
package it.tdlight.reactiveapi.kafka;
|
||||||
|
|
||||||
|
import it.tdlight.reactiveapi.ChannelCodec;
|
||||||
|
import it.tdlight.reactiveapi.EventProducer;
|
||||||
|
import it.tdlight.reactiveapi.KafkaParameters;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
@ -15,13 +18,17 @@ import reactor.kafka.sender.KafkaSender;
|
|||||||
import reactor.kafka.sender.SenderOptions;
|
import reactor.kafka.sender.SenderOptions;
|
||||||
import reactor.kafka.sender.SenderRecord;
|
import reactor.kafka.sender.SenderRecord;
|
||||||
|
|
||||||
public abstract class KafkaProducer<K> {
|
public final class KafkaProducer<K> implements EventProducer<K> {
|
||||||
|
|
||||||
private static final Logger LOG = LogManager.getLogger(KafkaProducer.class);
|
private static final Logger LOG = LogManager.getLogger(KafkaProducer.class);
|
||||||
|
|
||||||
private final KafkaSender<Integer, K> sender;
|
private final KafkaSender<Integer, K> sender;
|
||||||
|
private final ChannelCodec channelCodec;
|
||||||
|
private final String channelName;
|
||||||
|
|
||||||
public KafkaProducer(KafkaParameters kafkaParameters) {
|
public KafkaProducer(KafkaParameters kafkaParameters, ChannelCodec channelCodec, String channelName) {
|
||||||
|
this.channelCodec = channelCodec;
|
||||||
|
this.channelName = channelName;
|
||||||
Map<String, Object> props = new HashMap<>();
|
Map<String, Object> props = new HashMap<>();
|
||||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaParameters.bootstrapServers());
|
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaParameters.bootstrapServers());
|
||||||
props.put(ProducerConfig.CLIENT_ID_CONFIG, kafkaParameters.clientId());
|
props.put(ProducerConfig.CLIENT_ID_CONFIG, kafkaParameters.clientId());
|
||||||
@ -36,10 +43,17 @@ public abstract class KafkaProducer<K> {
|
|||||||
sender = KafkaSender.create(senderOptions.maxInFlight(1024));
|
sender = KafkaSender.create(senderOptions.maxInFlight(1024));
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract KafkaChannelCodec getChannelCodec();
|
@Override
|
||||||
|
public ChannelCodec getChannelCodec() {
|
||||||
|
return channelCodec;
|
||||||
|
}
|
||||||
|
|
||||||
public abstract String getChannelName();
|
@Override
|
||||||
|
public String getChannelName() {
|
||||||
|
return channelName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Mono<Void> sendMessages(Flux<K> eventsFlux) {
|
public Mono<Void> sendMessages(Flux<K> eventsFlux) {
|
||||||
var channelName = getChannelName();
|
var channelName = getChannelName();
|
||||||
return eventsFlux
|
return eventsFlux
|
||||||
@ -57,6 +71,7 @@ public abstract class KafkaProducer<K> {
|
|||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
sender.close();
|
sender.close();
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user