Remove required kafka dependency, zero-copy deserialization

This commit is contained in:
Andrea Cavalli 2022-10-07 16:03:51 +02:00
parent 705e5ca65e
commit 3bed3052d0
29 changed files with 362 additions and 130 deletions

View File

@ -180,7 +180,7 @@
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId>
<version>2.13.4</version>
<version>2.14.0-rc1</version>
</dependency>
<dependency>
<groupId>jakarta.xml.bind</groupId>

View File

@ -147,7 +147,7 @@ public class AtomixReactiveApi implements ReactiveApi {
.subscribeOn(Schedulers.parallel())
.subscribe(n -> {}, ex -> LOG.error("Requests channel broke unexpectedly", ex));
}
})).transform(ReactorUtils::subscribeOnce);
})).transform(ReactorUtils::subscribeOnceUntilUnsubscribe);
}
@Override

View File

@ -19,6 +19,7 @@ import it.tdlight.reactiveapi.Event.OnUpdateData;
import it.tdlight.reactiveapi.Event.OnUpdateError;
import it.tdlight.reactiveapi.Event.OnUserLoginCodeRequested;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import java.time.Duration;
@ -28,7 +29,6 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import org.apache.kafka.common.errors.SerializationException;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -117,7 +117,7 @@ abstract class BaseAtomixReactiveApiClient implements ReactiveApiMultiClient {
}
}
static @NotNull ClientBoundEvent deserializeEvent(DataInputStream is) throws IOException {
static @NotNull ClientBoundEvent deserializeEvent(DataInput is) throws IOException {
var userId = is.readLong();
var dataVersion = is.readInt();
if (dataVersion != SERIAL_VERSION) {

View File

@ -1,8 +1,6 @@
package it.tdlight.reactiveapi;
import java.lang.reflect.InvocationTargetException;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
public class ChannelCodec {
public static final ChannelCodec CLIENT_BOUND_EVENT = new ChannelCodec(ClientBoundEventSerializer.class, ClientBoundEventDeserializer.class);

View File

@ -1,15 +1,24 @@
package it.tdlight.reactiveapi;
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import org.apache.kafka.common.serialization.Deserializer;
import java.io.DataInput;
import java.io.IOException;
public class ClientBoundEventDeserializer implements Deserializer<ClientBoundEvent> {
@Override
public ClientBoundEvent deserialize(String topic, byte[] data) {
public ClientBoundEvent deserialize(byte[] data) {
if (data == null || data.length == 0) {
return null;
}
return LiveAtomixReactiveApiClient.deserializeEvent(data);
}
@Override
public ClientBoundEvent deserialize(int length, DataInput dataInput) throws IOException {
if (dataInput == null || length == 0) {
return null;
}
return LiveAtomixReactiveApiClient.deserializeEvent(dataInput);
}
}

View File

@ -1,15 +1,24 @@
package it.tdlight.reactiveapi;
import it.tdlight.reactiveapi.Event.ClientBoundEvent;
import org.apache.kafka.common.serialization.Serializer;
import java.io.DataOutput;
import java.io.IOException;
public class ClientBoundEventSerializer implements Serializer<ClientBoundEvent> {
@Override
public byte[] serialize(String topic, ClientBoundEvent data) {
public byte[] serialize(ClientBoundEvent data) {
if (data == null) {
return null;
}
return ReactiveApiPublisher.serializeEvent(data);
}
@Override
public void serialize(ClientBoundEvent data, DataOutput output) throws IOException {
if (data == null) {
return;
}
ReactiveApiPublisher.writeClientBoundEvent(data, output);
}
}

View File

@ -0,0 +1,22 @@
package it.tdlight.reactiveapi;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Map;
public interface Deserializer<T> {
default T deserialize(byte[] data) throws IOException {
var bais = new ByteArrayInputStream(data);
return deserialize(data.length, new DataInputStream(bais));
}
default T deserialize(int length, DataInput dataInput) throws IOException {
byte[] data = new byte[length];
dataInput.readFully(data);
return deserialize(data);
}
}

View File

@ -38,6 +38,7 @@ import it.tdlight.reactiveapi.ResultingEvent.ResultingEventPublisherClosed;
import it.tdlight.reactiveapi.ResultingEvent.TDLibBoundResultingEvent;
import it.tdlight.tdlight.ClientManager;
import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@ -51,7 +52,6 @@ import java.util.Set;
import java.util.StringJoiner;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import org.apache.kafka.common.errors.SerializationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.reactivestreams.Subscriber;
@ -431,7 +431,7 @@ public abstract class ReactiveApiPublisher {
}
}
private static void writeClientBoundEvent(ClientBoundEvent clientBoundEvent, DataOutputStream dataOutputStream)
public static void writeClientBoundEvent(ClientBoundEvent clientBoundEvent, DataOutput dataOutputStream)
throws IOException {
dataOutputStream.writeLong(clientBoundEvent.userId());
dataOutputStream.writeInt(SERIAL_VERSION);
@ -511,9 +511,13 @@ public abstract class ReactiveApiPublisher {
@Override
public void onNext(Object responseObj) {
r.accept(new Event.OnResponse.Response<>(onRequestObj.clientId(),
onRequestObj.requestId(),
userId, responseObj));
try {
r.accept(new Event.OnResponse.Response<>(onRequestObj.clientId(),
onRequestObj.requestId(),
userId, responseObj));
} catch (Throwable ex) {
onError(ex);
}
}
@Override

View File

@ -1,14 +1,9 @@
package it.tdlight.reactiveapi;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.LongConsumer;
import org.jetbrains.annotations.NotNull;
@ -36,6 +31,15 @@ public class ReactorUtils {
});
}
public static <V> Flux<V> subscribeOnceUntilUnsubscribe(Flux<V> f) {
AtomicBoolean subscribed = new AtomicBoolean();
return f.doOnSubscribe(s -> {
if (!subscribed.compareAndSet(false, true)) {
throw new UnsupportedOperationException("Can't subscribe more than once!");
}
}).doFinally(s -> subscribed.set(false));
}
public static <V> Mono<V> subscribeOnce(Mono<V> f) {
AtomicBoolean subscribed = new AtomicBoolean();
return f.doOnSubscribe(s -> {
@ -45,6 +49,15 @@ public class ReactorUtils {
});
}
public static <V> Mono<V> subscribeOnceUntilUnsubscribe(Mono<V> f) {
AtomicBoolean subscribed = new AtomicBoolean();
return f.doOnSubscribe(s -> {
if (!subscribed.compareAndSet(false, true)) {
throw new UnsupportedOperationException("Can't subscribe more than once!");
}
}).doFinally(s -> subscribed.set(false));
}
public static <K> Flux<K> createLastestSubscriptionFlux(Flux<K> upstream, int maxBufferSize) {
return upstream.transform(parent -> {
AtomicReference<Subscription> subscriptionAtomicReference = new AtomicReference<>();

View File

@ -0,0 +1,32 @@
package it.tdlight.reactiveapi;
/**
* Any exception during serialization in the producer
*/
public class SerializationException extends RuntimeException {
private static final long serialVersionUID = 1L;
public SerializationException(String message, Throwable cause) {
super(message, cause);
}
public SerializationException(String message) {
super(message);
}
public SerializationException(Throwable cause) {
super(cause);
}
public SerializationException() {
super();
}
/* avoid the expensive and useless stack trace for serialization exceptions */
@Override
public Throwable fillInStackTrace() {
return this;
}
}

View File

@ -0,0 +1,27 @@
package it.tdlight.reactiveapi;
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.Closeable;
import java.util.Map;
public interface Serializer<T> {
default byte[] serialize(T data) throws IOException {
try (var baos = new FastByteArrayOutputStream()) {
try (var daos = new DataOutputStream(baos)) {
serialize(data, daos);
baos.trim();
return baos.array;
}
}
}
default void serialize(T data, DataOutput output) throws IOException {
output.write(serialize(data));
}
}

View File

@ -51,7 +51,7 @@ public class TdlibChannelsSharedHost implements Closeable {
public TdlibChannelsSharedHost(Set<String> allLanes, TdlibChannelsServers tdServersChannels) {
this.tdServersChannels = tdServersChannels;
this.responsesSub = Mono.defer(() -> tdServersChannels.response()
.sendMessages(responses.asFlux().log("responses", Level.FINE)))
.sendMessages(responses.asFlux()/*.log("responses", Level.FINE)*/))
.repeatWhen(REPEAT_STRATEGY)
.retryWhen(RETRY_STRATEGY)
.subscribeOn(Schedulers.parallel())

View File

@ -43,7 +43,7 @@ public class TdlibChannelsSharedReceive implements Closeable {
this.tdClientsChannels = tdClientsChannels;
this.responses = Flux
.defer(() -> tdClientsChannels.response().consumeMessages())
.log("responses", Level.FINE)
//.log("responses", Level.FINE)
.repeatWhen(REPEAT_STRATEGY)
.retryWhen(RETRY_STRATEGY)
.publish()

View File

@ -5,25 +5,29 @@ import static it.tdlight.reactiveapi.Event.SERIAL_VERSION;
import it.tdlight.jni.TdApi;
import it.tdlight.jni.TdApi.Object;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
public class TdlibDeserializer implements Deserializer<Object> {
@Override
public Object deserialize(String topic, byte[] data) {
public Object deserialize(byte[] data) {
if (data.length == 0) {
return null;
}
var bais = new ByteArrayInputStream(data);
var dais = new DataInputStream(bais);
return deserialize(-1, dais);
}
@Override
public Object deserialize(int length, DataInput dataInput) {
try {
if (dais.readInt() != SERIAL_VERSION) {
if (dataInput.readInt() != SERIAL_VERSION) {
return new TdApi.Error(400, "Conflicting protocol version");
}
return TdApi.Deserializer.deserialize(dais);
return TdApi.Deserializer.deserialize(dataInput);
} catch (IOException e) {
throw new SerializationException("Failed to deserialize TDLib object", e);
}

View File

@ -8,45 +8,48 @@ import it.tdlight.reactiveapi.Event.OnRequest.InvalidRequest;
import it.tdlight.reactiveapi.Event.OnRequest.Request;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
public class TdlibRequestDeserializer<T extends TdApi.Object> implements Deserializer<OnRequest<T>> {
@Override
public OnRequest<T> deserialize(String topic, byte[] data) {
public OnRequest<T> deserialize(byte[] data) {
if (data.length == 0) {
return null;
}
try {
var bais = new ByteArrayInputStream(data);
var dais = new DataInputStream(bais);
var userId = dais.readLong();
var clientId = dais.readLong();
var requestId = dais.readLong();
if (dais.readInt() != SERIAL_VERSION) {
// Deprecated request
return new InvalidRequest<>(userId, clientId, requestId);
} else {
long millis = dais.readLong();
Instant timeout;
if (millis == -1) {
timeout = Instant.ofEpochMilli(Long.MAX_VALUE);
} else {
timeout = Instant.ofEpochMilli(millis);
}
@SuppressWarnings("unchecked")
TdApi.Function<T> request = (TdApi.Function<T>) TdApi.Deserializer.deserialize(dais);
return new Request<>(userId, clientId, requestId, request, timeout);
}
return deserialize(-1, dais);
} catch (UnsupportedOperationException | IOException e) {
throw new SerializationException(e);
}
}
@Override
public OnRequest<T> deserialize(int length, DataInput dataInput) throws IOException {
var userId = dataInput.readLong();
var clientId = dataInput.readLong();
var requestId = dataInput.readLong();
if (dataInput.readInt() != SERIAL_VERSION) {
// Deprecated request
return new InvalidRequest<>(userId, clientId, requestId);
} else {
long millis = dataInput.readLong();
Instant timeout;
if (millis == -1) {
timeout = Instant.ofEpochMilli(Long.MAX_VALUE);
} else {
timeout = Instant.ofEpochMilli(millis);
}
@SuppressWarnings("unchecked")
TdApi.Function<T> request = (TdApi.Function<T>) TdApi.Deserializer.deserialize(dataInput);
return new Request<>(userId, clientId, requestId, request, timeout);
}
}
}

View File

@ -6,41 +6,25 @@ import it.tdlight.jni.TdApi;
import it.tdlight.reactiveapi.Event.OnRequest;
import it.tdlight.reactiveapi.Event.OnRequest.Request;
import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serializer;
public class TdlibRequestSerializer<T extends TdApi.Object> implements Serializer<OnRequest<T>> {
private static final Instant INFINITE_TIMEOUT = Instant.now().plus(100_000, ChronoUnit.DAYS);
@Override
public byte[] serialize(String topic, OnRequest<T> data) {
public byte[] serialize(OnRequest<T> data) {
try {
if (data == null) {
return new byte[0];
} else {
try(var baos = new ByteArrayOutputStream()) {
try (var daos = new DataOutputStream(baos)) {
daos.writeLong(data.userId());
daos.writeLong(data.clientId());
daos.writeLong(data.requestId());
daos.writeInt(SERIAL_VERSION);
if (data instanceof OnRequest.Request<?> request) {
if (request.timeout() == Instant.MAX || request.timeout().compareTo(INFINITE_TIMEOUT) >= 0) {
daos.writeLong(-1);
} else {
daos.writeLong(request.timeout().toEpochMilli());
}
request.request().serialize(daos);
} else if (data instanceof OnRequest.InvalidRequest<?>) {
daos.writeLong(-2);
} else {
throw new SerializationException("Unknown request type: " + daos.getClass());
}
serialize(data, daos);
daos.flush();
return baos.toByteArray();
}
@ -50,4 +34,27 @@ public class TdlibRequestSerializer<T extends TdApi.Object> implements Serialize
throw new SerializationException("Failed to serialize TDLib object", e);
}
}
@Override
public void serialize(OnRequest<T> data, DataOutput dataOutput) throws IOException {
if (data == null) {
return;
}
dataOutput.writeLong(data.userId());
dataOutput.writeLong(data.clientId());
dataOutput.writeLong(data.requestId());
dataOutput.writeInt(SERIAL_VERSION);
if (data instanceof OnRequest.Request<?> request) {
if (request.timeout() == Instant.MAX || request.timeout().compareTo(INFINITE_TIMEOUT) >= 0) {
dataOutput.writeLong(-1);
} else {
dataOutput.writeLong(request.timeout().toEpochMilli());
}
request.request().serialize(dataOutput);
} else if (data instanceof OnRequest.InvalidRequest<?>) {
dataOutput.writeLong(-2);
} else {
throw new SerializationException("Unknown request type: " + dataOutput.getClass());
}
}
}

View File

@ -7,35 +7,39 @@ import it.tdlight.reactiveapi.Event.OnResponse;
import it.tdlight.reactiveapi.Event.OnResponse.InvalidResponse;
import it.tdlight.reactiveapi.Event.OnResponse.Response;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import java.time.Instant;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
public class TdlibResponseDeserializer<T extends TdApi.Object> implements Deserializer<OnResponse<T>> {
@Override
public OnResponse<T> deserialize(String topic, byte[] data) {
public OnResponse<T> deserialize(byte[] data) {
if (data.length == 0) {
return null;
}
try {
var bais = new ByteArrayInputStream(data);
var dais = new DataInputStream(bais);
var clientId = dais.readLong();
var requestId = dais.readLong();
var userId = dais.readLong();
if (dais.readInt() != SERIAL_VERSION) {
// Deprecated response
return new InvalidResponse<>(clientId, requestId, userId);
} else {
@SuppressWarnings("unchecked")
T response = (T) TdApi.Deserializer.deserialize(dais);
return new Response<>(clientId, requestId, userId, response);
}
return deserialize(-1, dais);
} catch (UnsupportedOperationException | IOException e) {
throw new SerializationException(e);
}
}
@Override
public OnResponse<T> deserialize(int length, DataInput dataInput) throws IOException {
var clientId = dataInput.readLong();
var requestId = dataInput.readLong();
var userId = dataInput.readLong();
if (dataInput.readInt() != SERIAL_VERSION) {
// Deprecated response
return new InvalidResponse<>(clientId, requestId, userId);
} else {
@SuppressWarnings("unchecked")
T response = (T) TdApi.Deserializer.deserialize(dataInput);
return new Response<>(clientId, requestId, userId, response);
}
}
}

View File

@ -6,34 +6,23 @@ import it.tdlight.jni.TdApi;
import it.tdlight.reactiveapi.Event.OnResponse;
import it.tdlight.reactiveapi.Event.OnResponse.Response;
import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serializer;
public class TdlibResponseSerializer<T extends TdApi.Object> implements Serializer<OnResponse<T>> {
@Override
public byte[] serialize(String topic, OnResponse<T> data) {
public byte[] serialize(OnResponse<T> data) {
try {
if (data == null) {
return new byte[0];
} else {
try(var baos = new ByteArrayOutputStream()) {
try (var daos = new DataOutputStream(baos)) {
daos.writeLong(data.clientId());
daos.writeLong(data.requestId());
daos.writeLong(data.userId());
daos.writeInt(SERIAL_VERSION);
if (data instanceof Response<?> response) {
response.response().serialize(daos);
} else if (data instanceof OnResponse.InvalidResponse<T>) {
daos.writeLong(-2);
} else {
throw new SerializationException("Unknown response type: " + daos.getClass());
}
serialize(data, daos);
daos.flush();
return baos.toByteArray();
}
@ -43,4 +32,19 @@ public class TdlibResponseSerializer<T extends TdApi.Object> implements Serializ
throw new SerializationException("Failed to serialize TDLib object", e);
}
}
@Override
public void serialize(OnResponse<T> data, DataOutput dataOutput) throws IOException {
dataOutput.writeLong(data.clientId());
dataOutput.writeLong(data.requestId());
dataOutput.writeLong(data.userId());
dataOutput.writeInt(SERIAL_VERSION);
if (data instanceof Response<?> response) {
response.response().serialize(dataOutput);
} else if (data instanceof OnResponse.InvalidResponse<T>) {
dataOutput.writeLong(-2);
} else {
throw new SerializationException("Unknown response type: " + dataOutput.getClass());
}
}
}

View File

@ -3,27 +3,25 @@ package it.tdlight.reactiveapi;
import static it.tdlight.reactiveapi.Event.SERIAL_VERSION;
import it.tdlight.jni.TdApi;
import it.tdlight.jni.TdApi.Object;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
public class TdlibSerializer implements Serializer<TdApi.Object> {
@Override
public byte[] serialize(String topic, TdApi.Object data) {
public byte[] serialize(TdApi.Object data) {
try {
if (data == null) {
return new byte[0];
} else {
try(var baos = new ByteArrayOutputStream()) {
try (var daos = new DataOutputStream(baos)) {
daos.writeInt(SERIAL_VERSION);
data.serialize(daos);
serialize(data, daos);
daos.flush();
return baos.toByteArray();
}
@ -34,4 +32,12 @@ public class TdlibSerializer implements Serializer<TdApi.Object> {
}
}
@Override
public void serialize(Object data, DataOutput output) throws IOException {
if (data == null) {
return;
}
output.writeInt(SERIAL_VERSION);
data.serialize(output);
}
}

View File

@ -1,28 +1,20 @@
package it.tdlight.reactiveapi;
import java.io.DataInput;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
public class UtfCodec implements Serializer<String>, Deserializer<String> {
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public String deserialize(String topic, byte[] data) {
public String deserialize(byte[] data) {
return new String(data, StandardCharsets.UTF_8);
}
@Override
public byte[] serialize(String topic, String data) {
public byte[] serialize(String data) {
return data.getBytes(StandardCharsets.UTF_8);
}
@Override
public void close() {
}
}

View File

@ -60,7 +60,8 @@ public final class KafkaConsumer<K> implements EventConsumer<K> {
props.put(ConsumerConfig.CLIENT_ID_CONFIG, kafkaParameters.clientId());
props.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaGroupId);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, getChannelCodec().getDeserializerClass());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaDeserializer.class);
props.put("custom.deserializer.class", getChannelCodec().getDeserializerClass());
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, toIntExact(Duration.ofMinutes(5).toMillis()));
if (!isQuickResponse()) {
@ -141,6 +142,6 @@ public final class KafkaConsumer<K> implements EventConsumer<K> {
})
.transform(this::retryIfCleanup)
.transform(this::retryIfCommitFailed)
.transform(ReactorUtils::subscribeOnce);
.transform(ReactorUtils::subscribeOnceUntilUnsubscribe);
}
}

View File

@ -0,0 +1,39 @@
package it.tdlight.reactiveapi.kafka;
import it.tdlight.reactiveapi.Deserializer;
import it.tdlight.reactiveapi.SerializationException;
import it.tdlight.reactiveapi.Serializer;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
public class KafkaDeserializer<T> implements Deserializer<T>, org.apache.kafka.common.serialization.Deserializer<T> {
private Deserializer<T> deserializer;
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
var clazz = (Class<?>) configs.get("custom.deserializer.class");
try {
this.deserializer = (Deserializer<T>) clazz.getConstructor().newInstance();
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
@Override
public T deserialize(String topic, byte[] data) {
try {
return deserializer.deserialize(data);
} catch (IOException e) {
throw new SerializationException(e);
}
}
@Override
public T deserialize(byte[] data) throws IOException {
return deserializer.deserialize(data);
}
}

View File

@ -37,7 +37,8 @@ public final class KafkaProducer<K> implements EventProducer<K> {
props.put(ProducerConfig.LINGER_MS_CONFIG, "20");
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getChannelCodec().getSerializerClass());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaSerializer.class);
props.put("custom.serializer.class", getChannelCodec().getSerializerClass());
SenderOptions<Integer, K> senderOptions = SenderOptions.create(props);
sender = KafkaSender.create(senderOptions.maxInFlight(1024));

View File

@ -0,0 +1,38 @@
package it.tdlight.reactiveapi.kafka;
import it.tdlight.reactiveapi.Serializer;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import org.apache.kafka.common.errors.SerializationException;
public class KafkaSerializer<T> implements Serializer<T>, org.apache.kafka.common.serialization.Serializer<T> {
private Serializer<T> serializer;
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
var clazz = (Class<?>) configs.get("custom.serializer.class");
try {
this.serializer = (Serializer<T>) clazz.getConstructor().newInstance();
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
@Override
public byte[] serialize(String topic, T data) {
try {
return serializer.serialize(data);
} catch (IOException e) {
throw new SerializationException(e);
}
}
@Override
public byte[] serialize(T data) throws IOException {
return serializer.serialize(data);
}
}

View File

@ -1,10 +1,10 @@
package it.tdlight.reactiveapi.rsocket;
import io.rsocket.Payload;
import it.tdlight.reactiveapi.Deserializer;
import it.tdlight.reactiveapi.Timestamped;
import java.time.Duration;
import java.util.Optional;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import reactor.core.publisher.Flux;

View File

@ -10,16 +10,16 @@ import io.rsocket.frame.decoder.PayloadDecoder;
import io.rsocket.transport.netty.client.TcpClientTransport;
import io.rsocket.util.DefaultPayload;
import it.tdlight.reactiveapi.ChannelCodec;
import it.tdlight.reactiveapi.Deserializer;
import it.tdlight.reactiveapi.EventConsumer;
import it.tdlight.reactiveapi.EventProducer;
import it.tdlight.reactiveapi.Serializer;
import it.tdlight.reactiveapi.SimpleEventProducer;
import it.tdlight.reactiveapi.Timestamped;
import java.time.Duration;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Hooks;

View File

@ -11,14 +11,14 @@ import io.rsocket.transport.netty.server.CloseableChannel;
import io.rsocket.transport.netty.server.TcpServerTransport;
import io.rsocket.util.DefaultPayload;
import it.tdlight.reactiveapi.ChannelCodec;
import it.tdlight.reactiveapi.Deserializer;
import it.tdlight.reactiveapi.EventConsumer;
import it.tdlight.reactiveapi.EventProducer;
import it.tdlight.reactiveapi.Serializer;
import it.tdlight.reactiveapi.Timestamped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;

View File

@ -1,23 +1,42 @@
package it.tdlight.reactiveapi.rsocket;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.ByteBufOutputStream;
import io.rsocket.Payload;
import io.rsocket.util.DefaultPayload;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import it.tdlight.reactiveapi.Deserializer;
import it.tdlight.reactiveapi.Serializer;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import reactor.core.publisher.Flux;
public class RSocketUtils {
public static <T> Flux<T> deserialize(Flux<Payload> payloadFlux, Deserializer<T> deserializer) {
return payloadFlux.map(payload -> {
var slice = payload.sliceData();
byte[] bytes = new byte[slice.readableBytes()];
slice.readBytes(bytes, 0, bytes.length);
return deserializer.deserialize(null, bytes);
try {
try (var bis = new ByteBufInputStream(payload.sliceData())) {
return deserializer.deserialize(payload.data().readableBytes(), bis);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
public static <T> Flux<Payload> serialize(Flux<T> flux, Serializer<T> serializer) {
return flux.map(element -> DefaultPayload.create(serializer.serialize(null, element)));
return flux.map(element -> {
var buf = ByteBufAllocator.DEFAULT.ioBuffer();
try (var baos = new ByteBufOutputStream(buf)) {
serializer.serialize(element, baos);
return DefaultPayload.create(baos.buffer().retain());
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
buf.release();
}
});
}
}

View File

@ -11,7 +11,7 @@ module tdlib.reactive.api {
requires tdlight.api;
requires com.google.common;
requires java.logging;
requires kafka.clients;
requires static kafka.clients;
requires org.apache.logging.log4j;
requires reactor.kafka;
requires com.fasterxml.jackson.databind;