(unfinished) Netty 5 refactoring

This commit is contained in:
Andrea Cavalli 2021-08-31 09:14:46 +02:00
parent 3b55e8bd24
commit 0faef5316e
29 changed files with 603 additions and 499 deletions

View File

@ -1,32 +1,31 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
public class MappedSerializer<A, B> implements Serializer<B, Buffer> { public class MappedSerializer<A, B> implements Serializer<B, Send<Buffer>> {
private final Serializer<A, Buffer> serializer; private final Serializer<A, Send<Buffer>> serializer;
private final Mapper<A, B> keyMapper; private final Mapper<A, B> keyMapper;
public MappedSerializer(Serializer<A, Buffer> serializer, public MappedSerializer(Serializer<A, Send<Buffer>> serializer,
Mapper<A, B> keyMapper) { Mapper<A, B> keyMapper) {
this.serializer = serializer; this.serializer = serializer;
this.keyMapper = keyMapper; this.keyMapper = keyMapper;
} }
@Override @Override
public @NotNull B deserialize(@NotNull Buffer serialized) throws SerializationException { public @NotNull B deserialize(@NotNull Send<Buffer> serialized) throws SerializationException {
try { try (serialized) {
return keyMapper.map(serializer.deserialize(serialized.retain())); return keyMapper.map(serializer.deserialize(serialized));
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull Buffer serialize(@NotNull B deserialized) throws SerializationException { public @NotNull Send<Buffer> serialize(@NotNull B deserialized) throws SerializationException {
return serializer.serialize(keyMapper.unmap(deserialized)); return serializer.serialize(keyMapper.unmap(deserialized));
} }
} }

View File

@ -1,32 +1,31 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B, Buffer> { public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B, Send<Buffer>> {
private final SerializerFixedBinaryLength<A, Buffer> fixedLengthSerializer; private final SerializerFixedBinaryLength<A, Send<Buffer>> fixedLengthSerializer;
private final Mapper<A, B> keyMapper; private final Mapper<A, B> keyMapper;
public MappedSerializerFixedLength(SerializerFixedBinaryLength<A, Buffer> fixedLengthSerializer, public MappedSerializerFixedLength(SerializerFixedBinaryLength<A, Send<Buffer>> fixedLengthSerializer,
Mapper<A, B> keyMapper) { Mapper<A, B> keyMapper) {
this.fixedLengthSerializer = fixedLengthSerializer; this.fixedLengthSerializer = fixedLengthSerializer;
this.keyMapper = keyMapper; this.keyMapper = keyMapper;
} }
@Override @Override
public @NotNull B deserialize(@NotNull Buffer serialized) throws SerializationException { public @NotNull B deserialize(@NotNull Send<Buffer> serialized) throws SerializationException {
try { try (serialized) {
return keyMapper.map(fixedLengthSerializer.deserialize(serialized.retain())); return keyMapper.map(fixedLengthSerializer.deserialize(serialized));
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull Buffer serialize(@NotNull B deserialized) throws SerializationException { public @NotNull Send<Buffer> serialize(@NotNull B deserialized) throws SerializationException {
return fixedLengthSerializer.serialize(keyMapper.unmap(deserialized)); return fixedLengthSerializer.serialize(keyMapper.unmap(deserialized));
} }

View File

@ -2,6 +2,7 @@ package it.cavallium.dbengine.database;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.BufferAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.CompositeBuffer; import io.netty.buffer.api.CompositeBuffer;
@ -70,8 +71,8 @@ public class LLUtils {
return response[0] == 1; return response[0] == 1;
} }
public static boolean responseToBoolean(Buffer response) { public static boolean responseToBoolean(Send<Buffer> responseToReceive) {
try (response) { try (var response = responseToReceive.receive()) {
assert response.readableBytes() == 1; assert response.readableBytes() == 1;
return response.getByte(response.readerOffset()) == 1; return response.getByte(response.readerOffset()) == 1;
} }
@ -228,6 +229,29 @@ public class LLUtils {
} }
} }
/**
* Returns {@code true} if and only if the two specified buffers are
* identical to each other for {@code length} bytes starting at {@code aStartIndex}
* index for the {@code a} buffer and {@code bStartIndex} index for the {@code b} buffer.
* A more compact way to express this is:
* <p>
* {@code a[aStartIndex : aStartIndex + length] == b[bStartIndex : bStartIndex + length]}
*/
public static boolean equals(Buffer a, int aStartIndex, Buffer b, int bStartIndex, int length) {
var aCur = a.openCursor(aStartIndex, length);
var bCur = b.openCursor(bStartIndex, length);
if (aCur.bytesLeft() != bCur.bytesLeft()) {
return false;
}
while (aCur.readByte() && bCur.readByte()) {
if (aCur.getByte() != bCur.getByte()) {
return false;
}
}
return true;
}
public static byte[] toArray(Buffer key) { public static byte[] toArray(Buffer key) {
byte[] array = new byte[key.readableBytes()]; byte[] array = new byte[key.readableBytes()];
key.copyInto(key.readerOffset(), array, 0, key.readableBytes()); key.copyInto(key.readerOffset(), array, 0, key.readableBytes());
@ -732,8 +756,4 @@ public class LLUtils {
public static int utf8MaxBytes(String deserialized) { public static int utf8MaxBytes(String deserialized) {
return deserialized.length() * 3; return deserialized.length() * 3;
} }
public static void writeString(Buffer buf, String deserialized, Charset charset) {
buf.writeBytes(deserialized.getBytes(charset));
}
} }

View File

@ -1,44 +1,39 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import static io.netty.buffer.Unpooled.*;
public class DatabaseEmpty { public class DatabaseEmpty {
@SuppressWarnings({"unused", "InstantiationOfUtilityClass"}) @SuppressWarnings({"unused", "InstantiationOfUtilityClass"})
public static final Nothing NOTHING = new Nothing(); public static final Nothing NOTHING = new Nothing();
public static final Serializer<Nothing, Buffer> NOTHING_SERIALIZER = new Serializer<>() {
@Override
public @NotNull Nothing deserialize(@NotNull Buffer serialized) {
try {
return NOTHING;
} finally {
serialized.release();
}
}
@Override public static Serializer<Nothing, Send<Buffer>> nothingSerializer(BufferAllocator bufferAllocator) {
public @NotNull Buffer serialize(@NotNull Nothing deserialized) { return new Serializer<>() {
return EMPTY_BUFFER; @Override
} public @NotNull Nothing deserialize(@NotNull Send<Buffer> serialized) {
}; try (serialized) {
public static final Function<Nothing, Nothing> NOTHING_HASH_FUNCTION = nothing -> nothing; return NOTHING;
private static final SubStageGetter<Nothing, DatabaseStageEntry<Nothing>> NOTHING_SUB_STAGE_GETTER }
= new SubStageGetterSingle<>(NOTHING_SERIALIZER); }
@Override
public @NotNull Send<Buffer> serialize(@NotNull Nothing deserialized) {
return bufferAllocator.allocate(0).send();
}
};
}
private DatabaseEmpty() { private DatabaseEmpty() {
} }
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, Buffer key) { public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, Send<Buffer> key) {
return new DatabaseSingle<>(dictionary, key, NOTHING_SERIALIZER); return new DatabaseSingle<>(dictionary, key, nothingSerializer(dictionary.getAllocator()));
}
public static SubStageGetter<Nothing, DatabaseStageEntry<Nothing>> createSubStageGetter() {
return NOTHING_SUB_STAGE_GETTER;
} }
public static final class Nothing { public static final class Nothing {

View File

@ -2,6 +2,7 @@ package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send; import io.netty.buffer.api.Send;
import io.netty.buffer.api.internal.ResourceSupport;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
@ -82,9 +83,9 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
public Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) { public Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
return dictionary return dictionary
.getRange(resolveSnapshot(snapshot), rangeMono, existsAlmostCertainly) .getRange(resolveSnapshot(snapshot), rangeMono, existsAlmostCertainly)
.<Entry<T, U>>handle((entry, sink) -> { .<Entry<T, U>>handle((entrySend, sink) -> {
try { try (var entry = entrySend.receive()) {
var key = deserializeSuffix(stripPrefix(entry.getKey(), false)); var key = deserializeSuffix(stripPrefix(entry.getKey()));
var value = valueSerializer.deserialize(entry.getValue()); var value = valueSerializer.deserialize(entry.getValue());
sink.next(Map.entry(key, value)); sink.next(Map.entry(key, value));
} catch (SerializationException ex) { } catch (SerializationException ex) {
@ -103,8 +104,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
.fromIterable(Collections.unmodifiableMap(value).entrySet()) .fromIterable(Collections.unmodifiableMap(value).entrySet())
.handle((entry, sink) -> { .handle((entry, sink) -> {
try { try {
sink.next(new LLEntry(this.toKey(serializeSuffix(entry.getKey())), sink.next(LLEntry.of(this.toKey(serializeSuffix(entry.getKey())),
valueSerializer.serialize(entry.getValue()))); valueSerializer.serialize(entry.getValue())).send());
} catch (SerializationException e) { } catch (SerializationException e) {
sink.error(e); sink.error(e);
} }
@ -141,30 +142,19 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) { public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
return Mono return dictionary
.using( .get(resolveSnapshot(snapshot), Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix))), existsAlmostCertainly)
() -> toKey(serializeSuffix(keySuffix)), .handle(this::deserializeValue);
keyBuf -> dictionary
.get(resolveSnapshot(snapshot), LLUtils.lazyRetain(keyBuf), existsAlmostCertainly)
.handle(this::deserializeValue),
ReferenceCounted::release
);
} }
@Override @Override
public Mono<Void> putValue(T keySuffix, U value) { public Mono<Void> putValue(T keySuffix, U value) {
return Mono.using(() -> serializeSuffix(keySuffix), var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
keySuffixBuf -> Mono.using(() -> toKey(keySuffixBuf.retain()), var valueMono = Mono.fromCallable(() -> valueSerializer.serialize(value));
keyBuf -> Mono.using(() -> valueSerializer.serialize(value), return dictionary
valueBuf -> dictionary .put(keyMono, valueMono, LLDictionaryResultType.VOID)
.put(LLUtils.lazyRetain(keyBuf), LLUtils.lazyRetain(valueBuf), LLDictionaryResultType.VOID) .doOnNext(Send::close)
.doOnNext(ReferenceCounted::release), .then();
ReferenceCounted::release
),
ReferenceCounted::release
),
ReferenceCounted::release
).then();
} }
@Override @Override
@ -177,212 +167,149 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
UpdateReturnMode updateReturnMode, UpdateReturnMode updateReturnMode,
boolean existsAlmostCertainly, boolean existsAlmostCertainly,
SerializationFunction<@Nullable U, @Nullable U> updater) { SerializationFunction<@Nullable U, @Nullable U> updater) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( return dictionary
() -> toKey(serializeSuffix(keySuffix)), .update(keyMono, getSerializedUpdater(updater), updateReturnMode, existsAlmostCertainly)
keyBuf -> dictionary .handle(this::deserializeValue);
.update(LLUtils.lazyRetain(keyBuf), getSerializedUpdater(updater), updateReturnMode, existsAlmostCertainly)
.handle(this::deserializeValue),
ReferenceCounted::release
);
} }
@Override @Override
public Mono<Delta<U>> updateValueAndGetDelta(T keySuffix, public Mono<Delta<U>> updateValueAndGetDelta(T keySuffix,
boolean existsAlmostCertainly, boolean existsAlmostCertainly,
SerializationFunction<@Nullable U, @Nullable U> updater) { SerializationFunction<@Nullable U, @Nullable U> updater) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( return dictionary
() -> toKey(serializeSuffix(keySuffix)), .updateAndGetDelta(keyMono, getSerializedUpdater(updater), existsAlmostCertainly)
keyBuf -> dictionary .transform(mono -> LLUtils.mapLLDelta(mono, valueSerializer::deserialize));
.updateAndGetDelta(LLUtils.lazyRetain(keyBuf), getSerializedUpdater(updater), existsAlmostCertainly)
.transform(mono -> LLUtils.mapDelta(mono, valueSerializer::deserialize)),
ReferenceCounted::release
);
} }
public SerializationFunction<@Nullable Buffer, @Nullable Buffer> getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) { public SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> getSerializedUpdater(
SerializationFunction<@Nullable U, @Nullable U> updater) {
return oldSerialized -> { return oldSerialized -> {
try { try (oldSerialized) {
U result; U result;
if (oldSerialized == null) { if (oldSerialized == null) {
result = updater.apply(null); result = updater.apply(null);
} else { } else {
result = updater.apply(valueSerializer.deserialize(oldSerialized.retain())); result = updater.apply(valueSerializer.deserialize(oldSerialized));
} }
if (result == null) { if (result == null) {
return null; return null;
} else { } else {
return valueSerializer.serialize(result); return valueSerializer.serialize(result);
} }
} finally {
if (oldSerialized != null) {
oldSerialized.release();
}
} }
}; };
} }
public <X> BiSerializationFunction<@Nullable Buffer, X, @Nullable Buffer> getSerializedUpdater( public <X> BiSerializationFunction<@Nullable Send<Buffer>, X, @Nullable Send<Buffer>> getSerializedUpdater(
BiSerializationFunction<@Nullable U, X, @Nullable U> updater) { BiSerializationFunction<@Nullable U, X, @Nullable U> updater) {
return (oldSerialized, extra) -> { return (oldSerialized, extra) -> {
try { try (oldSerialized) {
U result; U result;
if (oldSerialized == null) { if (oldSerialized == null) {
result = updater.apply(null, extra); result = updater.apply(null, extra);
} else { } else {
result = updater.apply(valueSerializer.deserialize(oldSerialized.retain()), extra); result = updater.apply(valueSerializer.deserialize(oldSerialized), extra);
} }
if (result == null) { if (result == null) {
return null; return null;
} else { } else {
return valueSerializer.serialize(result); return valueSerializer.serialize(result);
} }
} finally {
if (oldSerialized != null) {
oldSerialized.release();
}
} }
}; };
} }
@Override @Override
public Mono<U> putValueAndGetPrevious(T keySuffix, U value) { public Mono<U> putValueAndGetPrevious(T keySuffix, U value) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( var valueMono = Mono.fromCallable(() -> valueSerializer.serialize(value));
() -> serializeSuffix(keySuffix), return dictionary
keySuffixBuf -> Mono .put(keyMono,
.using( valueMono,
() -> toKey(keySuffixBuf.retain()), LLDictionaryResultType.PREVIOUS_VALUE)
keyBuf -> Mono .handle(this::deserializeValue);
.using(() -> valueSerializer.serialize(value),
valueBuf -> dictionary
.put(LLUtils.lazyRetain(keyBuf),
LLUtils.lazyRetain(valueBuf),
LLDictionaryResultType.PREVIOUS_VALUE)
.handle(this::deserializeValue),
ReferenceCounted::release
),
ReferenceCounted::release
),
ReferenceCounted::release
);
} }
@Override @Override
public Mono<Boolean> putValueAndGetChanged(T keySuffix, U value) { public Mono<Boolean> putValueAndGetChanged(T keySuffix, U value) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( var valueMono = Mono.fromCallable(() -> valueSerializer.serialize(value));
() -> serializeSuffix(keySuffix), return dictionary
keySuffixBuf -> Mono .put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE)
.using( .handle(this::deserializeValue)
() -> toKey(keySuffixBuf.retain()), .map(oldValue -> !Objects.equals(oldValue, value))
keyBuf -> Mono .defaultIfEmpty(value != null);
.using(() -> valueSerializer.serialize(value),
valueBuf -> dictionary
.put(LLUtils.lazyRetain(keyBuf),
LLUtils.lazyRetain(valueBuf),
LLDictionaryResultType.PREVIOUS_VALUE
)
.handle(this::deserializeValue)
.map(oldValue -> !Objects.equals(oldValue, value))
.defaultIfEmpty(value != null),
ReferenceCounted::release
),
ReferenceCounted::release
),
ReferenceCounted::release
);
} }
@Override @Override
public Mono<Void> remove(T keySuffix) { public Mono<Void> remove(T keySuffix) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( return dictionary
() -> toKey(serializeSuffix(keySuffix)), .remove(keyMono, LLDictionaryResultType.VOID)
keyBuf -> dictionary .doOnNext(Send::close)
.remove(LLUtils.lazyRetain(keyBuf), LLDictionaryResultType.VOID) .then();
.doOnNext(ReferenceCounted::release)
.then(),
ReferenceCounted::release
);
} }
@Override @Override
public Mono<U> removeAndGetPrevious(T keySuffix) { public Mono<U> removeAndGetPrevious(T keySuffix) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( return dictionary
() -> toKey(serializeSuffix(keySuffix)), .remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE)
keyBuf -> dictionary .handle(this::deserializeValue);
.remove(LLUtils.lazyRetain(keyBuf), LLDictionaryResultType.PREVIOUS_VALUE)
.handle(this::deserializeValue),
ReferenceCounted::release
);
} }
@Override @Override
public Mono<Boolean> removeAndGetStatus(T keySuffix) { public Mono<Boolean> removeAndGetStatus(T keySuffix) {
return Mono var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
.using( return dictionary
() -> toKey(serializeSuffix(keySuffix)), .remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
keyBuf -> dictionary .map(LLUtils::responseToBoolean);
.remove(LLUtils.lazyRetain(keyBuf), LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
.map(LLUtils::responseToBoolean),
ReferenceCounted::release
);
} }
@Override @Override
public Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) { public Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return dictionary.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> { var mappedKeys = keys
Buffer keySuffixBuf = serializeSuffix(keySuffix); .<Tuple2<T, Send<Buffer>>>handle((keySuffix, sink) -> {
try {
var key = toKey(keySuffixBuf.retain());
try {
return Tuples.of(keySuffix, key.retain());
} finally {
key.release();
}
} finally {
keySuffixBuf.release();
}
})), existsAlmostCertainly).flatMapSequential(entry -> {
entry.getT2().release();
return Mono.fromCallable(() -> {
Optional<U> valueOpt;
if (entry.getT3().isPresent()) {
var buf = entry.getT3().get();
try { try {
valueOpt = Optional.of(valueSerializer.deserialize(buf.retain())); sink.next(Tuples.of(keySuffix, toKey(serializeSuffix(keySuffix))));
} finally { } catch (SerializationException ex) {
buf.release(); sink.error(ex);
} }
} else { });
valueOpt = Optional.empty(); return dictionary
} .getMulti(resolveSnapshot(snapshot), mappedKeys, existsAlmostCertainly)
return Map.entry(entry.getT1(), valueOpt); .<Entry<T, Optional<U>>>handle((entry, sink) -> {
}); try {
}).transform(LLUtils::handleDiscard); Optional<U> valueOpt;
if (entry.getT3().isPresent()) {
try (var buf = entry.getT3().get()) {
valueOpt = Optional.of(valueSerializer.deserialize(buf));
}
} else {
valueOpt = Optional.empty();
}
sink.next(Map.entry(entry.getT1(), valueOpt));
} catch (SerializationException ex) {
sink.error(ex);
}
})
.transform(LLUtils::handleDiscard);
} }
private LLEntry serializeEntry(T key, U value) throws SerializationException { private Send<LLEntry> serializeEntry(T key, U value) throws SerializationException {
Buffer serializedKey = toKey(serializeSuffix(key)); try (var serializedKey = toKey(serializeSuffix(key)).receive()) {
try { try (var serializedValue = valueSerializer.serialize(value).receive()) {
Buffer serializedValue = valueSerializer.serialize(value); return LLEntry.of(serializedKey.send(), serializedValue.send()).send();
try {
return new LLEntry(serializedKey.retain(), serializedValue.retain());
} finally {
serializedValue.release();
} }
} finally {
serializedKey.release();
} }
} }
@Override @Override
public Mono<Void> putMulti(Flux<Entry<T, U>> entries) { public Mono<Void> putMulti(Flux<Entry<T, U>> entries) {
var serializedEntries = entries var serializedEntries = entries
.<LLEntry>handle((entry, sink) -> { .<Send<LLEntry>>handle((entry, sink) -> {
try { try {
sink.next(serializeEntry(entry.getKey(), entry.getValue())); sink.next(serializeEntry(entry.getKey(), entry.getValue()));
} catch (SerializationException e) { } catch (SerializationException e) {
@ -392,26 +319,26 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
return dictionary return dictionary
.putMulti(serializedEntries, false) .putMulti(serializedEntries, false)
.then() .then()
.doOnDiscard(LLEntry.class, entry -> { .doOnDiscard(LLEntry.class, ResourceSupport::close);
if (!entry.isReleased()) {
entry.release();
}
});
} }
@Override @Override
public <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries, public <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries,
BiSerializationFunction<@Nullable U, X, @Nullable U> updater) { BiSerializationFunction<@Nullable U, X, @Nullable U> updater) {
Flux<Tuple2<Buffer, X>> serializedEntries = entries var serializedEntries = entries
.flatMap(entry -> Mono .<Tuple2<Send<Buffer>, X>>handle((entry, sink) -> {
.fromCallable(() -> Tuples.of(serializeSuffix(entry.getT1()), entry.getT2())) try {
) sink.next(Tuples.of(serializeSuffix(entry.getT1()), entry.getT2()));
} catch (SerializationException ex) {
sink.error(ex);
}
})
.doOnDiscard(Tuple2.class, uncastedEntry -> { .doOnDiscard(Tuple2.class, uncastedEntry -> {
if (uncastedEntry.getT1() instanceof Buffer byteBuf) { if (uncastedEntry.getT1() instanceof Buffer byteBuf) {
byteBuf.release(); byteBuf.close();
} }
if (uncastedEntry.getT2() instanceof Buffer byteBuf) { if (uncastedEntry.getT2() instanceof Buffer byteBuf) {
byteBuf.release(); byteBuf.close();
} }
}); });
var serializedUpdater = getSerializedUpdater(updater); var serializedUpdater = getSerializedUpdater(updater);
@ -433,22 +360,17 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
return dictionary return dictionary
.getRangeKeys(resolveSnapshot(snapshot), rangeMono) .getRangeKeys(resolveSnapshot(snapshot), rangeMono)
.handle((key, sink) -> { .handle((key, sink) -> {
try { try (key) {
Buffer keySuffixWithExt = stripPrefix(key.retain(), false); try (var keySuffixWithExt = stripPrefix(key).receive()) {
try { sink.next(Map.entry(deserializeSuffix(keySuffixWithExt.copy().send()),
sink.next(Map.entry(deserializeSuffix(keySuffixWithExt.retainedSlice()),
new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary, new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary,
toKey(keySuffixWithExt.retainedSlice()), toKey(keySuffixWithExt.send()),
Serializer.noop() Serializer.noop()
), valueSerializer) ), valueSerializer)
)); ));
} catch (SerializationException ex) {
sink.error(ex);
} finally {
keySuffixWithExt.release();
} }
} finally { } catch (SerializationException ex) {
key.release(); sink.error(ex);
} }
}); });
} }
@ -457,34 +379,20 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) { public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
return dictionary return dictionary
.getRange(resolveSnapshot(snapshot), rangeMono) .getRange(resolveSnapshot(snapshot), rangeMono)
.<Entry<T, U>>handle((serializedEntry, sink) -> { .<Entry<T, U>>handle((serializedEntryToReceive, sink) -> {
Buffer key = serializedEntry.getKey(); try (var serializedEntry = serializedEntryToReceive.receive()) {
Buffer value = serializedEntry.getValue(); sink.next(Map.entry(deserializeSuffix(stripPrefix(serializedEntry.getKey())),
try { valueSerializer.deserialize(serializedEntry.getValue())));
Buffer keySuffix = stripPrefix(key.retain(), false);
try {
sink.next(Map.entry(deserializeSuffix(keySuffix.retain()),
valueSerializer.deserialize(value.retain())));
} finally {
keySuffix.release();
}
} catch (SerializationException e) { } catch (SerializationException e) {
sink.error(e); sink.error(e);
} finally {
key.release();
value.release();
} }
}) })
.doOnDiscard(Entry.class, uncastedEntry -> { .doOnDiscard(Entry.class, uncastedEntry -> {
if (uncastedEntry.getKey() instanceof Buffer byteBuf) { if (uncastedEntry.getKey() instanceof Buffer byteBuf) {
if (byteBuf.refCnt() > 0) { byteBuf.close();
byteBuf.release();
}
} }
if (uncastedEntry.getValue() instanceof Buffer byteBuf) { if (uncastedEntry.getValue() instanceof Buffer byteBuf) {
if (byteBuf.refCnt() > 0) { byteBuf.close();
byteBuf.release();
}
} }
}); });
} }
@ -495,17 +403,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
this.getAllValues(null), this.getAllValues(null),
dictionary.setRange(rangeMono, entries.handle((entry, sink) -> { dictionary.setRange(rangeMono, entries.handle((entry, sink) -> {
try { try {
Buffer serializedKey = toKey(serializeSuffix(entry.getKey())); sink.next(LLEntry.of(toKey(serializeSuffix(entry.getKey())),
try { valueSerializer.serialize(entry.getValue())).send());
Buffer serializedValue = valueSerializer.serialize(entry.getValue());
try {
sink.next(new LLEntry(serializedKey.retain(), serializedValue.retain()));
} finally {
serializedValue.release();
}
} finally {
serializedKey.release();
}
} catch (SerializationException e) { } catch (SerializationException e) {
sink.error(e); sink.error(e);
} }
@ -515,18 +414,16 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Mono<Void> clear() { public Mono<Void> clear() {
return Mono.defer(() -> { if (range.isAll()) {
if (range.isAll()) { return dictionary.clear();
return dictionary.clear(); } else if (range.isSingle()) {
} else if (range.isSingle()) { return dictionary
return dictionary .remove(Mono.fromCallable(range::getSingle), LLDictionaryResultType.VOID)
.remove(LLUtils.lazyRetain(range.getSingle()), LLDictionaryResultType.VOID) .doOnNext(Send::close)
.doOnNext(ReferenceCounted::release) .then();
.then(); } else {
} else { return dictionary.setRange(rangeMono, Flux.empty());
return dictionary.setRange(rangeMono, Flux.empty()); }
}
});
} }
} }

View File

@ -43,7 +43,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
Serializer<T, Send<Buffer>> keySuffixSerializer, Serializer<T, Send<Buffer>> keySuffixSerializer,
Serializer<U, Send<Buffer>> valueSerializer, Serializer<U, Send<Buffer>> valueSerializer,
Function<T, TH> keySuffixHashFunction, Function<T, TH> keySuffixHashFunction,
SerializerFixedBinaryLength<TH, Buffer> keySuffixHashSerializer) { SerializerFixedBinaryLength<TH, Send<Buffer>> keySuffixHashSerializer) {
if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) { if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) {
throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW"); throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
} }
@ -61,13 +61,13 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
} }
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary, public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
Serializer<T, Buffer> keySerializer, Serializer<T, Send<Buffer>> keySerializer,
Serializer<U, Buffer> valueSerializer, Serializer<U, Send<Buffer>> valueSerializer,
Function<T, UH> keyHashFunction, Function<T, UH> keyHashFunction,
SerializerFixedBinaryLength<UH, Buffer> keyHashSerializer) { SerializerFixedBinaryLength<UH, Send<Buffer>> keyHashSerializer) {
return new DatabaseMapDictionaryHashed<>( return new DatabaseMapDictionaryHashed<>(
dictionary, dictionary,
dictionary.getAllocator().buffer(0), dictionary.getAllocator().allocate(0).send(),
keySerializer, keySerializer,
valueSerializer, valueSerializer,
keyHashFunction, keyHashFunction,
@ -76,11 +76,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
} }
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary, public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary,
Buffer prefixKey, Send<Buffer> prefixKey,
Serializer<T, Buffer> keySuffixSerializer, Serializer<T, Send<Buffer>> keySuffixSerializer,
Serializer<U, Buffer> valueSerializer, Serializer<U, Send<Buffer>> valueSerializer,
Function<T, UH> keySuffixHashFunction, Function<T, UH> keySuffixHashFunction,
SerializerFixedBinaryLength<UH, Buffer> keySuffixHashSerializer) { SerializerFixedBinaryLength<UH, Send<Buffer>> keySuffixHashSerializer) {
return new DatabaseMapDictionaryHashed<>(dictionary, return new DatabaseMapDictionaryHashed<>(dictionary,
prefixKey, prefixKey,
keySuffixSerializer, keySuffixSerializer,

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
@ -15,24 +16,21 @@ import reactor.core.publisher.Mono;
public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing> { public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing> {
protected DatabaseSetDictionary(LLDictionary dictionary, protected DatabaseSetDictionary(LLDictionary dictionary,
Buffer prefixKey, Send<Buffer> prefixKey,
SerializerFixedBinaryLength<T, Buffer> keySuffixSerializer) { SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer) {
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.NOTHING_SERIALIZER); super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.nothingSerializer(dictionary.getAllocator()));
} }
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary, public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
SerializerFixedBinaryLength<T, Buffer> keySerializer) { SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer) {
var buf = dictionary.getAllocator().buffer(0); try (var buf = dictionary.getAllocator().allocate(0)) {
try { return new DatabaseSetDictionary<>(dictionary, buf.send(), keySerializer);
return new DatabaseSetDictionary<>(dictionary, buf, keySerializer);
} finally {
buf.release();
} }
} }
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary, public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
Buffer prefixKey, Send<Buffer> prefixKey,
SerializerFixedBinaryLength<T, Buffer> keySuffixSerializer) { SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer) {
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer); return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer);
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
@ -17,25 +18,25 @@ import reactor.core.publisher.Mono;
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> { public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
protected DatabaseSetDictionaryHashed(LLDictionary dictionary, protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
Buffer prefixKey, Send<Buffer> prefixKey,
Serializer<T, Buffer> keySuffixSerializer, Serializer<T, Send<Buffer>> keySuffixSerializer,
Function<T, TH> keySuffixHashFunction, Function<T, TH> keySuffixHashFunction,
SerializerFixedBinaryLength<TH, Buffer> keySuffixHashSerializer) { SerializerFixedBinaryLength<TH, Send<Buffer>> keySuffixHashSerializer) {
super(dictionary, super(dictionary,
prefixKey, prefixKey,
keySuffixSerializer, keySuffixSerializer,
DatabaseEmpty.NOTHING_SERIALIZER, DatabaseEmpty.nothingSerializer(dictionary.getAllocator()),
keySuffixHashFunction, keySuffixHashFunction,
keySuffixHashSerializer keySuffixHashSerializer
); );
} }
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary, public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
Serializer<T, Buffer> keySerializer, Serializer<T, Send<Buffer>> keySerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) { SerializerFixedBinaryLength<TH, Send<Buffer>> keyHashSerializer) {
return new DatabaseSetDictionaryHashed<>(dictionary, return new DatabaseSetDictionaryHashed<>(dictionary,
dictionary.getAllocator().buffer(0), dictionary.getAllocator().allocate(0).send(),
keySerializer, keySerializer,
keyHashFunction, keyHashFunction,
keyHashSerializer keyHashSerializer
@ -43,10 +44,10 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
} }
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary, public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary,
Buffer prefixKey, Send<Buffer> prefixKey,
Serializer<T, Buffer> keySuffixSerializer, Serializer<T, Send<Buffer>> keySuffixSerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) { SerializerFixedBinaryLength<TH, Send<Buffer>> keyHashSerializer) {
return new DatabaseSetDictionaryHashed<>(dictionary, return new DatabaseSetDictionaryHashed<>(dictionary,
prefixKey, prefixKey,
keySuffixSerializer, keySuffixSerializer,

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
@ -16,15 +17,15 @@ import reactor.core.publisher.Mono;
public class SubStageGetterHashMap<T, U, TH> implements public class SubStageGetterHashMap<T, U, TH> implements
SubStageGetter<Map<T, U>, DatabaseMapDictionaryHashed<T, U, TH>> { SubStageGetter<Map<T, U>, DatabaseMapDictionaryHashed<T, U, TH>> {
private final Serializer<T, Buffer> keySerializer; private final Serializer<T, Send<Buffer>> keySerializer;
private final Serializer<U, Buffer> valueSerializer; private final Serializer<U, Send<Buffer>> valueSerializer;
private final Function<T, TH> keyHashFunction; private final Function<T, TH> keyHashFunction;
private final SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer; private final SerializerFixedBinaryLength<TH, Send<Buffer>> keyHashSerializer;
public SubStageGetterHashMap(Serializer<T, Buffer> keySerializer, public SubStageGetterHashMap(Serializer<T, Send<Buffer>> keySerializer,
Serializer<U, Buffer> valueSerializer, Serializer<U, Send<Buffer>> valueSerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) { SerializerFixedBinaryLength<TH, Send<Buffer>> keyHashSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
this.keyHashFunction = keyHashFunction; this.keyHashFunction = keyHashFunction;
@ -34,20 +35,20 @@ public class SubStageGetterHashMap<T, U, TH> implements
@Override @Override
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary, public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Mono<Send<Buffer>> prefixKeyMono) {
return Mono.usingWhen( return Mono.usingWhen(
prefixKeyMono, prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseMapDictionaryHashed .fromSupplier(() -> DatabaseMapDictionaryHashed
.tail(dictionary, .tail(dictionary,
prefixKey.retain(), prefixKey,
keySerializer, keySerializer,
valueSerializer, valueSerializer,
keyHashFunction, keyHashFunction,
keyHashSerializer keyHashSerializer
) )
), ),
prefixKey -> Mono.fromRunnable(prefixKey::release) prefixKey -> Mono.fromRunnable(prefixKey::close)
); );
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
@ -16,13 +17,13 @@ import reactor.core.publisher.Mono;
public class SubStageGetterHashSet<T, TH> implements public class SubStageGetterHashSet<T, TH> implements
SubStageGetter<Map<T, Nothing>, DatabaseSetDictionaryHashed<T, TH>> { SubStageGetter<Map<T, Nothing>, DatabaseSetDictionaryHashed<T, TH>> {
private final Serializer<T, Buffer> keySerializer; private final Serializer<T, Send<Buffer>> keySerializer;
private final Function<T, TH> keyHashFunction; private final Function<T, TH> keyHashFunction;
private final SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer; private final SerializerFixedBinaryLength<TH, Send<Buffer>> keyHashSerializer;
public SubStageGetterHashSet(Serializer<T, Buffer> keySerializer, public SubStageGetterHashSet(Serializer<T, Send<Buffer>> keySerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) { SerializerFixedBinaryLength<TH, Send<Buffer>> keyHashSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.keyHashFunction = keyHashFunction; this.keyHashFunction = keyHashFunction;
this.keyHashSerializer = keyHashSerializer; this.keyHashSerializer = keyHashSerializer;
@ -31,18 +32,18 @@ public class SubStageGetterHashSet<T, TH> implements
@Override @Override
public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary, public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Mono<Send<Buffer>> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseSetDictionaryHashed .fromSupplier(() -> DatabaseSetDictionaryHashed
.tail(dictionary, .tail(dictionary,
prefixKey.retain(), prefixKey,
keySerializer, keySerializer,
keyHashFunction, keyHashFunction,
keyHashSerializer keyHashSerializer
) )
), ),
prefixKey -> Mono.fromRunnable(prefixKey::release) prefixKey -> Mono.fromRunnable(prefixKey::close)
); );
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
@ -14,11 +15,11 @@ import reactor.core.publisher.Mono;
public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseMapDictionary<T, U>> { public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseMapDictionary<T, U>> {
private final SerializerFixedBinaryLength<T, Buffer> keySerializer; private final SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer;
private final Serializer<U, Buffer> valueSerializer; private final Serializer<U, Send<Buffer>> valueSerializer;
public SubStageGetterMap(SerializerFixedBinaryLength<T, Buffer> keySerializer, public SubStageGetterMap(SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer,
Serializer<U, Buffer> valueSerializer) { Serializer<U, Send<Buffer>> valueSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }
@ -26,17 +27,17 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
@Override @Override
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary, public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Mono<Send<Buffer>> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseMapDictionary .fromSupplier(() -> DatabaseMapDictionary
.tail(dictionary, .tail(dictionary,
prefixKey.retain(), prefixKey,
keySerializer, keySerializer,
valueSerializer valueSerializer
) )
), ),
prefixKey -> Mono.fromRunnable(prefixKey::release) prefixKey -> Mono.fromRunnable(prefixKey::close)
); );
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
@ -15,11 +16,11 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
SubStageGetter<Map<T, U>, DatabaseMapDictionaryDeep<T, U, US>> { SubStageGetter<Map<T, U>, DatabaseMapDictionaryDeep<T, U, US>> {
private final SubStageGetter<U, US> subStageGetter; private final SubStageGetter<U, US> subStageGetter;
private final SerializerFixedBinaryLength<T, Buffer> keySerializer; private final SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer;
private final int keyExtLength; private final int keyExtLength;
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter, public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, Buffer> keySerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer,
int keyExtLength) { int keyExtLength) {
this.subStageGetter = subStageGetter; this.subStageGetter = subStageGetter;
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
@ -41,18 +42,18 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
@Override @Override
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary, public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Mono<Send<Buffer>> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseMapDictionaryDeep .fromSupplier(() -> DatabaseMapDictionaryDeep
.deepIntermediate(dictionary, .deepIntermediate(dictionary,
prefixKey.retain(), prefixKey,
keySerializer, keySerializer,
subStageGetter, subStageGetter,
keyExtLength keyExtLength
) )
), ),
prefixKey -> Mono.fromRunnable(prefixKey::release) prefixKey -> Mono.fromRunnable(prefixKey::close)
); );
} }
@ -61,23 +62,6 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
return true; return true;
} }
private Mono<Void> checkKeyFluxConsistency(Buffer prefixKey, List<Buffer> keys) {
return Mono
.fromCallable(() -> {
try {
for (Buffer key : keys) {
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
}
} finally {
prefixKey.release();
for (Buffer key : keys) {
key.release();
}
}
return null;
});
}
public int getKeyBinaryLength() { public int getKeyBinaryLength() {
return keySerializer.getSerializedBinaryLength() + keyExtLength; return keySerializer.getSerializedBinaryLength() + keyExtLength;
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
@ -14,20 +15,20 @@ import reactor.core.publisher.Mono;
public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, DatabaseSetDictionary<T>> { public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, DatabaseSetDictionary<T>> {
private final SerializerFixedBinaryLength<T, Buffer> keySerializer; private final SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer;
public SubStageGetterSet(SerializerFixedBinaryLength<T, Buffer> keySerializer) { public SubStageGetterSet(SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
} }
@Override @Override
public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary, public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Mono<Send<Buffer>> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer)), .fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer)),
prefixKey -> Mono.fromRunnable(prefixKey::release) prefixKey -> Mono.fromRunnable(prefixKey::close)
); );
} }

View File

@ -1,9 +1,10 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
public class SubStageGetterSingleBytes extends SubStageGetterSingle<Buffer> { public class SubStageGetterSingleBytes extends SubStageGetterSingle<Send<Buffer>> {
public SubStageGetterSingleBytes() { public SubStageGetterSingleBytes() {
super(Serializer.noop()); super(Serializer.noop());

View File

@ -2,7 +2,6 @@ package it.cavallium.dbengine.database.disk;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.BufferAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.BufferUtil;
import io.netty.buffer.api.Send; import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
@ -37,15 +36,17 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
ReadOptions readOptions, ReadOptions readOptions,
boolean canFillCache, boolean canFillCache,
boolean readValues) { boolean readValues) {
this.db = db; try (range) {
this.alloc = alloc; this.db = db;
this.cfh = cfh; this.alloc = alloc;
this.prefixLength = prefixLength; this.cfh = cfh;
this.range = range; this.prefixLength = prefixLength;
this.allowNettyDirect = allowNettyDirect; this.range = range.receive();
this.readOptions = readOptions; this.allowNettyDirect = allowNettyDirect;
this.canFillCache = canFillCache; this.readOptions = readOptions;
this.readValues = readValues; this.canFillCache = canFillCache;
this.readValues = readValues;
}
} }
@ -54,9 +55,8 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
.generate(() -> { .generate(() -> {
var readOptions = new ReadOptions(this.readOptions); var readOptions = new ReadOptions(this.readOptions);
readOptions.setFillCache(canFillCache && range.hasMin() && range.hasMax()); readOptions.setFillCache(canFillCache && range.hasMin() && range.hasMax());
return LLLocalDictionary.getRocksIterator(allowNettyDirect, readOptions, range.retain(), db, cfh); return LLLocalDictionary.getRocksIterator(allowNettyDirect, readOptions, range.copy().send(), db, cfh);
}, (tuple, sink) -> { }, (tuple, sink) -> {
range.retain();
try { try {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
ObjectArrayList<T> values = new ObjectArrayList<>(); ObjectArrayList<T> values = new ObjectArrayList<>();
@ -64,34 +64,32 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
try { try {
rocksIterator.status(); rocksIterator.status();
while (rocksIterator.isValid()) { while (rocksIterator.isValid()) {
Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key); try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key)) {
try {
if (firstGroupKey == null) { if (firstGroupKey == null) {
firstGroupKey = key.retain(); firstGroupKey = key.copy();
} else if (!ByteBufUtil.equals(firstGroupKey, firstGroupKey.readerIndex(), key, key.readerIndex(), prefixLength)) { } else if (!LLUtils.equals(firstGroupKey, firstGroupKey.readerOffset(),
key, key.readerOffset(), prefixLength)) {
break; break;
} }
Buffer value; Buffer value;
if (readValues) { if (readValues) {
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value); value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
} else { } else {
value = alloc.buffer(0); value = alloc.allocate(0);
} }
try { try {
rocksIterator.next(); rocksIterator.next();
rocksIterator.status(); rocksIterator.status();
T entry = getEntry(key.retain(), value.retain()); T entry = getEntry(key.send(), value.send());
values.add(entry); values.add(entry);
} finally { } finally {
value.release(); value.close();
} }
} finally {
key.release();
} }
} }
} finally { } finally {
if (firstGroupKey != null) { if (firstGroupKey != null) {
firstGroupKey.release(); firstGroupKey.close();
} }
} }
if (!values.isEmpty()) { if (!values.isEmpty()) {
@ -101,21 +99,19 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
} }
} catch (RocksDBException ex) { } catch (RocksDBException ex) {
sink.error(ex); sink.error(ex);
} finally {
range.release();
} }
return tuple; return tuple;
}, tuple -> { }, tuple -> {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
rocksIterator.close(); rocksIterator.close();
tuple.getT2().release(); tuple.getT2().close();
tuple.getT3().release(); tuple.getT3().close();
}); });
} }
public abstract T getEntry(Send<Buffer> key, Send<Buffer> value); public abstract T getEntry(Send<Buffer> key, Send<Buffer> value);
public void release() { public void release() {
range.release(); range.close();
} }
} }

View File

@ -2,7 +2,6 @@ package it.cavallium.dbengine.database.disk;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.BufferAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.BufferUtil;
import io.netty.buffer.api.Send; import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
@ -34,15 +33,17 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
ReadOptions readOptions, ReadOptions readOptions,
boolean canFillCache, boolean canFillCache,
String debugName) { String debugName) {
this.db = db; try (range) {
this.alloc = alloc; this.db = db;
this.cfh = cfh; this.alloc = alloc;
this.prefixLength = prefixLength; this.cfh = cfh;
this.range = range; this.prefixLength = prefixLength;
this.allowNettyDirect = allowNettyDirect; this.range = range.receive();
this.readOptions = readOptions; this.allowNettyDirect = allowNettyDirect;
this.canFillCache = canFillCache; this.readOptions = readOptions;
this.debugName = debugName; this.canFillCache = canFillCache;
this.debugName = debugName;
}
} }
@ -54,54 +55,48 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
readOptions.setReadaheadSize(32 * 1024); // 32KiB readOptions.setReadaheadSize(32 * 1024); // 32KiB
readOptions.setFillCache(canFillCache); readOptions.setFillCache(canFillCache);
} }
return LLLocalDictionary.getRocksIterator(allowNettyDirect, readOptions, range.retain(), db, cfh); return LLLocalDictionary.getRocksIterator(allowNettyDirect, readOptions, range.copy().send(), db, cfh);
}, (tuple, sink) -> { }, (tuple, sink) -> {
range.retain();
try { try {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
rocksIterator.status(); rocksIterator.status();
Buffer firstGroupKey = null; Buffer firstGroupKey = null;
try { try {
while (rocksIterator.isValid()) { while (rocksIterator.isValid()) {
Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key); try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key)) {
try {
if (firstGroupKey == null) { if (firstGroupKey == null) {
firstGroupKey = key.retain(); firstGroupKey = key.copy();
} else if (!ByteBufUtil.equals(firstGroupKey, firstGroupKey.readerIndex(), key, key.readerIndex(), prefixLength)) { } else if (!LLUtils.equals(firstGroupKey, firstGroupKey.readerOffset(), key, key.readerOffset(), prefixLength)) {
break; break;
} }
rocksIterator.next(); rocksIterator.next();
rocksIterator.status(); rocksIterator.status();
} finally {
key.release();
} }
} }
if (firstGroupKey != null) { if (firstGroupKey != null) {
var groupKeyPrefix = firstGroupKey.slice(0, prefixLength); var groupKeyPrefix = firstGroupKey.copy(firstGroupKey.readerOffset(), prefixLength);
sink.next(groupKeyPrefix.retain()); sink.next(groupKeyPrefix.send());
} else { } else {
sink.complete(); sink.complete();
} }
} finally { } finally {
if (firstGroupKey != null) { if (firstGroupKey != null) {
firstGroupKey.release(); firstGroupKey.close();
} }
} }
} catch (RocksDBException ex) { } catch (RocksDBException ex) {
sink.error(ex); sink.error(ex);
} finally {
range.release();
} }
return tuple; return tuple;
}, tuple -> { }, tuple -> {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
rocksIterator.close(); rocksIterator.close();
tuple.getT2().release(); tuple.getT2().close();
tuple.getT3().release(); tuple.getT3().close();
}); });
} }
public void release() { public void release() {
range.release(); range.close();
} }
} }

View File

@ -26,14 +26,14 @@ public class LLMemoryDatabaseConnection implements LLDatabaseConnection {
JMXNettyMonitoringManager.initialize(); JMXNettyMonitoringManager.initialize();
} }
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
public LLMemoryDatabaseConnection(ByteBufAllocator allocator) { public LLMemoryDatabaseConnection(BufferAllocator allocator) {
this.allocator = allocator; this.allocator = allocator;
} }
@Override @Override
public ByteBufAllocator getAllocator() { public BufferAllocator getAllocator() {
return allocator; return allocator;
} }

View File

@ -38,7 +38,7 @@ public class LLMemoryDictionary implements LLDictionary {
private final String databaseName; private final String databaseName;
private final String columnName; private final String columnName;
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final UpdateMode updateMode; private final UpdateMode updateMode;
private final Getter<Long, ConcurrentSkipListMap<ByteList, ByteList>> snapshots; private final Getter<Long, ConcurrentSkipListMap<ByteList, ByteList>> snapshots;
private final ConcurrentSkipListMap<ByteList, ByteList> mainDb; private final ConcurrentSkipListMap<ByteList, ByteList> mainDb;
@ -47,7 +47,7 @@ public class LLMemoryDictionary implements LLDictionary {
U get(T argument); U get(T argument);
} }
public LLMemoryDictionary(ByteBufAllocator allocator, public LLMemoryDictionary(BufferAllocator allocator,
String databaseName, String databaseName,
String columnName, String columnName,
UpdateMode updateMode, UpdateMode updateMode,
@ -67,7 +67,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public ByteBufAllocator getAllocator() { public BufferAllocator getAllocator() {
return allocator; return allocator;
} }

View File

@ -28,7 +28,7 @@ import reactor.core.publisher.Mono;
public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase { public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase {
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final String name; private final String name;
private final AtomicLong nextSnapshotNumber = new AtomicLong(1); private final AtomicLong nextSnapshotNumber = new AtomicLong(1);
@ -36,7 +36,7 @@ public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase {
private final ConcurrentHashMap<String, ConcurrentSkipListMap<ByteList, ByteList>> mainDb; private final ConcurrentHashMap<String, ConcurrentSkipListMap<ByteList, ByteList>> mainDb;
private final ConcurrentHashMap<String, LLMemoryDictionary> singletons = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String, LLMemoryDictionary> singletons = new ConcurrentHashMap<>();
public LLMemoryKeyValueDatabase(ByteBufAllocator allocator, String name, List<Column> columns) { public LLMemoryKeyValueDatabase(BufferAllocator allocator, String name, List<Column> columns) {
this.allocator = allocator; this.allocator = allocator;
this.name = name; this.name = name;
this.mainDb = new ConcurrentHashMap<>(); this.mainDb = new ConcurrentHashMap<>();
@ -87,7 +87,7 @@ public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase {
} }
@Override @Override
public ByteBufAllocator getAllocator() { public BufferAllocator getAllocator() {
return allocator; return allocator;
} }

View File

@ -2,6 +2,7 @@ package it.cavallium.dbengine.database.memory;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLSingleton; import it.cavallium.dbengine.database.LLSingleton;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
@ -18,8 +19,10 @@ public class LLMemorySingleton implements LLSingleton {
public LLMemorySingleton(LLMemoryDictionary dict, byte[] singletonName) { public LLMemorySingleton(LLMemoryDictionary dict, byte[] singletonName) {
this.dict = dict; this.dict = dict;
this.singletonName = singletonName; this.singletonName = singletonName;
Buffer singletonNameBuf = Unpooled.wrappedBuffer(singletonName); this.singletonNameBufMono = Mono.fromCallable(() -> dict
this.singletonNameBufMono = Mono.just(singletonNameBuf).map(Buffer::retain); .getAllocator()
.allocate(singletonName.length)
.writeBytes(singletonName));
} }
@Override @Override
@ -32,18 +35,16 @@ public class LLMemorySingleton implements LLSingleton {
return dict return dict
.get(snapshot, singletonNameBufMono, false) .get(snapshot, singletonNameBufMono, false)
.map(b -> { .map(b -> {
try { try (b) {
return LLUtils.toArray(b); return LLUtils.toArray(b);
} finally {
b.release();
} }
}); });
} }
@Override @Override
public Mono<Void> set(byte[] value) { public Mono<Void> set(byte[] value) {
var bbKey = Mono.just(Unpooled.wrappedBuffer(singletonName)).map(Buffer::retain); var bbKey = singletonNameBufMono;
var bbVal = Mono.just(Unpooled.wrappedBuffer(value)).map(Buffer::retain); var bbVal = Mono.fromCallable(() -> dict.getAllocator().allocate(value.length).writeBytes(value));
return dict return dict
.put(bbKey, bbVal, LLDictionaryResultType.VOID) .put(bbKey, bbVal, LLDictionaryResultType.VOID)
.then(); .then();

View File

@ -0,0 +1,107 @@
package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.SafeCloseable;
import java.io.DataInput;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.jetbrains.annotations.NotNull;
public class BufferDataInput implements DataInput, SafeCloseable {
private final Buffer buf;
public BufferDataInput(Send<Buffer> bufferSend) {
this.buf = bufferSend.receive().makeReadOnly();
}
@Override
public void readFully(byte @NotNull [] b) {
this.readFully(b, 0, b.length);
}
@Override
public void readFully(byte @NotNull [] b, int off, int len) {
buf.copyInto(buf.readerOffset(), b, off, len);
buf.readerOffset(buf.readerOffset() + len);
}
@Override
public int skipBytes(int n) {
n = Math.min(n, buf.readerOffset() - buf.writerOffset());
buf.readerOffset(buf.readerOffset() + n);
return n;
}
@Override
public boolean readBoolean() {
return buf.readUnsignedByte() != 0;
}
@Override
public byte readByte() {
return buf.readByte();
}
@Override
public int readUnsignedByte() {
return buf.readUnsignedByte();
}
@Override
public short readShort() {
return buf.readShort();
}
@Override
public int readUnsignedShort() {
return buf.readUnsignedShort();
}
@Override
public char readChar() {
return buf.readChar();
}
@Override
public int readInt() {
return buf.readInt();
}
@Override
public long readLong() {
return buf.readLong();
}
@Override
public float readFloat() {
return buf.readFloat();
}
@Override
public double readDouble() {
return buf.readDouble();
}
@Override
public String readLine() {
throw new UnsupportedOperationException();
}
@NotNull
@Override
public String readUTF() {
var len = buf.readUnsignedShort();
try (var copiedBuf = buf.copy(buf.readerOffset(), len)) {
var off = copiedBuf.readerOffset();
return LLUtils.deserializeString(copiedBuf.send(), off, len, StandardCharsets.UTF_8);
}
}
@Override
public void close() {
buf.close();
}
}

View File

@ -0,0 +1,91 @@
package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.SafeCloseable;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.jetbrains.annotations.NotNull;
public class BufferDataOutput implements DataOutput {
private final Buffer buf;
public BufferDataOutput(Buffer bufferSend) {
this.buf = bufferSend;
}
@Override
public void write(int b) {
buf.writeUnsignedByte(b);
}
@Override
public void write(byte @NotNull [] b) {
buf.writeBytes(b);
}
@Override
public void write(byte @NotNull [] b, int off, int len) {
buf.writeBytes(b, off, len);
}
@Override
public void writeBoolean(boolean v) {
buf.writeUnsignedByte(v ? 1 : 0);
}
@Override
public void writeByte(int v) {
buf.writeByte((byte) v);
}
@Override
public void writeShort(int v) {
buf.writeShort((short) v);
}
@Override
public void writeChar(int v) {
buf.writeChar((char) v);
}
@Override
public void writeInt(int v) {
buf.writeInt(v);
}
@Override
public void writeLong(long v) {
buf.writeLong(v);
}
@Override
public void writeFloat(float v) {
buf.writeFloat(v);
}
@Override
public void writeDouble(double v) {
buf.writeDouble(v);
}
@Override
public void writeBytes(@NotNull String s) {
buf.writeBytes(s.getBytes());
}
@Override
public void writeChars(@NotNull String s) {
s.chars().forEach(c -> buf.writeChar((char) c));
}
@Override
public void writeUTF(@NotNull String s) {
var bytes = s.getBytes(StandardCharsets.UTF_8);
buf.writeUnsignedShort(bytes.length);
buf.writeBytes(bytes);
}
}

View File

@ -1,13 +1,13 @@
package it.cavallium.dbengine.database.serialization; package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.api.BufferInputStream; import java.io.DataInput;
import io.netty.buffer.api.BufferOutputStream; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
public interface Codec<A> { public interface Codec<A> {
@NotNull A deserialize(@NotNull ByteBufInputStream serialized) throws IOException; @NotNull A deserialize(@NotNull BufferDataInput serialized) throws IOException;
void serialize(@NotNull ByteBufOutputStream outputStream, @NotNull A deserialized) throws IOException; void serialize(@NotNull BufferDataOutput outputStream, @NotNull A deserialized) throws IOException;
} }

View File

@ -2,18 +2,15 @@ package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.api.Buffer; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.BufferAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.BufferInputStream; import io.netty.buffer.api.Send;
import io.netty.buffer.api.BufferOutputStream;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.Unpooled;
import java.io.IOError; import java.io.IOError;
import java.io.IOException; import java.io.IOException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.warp.commonutils.error.IndexOutOfBoundsException; import org.warp.commonutils.error.IndexOutOfBoundsException;
public class CodecSerializer<A> implements Serializer<A, Buffer> { public class CodecSerializer<A> implements Serializer<A, Send<Buffer>> {
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final Codecs<A> deserializationCodecs; private final Codecs<A> deserializationCodecs;
private final Codec<A> serializationCodec; private final Codec<A> serializationCodec;
private final int serializationCodecId; private final int serializationCodecId;
@ -24,7 +21,7 @@ public class CodecSerializer<A> implements Serializer<A, Buffer> {
* @param microCodecs if true, allow only codecs with a value from 0 to 255 to save disk space * @param microCodecs if true, allow only codecs with a value from 0 to 255 to save disk space
*/ */
public CodecSerializer( public CodecSerializer(
ByteBufAllocator allocator, BufferAllocator allocator,
Codecs<A> deserializationCodecs, Codecs<A> deserializationCodecs,
Codec<A> serializationCodec, Codec<A> serializationCodec,
int serializationCodecId, int serializationCodecId,
@ -40,8 +37,8 @@ public class CodecSerializer<A> implements Serializer<A, Buffer> {
} }
@Override @Override
public @NotNull A deserialize(@NotNull Buffer serialized) { public @NotNull A deserialize(@NotNull Send<Buffer> serialized) {
try (var is = new ByteBufInputStream(serialized)) { try (var is = new BufferDataInput(serialized)) {
int codecId; int codecId;
if (microCodecs) { if (microCodecs) {
codecId = is.readUnsignedByte(); codecId = is.readUnsignedByte();
@ -53,26 +50,24 @@ public class CodecSerializer<A> implements Serializer<A, Buffer> {
} catch (IOException ex) { } catch (IOException ex) {
// This shouldn't happen // This shouldn't happen
throw new IOError(ex); throw new IOError(ex);
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull Buffer serialize(@NotNull A deserialized) { public @NotNull Send<Buffer> serialize(@NotNull A deserialized) {
Buffer buf = allocator.buffer(); try (Buffer buf = allocator.allocate(64)) {
try (var os = new ByteBufOutputStream(buf)) { var os = new BufferDataOutput(buf);
if (microCodecs) { if (microCodecs) {
os.writeByte(serializationCodecId); os.writeByte(serializationCodecId);
} else { } else {
os.writeInt(serializationCodecId); os.writeInt(serializationCodecId);
} }
serializationCodec.serialize(os, deserialized); serializationCodec.serialize(os, deserialized);
return buf.send();
} catch (IOException ex) { } catch (IOException ex) {
// This shouldn't happen // This shouldn't happen
throw new IOError(ex); throw new IOError(ex);
} }
return buf;
} }
@SuppressWarnings("unused") @SuppressWarnings("unused")

View File

@ -41,11 +41,10 @@ public interface Serializer<A, B> {
@Override @Override
public @NotNull Send<Buffer> serialize(@NotNull String deserialized) { public @NotNull Send<Buffer> serialize(@NotNull String deserialized) {
// UTF-8 uses max. 3 bytes per char, so calculate the worst case. var bytes = deserialized.getBytes(StandardCharsets.UTF_8);
int length = LLUtils.utf8MaxBytes(deserialized); try (Buffer buf = allocator.allocate(Integer.BYTES + bytes.length)) {
try (Buffer buf = allocator.allocate(Integer.BYTES + length)) { buf.writeInt(bytes.length);
buf.writeInt(length); buf.writeBytes(bytes);
LLUtils.writeString(buf, deserialized, StandardCharsets.UTF_8);
return buf.send(); return buf.send();
} }
} }

View File

@ -64,7 +64,7 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
public @NotNull Send<Buffer> serialize(@NotNull String deserialized) throws SerializationException { public @NotNull Send<Buffer> serialize(@NotNull String deserialized) throws SerializationException {
// UTF-8 uses max. 3 bytes per char, so calculate the worst case. // UTF-8 uses max. 3 bytes per char, so calculate the worst case.
try (Buffer buf = allocator.allocate(LLUtils.utf8MaxBytes(deserialized))) { try (Buffer buf = allocator.allocate(LLUtils.utf8MaxBytes(deserialized))) {
LLUtils.writeString(buf, deserialized, StandardCharsets.UTF_8); buf.writeBytes(deserialized.getBytes(StandardCharsets.UTF_8));
if (buf.readableBytes() != getSerializedBinaryLength()) { if (buf.readableBytes() != getSerializedBinaryLength()) {
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength() throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength()
+ " bytes has tried to serialize an element with " + " bytes has tried to serialize an element with "

View File

@ -1,14 +1,14 @@
package it.cavallium.dbengine.netty; package it.cavallium.dbengine.netty;
import io.netty.buffer.api.BufferAllocatorMetric;
public class JMXNettyMonitoring implements JMXNettyMonitoringMBean { public class JMXNettyMonitoring implements JMXNettyMonitoringMBean {
private final String name; private final String name;
private final ByteBufAllocatorMetric metric; protected final boolean direct;
private final io.netty.buffer.api.pool.BufferAllocatorMetric metric;
public JMXNettyMonitoring(String name, io.netty.buffer.api.BufferAllocatorMetric metric) { public JMXNettyMonitoring(String name, boolean direct, io.netty.buffer.api.pool.BufferAllocatorMetric metric) {
this.name = name; this.name = name;
this.direct = direct;
this.metric = metric; this.metric = metric;
} }
@ -19,12 +19,12 @@ public class JMXNettyMonitoring implements JMXNettyMonitoringMBean {
@Override @Override
public Long getHeapUsed() { public Long getHeapUsed() {
return metric.usedHeapMemory(); return direct ? 0 : metric.usedMemory();
} }
@Override @Override
public Long getDirectUsed() { public Long getDirectUsed() {
return metric.usedDirectMemory(); return direct ? metric.usedMemory() : 0;
} }
@Override @Override

View File

@ -1,10 +1,11 @@
package it.cavallium.dbengine.netty; package it.cavallium.dbengine.netty;
import io.netty.buffer.api.BufferAllocatorMetric; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.PoolArenaMetric; import io.netty.buffer.PoolArenaMetric;
import io.netty.buffer.PooledByteBufAllocator; import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.PooledByteBufAllocatorMetric; import io.netty.buffer.PooledByteBufAllocatorMetric;
import io.netty.buffer.UnpooledByteBufAllocator; import io.netty.buffer.UnpooledByteBufAllocator;
import io.netty.buffer.api.pool.PooledBufferAllocator;
import java.lang.management.ManagementFactory; import java.lang.management.ManagementFactory;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -37,50 +38,23 @@ public class JMXNettyMonitoringManager {
public synchronized static JMXNettyMonitoringManager getInstance() { public synchronized static JMXNettyMonitoringManager getInstance() {
if (instance == null) { if (instance == null) {
instance = new JMXNettyMonitoringManager(); instance = new JMXNettyMonitoringManager();
instance.initializeInternal();
} }
return instance; return instance;
} }
private void initializeInternal() { public void register(String name, BufferAllocator metric) {
Map<String, ByteBufAllocatorMetric> allocators = new HashMap<>();
allocators.put("unpooled", UnpooledByteBufAllocator.DEFAULT.metric());
allocators.put("pooled", PooledByteBufAllocator.DEFAULT.metric());
for (var entry : allocators.entrySet()) {
register(entry.getKey(), entry.getValue());
}
}
public void register(String name, ByteBufAllocatorMetric metric) {
try { try {
name = name.replaceAll("[^\\p{IsAlphabetic}\\p{IsDigit}_]", "_"); name = name.replaceAll("[^\\p{IsAlphabetic}\\p{IsDigit}_]", "_");
String type; String type;
StandardMBean mbean; StandardMBean mbean;
if (metric instanceof PooledByteBufAllocatorMetric) { if (metric instanceof PooledBufferAllocator pooledMetric) {
var pooledMetric = (PooledByteBufAllocatorMetric) metric;
for (var arenaEntry : (Iterable<Entry<String, PoolArenaMetric>>) Stream.concat(
pooledMetric.directArenas().stream().map(arena -> Map.entry("direct", arena)),
pooledMetric.heapArenas().stream().map(arena -> Map.entry("heap", arena))
)::iterator) {
var arenaType = arenaEntry.getKey();
var arenaMetric = arenaEntry.getValue();
var jmx = new JMXPoolArenaNettyMonitoring(arenaMetric);
mbean = new StandardMBean(jmx, JMXPoolArenaNettyMonitoringMBean.class);
ObjectName botObjectName = new ObjectName("io.netty.stats:name=PoolArena,type=" + arenaType + ",arenaId=" + nextArenaId.getAndIncrement());
platformMBeanServer.registerMBean(mbean, botObjectName);
}
var jmx = new JMXPooledNettyMonitoring(name, pooledMetric); var jmx = new JMXPooledNettyMonitoring(name, pooledMetric);
type = "pooled"; type = "pooled";
mbean = new StandardMBean(jmx, JMXNettyMonitoringMBean.class); mbean = new StandardMBean(jmx, JMXNettyMonitoringMBean.class);
} else {
var jmx = new JMXNettyMonitoring(name, metric);
type = "unpooled";
mbean = new StandardMBean(jmx, JMXNettyMonitoringMBean.class);
}
ObjectName botObjectName = new ObjectName("io.netty.stats:name=ByteBufAllocator,allocatorName=" + name + ",type=" + type); ObjectName botObjectName = new ObjectName("io.netty.stats:name=ByteBufAllocator,allocatorName=" + name + ",type=" + type);
platformMBeanServer.registerMBean(mbean, botObjectName); platformMBeanServer.registerMBean(mbean, botObjectName);
}
} catch (MalformedObjectNameException | NotCompliantMBeanException | InstanceAlreadyExistsException | MBeanRegistrationException e) { } catch (MalformedObjectNameException | NotCompliantMBeanException | InstanceAlreadyExistsException | MBeanRegistrationException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

View File

@ -1,50 +1,98 @@
package it.cavallium.dbengine.netty; package it.cavallium.dbengine.netty;
import io.netty.buffer.api.BufferAllocatorMetric; import io.netty.buffer.api.pool.BufferAllocatorMetric;
import io.netty.buffer.PooledByteBufAllocatorMetric; import io.netty.buffer.api.pool.BufferAllocatorMetric;
import io.netty.buffer.api.pool.PooledBufferAllocator;
import java.lang.reflect.Field;
public class JMXPooledNettyMonitoring extends JMXNettyMonitoring implements JMXNettyMonitoringMBean { public class JMXPooledNettyMonitoring extends JMXNettyMonitoring implements JMXNettyMonitoringMBean {
private final PooledByteBufAllocatorMetric metric; private final PooledBufferAllocator alloc;
private final BufferAllocatorMetric metric;
private Field smallCacheSize;
private Field numThreadLocalCaches;
private Field normalCacheSize;
private Field chunkSize;
public JMXPooledNettyMonitoring(String name, PooledByteBufAllocatorMetric metric) { public JMXPooledNettyMonitoring(String name, PooledBufferAllocator alloc) {
super(name, metric); super(name, alloc.isDirectBufferPooled(), alloc.metric());
this.metric = metric; this.alloc = alloc;
this.metric = alloc.metric();
try {
//noinspection JavaReflectionMemberAccess
numThreadLocalCaches = metric.getClass().getDeclaredField("numThreadLocalCaches");
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
try {
//noinspection JavaReflectionMemberAccess
smallCacheSize = metric.getClass().getDeclaredField("smallCacheSize");
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
try {
//noinspection JavaReflectionMemberAccess
normalCacheSize = metric.getClass().getDeclaredField("normalCacheSize");
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
try {
//noinspection JavaReflectionMemberAccess
chunkSize = metric.getClass().getDeclaredField("chunkSize");
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
} }
@Override @Override
public Integer getNumHeapArenas() { public Integer getNumHeapArenas() {
return metric.numHeapArenas(); return direct ? 0 : alloc.numArenas();
} }
@Override @Override
public Integer getNumDirectArenas() { public Integer getNumDirectArenas() {
return metric.numDirectArenas(); return direct ? alloc.numArenas() : 0;
} }
@Override @Override
public Integer getNumThreadLocalCachesArenas() { public Integer getNumThreadLocalCachesArenas() {
return metric.numThreadLocalCaches(); try {
return numThreadLocalCaches.getInt(metric);
} catch (IllegalAccessException e) {
return 0;
}
} }
@Deprecated @Deprecated
@Override @Override
public Integer getTinyCacheSize() { public Integer getTinyCacheSize() {
return metric.tinyCacheSize(); return 0;
} }
@Override @Override
public Integer getSmallCacheSize() { public Integer getSmallCacheSize() {
return metric.smallCacheSize(); try {
return smallCacheSize.getInt(metric);
} catch (IllegalAccessException e) {
return 0;
}
} }
@Override @Override
public Integer getNormalCacheSize() { public Integer getNormalCacheSize() {
return metric.normalCacheSize(); try {
return normalCacheSize.getInt(metric);
} catch (IllegalAccessException e) {
return 0;
}
} }
@Override @Override
public Integer getChunkSize() { public Integer getChunkSize() {
return metric.chunkSize(); try {
return chunkSize.getInt(metric);
} catch (IllegalAccessException e) {
return 0;
}
} }
} }