diff --git a/pom.xml b/pom.xml index c080b41..b9dd848 100644 --- a/pom.xml +++ b/pom.xml @@ -377,17 +377,17 @@ io.projectreactor reactor-core - 3.4.8 + 3.4.9 io.projectreactor reactor-tools - 3.4.8 + 3.4.9 io.projectreactor reactor-test - 3.4.8 + 3.4.9 org.novasearch diff --git a/src/main/java/it/cavallium/dbengine/client/DatabaseOptions.java b/src/main/java/it/cavallium/dbengine/client/DatabaseOptions.java index 3a8b6a9..119e718 100644 --- a/src/main/java/it/cavallium/dbengine/client/DatabaseOptions.java +++ b/src/main/java/it/cavallium/dbengine/client/DatabaseOptions.java @@ -14,5 +14,4 @@ public record DatabaseOptions(Map extraFlags, boolean allowMemoryMapping, boolean allowNettyDirect, boolean useNettyDirect, - boolean enableDbAssertionsWhenUsingAssertions, int maxOpenFiles) {} diff --git a/src/main/java/it/cavallium/dbengine/database/LLDictionary.java b/src/main/java/it/cavallium/dbengine/database/LLDictionary.java index 1caf698..5e2c135 100644 --- a/src/main/java/it/cavallium/dbengine/database/LLDictionary.java +++ b/src/main/java/it/cavallium/dbengine/database/LLDictionary.java @@ -71,23 +71,23 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure { return getMulti(snapshot, keys, false); } - Flux> putMulti(Flux> entries, boolean getOldValues); + Flux putMulti(Flux entries, boolean getOldValues); Flux> updateMulti(Flux> entries, BiSerializationFunction updateFunction); - Flux> getRange(@Nullable LLSnapshot snapshot, Mono range, boolean existsAlmostCertainly); + Flux getRange(@Nullable LLSnapshot snapshot, Mono range, boolean existsAlmostCertainly); - default Flux> getRange(@Nullable LLSnapshot snapshot, Mono range) { + default Flux getRange(@Nullable LLSnapshot snapshot, Mono range) { return getRange(snapshot, range, false); } - Flux>> getRangeGrouped(@Nullable LLSnapshot snapshot, + Flux> getRangeGrouped(@Nullable LLSnapshot snapshot, Mono range, int prefixLength, boolean existsAlmostCertainly); - default Flux>> getRangeGrouped(@Nullable LLSnapshot snapshot, + default Flux> getRangeGrouped(@Nullable LLSnapshot snapshot, Mono range, int prefixLength) { return getRangeGrouped(snapshot, range, prefixLength, false); @@ -101,11 +101,11 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure { Flux badBlocks(Mono range); - Mono setRange(Mono range, Flux> entries); + Mono setRange(Mono range, Flux entries); default Mono replaceRange(Mono range, boolean canKeysChange, - Function, Mono>> entriesReplacer, + Function> entriesReplacer, boolean existsAlmostCertainly) { return Mono.defer(() -> { if (canKeysChange) { @@ -126,7 +126,7 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure { default Mono replaceRange(Mono range, boolean canKeysChange, - Function, Mono>> entriesReplacer) { + Function> entriesReplacer) { return replaceRange(range, canKeysChange, entriesReplacer, false); } @@ -134,9 +134,9 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure { Mono sizeRange(@Nullable LLSnapshot snapshot, Mono range, boolean fast); - Mono> getOne(@Nullable LLSnapshot snapshot, Mono range); + Mono getOne(@Nullable LLSnapshot snapshot, Mono range); Mono getOneKey(@Nullable LLSnapshot snapshot, Mono range); - Mono> removeOne(Mono range); + Mono removeOne(Mono range); } diff --git a/src/main/java/it/cavallium/dbengine/database/LLEntry.java b/src/main/java/it/cavallium/dbengine/database/LLEntry.java new file mode 100644 index 0000000..3a7a46a --- /dev/null +++ b/src/main/java/it/cavallium/dbengine/database/LLEntry.java @@ -0,0 +1,74 @@ +package it.cavallium.dbengine.database; + +import io.netty.buffer.ByteBuf; +import io.netty.util.IllegalReferenceCountException; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import org.warp.commonutils.log.Logger; +import org.warp.commonutils.log.LoggerFactory; + +public class LLEntry { + + private static final Logger logger = LoggerFactory.getLogger(LLEntry.class); + + private final AtomicInteger refCnt = new AtomicInteger(1); + + private final ByteBuf key; + private final ByteBuf value; + + public LLEntry(ByteBuf key, ByteBuf value) { + try { + this.key = key.retain(); + this.value = value.retain(); + } finally { + key.release(); + value.release(); + } + } + + public ByteBuf getKey() { + if (refCnt.get() <= 0) { + throw new IllegalReferenceCountException(refCnt.get()); + } + return key; + } + + public ByteBuf getValue() { + if (refCnt.get() <= 0) { + throw new IllegalReferenceCountException(refCnt.get()); + } + return value; + } + + public void retain() { + if (refCnt.getAndIncrement() <= 0) { + throw new IllegalReferenceCountException(refCnt.get(), 1); + } + key.retain(); + value.retain(); + } + + public void release() { + if (refCnt.decrementAndGet() < 0) { + throw new IllegalReferenceCountException(refCnt.get(), -1); + } + if (key.refCnt() > 0) { + key.release(); + } + if (value.refCnt() > 0) { + value.release(); + } + } + + public boolean isReleased() { + return refCnt.get() <= 0; + } + + @Override + protected void finalize() throws Throwable { + if (refCnt.get() > 0) { + logger.warn(this.getClass().getName() + "::release has not been called!"); + } + super.finalize(); + } +} diff --git a/src/main/java/it/cavallium/dbengine/database/LLUtils.java b/src/main/java/it/cavallium/dbengine/database/LLUtils.java index 2aad71a..9c16c7e 100644 --- a/src/main/java/it/cavallium/dbengine/database/LLUtils.java +++ b/src/main/java/it/cavallium/dbengine/database/LLUtils.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.Callable; import java.util.function.Function; import java.util.function.ToIntFunction; @@ -42,13 +43,19 @@ import org.apache.lucene.search.SortedNumericSortField; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.rocksdb.RocksDB; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.warp.commonutils.functional.IOFunction; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; +import reactor.util.function.Tuple3; @SuppressWarnings("unused") public class LLUtils { + private static final Logger logger = LoggerFactory.getLogger(LLUtils.class); + private static final byte[] RESPONSE_TRUE = new byte[]{1}; private static final byte[] RESPONSE_FALSE = new byte[]{0}; private static final byte[] RESPONSE_TRUE_BUF = new byte[]{1}; @@ -514,35 +521,154 @@ public class LLUtils { } public static Mono handleDiscard(Mono mono) { - return mono.doOnDiscard(Map.Entry.class, e -> { - if (e.getKey() instanceof ByteBuf bb) { - if (bb.refCnt() > 0) { - bb.release(); - } - } - if (e.getValue() instanceof ByteBuf bb) { - if (bb.refCnt() > 0) { - bb.release(); - } - } - }); + return mono + .doOnDiscard(Object.class, obj -> { + if (obj instanceof ReferenceCounted o) { + discardRefCounted(o); + } else if (obj instanceof Entry o) { + discardEntry(o); + } else if (obj instanceof Collection o) { + discardCollection(o); + } else if (obj instanceof Tuple3 o) { + discardTuple3(o); + } else if (obj instanceof Tuple2 o) { + discardTuple2(o); + } else if (obj instanceof LLEntry o) { + discardLLEntry(o); + } else if (obj instanceof LLRange o) { + discardLLRange(o); + } else if (obj instanceof Delta o) { + discardDelta(o); + } else if (obj instanceof Map o) { + discardMap(o); + } + }); + // todo: check if the single object discard hook is more performant + /* + .doOnDiscard(ReferenceCounted.class, LLUtils::discardRefCounted) + .doOnDiscard(Map.Entry.class, LLUtils::discardEntry) + .doOnDiscard(Collection.class, LLUtils::discardCollection) + .doOnDiscard(Tuple2.class, LLUtils::discardTuple2) + .doOnDiscard(Tuple3.class, LLUtils::discardTuple3) + .doOnDiscard(LLEntry.class, LLUtils::discardLLEntry) + .doOnDiscard(LLRange.class, LLUtils::discardLLRange) + .doOnDiscard(Delta.class, LLUtils::discardDelta) + .doOnDiscard(Map.class, LLUtils::discardMap); + + */ } public static Flux handleDiscard(Flux mono) { return mono + .doOnDiscard(Object.class, obj -> { + if (obj instanceof ReferenceCounted o) { + discardRefCounted(o); + } else if (obj instanceof Entry o) { + discardEntry(o); + } else if (obj instanceof Collection o) { + discardCollection(o); + } else if (obj instanceof Tuple3 o) { + discardTuple3(o); + } else if (obj instanceof Tuple2 o) { + discardTuple2(o); + } else if (obj instanceof LLEntry o) { + discardLLEntry(o); + } else if (obj instanceof LLRange o) { + discardLLRange(o); + } else if (obj instanceof Delta o) { + discardDelta(o); + } else if (obj instanceof Map o) { + discardMap(o); + } else { + System.err.println(obj.getClass().getName()); + } + }); + // todo: check if the single object discard hook is more performant + /* .doOnDiscard(ReferenceCounted.class, LLUtils::discardRefCounted) .doOnDiscard(Map.Entry.class, LLUtils::discardEntry) - .doOnDiscard(Collection.class, LLUtils::discardCollection); + .doOnDiscard(Collection.class, LLUtils::discardCollection) + .doOnDiscard(Tuple2.class, LLUtils::discardTuple2) + .doOnDiscard(Tuple3.class, LLUtils::discardTuple3) + .doOnDiscard(LLEntry.class, LLUtils::discardLLEntry) + .doOnDiscard(LLRange.class, LLUtils::discardLLRange) + .doOnDiscard(Delta.class, LLUtils::discardDelta) + .doOnDiscard(Map.class, LLUtils::discardMap); + + */ + } + + private static void discardLLEntry(LLEntry entry) { + logger.trace("Releasing discarded ByteBuf"); + entry.release(); + } + + private static void discardLLRange(LLRange range) { + logger.trace("Releasing discarded ByteBuf"); + range.release(); } private static void discardEntry(Map.Entry e) { if (e.getKey() instanceof ByteBuf bb) { if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); bb.release(); } } if (e.getValue() instanceof ByteBuf bb) { if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + } + + private static void discardTuple2(Tuple2 e) { + if (e.getT1() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + if (e.getT2() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + } + + private static void discardTuple3(Tuple3 e) { + if (e.getT1() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } else if (e.getT1() instanceof Optional opt) { + if (opt.isPresent() && opt.get() instanceof ByteBuf bb) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + if (e.getT2() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } else if (e.getT1() instanceof Optional opt) { + if (opt.isPresent() && opt.get() instanceof ByteBuf bb) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + if (e.getT3() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } else if (e.getT1() instanceof Optional opt) { + if (opt.isPresent() && opt.get() instanceof ByteBuf bb) { + logger.trace("Releasing discarded ByteBuf"); bb.release(); } } @@ -550,6 +676,7 @@ public class LLUtils { private static void discardRefCounted(ReferenceCounted referenceCounted) { if (referenceCounted.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); referenceCounted.release(); } } @@ -558,16 +685,19 @@ public class LLUtils { for (Object o : collection) { if (o instanceof ReferenceCounted referenceCounted) { if (referenceCounted.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); referenceCounted.release(); } } else if (o instanceof Map.Entry entry) { if (entry.getKey() instanceof ReferenceCounted bb) { if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); bb.release(); } } if (entry.getValue() instanceof ReferenceCounted bb) { if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); bb.release(); } } @@ -576,4 +706,42 @@ public class LLUtils { } } } + + private static void discardDelta(Delta delta) { + if (delta.previous() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + if (delta.current() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + } + } + + private static void discardMap(Map map) { + for (Entry entry : map.entrySet()) { + boolean hasByteBuf = false; + if (entry.getKey() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + hasByteBuf = true; + } + if (entry.getValue() instanceof ByteBuf bb) { + if (bb.refCnt() > 0) { + logger.trace("Releasing discarded ByteBuf"); + bb.release(); + } + hasByteBuf = true; + } + if (!hasByteBuf) { + break; + } + } + } } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseEmpty.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseEmpty.java index 41c439e..f9006b1 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseEmpty.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseEmpty.java @@ -14,7 +14,11 @@ public class DatabaseEmpty { public static final Serializer NOTHING_SERIALIZER = new Serializer<>() { @Override public @NotNull Nothing deserialize(@NotNull ByteBuf serialized) { - return NOTHING; + try { + return NOTHING; + } finally { + serialized.release(); + } } @Override diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java index 8281ab7..cc04e56 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java @@ -7,6 +7,7 @@ import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.ExtraKeyOperationResult; import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionaryResultType; +import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateReturnMode; @@ -15,6 +16,7 @@ import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; +import java.nio.ByteBuffer; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -28,6 +30,7 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.core.publisher.SynchronousSink; import reactor.util.function.Tuple2; +import reactor.util.function.Tuple3; import reactor.util.function.Tuples; /** @@ -87,6 +90,8 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep extends DatabaseMapDictionaryDeep { try { - sink.next(Map.entry(this.toKey(serializeSuffix(entry.getKey())), + sink.next(new LLEntry(this.toKey(serializeSuffix(entry.getKey())), valueSerializer.serialize(entry.getValue()))); } catch (SerializationException e) { sink.error(e); @@ -151,26 +156,18 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep putValue(T keySuffix, U value) { - return Mono - .using( - () -> serializeSuffix(keySuffix), - keySuffixBuf -> Mono - .using( - () -> toKey(keySuffixBuf.retain()), - keyBuf -> Mono - .using(() -> valueSerializer.serialize(value), - valueBuf -> dictionary - .put(LLUtils.lazyRetain(keyBuf), - LLUtils.lazyRetain(valueBuf), - LLDictionaryResultType.VOID) - .doOnNext(ReferenceCounted::release), - ReferenceCounted::release - ), - ReferenceCounted::release - ), + return Mono.using(() -> serializeSuffix(keySuffix), + keySuffixBuf -> Mono.using(() -> toKey(keySuffixBuf.retain()), + keyBuf -> Mono.using(() -> valueSerializer.serialize(value), + valueBuf -> dictionary + .put(LLUtils.lazyRetain(keyBuf), LLUtils.lazyRetain(valueBuf), LLDictionaryResultType.VOID) + .doOnNext(ReferenceCounted::release), + ReferenceCounted::release + ), ReferenceCounted::release - ) - .then(); + ), + ReferenceCounted::release + ).then(); } @Override @@ -340,35 +337,43 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep>> getMulti(@Nullable CompositeSnapshot snapshot, Flux keys, boolean existsAlmostCertainly) { - return dictionary - .getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> { - ByteBuf keySuffixBuf = serializeSuffix(keySuffix); + return dictionary.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> { + ByteBuf keySuffixBuf = serializeSuffix(keySuffix); + try { + var key = toKey(keySuffixBuf.retain()); try { - return Tuples.of(keySuffix, toKey(keySuffixBuf.retain())); + return Tuples.of(keySuffix, key.retain()); } finally { - keySuffixBuf.release(); + key.release(); } - })), existsAlmostCertainly) - .flatMapSequential(entry -> { - entry.getT2().release(); - return Mono.fromCallable(() -> { - Optional valueOpt; - if (entry.getT3().isPresent()) { - valueOpt = Optional.of(valueSerializer.deserialize(entry.getT3().get())); - } else { - valueOpt = Optional.empty(); + } finally { + keySuffixBuf.release(); + } + })), existsAlmostCertainly).flatMapSequential(entry -> { + entry.getT2().release(); + return Mono.fromCallable(() -> { + Optional valueOpt; + if (entry.getT3().isPresent()) { + var buf = entry.getT3().get(); + try { + valueOpt = Optional.of(valueSerializer.deserialize(buf.retain())); + } finally { + buf.release(); } - return Map.entry(entry.getT1(), valueOpt); - }); + } else { + valueOpt = Optional.empty(); + } + return Map.entry(entry.getT1(), valueOpt); }); + }).transform(LLUtils::handleDiscard); } - private Entry serializeEntry(T key, U value) throws SerializationException { + private LLEntry serializeEntry(T key, U value) throws SerializationException { ByteBuf serializedKey = toKey(serializeSuffix(key)); try { ByteBuf serializedValue = valueSerializer.serialize(value); try { - return Map.entry(serializedKey.retain(), serializedValue.retain()); + return new LLEntry(serializedKey.retain(), serializedValue.retain()); } finally { serializedValue.release(); } @@ -380,20 +385,21 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep putMulti(Flux> entries) { var serializedEntries = entries - .flatMap(entry -> Mono - .fromCallable(() -> serializeEntry(entry.getKey(), entry.getValue())) - .doOnDiscard(Entry.class, uncastedEntry -> { - if (uncastedEntry.getKey() instanceof ByteBuf byteBuf) { - byteBuf.release(); - } - if (uncastedEntry.getValue() instanceof ByteBuf byteBuf) { - byteBuf.release(); - } - }) - ); + .handle((entry, sink) -> { + try { + sink.next(serializeEntry(entry.getKey(), entry.getValue())); + } catch (SerializationException e) { + sink.error(e); + } + }); return dictionary .putMulti(serializedEntries, false) - .then(); + .then() + .doOnDiscard(LLEntry.class, entry -> { + if (!entry.isReleased()) { + entry.release(); + } + }); } @Override @@ -455,21 +461,33 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep>handle((serializedEntry, sink) -> { + ByteBuf key = serializedEntry.getKey(); + ByteBuf value = serializedEntry.getValue(); try { - sink.next(Map.entry( - deserializeSuffix(stripPrefix(serializedEntry.getKey(), false)), - valueSerializer.deserialize(serializedEntry.getValue()) - )); + ByteBuf keySuffix = stripPrefix(key.retain(), false); + try { + sink.next(Map.entry(deserializeSuffix(keySuffix.retain()), + valueSerializer.deserialize(value.retain()))); + } finally { + keySuffix.release(); + } } catch (SerializationException e) { sink.error(e); + } finally { + key.release(); + value.release(); } }) .doOnDiscard(Entry.class, uncastedEntry -> { if (uncastedEntry.getKey() instanceof ByteBuf byteBuf) { - byteBuf.release(); + if (byteBuf.refCnt() > 0) { + byteBuf.release(); + } } if (uncastedEntry.getValue() instanceof ByteBuf byteBuf) { - byteBuf.release(); + if (byteBuf.refCnt() > 0) { + byteBuf.release(); + } } }); } @@ -481,8 +499,22 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep getAllValues(null), b -> dictionary.setRange(rangeMono, entries.handle((entry, sink) -> { try { - ByteBuf serializedValue = valueSerializer.serialize(entry.getValue()); - sink.next(Map.entry(toKey(serializeSuffix(entry.getKey())), serializedValue)); + ByteBuf serializedKeySuffix = serializeSuffix(entry.getKey()); + try { + ByteBuf serializedKey = toKey(serializedKeySuffix); + try { + ByteBuf serializedValue = valueSerializer.serialize(entry.getValue()); + try { + sink.next(new LLEntry(serializedKey.retain(), serializedValue.retain())); + } finally { + serializedValue.release(); + } + } finally { + serializedKey.release(); + } + } finally { + serializedKeySuffix.release(); + } } catch (SerializationException e) { sink.error(e); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java index 2873454..add1919 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java @@ -22,6 +22,7 @@ import java.util.Map.Entry; import org.jetbrains.annotations.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import reactor.util.function.Tuples; // todo: implement optimized methods (which?) public class DatabaseMapDictionaryDeep> implements DatabaseStageMap { @@ -393,25 +394,10 @@ public class DatabaseMapDictionaryDeep> implem return Mono.using( () -> serializeSuffix(keySuffix), keySuffixData -> { - Flux debuggingKeysFlux = Mono.>defer(() -> { - if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED - && this.subStageGetter.needsDebuggingKeyFlux()) { - return Flux - .using( - () -> toExtRange(keySuffixData.retain()), - extRangeBuf -> this.dictionary - .getRangeKeys(resolveSnapshot(snapshot), LLUtils.lazyRetainRange(extRangeBuf)), - LLRange::release - ) - .collectList(); - } else { - return Mono.just(List.of()); - } - }).flatMapIterable(it -> it); return Mono.using( () -> toKeyWithoutExt(keySuffixData.retain()), keyWithoutExt -> this.subStageGetter - .subStage(dictionary, snapshot, LLUtils.lazyRetain(keyWithoutExt), debuggingKeysFlux), + .subStage(dictionary, snapshot, LLUtils.lazyRetain(keyWithoutExt)), ReferenceCounted::release ); }, @@ -433,87 +419,43 @@ public class DatabaseMapDictionaryDeep> implem @Override public Flux> getAllStages(@Nullable CompositeSnapshot snapshot) { + return Flux - .defer(() -> { - if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) { - return dictionary - .getRangeKeysGrouped(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength) - .concatMap(rangeKeys -> Flux - .using( - () -> { - assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1; - ByteBuf groupKeyWithExt = rangeKeys.get(0).retainedSlice(); - ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain(), true); - ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true); - return new GroupBuffers(groupKeyWithExt, groupKeyWithoutExt, groupSuffix); - }, - buffers -> Mono - .fromCallable(() -> { - assert subStageKeysConsistency(buffers.groupKeyWithExt.readableBytes()); - return null; - }) - .then(this.subStageGetter - .subStage(dictionary, - snapshot, - LLUtils.lazyRetain(buffers.groupKeyWithoutExt), - Flux.fromIterable(rangeKeys).map(ByteBuf::retain) - ) - .>handle((us, sink) -> { - try { - var deserializedSuffix = this.deserializeSuffix(buffers.groupSuffix.retain()); - sink.next(Map.entry(deserializedSuffix, us)); - } catch (SerializationException ex) { - sink.error(ex); - } - }) - ), - buffers -> { - buffers.groupSuffix.release(); - buffers.groupKeyWithoutExt.release(); - buffers.groupKeyWithExt.release(); - } - ) - .doAfterTerminate(() -> { - for (ByteBuf rangeKey : rangeKeys) { - rangeKey.release(); - } - }) - ) - .doOnDiscard(Collection.class, discardedCollection -> { - for (Object o : discardedCollection) { - if (o instanceof ByteBuf byteBuf) { - byteBuf.release(); + .defer(() -> dictionary.getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength)) + .flatMapSequential(groupKeyWithoutExt -> Mono + .using( + () -> { + try { + var groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true); + try { + assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength); + return Tuples.of(groupKeyWithoutExt.retain(), groupSuffix.retain()); + } finally { + groupSuffix.release(); } + } finally { + groupKeyWithoutExt.release(); } - }); - } else { - return Flux - .defer(() -> dictionary.getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength)) - .flatMapSequential(groupKeyWithoutExt -> Mono - .using( - () -> { - var groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true); - assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength); - return groupSuffix; - }, - groupSuffix -> this.subStageGetter - .subStage(dictionary, - snapshot, - LLUtils.lazyRetain(groupKeyWithoutExt), - Flux.empty() - ) - .>handle((us, sink) -> { - try { - sink.next(Map.entry(this.deserializeSuffix(groupSuffix.retain()), us)); - } catch (SerializationException ex) { - sink.error(ex); - } - }), - ReferenceCounted::release + }, + groupKeyWithoutExtAndGroupSuffix -> this.subStageGetter + .subStage(dictionary, + snapshot, + LLUtils.lazyRetain(groupKeyWithoutExtAndGroupSuffix.getT1()) ) - ); - } - }); + .>handle((us, sink) -> { + try { + sink.next(Map.entry(this.deserializeSuffix(groupKeyWithoutExtAndGroupSuffix.getT2().retain()), us)); + } catch (SerializationException ex) { + sink.error(ex); + } + }), + entry -> { + entry.getT1().release(); + entry.getT2().release(); + } + ) + ) + .transform(LLUtils::handleDiscard); } private boolean subStageKeysConsistency(int totalKeyLength) { diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetter.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetter.java index b954d6d..b4609ad 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetter.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetter.java @@ -13,10 +13,7 @@ public interface SubStageGetter> { Mono subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono prefixKey, - @Nullable Flux debuggingKeyFlux); + Mono prefixKey); boolean isMultiKey(); - - boolean needsDebuggingKeyFlux(); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashMap.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashMap.java index 25e3451..025d4e2 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashMap.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashMap.java @@ -16,38 +16,25 @@ import reactor.core.publisher.Mono; public class SubStageGetterHashMap implements SubStageGetter, DatabaseMapDictionaryHashed> { - private static final boolean assertsEnabled; - static { - boolean assertsEnabledTmp = false; - //noinspection AssertWithSideEffects - assert assertsEnabledTmp = true; - //noinspection ConstantConditions - assertsEnabled = assertsEnabledTmp; - } - private final Serializer keySerializer; private final Serializer valueSerializer; private final Function keyHashFunction; private final SerializerFixedBinaryLength keyHashSerializer; - private final boolean enableAssertionsWhenUsingAssertions; public SubStageGetterHashMap(Serializer keySerializer, Serializer valueSerializer, Function keyHashFunction, - SerializerFixedBinaryLength keyHashSerializer, - boolean enableAssertionsWhenUsingAssertions) { + SerializerFixedBinaryLength keyHashSerializer) { this.keySerializer = keySerializer; this.valueSerializer = valueSerializer; this.keyHashFunction = keyHashFunction; this.keyHashSerializer = keyHashSerializer; - this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions; } @Override public Mono> subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono prefixKeyMono, - @Nullable Flux debuggingKeysFlux) { + Mono prefixKeyMono) { return Mono.usingWhen( prefixKeyMono, prefixKey -> Mono @@ -59,24 +46,7 @@ public class SubStageGetterHashMap implements keyHashFunction, keyHashSerializer ) - ) - .transform(mono -> { - if (debuggingKeysFlux != null) { - return debuggingKeysFlux.handle((key, sink) -> { - try { - if (key.readableBytes() != prefixKey.readableBytes() + getKeyHashBinaryLength()) { - sink.error(new IndexOutOfBoundsException()); - } else { - sink.complete(); - } - } finally { - key.release(); - } - }).then(mono); - } else { - return mono; - } - }), + ), prefixKey -> Mono.fromRunnable(prefixKey::release) ); } @@ -86,11 +56,6 @@ public class SubStageGetterHashMap implements return true; } - @Override - public boolean needsDebuggingKeyFlux() { - return assertsEnabled && enableAssertionsWhenUsingAssertions; - } - public int getKeyHashBinaryLength() { return keyHashSerializer.getSerializedBinaryLength(); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashSet.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashSet.java index fdbd32f..a228cc5 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashSet.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterHashSet.java @@ -16,35 +16,22 @@ import reactor.core.publisher.Mono; public class SubStageGetterHashSet implements SubStageGetter, DatabaseSetDictionaryHashed> { - private static final boolean assertsEnabled; - static { - boolean assertsEnabledTmp = false; - //noinspection AssertWithSideEffects - assert assertsEnabledTmp = true; - //noinspection ConstantConditions - assertsEnabled = assertsEnabledTmp; - } - private final Serializer keySerializer; private final Function keyHashFunction; private final SerializerFixedBinaryLength keyHashSerializer; - private final boolean enableAssertionsWhenUsingAssertions; public SubStageGetterHashSet(Serializer keySerializer, Function keyHashFunction, - SerializerFixedBinaryLength keyHashSerializer, - boolean enableAssertionsWhenUsingAssertions) { + SerializerFixedBinaryLength keyHashSerializer) { this.keySerializer = keySerializer; this.keyHashFunction = keyHashFunction; this.keyHashSerializer = keyHashSerializer; - this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions; } @Override public Mono> subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono prefixKeyMono, - @Nullable Flux debuggingKeysFlux) { + Mono prefixKeyMono) { return Mono.usingWhen(prefixKeyMono, prefixKey -> Mono .fromSupplier(() -> DatabaseSetDictionaryHashed @@ -54,24 +41,7 @@ public class SubStageGetterHashSet implements keyHashFunction, keyHashSerializer ) - ) - .transform(mono -> { - if (debuggingKeysFlux != null) { - return debuggingKeysFlux.handle((key, sink) -> { - try { - if (key.readableBytes() != prefixKey.readableBytes() + getKeyHashBinaryLength()) { - sink.error(new IndexOutOfBoundsException()); - } else { - sink.complete(); - } - } finally { - key.release(); - } - }).then(mono); - } else { - return mono; - } - }), + ), prefixKey -> Mono.fromRunnable(prefixKey::release) ); } @@ -81,11 +51,6 @@ public class SubStageGetterHashSet implements return true; } - @Override - public boolean needsDebuggingKeyFlux() { - return assertsEnabled && enableAssertionsWhenUsingAssertions; - } - public int getKeyHashBinaryLength() { return keyHashSerializer.getSerializedBinaryLength(); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMap.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMap.java index 3b5ed4f..f137329 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMap.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMap.java @@ -14,31 +14,19 @@ import reactor.core.publisher.Mono; public class SubStageGetterMap implements SubStageGetter, DatabaseMapDictionary> { - private static final boolean assertsEnabled; - static { - boolean assertsEnabledTmp = false; - //noinspection AssertWithSideEffects - assert assertsEnabledTmp = true; - //noinspection ConstantConditions - assertsEnabled = assertsEnabledTmp; - } - private final SerializerFixedBinaryLength keySerializer; private final Serializer valueSerializer; - private final boolean enableAssertionsWhenUsingAssertions; public SubStageGetterMap(SerializerFixedBinaryLength keySerializer, - Serializer valueSerializer, boolean enableAssertionsWhenUsingAssertions) { + Serializer valueSerializer) { this.keySerializer = keySerializer; this.valueSerializer = valueSerializer; - this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions; } @Override public Mono> subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono prefixKeyMono, - @Nullable Flux debuggingKeysFlux) { + Mono prefixKeyMono) { return Mono.usingWhen(prefixKeyMono, prefixKey -> Mono .fromSupplier(() -> DatabaseMapDictionary @@ -47,24 +35,7 @@ public class SubStageGetterMap implements SubStageGetter, Databa keySerializer, valueSerializer ) - ) - .transform(mono -> { - if (debuggingKeysFlux != null) { - return debuggingKeysFlux.handle((key, sink) -> { - try { - if (key.readableBytes() != prefixKey.readableBytes() + getKeyBinaryLength()) { - sink.error(new IndexOutOfBoundsException()); - } else { - sink.complete(); - } - } finally { - key.release(); - } - }).then(mono); - } else { - return mono; - } - }), + ), prefixKey -> Mono.fromRunnable(prefixKey::release) ); } @@ -74,11 +45,6 @@ public class SubStageGetterMap implements SubStageGetter, Databa return true; } - @Override - public boolean needsDebuggingKeyFlux() { - return assertsEnabled && enableAssertionsWhenUsingAssertions; - } - public int getKeyBinaryLength() { return keySerializer.getSerializedBinaryLength(); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMapDeep.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMapDeep.java index ab8679e..913ce3b 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMapDeep.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterMapDeep.java @@ -14,28 +14,17 @@ import reactor.core.publisher.Mono; public class SubStageGetterMapDeep> implements SubStageGetter, DatabaseMapDictionaryDeep> { - private static final boolean assertsEnabled; - static { - boolean assertsEnabledTmp = false; - //noinspection AssertWithSideEffects - assert assertsEnabledTmp = true; - //noinspection ConstantConditions - assertsEnabled = assertsEnabledTmp; - } - private final SubStageGetter subStageGetter; private final SerializerFixedBinaryLength keySerializer; private final int keyExtLength; - private final boolean enableAssertionsWhenUsingAssertions; public SubStageGetterMapDeep(SubStageGetter subStageGetter, SerializerFixedBinaryLength keySerializer, - int keyExtLength, boolean enableAssertionsWhenUsingAssertions) { + int keyExtLength) { this.subStageGetter = subStageGetter; this.keySerializer = keySerializer; this.keyExtLength = keyExtLength; assert keyExtConsistency(); - this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions; } @@ -52,8 +41,7 @@ public class SubStageGetterMapDeep> implements @Override public Mono> subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono prefixKeyMono, - @Nullable Flux debuggingKeysFlux) { + Mono prefixKeyMono) { return Mono.usingWhen(prefixKeyMono, prefixKey -> Mono .fromSupplier(() -> DatabaseMapDictionaryDeep @@ -63,24 +51,7 @@ public class SubStageGetterMapDeep> implements subStageGetter, keyExtLength ) - ) - .transform(mono -> { - if (debuggingKeysFlux != null) { - return debuggingKeysFlux.handle((key, sink) -> { - try { - if (key.readableBytes() != prefixKey.readableBytes() + getKeyBinaryLength()) { - sink.error(new IndexOutOfBoundsException()); - } else { - sink.complete(); - } - } finally { - key.release(); - } - }).then(mono); - } else { - return mono; - } - }), + ), prefixKey -> Mono.fromRunnable(prefixKey::release) ); } @@ -90,11 +61,6 @@ public class SubStageGetterMapDeep> implements return true; } - @Override - public boolean needsDebuggingKeyFlux() { - return assertsEnabled && enableAssertionsWhenUsingAssertions; - } - private Mono checkKeyFluxConsistency(ByteBuf prefixKey, List keys) { return Mono .fromCallable(() -> { diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSet.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSet.java index f915639..9f92697 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSet.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSet.java @@ -14,49 +14,19 @@ import reactor.core.publisher.Mono; public class SubStageGetterSet implements SubStageGetter, DatabaseSetDictionary> { - private static final boolean assertsEnabled; - static { - boolean assertsEnabledTmp = false; - //noinspection AssertWithSideEffects - assert assertsEnabledTmp = true; - //noinspection ConstantConditions - assertsEnabled = assertsEnabledTmp; - } - private final SerializerFixedBinaryLength keySerializer; - private final boolean enableAssertionsWhenUsingAssertions; - public SubStageGetterSet(SerializerFixedBinaryLength keySerializer, - boolean enableAssertionsWhenUsingAssertions) { + public SubStageGetterSet(SerializerFixedBinaryLength keySerializer) { this.keySerializer = keySerializer; - this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions; } @Override public Mono> subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono prefixKeyMono, - @Nullable Flux debuggingKeysFlux) { + Mono prefixKeyMono) { return Mono.usingWhen(prefixKeyMono, prefixKey -> Mono - .fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer)) - .transform(mono -> { - if (debuggingKeysFlux != null) { - return debuggingKeysFlux.handle((key, sink) -> { - try { - if (key.readableBytes() != prefixKey.readableBytes() + getKeyBinaryLength()) { - sink.error(new IndexOutOfBoundsException()); - } else { - sink.complete(); - } - } finally { - key.release(); - } - }).then(mono); - } else { - return mono; - } - }), + .fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer)), prefixKey -> Mono.fromRunnable(prefixKey::release) ); } @@ -66,11 +36,6 @@ public class SubStageGetterSet implements SubStageGetter, Dat return true; } - @Override - public boolean needsDebuggingKeyFlux() { - return assertsEnabled && enableAssertionsWhenUsingAssertions; - } - public int getKeyBinaryLength() { return keySerializer.getSerializedBinaryLength(); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java index 3a44c79..ea11a28 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java @@ -13,15 +13,6 @@ import reactor.core.publisher.Mono; public class SubStageGetterSingle implements SubStageGetter> { - private static final boolean assertsEnabled; - static { - boolean assertsEnabledTmp = false; - //noinspection AssertWithSideEffects - assert assertsEnabledTmp = true; - //noinspection ConstantConditions - assertsEnabled = assertsEnabledTmp; - } - private final Serializer serializer; public SubStageGetterSingle(Serializer serializer) { @@ -31,29 +22,11 @@ public class SubStageGetterSingle implements SubStageGetter> subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, - Mono keyPrefixMono, - @Nullable Flux debuggingKeysFlux) { + Mono keyPrefixMono) { return Mono.usingWhen( keyPrefixMono, keyPrefix -> Mono - .>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix.retain(), serializer)) - .transform(mono -> { - if (debuggingKeysFlux != null) { - return debuggingKeysFlux.handle((key, sink) -> { - try { - if (needsDebuggingKeyFlux() && !LLUtils.equals(keyPrefix, key)) { - sink.error(new IndexOutOfBoundsException("Found more than one element!")); - } else { - sink.complete(); - } - } finally { - key.release(); - } - }).then(mono); - } else { - return mono; - } - }), + .>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix.retain(), serializer)), keyPrefix -> Mono.fromRunnable(keyPrefix::release) ); } @@ -63,9 +36,4 @@ public class SubStageGetterSingle implements SubStageGetter> keys) { + private IntArrayList getLockIndicesEntries(List keys) { var list = new IntArrayList(keys.size()); - for (Entry key : keys) { + for (LLEntry key : keys) { list.add(getLockIndex(key.getKey())); } return list; @@ -290,7 +290,7 @@ public class LLLocalDictionary implements LLDictionary { throw new RocksDBException("Key buffer must be direct"); } ByteBuffer keyNioBuffer = LLUtils.toDirect(key); - assert !databaseOptions.enableDbAssertionsWhenUsingAssertions() || keyNioBuffer.isDirect(); + assert keyNioBuffer.isDirect(); // Create a direct result buffer because RocksDB works only with direct buffers ByteBuf resultBuf = alloc.directBuffer(INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES); try { @@ -300,17 +300,15 @@ public class LLLocalDictionary implements LLDictionary { do { // Create the result nio buffer to pass to RocksDB resultNioBuf = resultBuf.nioBuffer(0, resultBuf.capacity()); - if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) { - assert keyNioBuffer.isDirect(); - assert resultNioBuf.isDirect(); - } + assert keyNioBuffer.isDirect(); + assert resultNioBuf.isDirect(); valueSize = db.get(cfh, Objects.requireNonNullElse(readOptions, EMPTY_READ_OPTIONS), keyNioBuffer.position(0), resultNioBuf ); if (valueSize != RocksDB.NOT_FOUND) { - if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) { + if (ASSERTIONS_ENABLED) { // todo: check if position is equal to data that have been read // todo: check if limit is equal to value size or data that have been read assert valueSize <= 0 || resultNioBuf.limit() > 0; @@ -408,11 +406,11 @@ public class LLLocalDictionary implements LLDictionary { throw new RocksDBException("Value buffer must be direct"); } var keyNioBuffer = LLUtils.toDirect(key); - assert !databaseOptions.enableDbAssertionsWhenUsingAssertions() || keyNioBuffer.isDirect(); + assert keyNioBuffer.isDirect(); var valueNioBuffer = LLUtils.toDirect(value); - assert !databaseOptions.enableDbAssertionsWhenUsingAssertions() || valueNioBuffer.isDirect(); + assert valueNioBuffer.isDirect(); db.put(cfh, validWriteOptions, keyNioBuffer, valueNioBuffer); } else { db.put(cfh, validWriteOptions, LLUtils.toArray(key), LLUtils.toArray(value)); @@ -750,8 +748,7 @@ public class LLLocalDictionary implements LLDictionary { newData = updater.apply(prevDataToSendToUpdater == null ? null : prevDataToSendToUpdater.retain()); - assert !databaseOptions.enableDbAssertionsWhenUsingAssertions() - || prevDataToSendToUpdater == null + assert prevDataToSendToUpdater == null || prevDataToSendToUpdater.readerIndex() == 0 || !prevDataToSendToUpdater.isReadable(); } finally { @@ -886,7 +883,7 @@ public class LLLocalDictionary implements LLDictionary { .single() .map(LLUtils::booleanToResponseByteBuffer) .doAfterTerminate(() -> { - assert !databaseOptions.enableDbAssertionsWhenUsingAssertions() || key.refCnt() > 0; + assert key.refCnt() > 0; }); case PREVIOUS_VALUE -> Mono .fromCallable(() -> { @@ -912,7 +909,7 @@ public class LLLocalDictionary implements LLDictionary { try { return dbGet(cfh, null, key.retain(), true); } finally { - assert !databaseOptions.enableDbAssertionsWhenUsingAssertions() || key.refCnt() > 0; + assert key.refCnt() > 0; } } } else { @@ -1005,8 +1002,7 @@ public class LLLocalDictionary implements LLDictionary { .doAfterTerminate(() -> keyBufsWindow.forEach(ReferenceCounted::release)); }, 2) // Max concurrency is 2 to read data while preparing the next segment .doOnDiscard(Entry.class, discardedEntry -> { - //noinspection unchecked - var entry = (Entry) discardedEntry; + var entry = (LLEntry) discardedEntry; entry.getKey().release(); entry.getValue().release(); }) @@ -1019,14 +1015,14 @@ public class LLLocalDictionary implements LLDictionary { } @Override - public Flux> putMulti(Flux> entries, boolean getOldValues) { + public Flux putMulti(Flux entries, boolean getOldValues) { return entries .buffer(Math.min(MULTI_GET_WINDOW, CAPPED_WRITE_BATCH_CAP)) .flatMapSequential(ew -> Mono .using( () -> ew, entriesWindow -> Mono - .>fromCallable(() -> { + .fromCallable(() -> { Iterable locks; ArrayList stamps; if (updateMode == UpdateMode.ALLOW) { @@ -1047,13 +1043,15 @@ public class LLLocalDictionary implements LLDictionary { MAX_WRITE_BATCH_SIZE, BATCH_WRITE_OPTIONS ); - for (Entry entry : entriesWindow) { - batch.put(cfh, entry.getKey().retain(), entry.getValue().retain()); + for (LLEntry entry : entriesWindow) { + var k = entry.getKey().retain(); + var v = entry.getValue().retain(); + batch.put(cfh, k, v); } batch.writeToDbAndClose(); batch.close(); } else { - for (Entry entry : entriesWindow) { + for (LLEntry entry : entriesWindow) { db.put(cfh, EMPTY_WRITE_OPTIONS, entry.getKey().nioBuffer(), entry.getValue().nioBuffer()); } } @@ -1077,8 +1075,7 @@ public class LLLocalDictionary implements LLDictionary { return this .getMulti(null, Flux .fromIterable(entriesWindow) - .map(Entry::getKey) - .map(ByteBuf::retain) + .map(entry -> entry.getKey().retain()) .map(buf -> Tuples.of(obj, buf)), false) .publishOn(dbScheduler) .then(transformer); @@ -1087,9 +1084,8 @@ public class LLLocalDictionary implements LLDictionary { } }), entriesWindow -> { - for (Entry entry : entriesWindow) { - entry.getKey().release(); - entry.getValue().release(); + for (LLEntry entry : entriesWindow) { + entry.release(); } } ), 2) // Max concurrency is 2 to read data while preparing the next segment @@ -1244,7 +1240,7 @@ public class LLLocalDictionary implements LLDictionary { } @Override - public Flux> getRange(@Nullable LLSnapshot snapshot, + public Flux getRange(@Nullable LLSnapshot snapshot, Mono rangeMono, boolean existsAlmostCertainly) { return Flux.usingWhen(rangeMono, @@ -1260,7 +1256,7 @@ public class LLLocalDictionary implements LLDictionary { } @Override - public Flux>> getRangeGrouped(@Nullable LLSnapshot snapshot, + public Flux> getRangeGrouped(@Nullable LLSnapshot snapshot, Mono rangeMono, int prefixLength, boolean existsAlmostCertainly) { return Flux.usingWhen(rangeMono, @@ -1276,18 +1272,18 @@ public class LLLocalDictionary implements LLDictionary { ); } - private Flux> getRangeSingle(LLSnapshot snapshot, + private Flux getRangeSingle(LLSnapshot snapshot, Mono keyMono, boolean existsAlmostCertainly) { return Flux.usingWhen(keyMono, key -> this .get(snapshot, Mono.just(key).map(ByteBuf::retain), existsAlmostCertainly) - .map(value -> Map.entry(key.retain(), value)), + .map(value -> new LLEntry(key.retain(), value)), key -> Mono.fromRunnable(key::release) - ); + ).transform(LLUtils::handleDiscard); } - private Flux> getRangeMulti(LLSnapshot snapshot, Mono rangeMono) { + private Flux getRangeMulti(LLSnapshot snapshot, Mono rangeMono) { return Flux.usingWhen(rangeMono, range -> Flux.using( () -> new LLLocalEntryReactiveRocksIterator(db, alloc, cfh, range.retain(), @@ -1299,7 +1295,7 @@ public class LLLocalDictionary implements LLDictionary { ); } - private Flux>> getRangeMultiGrouped(LLSnapshot snapshot, Mono rangeMono, int prefixLength) { + private Flux> getRangeMultiGrouped(LLSnapshot snapshot, Mono rangeMono, int prefixLength) { return Flux.usingWhen(rangeMono, range -> Flux.using( () -> new LLLocalGroupedEntryReactiveRocksIterator(db, alloc, cfh, prefixLength, range.retain(), @@ -1436,7 +1432,7 @@ public class LLLocalDictionary implements LLDictionary { } @Override - public Mono setRange(Mono rangeMono, Flux> entries) { + public Mono setRange(Mono rangeMono, Flux entries) { return Mono.usingWhen(rangeMono, range -> { if (USE_WINDOW_IN_SET_RANGE) { @@ -1520,17 +1516,14 @@ public class LLLocalDictionary implements LLDictionary { ) .flatMap(keysWindowFlux -> keysWindowFlux .collectList() - .doOnDiscard(Entry.class, discardedEntry -> { - //noinspection unchecked - var entry = (Entry) discardedEntry; - entry.getKey().release(); - entry.getValue().release(); - }) .flatMap(entriesList -> Mono .fromCallable(() -> { try { if (!USE_WRITE_BATCHES_IN_SET_RANGE) { - for (Entry entry : entriesList) { + for (LLEntry entry : entriesList) { + assert !entry.isReleased(); + assert entry.getKey().refCnt() > 0; + assert entry.getValue().refCnt() > 0; db.put(cfh, EMPTY_WRITE_OPTIONS, entry.getKey().nioBuffer(), entry.getValue().nioBuffer()); } } else if (USE_CAPPED_WRITE_BATCH_IN_SET_RANGE) { @@ -1540,14 +1533,20 @@ public class LLLocalDictionary implements LLDictionary { MAX_WRITE_BATCH_SIZE, BATCH_WRITE_OPTIONS )) { - for (Entry entry : entriesList) { + for (LLEntry entry : entriesList) { + assert !entry.isReleased(); + assert entry.getKey().refCnt() > 0; + assert entry.getValue().refCnt() > 0; batch.put(cfh, entry.getKey().retain(), entry.getValue().retain()); } batch.writeToDbAndClose(); } } else { try (var batch = new WriteBatch(RESERVED_WRITE_BATCH_SIZE)) { - for (Entry entry : entriesList) { + for (LLEntry entry : entriesList) { + assert !entry.isReleased(); + assert entry.getKey().refCnt() > 0; + assert entry.getValue().refCnt() > 0; batch.put(cfh, LLUtils.toArray(entry.getKey()), LLUtils.toArray(entry.getValue())); } db.write(EMPTY_WRITE_OPTIONS, batch); @@ -1556,9 +1555,9 @@ public class LLLocalDictionary implements LLDictionary { } return null; } finally { - for (Entry entry : entriesList) { - entry.getKey().release(); - entry.getValue().release(); + for (LLEntry entry : entriesList) { + assert !entry.isReleased(); + entry.release(); } } }) @@ -1903,7 +1902,7 @@ public class LLLocalDictionary implements LLDictionary { } @Override - public Mono> getOne(@Nullable LLSnapshot snapshot, Mono rangeMono) { + public Mono getOne(@Nullable LLSnapshot snapshot, Mono rangeMono) { return Mono.usingWhen(rangeMono, range -> runOnDb(() -> { try (var readOpts = new ReadOptions(resolveSnapshot(snapshot))) { @@ -1940,7 +1939,7 @@ public class LLLocalDictionary implements LLDictionary { try { ByteBuf value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value); try { - return Map.entry(key.retain(), value.retain()); + return new LLEntry(key, value); } finally { value.release(); } @@ -2123,7 +2122,7 @@ public class LLLocalDictionary implements LLDictionary { } @Override - public Mono> removeOne(Mono rangeMono) { + public Mono removeOne(Mono rangeMono) { return Mono.usingWhen(rangeMono, range -> runOnDb(() -> { try (var readOpts = new ReadOptions(getReadOptions(null))) { @@ -2161,7 +2160,7 @@ public class LLLocalDictionary implements LLDictionary { ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key); ByteBuf value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value); dbDelete(cfh, null, key); - return Map.entry(key, value); + return new LLEntry(key, value); } finally { maxBound.release(); } diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalEntryReactiveRocksIterator.java b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalEntryReactiveRocksIterator.java index b8e014a..62eacca 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalEntryReactiveRocksIterator.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalEntryReactiveRocksIterator.java @@ -2,6 +2,7 @@ package it.cavallium.dbengine.database.disk; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; +import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLRange; import java.util.Map; import java.util.Map.Entry; @@ -9,7 +10,7 @@ import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ReadOptions; import org.rocksdb.RocksDB; -public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator> { +public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator { public LLLocalEntryReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, @@ -22,7 +23,7 @@ public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksItera } @Override - public Entry getEntry(ByteBuf key, ByteBuf value) { - return Map.entry(key, value); + public LLEntry getEntry(ByteBuf key, ByteBuf value) { + return new LLEntry(key, value); } } diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalGroupedEntryReactiveRocksIterator.java b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalGroupedEntryReactiveRocksIterator.java index 259f416..c86a92c 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalGroupedEntryReactiveRocksIterator.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalGroupedEntryReactiveRocksIterator.java @@ -2,6 +2,7 @@ package it.cavallium.dbengine.database.disk; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; +import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLRange; import java.util.Map; import java.util.Map.Entry; @@ -10,7 +11,7 @@ import org.rocksdb.ReadOptions; import org.rocksdb.RocksDB; public class LLLocalGroupedEntryReactiveRocksIterator extends - LLLocalGroupedReactiveRocksIterator> { + LLLocalGroupedReactiveRocksIterator { public LLLocalGroupedEntryReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh, int prefixLength, @@ -22,7 +23,7 @@ public class LLLocalGroupedEntryReactiveRocksIterator extends } @Override - public Entry getEntry(ByteBuf key, ByteBuf value) { - return Map.entry(key, value); + public LLEntry getEntry(ByteBuf key, ByteBuf value) { + return new LLEntry(key, value); } } diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalKeyValueDatabase.java b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalKeyValueDatabase.java index 4301984..c4619b3 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalKeyValueDatabase.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalKeyValueDatabase.java @@ -469,12 +469,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase { private ColumnFamilyHandle getCfh(byte[] columnName) throws RocksDBException { ColumnFamilyHandle cfh = handles.get(Column.special(Column.toString(columnName))); - if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) { - //noinspection RedundantIfStatement - if (!enableColumnsBug) { - assert Arrays.equals(cfh.getName(), columnName); - } - } + assert enableColumnsBug || Arrays.equals(cfh.getName(), columnName); return cfh; } diff --git a/src/main/java/it/cavallium/dbengine/database/memory/LLMemoryDictionary.java b/src/main/java/it/cavallium/dbengine/database/memory/LLMemoryDictionary.java index e1c2d9c..8614e36 100644 --- a/src/main/java/it/cavallium/dbengine/database/memory/LLMemoryDictionary.java +++ b/src/main/java/it/cavallium/dbengine/database/memory/LLMemoryDictionary.java @@ -7,10 +7,12 @@ import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.ExtraKeyOperationResult; import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionaryResultType; +import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.UpdateMode; +import it.cavallium.dbengine.database.disk.ReleasableSlice; import it.cavallium.dbengine.database.serialization.BiSerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationFunction; @@ -245,7 +247,7 @@ public class LLMemoryDictionary implements LLDictionary { } @Override - public Flux> putMulti(Flux> entries, boolean getOldValues) { + public Flux putMulti(Flux entries, boolean getOldValues) { return entries .handle((entry, sink) -> { var key = entry.getKey(); @@ -255,7 +257,7 @@ public class LLMemoryDictionary implements LLDictionary { if (v == null || !getOldValues) { sink.complete(); } else { - sink.next(Map.entry(key.retain(), kk(v))); + sink.next(new LLEntry(key.retain(), kk(v))); } } finally { key.release(); @@ -271,7 +273,7 @@ public class LLMemoryDictionary implements LLDictionary { } @Override - public Flux> getRange(@Nullable LLSnapshot snapshot, + public Flux getRange(@Nullable LLSnapshot snapshot, Mono rangeMono, boolean existsAlmostCertainly) { return Flux.usingWhen(rangeMono, @@ -280,13 +282,13 @@ public class LLMemoryDictionary implements LLDictionary { return Mono.fromCallable(() -> { var element = snapshots.get(resolveSnapshot(snapshot)) .get(k(range.getSingle())); - return Map.entry(range.getSingle().retain(), kk(element)); + return new LLEntry(range.getSingle().retain(), kk(element)); }).flux(); } else { return Mono .fromCallable(() -> mapSlice(snapshot, range)) .flatMapMany(map -> Flux.fromIterable(map.entrySet())) - .map(entry -> Map.entry(kk(entry.getKey()), kk(entry.getValue()))); + .map(entry -> new LLEntry(kk(entry.getKey()), kk(entry.getValue()))); } }, range -> Mono.fromRunnable(range::release) @@ -294,7 +296,7 @@ public class LLMemoryDictionary implements LLDictionary { } @Override - public Flux>> getRangeGrouped(@Nullable LLSnapshot snapshot, + public Flux> getRangeGrouped(@Nullable LLSnapshot snapshot, Mono rangeMono, int prefixLength, boolean existsAlmostCertainly) { @@ -333,8 +335,16 @@ public class LLMemoryDictionary implements LLDictionary { @Override public Flux getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, Mono rangeMono, int prefixLength) { return getRangeKeys(snapshot, rangeMono) - .distinctUntilChanged(k -> k.slice(k.readerIndex(), prefixLength), LLUtils::equals) - .map(k -> k.slice(k.readerIndex(), prefixLength)); + .distinctUntilChanged(k -> k.slice(k.readerIndex(), prefixLength), (a, b) -> { + if (LLUtils.equals(a, b)) { + b.release(); + return true; + } else { + return false; + } + }) + .map(k -> k.slice(k.readerIndex(), prefixLength)) + .transform(LLUtils::handleDiscard); } @Override @@ -343,7 +353,7 @@ public class LLMemoryDictionary implements LLDictionary { } @Override - public Mono setRange(Mono rangeMono, Flux> entries) { + public Mono setRange(Mono rangeMono, Flux entries) { return Mono.error(new UnsupportedOperationException("Not implemented")); } @@ -361,7 +371,7 @@ public class LLMemoryDictionary implements LLDictionary { } @Override - public Mono> getOne(@Nullable LLSnapshot snapshot, Mono rangeMono) { + public Mono getOne(@Nullable LLSnapshot snapshot, Mono rangeMono) { return Mono.error(new UnsupportedOperationException("Not implemented")); } @@ -371,7 +381,7 @@ public class LLMemoryDictionary implements LLDictionary { } @Override - public Mono> removeOne(Mono rangeMono) { + public Mono removeOne(Mono rangeMono) { return Mono.error(new UnsupportedOperationException("Not implemented")); } diff --git a/src/test/java/it/cavallium/dbengine/DbTestUtils.java b/src/test/java/it/cavallium/dbengine/DbTestUtils.java index a2c6228..5934e51 100644 --- a/src/test/java/it/cavallium/dbengine/DbTestUtils.java +++ b/src/test/java/it/cavallium/dbengine/DbTestUtils.java @@ -1,9 +1,14 @@ package it.cavallium.dbengine; +import static org.junit.jupiter.api.Assertions.assertEquals; + import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.PoolArenaMetric; import io.netty.buffer.PooledByteBufAllocator; +import io.netty.buffer.UnpooledByteBufAllocator; import it.cavallium.dbengine.database.Column; +import it.cavallium.dbengine.database.LLDatabaseConnection; import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.UpdateMode; @@ -35,47 +40,129 @@ import reactor.core.scheduler.Schedulers; public class DbTestUtils { - public static final ByteBufAllocator ALLOCATOR = new PooledByteBufAllocator(true); + private volatile static ByteBufAllocator POOLED_ALLOCATOR = null; + + public static synchronized ByteBufAllocator getUncachedAllocator() { + try { + ensureNoLeaks(POOLED_ALLOCATOR); + } catch (Throwable ex) { + POOLED_ALLOCATOR = null; + } + if (POOLED_ALLOCATOR == null) { + POOLED_ALLOCATOR = new PooledByteBufAllocator(false, 1, 0, 8192, 11, 0, 0, true); + } + return POOLED_ALLOCATOR; + } + + public static synchronized ByteBufAllocator getUncachedAllocatorUnsafe() { + return POOLED_ALLOCATOR; + } + public static final AtomicInteger dbId = new AtomicInteger(0); + @SuppressWarnings("SameParameterValue") + private static int getActiveBuffers(ByteBufAllocator allocator) { + int directActive = 0, directAlloc = 0, directDealloc = 0; + if (allocator instanceof PooledByteBufAllocator alloc) { + for (PoolArenaMetric arena : alloc.directArenas()) { + directActive += arena.numActiveAllocations(); + directAlloc += arena.numAllocations(); + directDealloc += arena.numDeallocations(); + } + } else if (allocator instanceof UnpooledByteBufAllocator alloc) { + directActive += alloc.metric().usedDirectMemory(); + } else { + throw new UnsupportedOperationException(); + } + System.out.println("directActive " + directActive + " directAlloc " + directAlloc + " directDealloc " + directDealloc); + return directActive; + } + + @SuppressWarnings("SameParameterValue") + private static int getActiveHeapBuffers(ByteBufAllocator allocator) { + int heapActive = 0, heapAlloc = 0, heapDealloc = 0; + if (allocator instanceof PooledByteBufAllocator alloc) { + for (PoolArenaMetric arena : alloc.heapArenas()) { + heapActive += arena.numActiveAllocations(); + heapAlloc += arena.numAllocations(); + heapDealloc += arena.numDeallocations(); + } + } else if (allocator instanceof UnpooledByteBufAllocator alloc) { + heapActive += alloc.metric().usedHeapMemory(); + } else { + throw new UnsupportedOperationException(); + } + System.out.println("heapActive " + heapActive + " heapAlloc " + heapAlloc + " heapDealloc " + heapDealloc); + return heapActive; + } + public static Flux tempDb(Function> action) { - var wrkspcPath = Path.of("/tmp/.cache/tempdb-" + dbId.incrementAndGet() + "/"); - return Flux.usingWhen(Mono - .fromCallable(() -> { - if (Files.exists(wrkspcPath)) { - Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> { - try { - Files.delete(file); - } catch (IOException ex) { - throw new CompletionException(ex); - } - }); - } - Files.createDirectories(wrkspcPath); - return null; - }) - .subscribeOn(Schedulers.boundedElastic()) - .then(new LLLocalDatabaseConnection(DbTestUtils.ALLOCATOR, wrkspcPath).connect()) - .flatMap(conn -> conn.getDatabase("testdb", - List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")), - new DatabaseOptions(Map.of(), true, false, true, false, true, true, true, true, -1) - )), - action, - db -> db.close().then(Mono.fromCallable(() -> { - if (Files.exists(wrkspcPath)) { - Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> { - try { - Files.delete(file); - } catch (IOException ex) { - throw new CompletionException(ex); - } - }); - } - return null; - }).subscribeOn(Schedulers.boundedElastic())) + return Flux.usingWhen(openTempDb(), + tempDb -> action.apply(tempDb.db()), + DbTestUtils::closeTempDb ); } + public static record TempDb(ByteBufAllocator allocator, LLDatabaseConnection connection, LLKeyValueDatabase db, + Path path) {} + + public static Mono openTempDb() { + return Mono.defer(() -> { + var wrkspcPath = Path.of("/tmp/.cache/tempdb-" + dbId.incrementAndGet() + "/"); + var alloc = getUncachedAllocator(); + return Mono + .fromCallable(() -> { + if (Files.exists(wrkspcPath)) { + Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> { + try { + Files.delete(file); + } catch (IOException ex) { + throw new CompletionException(ex); + } + }); + } + Files.createDirectories(wrkspcPath); + return null; + }) + .subscribeOn(Schedulers.boundedElastic()) + .then(new LLLocalDatabaseConnection(alloc, wrkspcPath).connect()) + .flatMap(conn -> conn + .getDatabase("testdb", + List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")), + new DatabaseOptions(Map.of(), true, false, true, false, true, true, true, -1) + ) + .map(db -> new TempDb(alloc, conn, db, wrkspcPath)) + ); + }); + } + + public static Mono closeTempDb(TempDb tempDb) { + return tempDb.db().close().then(tempDb.connection().disconnect()).then(Mono.fromCallable(() -> { + ensureNoLeaks(tempDb.allocator()); + if (tempDb.allocator() instanceof PooledByteBufAllocator pooledByteBufAllocator) { + pooledByteBufAllocator.trimCurrentThreadCache(); + pooledByteBufAllocator.freeThreadLocalCache(); + } + if (Files.exists(tempDb.path())) { + Files.walk(tempDb.path()).sorted(Comparator.reverseOrder()).forEach(file -> { + try { + Files.delete(file); + } catch (IOException ex) { + throw new CompletionException(ex); + } + }); + } + return null; + }).subscribeOn(Schedulers.boundedElastic())).then(); + } + + public static void ensureNoLeaks(ByteBufAllocator allocator) { + if (allocator != null) { + assertEquals(0, getActiveBuffers(allocator)); + assertEquals(0, getActiveHeapBuffers(allocator)); + } + } + public static Mono tempDictionary(LLKeyValueDatabase database, UpdateMode updateMode) { return tempDictionary(database, "testmap", updateMode); } @@ -98,13 +185,13 @@ public class DbTestUtils { int keyBytes) { if (dbType == DbType.MAP) { return DatabaseMapDictionary.simple(dictionary, - SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, keyBytes), - Serializer.utf8(DbTestUtils.ALLOCATOR) + SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), keyBytes), + Serializer.utf8(dictionary.getAllocator()) ); } else { return DatabaseMapDictionaryHashed.simple(dictionary, - Serializer.utf8(DbTestUtils.ALLOCATOR), - Serializer.utf8(DbTestUtils.ALLOCATOR), + Serializer.utf8(dictionary.getAllocator()), + Serializer.utf8(dictionary.getAllocator()), s -> (short) s.hashCode(), new SerializerFixedBinaryLength<>() { @Override @@ -126,7 +213,7 @@ public class DbTestUtils { @Override public @NotNull ByteBuf serialize(@NotNull Short deserialized) { - var out = DbTestUtils.ALLOCATOR.directBuffer(Short.BYTES); + var out = dictionary.getAllocator().directBuffer(Short.BYTES); try { out.writeShort(deserialized); out.writerIndex(Short.BYTES); @@ -140,33 +227,31 @@ public class DbTestUtils { } } - public static DatabaseMapDictionaryDeep, + public static DatabaseMapDictionaryDeep, DatabaseMapDictionary> tempDatabaseMapDictionaryDeepMap( LLDictionary dictionary, int key1Bytes, int key2Bytes) { return DatabaseMapDictionaryDeep.deepTail(dictionary, - SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key1Bytes), + SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), key1Bytes), key2Bytes, - new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key2Bytes), - Serializer.utf8(DbTestUtils.ALLOCATOR), - true + new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), key2Bytes), + Serializer.utf8(dictionary.getAllocator()) ) ); } - public static DatabaseMapDictionaryDeep, + public static DatabaseMapDictionaryDeep, DatabaseMapDictionaryHashed> tempDatabaseMapDictionaryDeepMapHashMap( LLDictionary dictionary, int key1Bytes) { return DatabaseMapDictionaryDeep.deepTail(dictionary, - SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key1Bytes), + SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), key1Bytes), Integer.BYTES, - new SubStageGetterHashMap<>(Serializer.utf8(DbTestUtils.ALLOCATOR), - Serializer.utf8(DbTestUtils.ALLOCATOR), + new SubStageGetterHashMap<>(Serializer.utf8(dictionary.getAllocator()), + Serializer.utf8(dictionary.getAllocator()), String::hashCode, - SerializerFixedBinaryLength.intSerializer(DbTestUtils.ALLOCATOR), - true + SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator()) ) ); } @@ -174,10 +259,10 @@ public class DbTestUtils { public static DatabaseMapDictionaryHashed tempDatabaseMapDictionaryHashMap( LLDictionary dictionary) { return DatabaseMapDictionaryHashed.simple(dictionary, - Serializer.utf8(DbTestUtils.ALLOCATOR), - Serializer.utf8(DbTestUtils.ALLOCATOR), + Serializer.utf8(dictionary.getAllocator()), + Serializer.utf8(dictionary.getAllocator()), String::hashCode, - SerializerFixedBinaryLength.intSerializer(DbTestUtils.ALLOCATOR) + SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator()) ); } } diff --git a/src/test/java/it/cavallium/dbengine/OldDatabaseTests.java b/src/test/java/it/cavallium/dbengine/OldDatabaseTests.java index 85620a1..f7c6ff4 100644 --- a/src/test/java/it/cavallium/dbengine/OldDatabaseTests.java +++ b/src/test/java/it/cavallium/dbengine/OldDatabaseTests.java @@ -75,7 +75,7 @@ public class OldDatabaseTests { .map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary, new FixedStringSerializer(3), 4, - new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop(), true) + new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop()) )) .flatMap(collection -> Flux .fromIterable(originalSuperKeys) @@ -135,7 +135,7 @@ public class OldDatabaseTests { .then(new LLLocalDatabaseConnection(PooledByteBufAllocator.DEFAULT, wrkspcPath).connect()) .flatMap(conn -> conn.getDatabase("testdb", List.of(Column.dictionary("testmap")), - new DatabaseOptions(Map.of(), true, false, true, false, true, true, true, true, -1) + new DatabaseOptions(Map.of(), true, false, true, false, true, true, true, -1) )); } @@ -159,14 +159,14 @@ public class OldDatabaseTests { .map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary, new FixedStringSerializer(3), 4, - new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop(), true) + new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop()) )), db .getDictionary("testmap", UpdateMode.DISALLOW) .map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary, new FixedStringSerializer(6), 7, - new SubStageGetterMap<>(new FixedStringSerializer(7), Serializer.noop(), true) + new SubStageGetterMap<>(new FixedStringSerializer(7), Serializer.noop()) )) ) .single() diff --git a/src/test/java/it/cavallium/dbengine/TestDictionary.java b/src/test/java/it/cavallium/dbengine/TestDictionary.java index 5ad61cd..99f1002 100644 --- a/src/test/java/it/cavallium/dbengine/TestDictionary.java +++ b/src/test/java/it/cavallium/dbengine/TestDictionary.java @@ -1,5 +1,8 @@ package it.cavallium.dbengine; +import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocator; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocatorUnsafe; import static it.cavallium.dbengine.DbTestUtils.tempDb; import static it.cavallium.dbengine.DbTestUtils.tempDictionary; @@ -7,6 +10,8 @@ import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.UpdateMode; import java.util.Arrays; import java.util.stream.Stream; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -18,6 +23,16 @@ public class TestDictionary { return Arrays.stream(UpdateMode.values()).map(Arguments::of); } + @BeforeEach + public void beforeEach() { + ensureNoLeaks(getUncachedAllocator()); + } + + @AfterEach + public void afterEach() { + ensureNoLeaks(getUncachedAllocatorUnsafe()); + } + @ParameterizedTest @MethodSource("provideArgumentsCreate") public void testCreate(UpdateMode updateMode) { diff --git a/src/test/java/it/cavallium/dbengine/TestDictionaryMap.java b/src/test/java/it/cavallium/dbengine/TestDictionaryMap.java index 84397f2..2b71580 100644 --- a/src/test/java/it/cavallium/dbengine/TestDictionaryMap.java +++ b/src/test/java/it/cavallium/dbengine/TestDictionaryMap.java @@ -2,6 +2,7 @@ package it.cavallium.dbengine; import static it.cavallium.dbengine.DbTestUtils.*; +import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.UpdateMode; import java.util.Arrays; import java.util.List; @@ -13,6 +14,8 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -74,6 +77,16 @@ public class TestDictionaryMap { .map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3(), fullTuple.getT4(), fullTuple.getT5())); } + @BeforeEach + public void beforeEach() { + ensureNoLeaks(getUncachedAllocator()); + } + + @AfterEach + public void afterEach() { + ensureNoLeaks(getUncachedAllocatorUnsafe()); + } + @ParameterizedTest @MethodSource("provideArgumentsPut") public void testPut(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) { @@ -338,6 +351,7 @@ public class TestDictionaryMap { ) .filter(k -> k.getValue().isPresent()) .map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow())) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); @@ -390,6 +404,7 @@ public class TestDictionaryMap { ) .doAfterTerminate(map::release) ) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); @@ -527,6 +542,7 @@ public class TestDictionaryMap { ) .doAfterTerminate(map::release) ) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); @@ -555,6 +571,7 @@ public class TestDictionaryMap { ) .doAfterTerminate(map::release) ) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); @@ -588,6 +605,7 @@ public class TestDictionaryMap { ) .doAfterTerminate(map::release) ) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); @@ -616,6 +634,7 @@ public class TestDictionaryMap { .doAfterTerminate(map::release) ) .flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val)) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); @@ -627,7 +646,6 @@ public class TestDictionaryMap { @ParameterizedTest @MethodSource("provideArgumentsPutMulti") public void testPutMultiClear(DbType dbType, UpdateMode updateMode, Map entries, boolean shouldFail) { - var remainingEntries = new ConcurrentHashMap, Boolean>().keySet(true); Step stpVer = StepVerifier .create(tempDb(db -> tempDictionary(db, updateMode) .map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5)) @@ -642,6 +660,7 @@ public class TestDictionaryMap { .doAfterTerminate(map::release) ) .flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val)) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); diff --git a/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeep.java b/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeep.java index 371e5c6..725c496 100644 --- a/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeep.java +++ b/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeep.java @@ -1,9 +1,13 @@ package it.cavallium.dbengine; +import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocator; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocatorUnsafe; import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMap; import static it.cavallium.dbengine.DbTestUtils.tempDb; import static it.cavallium.dbengine.DbTestUtils.tempDictionary; +import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.UpdateMode; import java.util.Arrays; import java.util.Map; @@ -13,6 +17,8 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -140,6 +146,16 @@ public class TestDictionaryMapDeep { .toStream(); } + @BeforeEach + public void beforeEach() { + ensureNoLeaks(getUncachedAllocator()); + } + + @AfterEach + public void afterEach() { + ensureNoLeaks(getUncachedAllocatorUnsafe()); + } + @ParameterizedTest @MethodSource("provideArgumentsSet") public void testSetValueGetValue(UpdateMode updateMode, String key, Map value, boolean shouldFail) { @@ -520,35 +536,31 @@ public class TestDictionaryMapDeep { if (updateMode != UpdateMode.ALLOW_UNSAFE && !isTestBadKeysEnabled()) { return; } - var stpVer = StepVerifier - .create(tempDb(db -> tempDictionary(db, updateMode) - .map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6)) - .flatMapMany(map -> Flux - .concat( - map.updateValue(key, old -> { - assert old == null; - return Map.of("error?", "error."); - }).then(map.getValue(null, key)), - map.updateValue(key, false, old -> { - assert Objects.equals(old, Map.of("error?", "error.")); - return Map.of("error?", "error."); - }).then(map.getValue(null, key)), - map.updateValue(key, true, old -> { - assert Objects.equals(old, Map.of("error?", "error.")); - return Map.of("error?", "error."); - }).then(map.getValue(null, key)), - map.updateValue(key, true, old -> { - assert Objects.equals(old, Map.of("error?", "error.")); - return value; - }).then(map.getValue(null, key)), - map.updateValue(key, true, old -> { - assert Objects.equals(old, value); - return value; - }).then(map.getValue(null, key)) - ) - .doAfterTerminate(map::release) - ) - )); + var stpVer = StepVerifier.create(tempDb(db -> tempDictionary(db, updateMode) + .map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6)) + .flatMapMany(map -> Flux.concat( + map.updateValue(key, old -> { + assert old == null; + return Map.of("error?", "error."); + }).then(map.getValue(null, key)), + map.updateValue(key, false, old -> { + assert Objects.equals(old, Map.of("error?", "error.")); + return Map.of("error?", "error."); + }).then(map.getValue(null, key)), + map.updateValue(key, true, old -> { + assert Objects.equals(old, Map.of("error?", "error.")); + return Map.of("error?", "error."); + }).then(map.getValue(null, key)), + map.updateValue(key, true, old -> { + assert Objects.equals(old, Map.of("error?", "error.")); + return value; + }).then(map.getValue(null, key)), + map.updateValue(key, true, old -> { + assert Objects.equals(old, value); + return value; + }).then(map.getValue(null, key)) + ).doAfterTerminate(map::release)) + )); if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) { stpVer.verifyError(); } else { @@ -795,6 +807,7 @@ public class TestDictionaryMapDeep { ) .doAfterTerminate(map::release); }) + .transform(LLUtils::handleDiscard) )); if (shouldFail) { stpVer.verifyError(); diff --git a/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeepHashMap.java b/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeepHashMap.java index 36b38bb..4717705 100644 --- a/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeepHashMap.java +++ b/src/test/java/it/cavallium/dbengine/TestDictionaryMapDeepHashMap.java @@ -1,5 +1,8 @@ package it.cavallium.dbengine; +import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocator; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocatorUnsafe; import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMapHashMap; import static it.cavallium.dbengine.DbTestUtils.tempDb; import static it.cavallium.dbengine.DbTestUtils.tempDictionary; @@ -13,6 +16,8 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -94,6 +99,16 @@ public class TestDictionaryMapDeepHashMap { .toStream(); } + @BeforeEach + public void beforeEach() { + ensureNoLeaks(getUncachedAllocator()); + } + + @AfterEach + public void afterEach() { + ensureNoLeaks(getUncachedAllocatorUnsafe()); + } + @ParameterizedTest @MethodSource("provideArgumentsPut") public void testAtPutValueGetAllValues(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) { diff --git a/src/test/java/it/cavallium/dbengine/TestLLDictionaryLeaks.java b/src/test/java/it/cavallium/dbengine/TestLLDictionaryLeaks.java new file mode 100644 index 0000000..b1e656d --- /dev/null +++ b/src/test/java/it/cavallium/dbengine/TestLLDictionaryLeaks.java @@ -0,0 +1,217 @@ +package it.cavallium.dbengine; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import io.netty.buffer.ByteBuf; +import it.cavallium.dbengine.DbTestUtils.TempDb; +import it.cavallium.dbengine.database.LLDictionary; +import it.cavallium.dbengine.database.LLDictionaryResultType; +import it.cavallium.dbengine.database.LLKeyValueDatabase; +import it.cavallium.dbengine.database.LLUtils; +import it.cavallium.dbengine.database.UpdateMode; +import it.cavallium.dbengine.database.UpdateReturnMode; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Objects; +import java.util.concurrent.Flow.Publisher; +import java.util.stream.Stream; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; + +public class TestLLDictionaryLeaks { + + private TempDb tempDb; + private LLKeyValueDatabase db; + + @BeforeEach + public void beforeEach() { + tempDb = Objects.requireNonNull(DbTestUtils.openTempDb().block(), "TempDB"); + db = tempDb.db(); + } + + public static Stream provideArguments() { + return Arrays.stream(UpdateMode.values()).map(Arguments::of); + } + + public static Stream providePutArguments() { + var updateModes = Arrays.stream(UpdateMode.values()); + return updateModes.flatMap(updateMode -> { + var resultTypes = Arrays.stream(LLDictionaryResultType.values()); + return resultTypes.map(resultType -> Arguments.of(updateMode, resultType)); + }); + } + + public static Stream provideUpdateArguments() { + var updateModes = Arrays.stream(UpdateMode.values()); + return updateModes.flatMap(updateMode -> { + var resultTypes = Arrays.stream(UpdateReturnMode.values()); + return resultTypes.map(resultType -> Arguments.of(updateMode, resultType)); + }); + } + + private LLDictionary getDict(UpdateMode updateMode) { + var dict = DbTestUtils.tempDictionary(db, updateMode).block(); + var key1 = Mono.fromCallable(() -> fromString("test-key-1")); + var key2 = Mono.fromCallable(() -> fromString("test-key-2")); + var key3 = Mono.fromCallable(() -> fromString("test-key-3")); + var key4 = Mono.fromCallable(() -> fromString("test-key-4")); + var value = Mono.fromCallable(() -> fromString("test-value")); + dict.put(key1, value, LLDictionaryResultType.VOID).block(); + dict.put(key2, value, LLDictionaryResultType.VOID).block(); + dict.put(key3, value, LLDictionaryResultType.VOID).block(); + dict.put(key4, value, LLDictionaryResultType.VOID).block(); + return dict; + } + + private ByteBuf fromString(String s) { + var sb = s.getBytes(StandardCharsets.UTF_8); + var b = db.getAllocator().buffer(sb.length); + b.writeBytes(b); + return b; + } + + private void run(Flux publisher) { + publisher.subscribeOn(Schedulers.immediate()).blockLast(); + } + + private void runVoid(Mono publisher) { + publisher.then().subscribeOn(Schedulers.immediate()).block(); + } + + private T run(Mono publisher) { + return publisher.subscribeOn(Schedulers.immediate()).block(); + } + + private T run(boolean shouldFail, Mono publisher) { + return publisher.subscribeOn(Schedulers.immediate()).transform(mono -> { + if (shouldFail) { + return mono.onErrorResume(ex -> Mono.empty()); + } else { + return mono; + } + }).block(); + } + + private void runVoid(boolean shouldFail, Mono publisher) { + publisher.then().subscribeOn(Schedulers.immediate()).transform(mono -> { + if (shouldFail) { + return mono.onErrorResume(ex -> Mono.empty()); + } else { + return mono; + } + }).block(); + } + + @Test + public void testNoOp() { + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testGetDict(UpdateMode updateMode) { + var dict = getDict(updateMode); + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testGetColumnName(UpdateMode updateMode) { + var dict = getDict(updateMode); + dict.getColumnName(); + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testGetAllocator(UpdateMode updateMode) { + var dict = getDict(updateMode); + dict.getAllocator(); + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testGet(UpdateMode updateMode) { + var dict = getDict(updateMode); + var key = Mono.fromCallable(() -> fromString("test")); + runVoid(dict.get(null, key).then().transform(LLUtils::handleDiscard)); + runVoid(dict.get(null, key, true).then().transform(LLUtils::handleDiscard)); + runVoid(dict.get(null, key, false).then().transform(LLUtils::handleDiscard)); + } + + @ParameterizedTest + @MethodSource("providePutArguments") + public void testPut(UpdateMode updateMode, LLDictionaryResultType resultType) { + var dict = getDict(updateMode); + var key = Mono.fromCallable(() -> fromString("test-key")); + var value = Mono.fromCallable(() -> fromString("test-value")); + runVoid(dict.put(key, value, resultType).then()); + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testGetUpdateMode(UpdateMode updateMode) { + var dict = getDict(updateMode); + assertEquals(updateMode, run(dict.getUpdateMode())); + } + + @ParameterizedTest + @MethodSource("provideUpdateArguments") + public void testUpdate(UpdateMode updateMode, UpdateReturnMode updateReturnMode) { + var dict = getDict(updateMode); + var key = Mono.fromCallable(() -> fromString("test-key")); + runVoid(updateMode == UpdateMode.DISALLOW, + dict.update(key, old -> old, updateReturnMode, true).then().transform(LLUtils::handleDiscard) + ); + runVoid(updateMode == UpdateMode.DISALLOW, + dict.update(key, old -> old, updateReturnMode, false).then().transform(LLUtils::handleDiscard) + ); + runVoid(updateMode == UpdateMode.DISALLOW, + dict.update(key, old -> old, updateReturnMode).then().transform(LLUtils::handleDiscard) + ); + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testUpdateAndGetDelta(UpdateMode updateMode) { + var dict = getDict(updateMode); + var key = Mono.fromCallable(() -> fromString("test-key")); + runVoid(updateMode == UpdateMode.DISALLOW, + dict.updateAndGetDelta(key, old -> old, true).then().transform(LLUtils::handleDiscard) + ); + runVoid(updateMode == UpdateMode.DISALLOW, + dict.updateAndGetDelta(key, old -> old, false).then().transform(LLUtils::handleDiscard) + ); + runVoid(updateMode == UpdateMode.DISALLOW, + dict.updateAndGetDelta(key, old -> old).then().transform(LLUtils::handleDiscard) + ); + } + + @ParameterizedTest + @MethodSource("provideArguments") + public void testClear(UpdateMode updateMode) { + var dict = getDict(updateMode); + runVoid(dict.clear()); + } + + @ParameterizedTest + @MethodSource("providePutArguments") + public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) { + var dict = getDict(updateMode); + var key = Mono.fromCallable(() -> fromString("test-key")); + runVoid(dict.remove(key, resultType).then()); + } + + @AfterEach + public void afterEach() { + DbTestUtils.closeTempDb(tempDb).block(); + } +} diff --git a/src/test/java/it/cavallium/dbengine/TestSingletons.java b/src/test/java/it/cavallium/dbengine/TestSingletons.java index ba4cbc0..fcd1647 100644 --- a/src/test/java/it/cavallium/dbengine/TestSingletons.java +++ b/src/test/java/it/cavallium/dbengine/TestSingletons.java @@ -1,11 +1,16 @@ package it.cavallium.dbengine; +import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocator; +import static it.cavallium.dbengine.DbTestUtils.getUncachedAllocatorUnsafe; import static it.cavallium.dbengine.DbTestUtils.tempDb; import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.collections.DatabaseInt; import it.cavallium.dbengine.database.collections.DatabaseLong; import java.util.stream.Stream; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -34,6 +39,16 @@ public class TestSingletons { ); } + @BeforeEach + public void beforeEach() { + ensureNoLeaks(getUncachedAllocator()); + } + + @AfterEach + public void afterEach() { + ensureNoLeaks(getUncachedAllocatorUnsafe()); + } + @Test public void testCreateInteger() { StepVerifier