Partial migration to ResourceSupport
This commit is contained in:
parent
247207febe
commit
3a544d4297
@ -5,13 +5,14 @@ import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import java.util.Objects;
|
||||
import org.warp.commonutils.log.Logger;
|
||||
import org.warp.commonutils.log.LoggerFactory;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public final class SearchResult<T, U> extends ResourceSupport<SearchResult<T, U>, SearchResult<T, U>> {
|
||||
public final class SearchResult<T, U> extends LiveResourceSupport<SearchResult<T, U>, SearchResult<T, U>> {
|
||||
|
||||
private Flux<SearchResultItem<T, U>> results;
|
||||
private TotalHitsCount totalHitsCount;
|
||||
|
@ -5,6 +5,7 @@ import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import it.cavallium.dbengine.database.collections.ValueGetter;
|
||||
import java.util.Objects;
|
||||
import org.reactivestreams.Publisher;
|
||||
@ -14,7 +15,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public final class SearchResultKeys<T> extends ResourceSupport<SearchResultKeys<T>, SearchResultKeys<T>> {
|
||||
public final class SearchResultKeys<T> extends LiveResourceSupport<SearchResultKeys<T>, SearchResultKeys<T>> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SearchResultKeys.class);
|
||||
|
||||
|
@ -8,7 +8,7 @@ import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import java.util.StringJoiner;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
public class LLDelta extends ResourceSupport<LLDelta, LLDelta> {
|
||||
public class LLDelta extends LiveResourceSupport<LLDelta, LLDelta> {
|
||||
@Nullable
|
||||
private final Buffer previous;
|
||||
@Nullable
|
||||
|
@ -9,7 +9,7 @@ import java.util.StringJoiner;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
public class LLEntry extends ResourceSupport<LLEntry, LLEntry> {
|
||||
public class LLEntry extends LiveResourceSupport<LLEntry, LLEntry> {
|
||||
@NotNull
|
||||
private final Buffer key;
|
||||
@NotNull
|
||||
|
@ -12,7 +12,7 @@ import java.util.StringJoiner;
|
||||
/**
|
||||
* Range of data, from min (inclusive),to max (exclusive)
|
||||
*/
|
||||
public class LLRange extends ResourceSupport<LLRange, LLRange> {
|
||||
public class LLRange extends LiveResourceSupport<LLRange, LLRange> {
|
||||
|
||||
private static final LLRange RANGE_ALL = new LLRange(null, null, null, d -> {});
|
||||
private Buffer min;
|
||||
@ -212,9 +212,9 @@ public class LLRange extends ResourceSupport<LLRange, LLRange> {
|
||||
|
||||
@Override
|
||||
public void drop(LLRange obj) {
|
||||
if (obj.min != null) obj.min.close();
|
||||
if (obj.max != null) obj.max.close();
|
||||
if (obj.single != null) obj.single.close();
|
||||
if (obj.min != null && obj.min.isAccessible()) obj.min.close();
|
||||
if (obj.max != null && obj.max.isAccessible()) obj.max.close();
|
||||
if (obj.single != null && obj.single.isAccessible()) obj.single.close();
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ import org.warp.commonutils.log.Logger;
|
||||
import org.warp.commonutils.log.LoggerFactory;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
public final class LLSearchResultShard extends ResourceSupport<LLSearchResultShard, LLSearchResultShard> {
|
||||
public final class LLSearchResultShard extends LiveResourceSupport<LLSearchResultShard, LLSearchResultShard> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LLSearchResultShard.class);
|
||||
|
||||
|
@ -433,6 +433,25 @@ public class LLUtils {
|
||||
.doOnDiscard(Send.class, send -> send.close());
|
||||
}
|
||||
|
||||
/**
|
||||
* cleanup resource
|
||||
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||
*/
|
||||
public static <U, T extends Resource<T>, V extends T> Flux<U> usingEachResource(Flux<V> resourceSupplier,
|
||||
Function<V, Mono<U>> resourceClosure,
|
||||
boolean cleanupOnSuccess) {
|
||||
return resourceSupplier
|
||||
.concatMap(resource -> Mono.usingWhen(Mono.just(resource), resourceClosure, r -> {
|
||||
if (cleanupOnSuccess) {
|
||||
return Mono.fromRunnable(() -> r.close());
|
||||
} else {
|
||||
return Mono.empty();
|
||||
}
|
||||
}, (r, ex) -> Mono.fromRunnable(() -> r.close()), r -> Mono.fromRunnable(() -> r.close())))
|
||||
.doOnDiscard(Resource.class, resource -> resource.close())
|
||||
.doOnDiscard(Send.class, send -> send.close());
|
||||
}
|
||||
|
||||
/**
|
||||
* cleanup resource
|
||||
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||
@ -936,7 +955,9 @@ public class LLUtils {
|
||||
}
|
||||
|
||||
private static void discardStage(DatabaseStage<?> stage) {
|
||||
stage.release();
|
||||
if (stage != null && stage.isAccessible()) {
|
||||
stage.close();
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isDirect(Buffer key) {
|
||||
|
@ -0,0 +1,33 @@
|
||||
package it.cavallium.dbengine.database;
|
||||
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.internal.LifecycleTracer;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import org.warp.commonutils.log.Logger;
|
||||
import org.warp.commonutils.log.LoggerFactory;
|
||||
|
||||
public abstract class LiveResourceSupport<I extends Resource<I>, T extends LiveResourceSupport<I, T>> extends ResourceSupport<I, T> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LiveResourceSupport.class);
|
||||
|
||||
protected LiveResourceSupport(Drop<T> drop) {
|
||||
super(drop);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
if (this.isAccessible()) {
|
||||
try {
|
||||
this.close();
|
||||
} catch (Throwable ignored) {
|
||||
} finally {
|
||||
var ise = new IllegalStateException("Resource not released");
|
||||
ise.setStackTrace(new StackTraceElement[0]);
|
||||
logger.error("Resource not released: {}", this, attachTrace(ise));
|
||||
}
|
||||
}
|
||||
super.finalize();
|
||||
}
|
||||
}
|
@ -2,7 +2,7 @@ package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.CompositeBuffer;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
@ -36,8 +36,10 @@ public class DatabaseEmpty {
|
||||
private DatabaseEmpty() {
|
||||
}
|
||||
|
||||
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, Send<Buffer> key) {
|
||||
return new DatabaseSingle<>(dictionary, key, nothingSerializer(dictionary.getAllocator()));
|
||||
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary,
|
||||
Send<Buffer> key,
|
||||
Drop<DatabaseSingle<Nothing>> drop) {
|
||||
return new DatabaseSingle<>(dictionary, key, nothingSerializer(dictionary.getAllocator()), drop);
|
||||
}
|
||||
|
||||
public static final class Nothing {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
@ -42,23 +43,27 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
protected DatabaseMapDictionary(LLDictionary dictionary,
|
||||
@NotNull Send<Buffer> prefixKey,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
Serializer<U> valueSerializer) {
|
||||
Serializer<U> valueSerializer,
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||
// Do not retain or release or use the prefixKey here
|
||||
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0);
|
||||
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0, drop);
|
||||
this.valueSerializer = valueSerializer;
|
||||
}
|
||||
|
||||
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
|
||||
SerializerFixedBinaryLength<T> keySerializer,
|
||||
Serializer<U> valueSerializer) {
|
||||
return new DatabaseMapDictionary<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer, valueSerializer);
|
||||
Serializer<U> valueSerializer,
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||
return new DatabaseMapDictionary<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer,
|
||||
valueSerializer, drop);
|
||||
}
|
||||
|
||||
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
|
||||
Send<Buffer> prefixKey,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
Serializer<U> valueSerializer) {
|
||||
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
|
||||
Serializer<U> valueSerializer,
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer, drop);
|
||||
}
|
||||
|
||||
private Send<Buffer> toKey(Send<Buffer> suffixKeyToSend) {
|
||||
@ -147,7 +152,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
@Override
|
||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||
return Mono.fromCallable(() ->
|
||||
new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), valueSerializer));
|
||||
new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), valueSerializer, d -> {}));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -396,10 +401,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
removePrefix(keyBuf);
|
||||
suffixKeyConsistency(keyBuf.readableBytes());
|
||||
sink.next(Map.entry(deserializeSuffix(keyBuf.copy().send()),
|
||||
new DatabaseSingle<>(dictionary,
|
||||
toKey(keyBuf.send()),
|
||||
valueSerializer
|
||||
)
|
||||
new DatabaseSingle<>(dictionary, toKey(keyBuf.send()), valueSerializer, d -> {})
|
||||
));
|
||||
} catch (SerializationException ex) {
|
||||
sink.error(ex);
|
||||
|
@ -2,8 +2,11 @@ package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import io.net5.util.IllegalReferenceCountException;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
@ -12,6 +15,7 @@ import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||
import it.cavallium.dbengine.database.LLRange;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationException;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
@ -24,20 +28,21 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
// todo: implement optimized methods (which?)
|
||||
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
|
||||
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extends
|
||||
LiveResourceSupport<DatabaseStage<Map<T, U>>, DatabaseMapDictionaryDeep<T, U, US>>
|
||||
implements DatabaseStageMap<T, U, US> {
|
||||
|
||||
protected final LLDictionary dictionary;
|
||||
private final BufferAllocator alloc;
|
||||
protected final SubStageGetter<U, US> subStageGetter;
|
||||
protected final SerializerFixedBinaryLength<T> keySuffixSerializer;
|
||||
@NotNull
|
||||
protected final Buffer keyPrefix;
|
||||
protected final int keyPrefixLength;
|
||||
protected final int keySuffixLength;
|
||||
protected final int keyExtLength;
|
||||
protected final LLRange range;
|
||||
protected final Mono<Send<LLRange>> rangeMono;
|
||||
private volatile boolean released;
|
||||
|
||||
protected LLRange range;
|
||||
protected Buffer keyPrefix;
|
||||
|
||||
private static void incrementPrefix(Buffer prefix, int prefixLength) {
|
||||
assert prefix.readableBytes() >= prefixLength;
|
||||
@ -71,18 +76,12 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
}
|
||||
}
|
||||
|
||||
static Buffer firstRangeKey(BufferAllocator alloc,
|
||||
Send<Buffer> prefixKey,
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
static Buffer firstRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, int prefixLength, int suffixLength,
|
||||
int extLength) {
|
||||
return zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
|
||||
}
|
||||
|
||||
static Buffer nextRangeKey(BufferAllocator alloc,
|
||||
Send<Buffer> prefixKey,
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
static Buffer nextRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, int prefixLength, int suffixLength,
|
||||
int extLength) {
|
||||
try (prefixKey) {
|
||||
Buffer nonIncremented = zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
|
||||
@ -91,11 +90,8 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
}
|
||||
}
|
||||
|
||||
protected static Buffer zeroFillKeySuffixAndExt(BufferAllocator alloc,
|
||||
@NotNull Send<Buffer> prefixKeySend,
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
int extLength) {
|
||||
protected static Buffer zeroFillKeySuffixAndExt(BufferAllocator alloc, @NotNull Send<Buffer> prefixKeySend,
|
||||
int prefixLength, int suffixLength, int extLength) {
|
||||
var result = prefixKeySend.receive();
|
||||
if (result == null) {
|
||||
assert prefixLength == 0;
|
||||
@ -115,41 +111,20 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
}
|
||||
}
|
||||
|
||||
static Buffer firstRangeKey(
|
||||
BufferAllocator alloc,
|
||||
Send<Buffer> prefixKey,
|
||||
Send<Buffer> suffixKey,
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
int extLength) {
|
||||
static Buffer firstRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, Send<Buffer> suffixKey, int prefixLength,
|
||||
int suffixLength, int extLength) {
|
||||
return zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
||||
}
|
||||
|
||||
static Buffer nextRangeKey(
|
||||
BufferAllocator alloc,
|
||||
Send<Buffer> prefixKey,
|
||||
Send<Buffer> suffixKey,
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
int extLength) {
|
||||
Buffer nonIncremented = zeroFillKeyExt(alloc,
|
||||
prefixKey,
|
||||
suffixKey,
|
||||
prefixLength,
|
||||
suffixLength,
|
||||
extLength
|
||||
);
|
||||
static Buffer nextRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, Send<Buffer> suffixKey, int prefixLength,
|
||||
int suffixLength, int extLength) {
|
||||
Buffer nonIncremented = zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
||||
incrementPrefix(nonIncremented, prefixLength + suffixLength);
|
||||
return nonIncremented;
|
||||
}
|
||||
|
||||
protected static Buffer zeroFillKeyExt(
|
||||
BufferAllocator alloc,
|
||||
Send<Buffer> prefixKeySend,
|
||||
Send<Buffer> suffixKeySend,
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
int extLength) {
|
||||
protected static Buffer zeroFillKeyExt(BufferAllocator alloc, Send<Buffer> prefixKeySend, Send<Buffer> suffixKeySend,
|
||||
int prefixLength, int suffixLength, int extLength) {
|
||||
try (var prefixKey = prefixKeySend.receive()) {
|
||||
try (var suffixKey = suffixKeySend.receive()) {
|
||||
assert prefixKey.readableBytes() == prefixLength;
|
||||
@ -174,36 +149,30 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
*/
|
||||
@Deprecated
|
||||
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary,
|
||||
SerializerFixedBinaryLength<T> keySerializer,
|
||||
SubStageGetterSingle<U> subStageGetter) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer, subStageGetter, 0);
|
||||
SerializerFixedBinaryLength<T> keySerializer, SubStageGetterSingle<U> subStageGetter,
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer,
|
||||
subStageGetter, 0, drop);
|
||||
}
|
||||
|
||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(LLDictionary dictionary,
|
||||
SerializerFixedBinaryLength<T> keySerializer,
|
||||
int keyExtLength,
|
||||
SubStageGetter<U, US> subStageGetter) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary,
|
||||
LLUtils.empty(dictionary.getAllocator()),
|
||||
keySerializer,
|
||||
subStageGetter,
|
||||
keyExtLength
|
||||
);
|
||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(
|
||||
LLDictionary dictionary, SerializerFixedBinaryLength<T> keySerializer, int keyExtLength,
|
||||
SubStageGetter<U, US> subStageGetter, Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer,
|
||||
subStageGetter, keyExtLength, drop);
|
||||
}
|
||||
|
||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(LLDictionary dictionary,
|
||||
Send<Buffer> prefixKey,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
SubStageGetter<U, US> subStageGetter,
|
||||
int keyExtLength) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength);
|
||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(
|
||||
LLDictionary dictionary, Send<Buffer> prefixKey, SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
SubStageGetter<U, US> subStageGetter, int keyExtLength, Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter,
|
||||
keyExtLength, drop);
|
||||
}
|
||||
|
||||
protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
||||
@NotNull Send<Buffer> prefixKeyToReceive,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
SubStageGetter<U, US> subStageGetter,
|
||||
int keyExtLength) {
|
||||
protected DatabaseMapDictionaryDeep(LLDictionary dictionary, @NotNull Send<Buffer> prefixKeyToReceive,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer, SubStageGetter<U, US> subStageGetter, int keyExtLength,
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
try (var prefixKey = prefixKeyToReceive.receive()) {
|
||||
this.dictionary = dictionary;
|
||||
this.alloc = dictionary.getAllocator();
|
||||
@ -231,6 +200,31 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
}
|
||||
}
|
||||
|
||||
private DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
||||
BufferAllocator alloc,
|
||||
SubStageGetter<U, US> subStageGetter,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
int keyPrefixLength,
|
||||
int keySuffixLength,
|
||||
int keyExtLength,
|
||||
Mono<Send<LLRange>> rangeMono,
|
||||
Send<LLRange> range,
|
||||
Send<Buffer> keyPrefix,
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
this.dictionary = dictionary;
|
||||
this.alloc = alloc;
|
||||
this.subStageGetter = subStageGetter;
|
||||
this.keySuffixSerializer = keySuffixSerializer;
|
||||
this.keyPrefixLength = keyPrefixLength;
|
||||
this.keySuffixLength = keySuffixLength;
|
||||
this.keyExtLength = keyExtLength;
|
||||
this.rangeMono = rangeMono;
|
||||
|
||||
this.range = range.receive();
|
||||
this.keyPrefix = keyPrefix.receive();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
protected boolean suffixKeyConsistency(int keySuffixLength) {
|
||||
return this.keySuffixLength == keySuffixLength;
|
||||
@ -301,7 +295,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
return this.subStageGetter
|
||||
.subStage(dictionary, snapshot, suffixKeyWithoutExt)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::release);
|
||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::close);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -415,13 +409,42 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (!released) {
|
||||
released = true;
|
||||
this.range.close();
|
||||
this.keyPrefix.close();
|
||||
} else {
|
||||
throw new IllegalReferenceCountException(0, -1);
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
throw new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<DatabaseMapDictionaryDeep<T, U, US>> prepareSend() {
|
||||
var keyPrefix = this.keyPrefix.send();
|
||||
var range = this.range.send();
|
||||
return drop -> new DatabaseMapDictionaryDeep<>(dictionary, alloc, subStageGetter, keySuffixSerializer,
|
||||
keyPrefixLength, keySuffixLength, keyExtLength, rangeMono, range, keyPrefix, drop);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void makeInaccessible() {
|
||||
this.keyPrefix = null;
|
||||
this.range = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop<T, U, US extends DatabaseStage<U>> implements
|
||||
Drop<DatabaseMapDictionaryDeep<T, U, US>> {
|
||||
|
||||
private final Drop<DatabaseMapDictionaryDeep<T,U,US>> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(DatabaseMapDictionaryDeep<T, U, US> obj) {
|
||||
if (obj.range != null) {
|
||||
obj.range.close();
|
||||
}
|
||||
if (obj.keyPrefix != null) {
|
||||
obj.keyPrefix.close();
|
||||
}
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,11 +2,14 @@ package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
@ -24,18 +27,23 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
|
||||
public class DatabaseMapDictionaryHashed<T, U, TH> extends
|
||||
LiveResourceSupport<DatabaseStage<Map<T, U>>, DatabaseMapDictionaryHashed<T, U, TH>>
|
||||
implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
|
||||
|
||||
private final BufferAllocator alloc;
|
||||
private final DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary;
|
||||
private final Function<T, TH> keySuffixHashFunction;
|
||||
|
||||
private DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary;
|
||||
|
||||
protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
|
||||
@NotNull Send<Buffer> prefixKey,
|
||||
Serializer<T> keySuffixSerializer,
|
||||
Serializer<U> valueSerializer,
|
||||
Function<T, TH> keySuffixHashFunction,
|
||||
SerializerFixedBinaryLength<TH> keySuffixHashSerializer) {
|
||||
SerializerFixedBinaryLength<TH> keySuffixHashSerializer,
|
||||
Drop<DatabaseMapDictionaryHashed<T, U, TH>> drop) {
|
||||
super(new DatabaseMapDictionaryHashed.CloseOnDrop<>(drop));
|
||||
if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) {
|
||||
throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
|
||||
}
|
||||
@ -44,26 +52,36 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
= new ValueWithHashSerializer<>(alloc, keySuffixSerializer, valueSerializer);
|
||||
ValuesSetSerializer<Entry<T, U>> valuesSetSerializer
|
||||
= new ValuesSetSerializer<>(alloc, valueWithHashSerializer);
|
||||
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
|
||||
prefixKey,
|
||||
keySuffixHashSerializer,
|
||||
valuesSetSerializer
|
||||
);
|
||||
this.subDictionary = DatabaseMapDictionary.tail(dictionary, prefixKey, keySuffixHashSerializer,
|
||||
valuesSetSerializer, d -> {});
|
||||
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||
}
|
||||
|
||||
private DatabaseMapDictionaryHashed(BufferAllocator alloc,
|
||||
Function<T, TH> keySuffixHashFunction,
|
||||
Send<DatabaseStage<Map<TH, ObjectArraySet<Entry<T, U>>>>> subDictionary,
|
||||
Drop<DatabaseMapDictionaryHashed<T, U, TH>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
this.alloc = alloc;
|
||||
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||
|
||||
this.subDictionary = (DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>>) subDictionary.receive();
|
||||
}
|
||||
|
||||
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
|
||||
Serializer<T> keySerializer,
|
||||
Serializer<U> valueSerializer,
|
||||
Function<T, UH> keyHashFunction,
|
||||
SerializerFixedBinaryLength<UH> keyHashSerializer) {
|
||||
SerializerFixedBinaryLength<UH> keyHashSerializer,
|
||||
Drop<DatabaseMapDictionaryHashed<T, U, UH>> drop) {
|
||||
return new DatabaseMapDictionaryHashed<>(
|
||||
dictionary,
|
||||
LLUtils.empty(dictionary.getAllocator()),
|
||||
keySerializer,
|
||||
valueSerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
keyHashSerializer,
|
||||
drop
|
||||
);
|
||||
}
|
||||
|
||||
@ -72,13 +90,15 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
Serializer<T> keySuffixSerializer,
|
||||
Serializer<U> valueSerializer,
|
||||
Function<T, UH> keySuffixHashFunction,
|
||||
SerializerFixedBinaryLength<UH> keySuffixHashSerializer) {
|
||||
SerializerFixedBinaryLength<UH> keySuffixHashSerializer,
|
||||
Drop<DatabaseMapDictionaryHashed<T, U, UH>> drop) {
|
||||
return new DatabaseMapDictionaryHashed<>(dictionary,
|
||||
prefixKey,
|
||||
keySuffixSerializer,
|
||||
valueSerializer,
|
||||
keySuffixHashFunction,
|
||||
keySuffixHashSerializer
|
||||
keySuffixHashSerializer,
|
||||
drop
|
||||
);
|
||||
}
|
||||
|
||||
@ -125,11 +145,6 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
return subDictionary.clearAndGetStatus();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> close() {
|
||||
return subDictionary.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||
return subDictionary.isEmpty(snapshot);
|
||||
@ -145,11 +160,6 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
return this.subDictionary.badBlocks();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
this.subDictionary.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) {
|
||||
return this
|
||||
@ -160,7 +170,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
private Mono<DatabaseSingleBucket<T, U, TH>> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) {
|
||||
return subDictionary
|
||||
.at(snapshot, hash)
|
||||
.map(entry -> new DatabaseSingleBucket<>(entry, key));
|
||||
.map(entry -> new DatabaseSingleBucket<>(entry, key, d -> {}));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -193,13 +203,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
@Override
|
||||
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
||||
return entries
|
||||
.flatMap(entry -> Flux.usingWhen(
|
||||
this.at(null, entry.getKey()),
|
||||
.flatMap(entry -> LLUtils.usingResource(this.at(null, entry.getKey()),
|
||||
stage -> stage
|
||||
.setAndGetPrevious(entry.getValue())
|
||||
.map(prev -> Map.entry(entry.getKey(), prev)),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
));
|
||||
.map(prev -> Map.entry(entry.getKey(), prev)), true)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -297,4 +305,37 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
throw new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<DatabaseMapDictionaryHashed<T, U, TH>> prepareSend() {
|
||||
var subDictionary = this.subDictionary.send();
|
||||
return drop -> new DatabaseMapDictionaryHashed<>(alloc, keySuffixHashFunction, subDictionary, drop);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void makeInaccessible() {
|
||||
this.subDictionary = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop<T, U, TH> implements Drop<DatabaseMapDictionaryHashed<T,U,TH>> {
|
||||
|
||||
private final Drop<DatabaseMapDictionaryHashed<T,U,TH>> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<DatabaseMapDictionaryHashed<T,U,TH>> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(DatabaseMapDictionaryHashed<T, U, TH> obj) {
|
||||
if (obj.subDictionary != null) {
|
||||
obj.subDictionary.close();
|
||||
}
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
@ -18,19 +19,22 @@ public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing>
|
||||
|
||||
protected DatabaseSetDictionary(LLDictionary dictionary,
|
||||
Send<Buffer> prefixKey,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer) {
|
||||
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.nothingSerializer(dictionary.getAllocator()));
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
Drop<DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>>> drop) {
|
||||
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.nothingSerializer(dictionary.getAllocator()), drop);
|
||||
}
|
||||
|
||||
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
|
||||
SerializerFixedBinaryLength<T> keySerializer) {
|
||||
return new DatabaseSetDictionary<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer);
|
||||
SerializerFixedBinaryLength<T> keySerializer,
|
||||
Drop<DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>>> drop) {
|
||||
return new DatabaseSetDictionary<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer, drop);
|
||||
}
|
||||
|
||||
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
|
||||
Send<Buffer> prefixKey,
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer) {
|
||||
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer);
|
||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||
Drop<DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>>> drop) {
|
||||
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer, drop);
|
||||
}
|
||||
|
||||
public Mono<Set<T>> getKeySet(@Nullable CompositeSnapshot snapshot) {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
@ -23,25 +24,29 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
|
||||
@NotNull Send<Buffer> prefixKey,
|
||||
Serializer<T> keySuffixSerializer,
|
||||
Function<T, TH> keySuffixHashFunction,
|
||||
SerializerFixedBinaryLength<TH> keySuffixHashSerializer) {
|
||||
SerializerFixedBinaryLength<TH> keySuffixHashSerializer,
|
||||
Drop<DatabaseMapDictionaryHashed<T, Nothing, TH>> drop) {
|
||||
super(dictionary,
|
||||
prefixKey,
|
||||
keySuffixSerializer,
|
||||
DatabaseEmpty.nothingSerializer(dictionary.getAllocator()),
|
||||
keySuffixHashFunction,
|
||||
keySuffixHashSerializer
|
||||
keySuffixHashSerializer,
|
||||
drop
|
||||
);
|
||||
}
|
||||
|
||||
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
|
||||
Serializer<T> keySerializer,
|
||||
Function<T, TH> keyHashFunction,
|
||||
SerializerFixedBinaryLength<TH> keyHashSerializer) {
|
||||
SerializerFixedBinaryLength<TH> keyHashSerializer,
|
||||
Drop<DatabaseMapDictionaryHashed<T, Nothing, TH>> drop) {
|
||||
return new DatabaseSetDictionaryHashed<>(dictionary,
|
||||
LLUtils.empty(dictionary.getAllocator()),
|
||||
keySerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
keyHashSerializer,
|
||||
drop
|
||||
);
|
||||
}
|
||||
|
||||
@ -49,12 +54,13 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
|
||||
Send<Buffer> prefixKey,
|
||||
Serializer<T> keySuffixSerializer,
|
||||
Function<T, TH> keyHashFunction,
|
||||
SerializerFixedBinaryLength<TH> keyHashSerializer) {
|
||||
SerializerFixedBinaryLength<TH> keyHashSerializer, Drop<DatabaseMapDictionaryHashed<T, Nothing, TH>> drop) {
|
||||
return new DatabaseSetDictionaryHashed<>(dictionary,
|
||||
prefixKey,
|
||||
keySuffixSerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
keyHashSerializer,
|
||||
drop
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
@ -20,14 +22,18 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.SynchronousSink;
|
||||
|
||||
public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
||||
public class DatabaseSingle<U> extends ResourceSupport<DatabaseStage<U>, DatabaseSingle<U>> implements
|
||||
DatabaseStageEntry<U> {
|
||||
|
||||
private final LLDictionary dictionary;
|
||||
private final Buffer key;
|
||||
private final Mono<Send<Buffer>> keyMono;
|
||||
private final Serializer<U> serializer;
|
||||
|
||||
public DatabaseSingle(LLDictionary dictionary, Send<Buffer> key, Serializer<U> serializer) {
|
||||
private Buffer key;
|
||||
|
||||
public DatabaseSingle(LLDictionary dictionary, Send<Buffer> key, Serializer<U> serializer,
|
||||
Drop<DatabaseSingle<U>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
try (key) {
|
||||
this.dictionary = dictionary;
|
||||
this.key = key.receive();
|
||||
@ -124,13 +130,41 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
||||
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single).map(ResourceSupport::send));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
key.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Flux<BadBlock> badBlocks() {
|
||||
return dictionary.badBlocks(keyMono.map(LLRange::single).map(ResourceSupport::send));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
throw new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<DatabaseSingle<U>> prepareSend() {
|
||||
var key = this.key.send();
|
||||
return drop -> new DatabaseSingle<>(dictionary, key, serializer, drop);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void makeInaccessible() {
|
||||
this.key = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop<U> implements Drop<DatabaseSingle<U>> {
|
||||
|
||||
private final Drop<DatabaseSingle<U>> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<DatabaseSingle<U>> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(DatabaseSingle<U> obj) {
|
||||
if (obj.key != null) {
|
||||
obj.key.close();
|
||||
}
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +1,14 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.Delta;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import it.cavallium.dbengine.database.UpdateReturnMode;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
|
||||
@ -23,14 +27,26 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
||||
public class DatabaseSingleBucket<K, V, TH>
|
||||
extends LiveResourceSupport<DatabaseStage<V>, DatabaseSingleBucket<K, V, TH>>
|
||||
implements DatabaseStageEntry<V> {
|
||||
|
||||
private final DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage;
|
||||
private final K key;
|
||||
|
||||
public DatabaseSingleBucket(DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage, K key) {
|
||||
this.bucketStage = bucketStage;
|
||||
private DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage;
|
||||
|
||||
public DatabaseSingleBucket(DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage, K key,
|
||||
Drop<DatabaseSingleBucket<K, V, TH>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
this.key = key;
|
||||
this.bucketStage = bucketStage;
|
||||
}
|
||||
|
||||
private DatabaseSingleBucket(Send<DatabaseStage<ObjectArraySet<Entry<K, V>>>> bucketStage, K key,
|
||||
Drop<DatabaseSingleBucket<K, V, TH>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
this.key = key;
|
||||
this.bucketStage = (DatabaseStageEntry<ObjectArraySet<Entry<K, V>>>) bucketStage.receive();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -77,7 +93,8 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater, boolean existsAlmostCertainly) {
|
||||
public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater,
|
||||
boolean existsAlmostCertainly) {
|
||||
return bucketStage
|
||||
.updateAndGetDelta(oldBucket -> {
|
||||
V oldValue = extractValue(oldBucket);
|
||||
@ -106,11 +123,6 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
||||
return this.updateAndGetDelta(prev -> null).map(LLUtils::isDeltaChanged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> close() {
|
||||
return bucketStage.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||
return this.get(snapshot).map(prev -> 1L).defaultIfEmpty(0L);
|
||||
@ -131,11 +143,6 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
||||
return bucketStage.badBlocks();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
bucketStage.release();
|
||||
}
|
||||
|
||||
private Mono<V> extractValueTransformation(Set<Entry<K, V>> entries) {
|
||||
return Mono.fromCallable(() -> extractValue(entries));
|
||||
}
|
||||
@ -193,4 +200,38 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
throw new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<DatabaseSingleBucket<K, V, TH>> prepareSend() {
|
||||
var bucketStage = this.bucketStage.send();
|
||||
return drop -> new DatabaseSingleBucket<>(bucketStage, key, drop);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void makeInaccessible() {
|
||||
this.bucketStage = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop<K, V, TH> implements
|
||||
Drop<DatabaseSingleBucket<K, V, TH>> {
|
||||
|
||||
private final Drop<DatabaseSingleBucket<K, V, TH>> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<DatabaseSingleBucket<K, V, TH>> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(DatabaseSingleBucket<K, V, TH> obj) {
|
||||
if (obj.bucketStage != null) {
|
||||
obj.bucketStage.close();
|
||||
}
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,9 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.client.Mapper;
|
||||
@ -14,16 +18,28 @@ import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.SynchronousSink;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
||||
public class DatabaseSingleMapped<A, B> extends ResourceSupport<DatabaseStage<A>, DatabaseSingleMapped<A, B>>
|
||||
implements DatabaseStageEntry<A> {
|
||||
|
||||
private final DatabaseStageEntry<B> serializedSingle;
|
||||
private final Mapper<A, B> mapper;
|
||||
|
||||
public DatabaseSingleMapped(DatabaseStageEntry<B> serializedSingle, Mapper<A, B> mapper) {
|
||||
private DatabaseStageEntry<B> serializedSingle;
|
||||
|
||||
public DatabaseSingleMapped(DatabaseStageEntry<B> serializedSingle, Mapper<A, B> mapper,
|
||||
Drop<DatabaseSingleMapped<A, B>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
this.serializedSingle = serializedSingle;
|
||||
this.mapper = mapper;
|
||||
}
|
||||
|
||||
private DatabaseSingleMapped(Send<DatabaseStage<B>> serializedSingle, Mapper<A, B> mapper,
|
||||
Drop<DatabaseSingleMapped<A, B>> drop) {
|
||||
super(new CloseOnDrop<>(drop));
|
||||
this.mapper = mapper;
|
||||
|
||||
this.serializedSingle = (DatabaseStageEntry<B>) serializedSingle.receive();
|
||||
}
|
||||
|
||||
private void deserializeSink(B value, SynchronousSink<A> sink) {
|
||||
try {
|
||||
sink.next(this.unMap(value));
|
||||
@ -107,11 +123,6 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
||||
return serializedSingle.clearAndGetStatus();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> close() {
|
||||
return serializedSingle.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||
return serializedSingle.leavesCount(snapshot, fast);
|
||||
@ -132,11 +143,6 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
||||
return this.serializedSingle.badBlocks();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
serializedSingle.release();
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private A unMap(B bytes) throws SerializationException {
|
||||
return mapper.unmap(bytes);
|
||||
@ -146,4 +152,37 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
||||
private B map(A bytes) throws SerializationException {
|
||||
return mapper.map(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
throw new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<DatabaseSingleMapped<A, B>> prepareSend() {
|
||||
var serializedSingle = this.serializedSingle.send();
|
||||
return drop -> new DatabaseSingleMapped<>(serializedSingle, mapper, drop);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void makeInaccessible() {
|
||||
this.serializedSingle = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop<A, B> implements Drop<DatabaseSingleMapped<A, B>> {
|
||||
|
||||
private final Drop<DatabaseSingleMapped<A, B>> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<DatabaseSingleMapped<A, B>> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(DatabaseSingleMapped<A, B> obj) {
|
||||
if (obj.serializedSingle != null) {
|
||||
obj.serializedSingle.close();
|
||||
}
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Resource;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.Delta;
|
||||
@ -12,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
||||
public interface DatabaseStage<T> extends DatabaseStageWithEntry<T>, Resource<DatabaseStage<T>> {
|
||||
|
||||
default Mono<T> get(@Nullable CompositeSnapshot snapshot) {
|
||||
return get(snapshot, false);
|
||||
@ -74,12 +75,6 @@ public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
||||
return clearAndGetPrevious().map(Objects::nonNull).defaultIfEmpty(false);
|
||||
}
|
||||
|
||||
void release();
|
||||
|
||||
default Mono<Void> close() {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Count all the elements.
|
||||
* If it's a nested collection the count will include all the children recursively
|
||||
|
@ -1,5 +1,6 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Resource;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
|
@ -34,11 +34,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
||||
|
||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
||||
return Mono.usingWhen(
|
||||
this.at(snapshot, key),
|
||||
stage -> stage.get(snapshot, existsAlmostCertainly),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
);
|
||||
return LLUtils.usingResource(this.at(snapshot, key),
|
||||
stage -> stage.get(snapshot, existsAlmostCertainly), true);
|
||||
}
|
||||
|
||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
||||
@ -50,11 +47,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
}
|
||||
|
||||
default Mono<Void> putValue(T key, U value) {
|
||||
return Mono.usingWhen(
|
||||
at(null, key).single(),
|
||||
stage -> stage.set(value),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
);
|
||||
return LLUtils.usingResource(at(null, key).single(),
|
||||
stage -> stage.set(value), true);
|
||||
}
|
||||
|
||||
Mono<UpdateMode> getUpdateMode();
|
||||
@ -63,11 +57,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
UpdateReturnMode updateReturnMode,
|
||||
boolean existsAlmostCertainly,
|
||||
SerializationFunction<@Nullable U, @Nullable U> updater) {
|
||||
return Mono.usingWhen(
|
||||
this.at(null, key).single(),
|
||||
stage -> stage.update(updater, updateReturnMode, existsAlmostCertainly),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
);
|
||||
return LLUtils.usingResource(this.at(null, key).single(),
|
||||
stage -> stage.update(updater, updateReturnMode, existsAlmostCertainly), true);
|
||||
}
|
||||
|
||||
default <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries,
|
||||
@ -94,11 +85,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
default Mono<Delta<U>> updateValueAndGetDelta(T key,
|
||||
boolean existsAlmostCertainly,
|
||||
SerializationFunction<@Nullable U, @Nullable U> updater) {
|
||||
return Mono.usingWhen(
|
||||
this.at(null, key).single(),
|
||||
stage -> stage.updateAndGetDelta(updater, existsAlmostCertainly),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
);
|
||||
return LLUtils.usingResource(this.at(null, key).single(),
|
||||
stage -> stage.updateAndGetDelta(updater, existsAlmostCertainly), true);
|
||||
}
|
||||
|
||||
default Mono<Delta<U>> updateValueAndGetDelta(T key, SerializationFunction<@Nullable U, @Nullable U> updater) {
|
||||
@ -106,22 +94,14 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
}
|
||||
|
||||
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
||||
return Mono.usingWhen(
|
||||
at(null, key).single(),
|
||||
stage -> stage.setAndGetPrevious(value),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
);
|
||||
return LLUtils.usingResource(at(null, key).single(), stage -> stage.setAndGetPrevious(value), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the key was associated with any value, false if the key didn't exist.
|
||||
*/
|
||||
default Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
||||
return Mono.usingWhen(
|
||||
at(null, key).single(),
|
||||
stage -> stage.setAndGetChanged(value),
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
).single();
|
||||
return LLUtils.usingResource(at(null, key).single(), stage -> stage.setAndGetChanged(value), true).single();
|
||||
}
|
||||
|
||||
default Mono<Void> remove(T key) {
|
||||
@ -129,11 +109,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
}
|
||||
|
||||
default Mono<U> removeAndGetPrevious(T key) {
|
||||
return Mono.usingWhen(
|
||||
at(null, key),
|
||||
DatabaseStage::clearAndGetPrevious,
|
||||
stage -> Mono.fromRunnable(stage::release)
|
||||
);
|
||||
return LLUtils.usingResource(at(null, key), DatabaseStage::clearAndGetPrevious, true);
|
||||
}
|
||||
|
||||
default Mono<Boolean> removeAndGetStatus(T key) {
|
||||
@ -175,11 +151,11 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
default Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
||||
return this
|
||||
.getAllStages(snapshot)
|
||||
.flatMapSequential(entry -> entry
|
||||
.flatMapSequential(stage -> stage
|
||||
.getValue()
|
||||
.get(snapshot, true)
|
||||
.map(value -> Map.entry(entry.getKey(), value))
|
||||
.doAfterTerminate(() -> entry.getValue().release())
|
||||
.map(value -> Map.entry(stage.getKey(), value))
|
||||
.doFinally(s -> stage.getValue().close())
|
||||
);
|
||||
}
|
||||
|
||||
@ -193,7 +169,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
return setAllValues(Flux.empty());
|
||||
}
|
||||
|
||||
default Mono<Void> replaceAllValues(boolean canKeysChange, Function<Entry<T, U>, Mono<Entry<T, U>>> entriesReplacer) {
|
||||
default Mono<Void> replaceAllValues(boolean canKeysChange, Function<Entry<T, U>,
|
||||
Mono<Entry<T, U>>> entriesReplacer) {
|
||||
if (canKeysChange) {
|
||||
return this.setAllValues(this.getAllValues(null).flatMap(entriesReplacer)).then();
|
||||
} else {
|
||||
@ -202,7 +179,11 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
.flatMap(entriesReplacer)
|
||||
.flatMap(replacedEntry -> this
|
||||
.at(null, replacedEntry.getKey())
|
||||
.flatMap(v -> v.set(replacedEntry.getValue()).doAfterTerminate(v::release)))
|
||||
.flatMap(stage -> stage
|
||||
.set(replacedEntry.getValue())
|
||||
.doFinally(s -> stage.close())
|
||||
)
|
||||
)
|
||||
.then();
|
||||
}
|
||||
}
|
||||
@ -210,9 +191,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) {
|
||||
return this
|
||||
.getAllStages(null)
|
||||
.flatMap(stage -> Mono
|
||||
.defer(() -> entriesReplacer.apply(stage))
|
||||
.doAfterTerminate(() -> stage.getValue().release())
|
||||
.flatMap(stage -> entriesReplacer.apply(stage)
|
||||
.doFinally(s -> stage.getValue().close())
|
||||
)
|
||||
.then();
|
||||
}
|
||||
@ -221,14 +201,15 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
default Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
|
||||
return this
|
||||
.setAllValuesAndGetPrevious(Flux.fromIterable(Map.copyOf(value).entrySet()))
|
||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new);
|
||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||
.filter(map -> !map.isEmpty());
|
||||
}
|
||||
|
||||
@Override
|
||||
default Mono<Boolean> setAndGetChanged(Map<T, U> value) {
|
||||
return this
|
||||
.setAndGetPrevious(value)
|
||||
.map(oldValue -> !Objects.equals(oldValue, value))
|
||||
.map(oldValue -> !Objects.equals(oldValue, value.isEmpty() ? null : value))
|
||||
.switchIfEmpty(Mono.fromSupplier(() -> !value.isEmpty()));
|
||||
}
|
||||
|
||||
@ -286,18 +267,17 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
|
||||
@Override
|
||||
default Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
||||
return getAllValues(snapshot)
|
||||
return this
|
||||
.getAllValues(snapshot)
|
||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||
.filter(map -> !map.isEmpty());
|
||||
}
|
||||
|
||||
@Override
|
||||
default Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||
return getAllStages(snapshot)
|
||||
.flatMap(stage -> Mono
|
||||
.fromRunnable(() -> stage.getValue().release())
|
||||
.thenReturn(true)
|
||||
)
|
||||
return this
|
||||
.getAllStages(snapshot)
|
||||
.doOnNext(stage -> stage.getValue().close())
|
||||
.count();
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.net5.buffer.api.Resource;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
|
@ -4,6 +4,7 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
import java.util.Map;
|
||||
@ -34,20 +35,9 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
||||
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
Mono<Send<Buffer>> prefixKeyMono) {
|
||||
return Mono.usingWhen(
|
||||
prefixKeyMono,
|
||||
prefixKey -> Mono
|
||||
.fromSupplier(() -> DatabaseMapDictionaryHashed
|
||||
.tail(dictionary,
|
||||
prefixKey,
|
||||
keySerializer,
|
||||
valueSerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
)
|
||||
),
|
||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||
);
|
||||
return LLUtils.usingSend(prefixKeyMono, prefixKey -> Mono.just(DatabaseMapDictionaryHashed
|
||||
.tail(dictionary, prefixKey, keySerializer, valueSerializer, keyHashFunction,
|
||||
keyHashSerializer, d -> {})), true);
|
||||
}
|
||||
|
||||
public int getKeyHashBinaryLength() {
|
||||
|
@ -34,13 +34,8 @@ public class SubStageGetterHashSet<T, TH> implements
|
||||
Mono<Send<Buffer>> prefixKeyMono) {
|
||||
return Mono.usingWhen(prefixKeyMono,
|
||||
prefixKey -> Mono
|
||||
.fromSupplier(() -> DatabaseSetDictionaryHashed
|
||||
.tail(dictionary,
|
||||
prefixKey,
|
||||
keySerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
)
|
||||
.fromSupplier(() -> DatabaseSetDictionaryHashed.tail(dictionary, prefixKey, keySerializer,
|
||||
keyHashFunction, keyHashSerializer, d -> {})
|
||||
),
|
||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||
);
|
||||
|
@ -4,6 +4,7 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
import java.util.Map;
|
||||
@ -25,11 +26,9 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
||||
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
Mono<Send<Buffer>> prefixKeyMono) {
|
||||
return Mono.usingWhen(prefixKeyMono,
|
||||
return LLUtils.usingSend(prefixKeyMono,
|
||||
prefixKey -> Mono.fromSupplier(() -> DatabaseMapDictionary
|
||||
.tail(dictionary, prefixKey, keySerializer, valueSerializer)),
|
||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||
);
|
||||
.tail(dictionary, prefixKey, keySerializer, valueSerializer, d -> {})), true);
|
||||
}
|
||||
|
||||
public int getKeyBinaryLength() {
|
||||
|
@ -4,6 +4,7 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
import java.util.Map;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
@ -40,11 +41,9 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
||||
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
Mono<Send<Buffer>> prefixKeyMono) {
|
||||
return Mono.usingWhen(prefixKeyMono,
|
||||
prefixKey -> Mono.fromSupplier(() -> DatabaseMapDictionaryDeep
|
||||
.deepIntermediate(dictionary, prefixKey, keySerializer, subStageGetter, keyExtLength)),
|
||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||
);
|
||||
return LLUtils.usingSend(prefixKeyMono, prefixKey -> Mono.just(DatabaseMapDictionaryDeep
|
||||
.deepIntermediate(dictionary, prefixKey, keySerializer, subStageGetter, keyExtLength,
|
||||
d -> {})), true);
|
||||
}
|
||||
|
||||
public int getKeyBinaryLength() {
|
||||
|
@ -24,7 +24,7 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
||||
Mono<Send<Buffer>> prefixKeyMono) {
|
||||
return Mono.usingWhen(prefixKeyMono,
|
||||
prefixKey -> Mono
|
||||
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer)),
|
||||
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer, d -> {})),
|
||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||
);
|
||||
}
|
||||
|
@ -20,12 +20,7 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
|
||||
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary,
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
Mono<Send<Buffer>> keyPrefixMono) {
|
||||
return Mono.usingWhen(
|
||||
keyPrefixMono,
|
||||
keyPrefix -> Mono
|
||||
.<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix, serializer)),
|
||||
keyPrefix -> Mono.fromRunnable(keyPrefix::close)
|
||||
);
|
||||
return keyPrefixMono.map(keyPrefix -> new DatabaseSingle<>(dictionary, keyPrefix, serializer, d -> {}));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ package it.cavallium.dbengine.database.disk;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
@ -11,7 +12,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class LLIndexSearcher extends ResourceSupport<LLIndexSearcher, LLIndexSearcher> {
|
||||
public class LLIndexSearcher extends LiveResourceSupport<LLIndexSearcher, LLIndexSearcher> {
|
||||
|
||||
private IndexSearcher indexSearcher;
|
||||
|
||||
|
@ -5,6 +5,7 @@ import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
@ -37,7 +38,7 @@ public interface LLIndexSearchers extends Resource<LLIndexSearchers> {
|
||||
|
||||
IndexReader allShards();
|
||||
|
||||
class UnshardedIndexSearchers extends ResourceSupport<LLIndexSearchers, UnshardedIndexSearchers>
|
||||
class UnshardedIndexSearchers extends LiveResourceSupport<LLIndexSearchers, UnshardedIndexSearchers>
|
||||
implements LLIndexSearchers {
|
||||
|
||||
private LLIndexSearcher indexSearcher;
|
||||
@ -103,7 +104,7 @@ public interface LLIndexSearchers extends Resource<LLIndexSearchers> {
|
||||
}
|
||||
}
|
||||
|
||||
class ShardedIndexSearchers extends ResourceSupport<LLIndexSearchers, ShardedIndexSearchers>
|
||||
class ShardedIndexSearchers extends LiveResourceSupport<LLIndexSearchers, ShardedIndexSearchers>
|
||||
implements LLIndexSearchers {
|
||||
|
||||
private List<LLIndexSearcher> indexSearchers;
|
||||
|
@ -9,6 +9,7 @@ import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.database.EnglishItalianStopFilter;
|
||||
import it.cavallium.dbengine.database.LLKeyScore;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||
import it.cavallium.dbengine.database.collections.ValueGetter;
|
||||
@ -231,9 +232,8 @@ public class LuceneUtils {
|
||||
public static <T, U, V> ValueGetter<Entry<T, U>, V> getAsyncDbValueGetterDeep(
|
||||
CompositeSnapshot snapshot,
|
||||
DatabaseMapDictionaryDeep<T, Map<U, V>, DatabaseMapDictionary<U, V>> dictionaryDeep) {
|
||||
return entry -> dictionaryDeep
|
||||
.at(snapshot, entry.getKey())
|
||||
.flatMap(sub -> sub.getValue(snapshot, entry.getValue()).doAfterTerminate(sub::release));
|
||||
return entry -> LLUtils.usingResource(dictionaryDeep
|
||||
.at(snapshot, entry.getKey()), sub -> sub.getValue(snapshot, entry.getValue()), true);
|
||||
}
|
||||
|
||||
public static PerFieldAnalyzerWrapper toPerFieldAnalyzerWrapper(IndicizerAnalyzers indicizerAnalyzers) {
|
||||
|
@ -6,6 +6,7 @@ import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.database.LLKeyScore;
|
||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import it.cavallium.dbengine.database.disk.LLLocalKeyValueDatabase;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
@ -14,7 +15,7 @@ import org.warp.commonutils.log.LoggerFactory;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public final class LuceneSearchResult extends ResourceSupport<LuceneSearchResult, LuceneSearchResult> {
|
||||
public final class LuceneSearchResult extends LiveResourceSupport<LuceneSearchResult, LuceneSearchResult> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LuceneSearchResult.class);
|
||||
|
||||
|
@ -6,9 +6,10 @@ import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.SearchResult;
|
||||
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
public class NullableBuffer extends ResourceSupport<NullableBuffer, NullableBuffer> {
|
||||
public class NullableBuffer extends LiveResourceSupport<NullableBuffer, NullableBuffer> {
|
||||
|
||||
@Nullable
|
||||
private Buffer buffer;
|
||||
|
@ -179,7 +179,8 @@ public class DbTestUtils {
|
||||
if (mapType == MapType.MAP) {
|
||||
return DatabaseMapDictionary.simple(dictionary,
|
||||
SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), keyBytes),
|
||||
Serializer.utf8(dictionary.getAllocator())
|
||||
Serializer.utf8(dictionary.getAllocator()),
|
||||
d -> {}
|
||||
);
|
||||
} else {
|
||||
return DatabaseMapDictionaryHashed.simple(dictionary,
|
||||
@ -209,7 +210,8 @@ public class DbTestUtils {
|
||||
return out.send();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
d -> {}
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -224,7 +226,8 @@ public class DbTestUtils {
|
||||
key2Bytes,
|
||||
new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), key2Bytes),
|
||||
Serializer.utf8(dictionary.getAllocator())
|
||||
)
|
||||
),
|
||||
d -> {}
|
||||
);
|
||||
}
|
||||
|
||||
@ -239,7 +242,8 @@ public class DbTestUtils {
|
||||
Serializer.utf8(dictionary.getAllocator()),
|
||||
String::hashCode,
|
||||
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator())
|
||||
)
|
||||
),
|
||||
d -> {}
|
||||
);
|
||||
}
|
||||
|
||||
@ -249,7 +253,8 @@ public class DbTestUtils {
|
||||
Serializer.utf8(dictionary.getAllocator()),
|
||||
Serializer.utf8(dictionary.getAllocator()),
|
||||
String::hashCode,
|
||||
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator())
|
||||
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator()),
|
||||
d -> {}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -115,8 +115,7 @@ public abstract class TestDictionaryMap {
|
||||
var resultingMap = run(map.get(null));
|
||||
Assertions.assertEquals(shouldFail ? null : Map.of(key, value), resultingMap);
|
||||
|
||||
runVoid(map.close());
|
||||
map.release();
|
||||
map.close();
|
||||
|
||||
//if (shouldFail) this.checkLeaks = false;
|
||||
|
||||
@ -129,10 +128,10 @@ public abstract class TestDictionaryMap {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5))
|
||||
.flatMap(map -> map
|
||||
.at(null, key).flatMap(v -> v.set(value).doAfterTerminate(v::release))
|
||||
.then(map.at(null, key).flatMap(v -> v.get(null).doAfterTerminate(v::release)))
|
||||
.doAfterTerminate(map::release)
|
||||
.flatMap(map -> LLUtils
|
||||
.usingResource(map.at(null, key), v -> v.set(value), true)
|
||||
.then(LLUtils.usingResource(map.at(null, key), v -> v.get(null), true))
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -155,7 +154,7 @@ public abstract class TestDictionaryMap {
|
||||
map.putValueAndGetPrevious(key, value),
|
||||
map.putValueAndGetPrevious(key, value)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -178,7 +177,7 @@ public abstract class TestDictionaryMap {
|
||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||
map.removeAndGetPrevious(key)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -201,7 +200,7 @@ public abstract class TestDictionaryMap {
|
||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||
map.removeAndGetStatus(key)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -244,7 +243,7 @@ public abstract class TestDictionaryMap {
|
||||
return value;
|
||||
})
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -311,7 +310,7 @@ public abstract class TestDictionaryMap {
|
||||
.doOnSuccess(s -> log.debug("5. Getting value: {}", key))
|
||||
.then(map.getValue(null, key))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -336,7 +335,7 @@ public abstract class TestDictionaryMap {
|
||||
map.remove(key),
|
||||
map.putValueAndGetChanged(key, "error?").single()
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -395,7 +394,7 @@ public abstract class TestDictionaryMap {
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.filter(k -> k.getValue().isPresent())
|
||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||
@ -423,7 +422,7 @@ public abstract class TestDictionaryMap {
|
||||
.flatMapMany(map -> map
|
||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.filter(k -> k.getValue().isPresent())
|
||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||
@ -453,7 +452,7 @@ public abstract class TestDictionaryMap {
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -481,7 +480,7 @@ public abstract class TestDictionaryMap {
|
||||
map.set(entries).then(Mono.empty()),
|
||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.filter(k -> k.getValue().isPresent())
|
||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||
@ -522,7 +521,7 @@ public abstract class TestDictionaryMap {
|
||||
removalMono.then(Mono.empty()),
|
||||
map.setAndGetChanged(entries).single()
|
||||
)
|
||||
.doAfterTerminate(map::release);
|
||||
.doFinally(s -> map.close());
|
||||
})
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -544,7 +543,7 @@ public abstract class TestDictionaryMap {
|
||||
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
||||
.map(Map::entrySet)
|
||||
.concatMapIterable(list -> list)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -570,7 +569,7 @@ public abstract class TestDictionaryMap {
|
||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||
.map(Map::entrySet)
|
||||
.concatMapIterable(list -> list)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -597,7 +596,7 @@ public abstract class TestDictionaryMap {
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.getAllValues(null)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -627,7 +626,7 @@ public abstract class TestDictionaryMap {
|
||||
.map(Map::entrySet)
|
||||
.flatMapIterable(list -> list)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -659,10 +658,10 @@ public abstract class TestDictionaryMap {
|
||||
.getValue()
|
||||
.get(null)
|
||||
.map(val -> Map.entry(stage.getKey(), val))
|
||||
.doAfterTerminate(() -> stage.getValue().release())
|
||||
.doFinally(s -> stage.getValue().close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -691,7 +690,7 @@ public abstract class TestDictionaryMap {
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.isEmpty(null)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
||||
.transform(LLUtils::handleDiscard)
|
||||
@ -718,7 +717,7 @@ public abstract class TestDictionaryMap {
|
||||
map.clear().then(Mono.empty()),
|
||||
map.isEmpty(null)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
||||
.transform(LLUtils::handleDiscard)
|
||||
|
@ -12,6 +12,7 @@ import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryMap;
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDb;
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
|
||||
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
@ -196,8 +197,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
var resultingMap = run(map.get(null));
|
||||
Assertions.assertEquals(shouldFail ? null : Map.of(key, value), resultingMap);
|
||||
|
||||
runVoid(map.close());
|
||||
map.release();
|
||||
map.close();
|
||||
|
||||
//if (shouldFail) this.checkLeaks = false;
|
||||
|
||||
@ -220,8 +220,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
|
||||
Assertions.assertEquals(shouldFail ? null : value, returnedValue);
|
||||
|
||||
runVoid(map.close());
|
||||
map.release();
|
||||
map.close();
|
||||
|
||||
//if (shouldFail) this.checkLeaks = false;
|
||||
|
||||
@ -240,7 +239,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.flatMapMany(map -> map
|
||||
.putValue(key, value)
|
||||
.thenMany(map.getAllValues(null))
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -265,14 +264,14 @@ public abstract class TestDictionaryMapDeep {
|
||||
.flatMap(v_ -> Mono.using(
|
||||
() -> v_,
|
||||
v -> v.set(value),
|
||||
DatabaseMapDictionaryDeep::release
|
||||
ResourceSupport::close
|
||||
))
|
||||
.then(map
|
||||
.at(null, "capra")
|
||||
.flatMap(v_ -> Mono.using(
|
||||
() -> v_,
|
||||
v -> v.set(Map.of("normal", "123", "ormaln", "456")),
|
||||
DatabaseMapDictionaryDeep::release
|
||||
ResourceSupport::close
|
||||
))
|
||||
)
|
||||
.thenMany(map
|
||||
@ -280,10 +279,10 @@ public abstract class TestDictionaryMapDeep {
|
||||
.flatMap(v -> v.getValue()
|
||||
.getAllValues(null)
|
||||
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue()))
|
||||
.doAfterTerminate(() -> v.getValue().release())
|
||||
.doFinally(s -> v.getValue().close())
|
||||
)
|
||||
),
|
||||
DatabaseMapDictionaryDeep::release
|
||||
ResourceSupport::close
|
||||
))
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -308,9 +307,9 @@ public abstract class TestDictionaryMapDeep {
|
||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMap(map -> map
|
||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doAfterTerminate(v::release))
|
||||
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doAfterTerminate(v::release)))
|
||||
.doAfterTerminate(map::release)
|
||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.close()))
|
||||
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doFinally(s -> v.close())))
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -335,7 +334,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.putValueAndGetPrevious(key, value),
|
||||
map.putValueAndGetPrevious(key, value)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -358,22 +357,22 @@ public abstract class TestDictionaryMapDeep {
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValueAndGetPrevious(key2, "error?")
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValueAndGetPrevious(key2, value)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValueAndGetPrevious(key2, value)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -396,7 +395,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||
map.removeAndGetPrevious(key)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -420,22 +419,22 @@ public abstract class TestDictionaryMapDeep {
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, "error?")
|
||||
.then(v.removeAndGetPrevious(key2))
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, value)
|
||||
.then(v.removeAndGetPrevious(key2))
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v.removeAndGetPrevious(key2)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -458,7 +457,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||
map.removeAndGetStatus(key)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -482,22 +481,22 @@ public abstract class TestDictionaryMapDeep {
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, "error?")
|
||||
.then(v.removeAndGetStatus(key2))
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, value)
|
||||
.then(v.removeAndGetStatus(key2))
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v.removeAndGetStatus(key2)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -540,7 +539,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
return value;
|
||||
})
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||
@ -565,28 +564,28 @@ public abstract class TestDictionaryMapDeep {
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> prev)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> value)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> value)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> null)
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
.transform(LLUtils::handleDiscard)
|
||||
)
|
||||
));
|
||||
@ -626,7 +625,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
assert Objects.equals(old, value);
|
||||
return value;
|
||||
}).then(map.getValue(null, key))
|
||||
).doAfterTerminate(map::release))
|
||||
).doFinally(s -> map.close()))
|
||||
));
|
||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||
stpVer.verifyError();
|
||||
@ -652,7 +651,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.updateValue(key2, prev -> prev)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
@ -660,7 +659,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.updateValue(key2, prev -> value)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
@ -668,7 +667,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.updateValue(key2, prev -> value)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
@ -676,10 +675,10 @@ public abstract class TestDictionaryMapDeep {
|
||||
.updateValue(key2, prev -> null)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doAfterTerminate(v::release)
|
||||
.doFinally(s -> v.close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
.transform(LLUtils::handleDiscard)
|
||||
)
|
||||
));
|
||||
@ -704,7 +703,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.remove(key),
|
||||
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single()
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -758,7 +757,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.filter(k -> k.getValue().isPresent())
|
||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||
@ -786,7 +785,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.flatMapMany(map -> map
|
||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.filter(k -> k.getValue().isPresent())
|
||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||
@ -815,7 +814,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
.transform(LLUtils::handleDiscard)
|
||||
)
|
||||
));
|
||||
@ -843,7 +842,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.set(entries).then(Mono.empty()),
|
||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.filter(k -> k.getValue().isPresent())
|
||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||
@ -881,7 +880,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
removalMono.then(Mono.empty()),
|
||||
map.setAndGetChanged(entries).single()
|
||||
)
|
||||
.doAfterTerminate(map::release);
|
||||
.doFinally(s -> map.close());
|
||||
})
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -907,7 +906,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
)
|
||||
.map(Map::entrySet)
|
||||
.concatMapIterable(list -> list)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -933,7 +932,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||
.map(Map::entrySet)
|
||||
.concatMapIterable(list -> list)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -961,7 +960,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.getAllValues(null)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -990,7 +989,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
.map(Map::entrySet)
|
||||
.flatMapIterable(list -> list)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -1021,10 +1020,10 @@ public abstract class TestDictionaryMapDeep {
|
||||
.getValue()
|
||||
.get(null)
|
||||
.map(val -> Map.entry(stage.getKey(), val))
|
||||
.doAfterTerminate(() -> stage.getValue().release())
|
||||
.doFinally(s -> stage.getValue().close())
|
||||
)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
@ -1051,7 +1050,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.isEmpty(null)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
.transform(LLUtils::handleDiscard)
|
||||
));
|
||||
@ -1077,7 +1076,7 @@ public abstract class TestDictionaryMapDeep {
|
||||
map.clear().then(Mono.empty()),
|
||||
map.isEmpty(null)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
|
@ -121,14 +121,14 @@ public abstract class TestDictionaryMapDeepHashMap {
|
||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMapHashMap(dict, 5))
|
||||
.flatMapMany(map -> map
|
||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doAfterTerminate(v::release))
|
||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.close()))
|
||||
.thenMany(map
|
||||
.getAllValues(null)
|
||||
.map(Entry::getValue)
|
||||
.flatMap(maps -> Flux.fromIterable(maps.entrySet()))
|
||||
.map(Entry::getValue)
|
||||
)
|
||||
.doAfterTerminate(map::release)
|
||||
.doFinally(s -> map.close())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
|
Loading…
Reference in New Issue
Block a user