Allow multiple resubscriptions to the same flux
This commit is contained in:
parent
d29ac00c24
commit
8eedc27bba
@ -7,6 +7,7 @@ import io.netty.buffer.ByteBufAllocator;
|
|||||||
import io.netty.buffer.ByteBufUtil;
|
import io.netty.buffer.ByteBufUtil;
|
||||||
import io.netty.buffer.CompositeByteBuf;
|
import io.netty.buffer.CompositeByteBuf;
|
||||||
import io.netty.buffer.Unpooled;
|
import io.netty.buffer.Unpooled;
|
||||||
|
import io.netty.util.IllegalReferenceCountException;
|
||||||
import it.cavallium.dbengine.lucene.RandomSortField;
|
import it.cavallium.dbengine.lucene.RandomSortField;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -156,6 +157,18 @@ public class LLUtils {
|
|||||||
return new it.cavallium.dbengine.database.LLKeyScore(hit.getKey(), hit.getScore());
|
return new it.cavallium.dbengine.database.LLKeyScore(hit.getKey(), hit.getScore());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String toStringSafe(ByteBuf key) {
|
||||||
|
try {
|
||||||
|
if (key.refCnt() > 0) {
|
||||||
|
return toString(key);
|
||||||
|
} else {
|
||||||
|
return "(released)";
|
||||||
|
}
|
||||||
|
} catch (IllegalReferenceCountException ex) {
|
||||||
|
return "(released)";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static String toString(ByteBuf key) {
|
public static String toString(ByteBuf key) {
|
||||||
if (key == null) {
|
if (key == null) {
|
||||||
return "null";
|
return "null";
|
||||||
|
@ -68,8 +68,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
entry -> deserialize(entry.getValue()),
|
entry -> deserialize(entry.getValue()),
|
||||||
HashMap::new)
|
HashMap::new)
|
||||||
.filter(map -> !map.isEmpty())
|
.filter(map -> !map.isEmpty())
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -87,8 +87,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -99,27 +99,26 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return Mono.defer(() -> dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast))
|
return Mono
|
||||||
.doFirst(() -> range.retain())
|
.defer(() -> dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast))
|
||||||
.doFinally(s -> range.release());
|
.doFirst(range::retain)
|
||||||
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||||
return Mono.defer(() -> dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain()))
|
return Mono
|
||||||
.doFirst(() -> range.retain())
|
.defer(() -> dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain()))
|
||||||
.doFinally(s -> range.release());
|
.doFirst(range::retain)
|
||||||
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||||
return Mono
|
return Mono
|
||||||
.using(
|
.fromSupplier(() -> new DatabaseSingleMapped<>(
|
||||||
() -> toKey(serializeSuffix(keySuffix)),
|
new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noop())
|
||||||
keyBuf -> Mono
|
, valueSerializer)
|
||||||
.fromSupplier(() -> new DatabaseSingle<>(dictionary, keyBuf.retain(), Serializer.noop()))
|
|
||||||
.<DatabaseStageEntry<U>>map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer)),
|
|
||||||
ReferenceCounted::release
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -279,41 +278,41 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> removeAndGetPrevious(T keySuffix) {
|
public Mono<U> removeAndGetPrevious(T keySuffix) {
|
||||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
return Mono
|
||||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
.using(
|
||||||
return dictionary
|
() -> toKey(serializeSuffix(keySuffix)),
|
||||||
.remove(keyBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
keyBuf -> dictionary
|
||||||
.map(this::deserialize)
|
.remove(keyBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
||||||
.doFinally(s -> {
|
.map(this::deserialize),
|
||||||
keyBuf.release();
|
ReferenceCounted::release
|
||||||
keySuffixBuf.release();
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> removeAndGetStatus(T keySuffix) {
|
public Mono<Boolean> removeAndGetStatus(T keySuffix) {
|
||||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
return Mono
|
||||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
.using(
|
||||||
return dictionary
|
() -> toKey(serializeSuffix(keySuffix)),
|
||||||
.remove(keyBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
|
keyBuf -> dictionary
|
||||||
.map(LLUtils::responseToBoolean)
|
.remove(keyBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
|
||||||
.doFinally(s -> {
|
.map(LLUtils::responseToBoolean),
|
||||||
keyBuf.release();
|
ReferenceCounted::release
|
||||||
keySuffixBuf.release();
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
|
public Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
|
||||||
return dictionary
|
return Flux
|
||||||
.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> {
|
.defer(() -> dictionary
|
||||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> {
|
||||||
try {
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
return toKey(keySuffixBuf.retain());
|
try {
|
||||||
} finally {
|
return toKey(keySuffixBuf.retain());
|
||||||
keySuffixBuf.release();
|
} finally {
|
||||||
}
|
keySuffixBuf.release();
|
||||||
})), existsAlmostCertainly)
|
}
|
||||||
|
})), existsAlmostCertainly)
|
||||||
|
)
|
||||||
.flatMap(entry -> Mono
|
.flatMap(entry -> Mono
|
||||||
.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey(), false)), deserialize(entry.getValue())))
|
.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey(), false)), deserialize(entry.getValue())))
|
||||||
);
|
);
|
||||||
@ -352,28 +351,33 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||||
return Flux.defer(() -> dictionary.getRangeKeys(resolveSnapshot(snapshot), range.retain()))
|
return Flux
|
||||||
|
.defer(() -> dictionary.getRangeKeys(resolveSnapshot(snapshot), range.retain()))
|
||||||
.<Entry<T, DatabaseStageEntry<U>>>map(key -> {
|
.<Entry<T, DatabaseStageEntry<U>>>map(key -> {
|
||||||
ByteBuf keySuffixWithExt = stripPrefix(key, false);
|
ByteBuf keySuffixWithExt = stripPrefix(key.retain(), false);
|
||||||
// Don't use "key" under this point ---
|
|
||||||
try {
|
try {
|
||||||
return Map.entry(deserializeSuffix(keySuffixWithExt.retainedSlice()),
|
try {
|
||||||
new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary,
|
return Map.entry(deserializeSuffix(keySuffixWithExt.retainedSlice()),
|
||||||
toKey(keySuffixWithExt.retainedSlice()),
|
new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary,
|
||||||
Serializer.noop()
|
toKey(keySuffixWithExt.retainedSlice()),
|
||||||
), valueSerializer)
|
Serializer.noop()
|
||||||
);
|
), valueSerializer)
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
keySuffixWithExt.release();
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
keySuffixWithExt.release();
|
key.release();
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
||||||
return Flux.defer(() -> dictionary.getRange(resolveSnapshot(snapshot), range.retain()))
|
return Flux
|
||||||
|
.defer(() -> dictionary.getRange(resolveSnapshot(snapshot), range.retain()))
|
||||||
.map(serializedEntry -> Map.entry(
|
.map(serializedEntry -> Map.entry(
|
||||||
deserializeSuffix(stripPrefix(serializedEntry.getKey(), false)),
|
deserializeSuffix(stripPrefix(serializedEntry.getKey(), false)),
|
||||||
valueSerializer.deserialize(serializedEntry.getValue())
|
valueSerializer.deserialize(serializedEntry.getValue())
|
||||||
@ -384,8 +388,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
castedEntry.getKey().release();
|
castedEntry.getKey().release();
|
||||||
castedEntry.getValue().release();
|
castedEntry.getValue().release();
|
||||||
})
|
})
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -401,28 +405,27 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> clear() {
|
public Mono<Void> clear() {
|
||||||
if (range.isAll()) {
|
return Mono
|
||||||
return dictionary
|
.defer(() -> {
|
||||||
.clear();
|
if (range.isAll()) {
|
||||||
} else if (range.isSingle()) {
|
return dictionary.clear();
|
||||||
return Mono
|
} else if (range.isSingle()) {
|
||||||
.defer(() -> dictionary.remove(range.getSingle().retain(), LLDictionaryResultType.VOID))
|
return dictionary
|
||||||
.doOnNext(ReferenceCounted::release)
|
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
||||||
.then()
|
.doOnNext(ReferenceCounted::release)
|
||||||
.doFirst(() -> range.getSingle().retain())
|
.then();
|
||||||
.doFinally(s -> range.getSingle().release());
|
} else {
|
||||||
} else {
|
return dictionary.setRange(range.retain(), Flux.empty());
|
||||||
return Mono
|
}
|
||||||
.defer(() -> dictionary.setRange(range.retain(), Flux.empty()))
|
})
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -11,6 +11,7 @@ import it.cavallium.dbengine.database.LLSnapshot;
|
|||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.UpdateMode;
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
import it.cavallium.dbengine.database.disk.LLLocalDictionary;
|
import it.cavallium.dbengine.database.disk.LLLocalDictionary;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLLocalDictionary.ReleasableSlice;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -367,15 +368,16 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return Mono
|
return Mono
|
||||||
.defer(() -> dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast))
|
.defer(() -> dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast))
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||||
return Mono.defer(() -> dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain()))
|
return Mono
|
||||||
.doFirst(() -> range.retain())
|
.defer(() -> dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain()))
|
||||||
.doFinally(s -> range.release());
|
.doFirst(range::retain)
|
||||||
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -430,72 +432,76 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||||
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
return Flux
|
||||||
return Flux
|
.defer(() -> {
|
||||||
.defer(() -> dictionary
|
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
||||||
.getRangeKeysGrouped(resolveSnapshot(snapshot), range.retain(),
|
return Flux
|
||||||
keyPrefixLength + keySuffixLength)
|
.defer(() -> dictionary.getRangeKeysGrouped(resolveSnapshot(snapshot), range.retain(), keyPrefixLength + keySuffixLength))
|
||||||
)
|
.flatMapSequential(rangeKeys -> Flux
|
||||||
.flatMapSequential(rangeKeys -> Flux
|
.using(
|
||||||
.using(
|
() -> {
|
||||||
() -> {
|
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
||||||
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
ByteBuf groupKeyWithExt = rangeKeys.get(0).retainedSlice();
|
||||||
ByteBuf groupKeyWithExt = rangeKeys.get(0).retainedSlice();
|
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain(), true);
|
||||||
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain(), true);
|
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true);
|
||||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true);
|
return new GroupBuffers(groupKeyWithExt, groupKeyWithoutExt, groupSuffix);
|
||||||
return new GroupBuffers(groupKeyWithExt, groupKeyWithoutExt, groupSuffix);
|
},
|
||||||
},
|
buffers -> Mono
|
||||||
buffers -> Mono
|
.fromCallable(() -> {
|
||||||
.fromCallable(() -> {
|
assert subStageKeysConsistency(buffers.groupKeyWithExt.readableBytes());
|
||||||
assert subStageKeysConsistency(buffers.groupKeyWithExt.readableBytes());
|
return null;
|
||||||
return null;
|
})
|
||||||
})
|
.then(this.subStageGetter
|
||||||
.then(this.subStageGetter
|
.subStage(dictionary,
|
||||||
.subStage(dictionary,
|
snapshot,
|
||||||
snapshot,
|
buffers.groupKeyWithoutExt.retain(),
|
||||||
buffers.groupKeyWithoutExt.retain(),
|
rangeKeys.stream().map(ByteBuf::retain).collect(Collectors.toList())
|
||||||
rangeKeys.stream().map(ByteBuf::retain).collect(Collectors.toList())
|
)
|
||||||
)
|
.map(us -> Map.entry(this.deserializeSuffix(buffers.groupSuffix.retain()), us))
|
||||||
.map(us -> Map.entry(this.deserializeSuffix(buffers.groupSuffix.retain()), us))
|
),
|
||||||
),
|
buffers -> {
|
||||||
buffers -> {
|
buffers.groupSuffix.release();
|
||||||
buffers.groupSuffix.release();
|
buffers.groupKeyWithoutExt.release();
|
||||||
buffers.groupKeyWithoutExt.release();
|
buffers.groupKeyWithExt.release();
|
||||||
buffers.groupKeyWithExt.release();
|
}
|
||||||
}
|
)
|
||||||
)
|
.doAfterTerminate(() -> {
|
||||||
.doFinally(s -> {
|
for (ByteBuf rangeKey : rangeKeys) {
|
||||||
for (ByteBuf rangeKey : rangeKeys) {
|
rangeKey.release();
|
||||||
rangeKey.release();
|
}
|
||||||
}
|
})
|
||||||
})
|
|
||||||
)
|
|
||||||
.doOnDiscard(Collection.class, discardedCollection -> {
|
|
||||||
//noinspection unchecked
|
|
||||||
var rangeKeys = (Collection<ByteBuf>) discardedCollection;
|
|
||||||
for (ByteBuf rangeKey : rangeKeys) {
|
|
||||||
rangeKey.release();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return Flux
|
|
||||||
.defer(() -> dictionary
|
|
||||||
.getRangeKeyPrefixes(resolveSnapshot(snapshot), range.retain(),
|
|
||||||
keyPrefixLength + keySuffixLength)
|
|
||||||
)
|
|
||||||
.flatMapSequential(groupKeyWithoutExt -> {
|
|
||||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true);
|
|
||||||
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
|
||||||
return this.subStageGetter
|
|
||||||
.subStage(dictionary,
|
|
||||||
snapshot,
|
|
||||||
groupKeyWithoutExt.retain(),
|
|
||||||
List.of()
|
|
||||||
)
|
)
|
||||||
.map(us -> Map.entry(this.deserializeSuffix(groupSuffix.retain()), us))
|
.doOnDiscard(Collection.class, discardedCollection -> {
|
||||||
.doFinally(s -> groupSuffix.release());
|
//noinspection unchecked
|
||||||
});
|
var rangeKeys = (Collection<ByteBuf>) discardedCollection;
|
||||||
}
|
for (ByteBuf rangeKey : rangeKeys) {
|
||||||
|
rangeKey.release();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return Flux
|
||||||
|
.defer(() -> dictionary.getRangeKeyPrefixes(resolveSnapshot(snapshot), range.retain(), keyPrefixLength + keySuffixLength))
|
||||||
|
.flatMapSequential(groupKeyWithoutExt -> Mono
|
||||||
|
.using(
|
||||||
|
() -> {
|
||||||
|
var groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true);
|
||||||
|
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
||||||
|
return groupSuffix;
|
||||||
|
},
|
||||||
|
groupSuffix -> this.subStageGetter
|
||||||
|
.subStage(dictionary,
|
||||||
|
snapshot,
|
||||||
|
groupKeyWithoutExt.retain(),
|
||||||
|
List.of()
|
||||||
|
)
|
||||||
|
.map(us -> Map.entry(this.deserializeSuffix(groupSuffix.retain()), us)),
|
||||||
|
ReferenceCounted::release
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.doFirst(range::retain)
|
||||||
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean subStageKeysConsistency(int totalKeyLength) {
|
private boolean subStageKeysConsistency(int totalKeyLength) {
|
||||||
@ -521,7 +527,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
.at(null, entry.getKey())
|
.at(null, entry.getKey())
|
||||||
.flatMap(us -> us
|
.flatMap(us -> us
|
||||||
.set(entry.getValue())
|
.set(entry.getValue())
|
||||||
.doFinally(s -> us.release())
|
.doAfterTerminate(us::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::release)
|
.doOnDiscard(DatabaseStage.class, DatabaseStage::release)
|
||||||
@ -532,22 +538,19 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> clear() {
|
public Mono<Void> clear() {
|
||||||
if (range.isAll()) {
|
return Mono
|
||||||
return dictionary
|
.defer(() -> {
|
||||||
.clear();
|
if (range.isAll()) {
|
||||||
} else if (range.isSingle()) {
|
return dictionary.clear();
|
||||||
return Mono
|
} else if (range.isSingle()) {
|
||||||
.defer(() -> dictionary
|
return dictionary
|
||||||
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
||||||
.doOnNext(ReferenceCounted::release)
|
.doOnNext(ReferenceCounted::release)
|
||||||
)
|
.then();
|
||||||
.then();
|
} else {
|
||||||
} else {
|
return dictionary.setRange(range.retain(), Flux.empty());
|
||||||
return Mono
|
}
|
||||||
.defer(() -> dictionary
|
});
|
||||||
.setRange(range.retain(), Flux.empty())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
|
@ -176,7 +176,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
.map(Entry::getKey)
|
.map(Entry::getKey)
|
||||||
.flatMap(key -> this
|
.flatMap(key -> this
|
||||||
.at(snapshot, key)
|
.at(snapshot, key)
|
||||||
.flatMap(stage -> Mono.just(Map.entry(key, stage)).doFinally(s -> stage.release()))
|
.flatMap(stage -> Mono.just(Map.entry(key, stage)).doAfterTerminate(stage::release))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -194,7 +194,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
.flatMap(stage -> stage
|
.flatMap(stage -> stage
|
||||||
.setAndGetPrevious(entry.getValue())
|
.setAndGetPrevious(entry.getValue())
|
||||||
.map(prev -> Map.entry(entry.getKey(), prev))
|
.map(prev -> Map.entry(entry.getKey(), prev))
|
||||||
.doFinally(s -> stage.release()))
|
.doAfterTerminate(stage::release))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,8 +46,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
return Mono
|
return Mono
|
||||||
.defer(() -> dictionary.get(resolveSnapshot(snapshot), key.retain(), existsAlmostCertainly))
|
.defer(() -> dictionary.get(resolveSnapshot(snapshot), key.retain(), existsAlmostCertainly))
|
||||||
.map(this::deserialize)
|
.map(this::deserialize)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -60,8 +60,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
.map(this::deserialize),
|
.map(this::deserialize),
|
||||||
ReferenceCounted::release
|
ReferenceCounted::release
|
||||||
)
|
)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -78,8 +78,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
}
|
}
|
||||||
}, updateReturnMode, existsAlmostCertainly))
|
}, updateReturnMode, existsAlmostCertainly))
|
||||||
.map(this::deserialize)
|
.map(this::deserialize)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -94,8 +94,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
return this.serialize(result);
|
return this.serialize(result);
|
||||||
}
|
}
|
||||||
}, existsAlmostCertainly).transform(mono -> LLUtils.mapDelta(mono, this::deserialize)))
|
}, existsAlmostCertainly).transform(mono -> LLUtils.mapDelta(mono, this::deserialize)))
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -105,8 +105,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
.remove(key.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
.remove(key.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
||||||
)
|
)
|
||||||
.map(this::deserialize)
|
.map(this::deserialize)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -116,8 +116,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key.retain()))
|
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key.retain()))
|
||||||
)
|
)
|
||||||
.map(empty -> empty ? 0L : 1L)
|
.map(empty -> empty ? 0L : 1L)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -126,8 +126,8 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
.defer(() -> dictionary
|
.defer(() -> dictionary
|
||||||
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key.retain()))
|
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key.retain()))
|
||||||
)
|
)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
|
@ -27,7 +27,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
||||||
|
|
||||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
||||||
return this.at(snapshot, key).flatMap(v -> v.get(snapshot, existsAlmostCertainly).doFinally(s -> v.release()));
|
return this.at(snapshot, key).flatMap(v -> v.get(snapshot, existsAlmostCertainly).doAfterTerminate(v::release));
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
||||||
@ -39,7 +39,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> putValue(T key, U value) {
|
default Mono<Void> putValue(T key, U value) {
|
||||||
return at(null, key).single().flatMap(v -> v.set(value).doFinally(s -> v.release()));
|
return at(null, key).single().flatMap(v -> v.set(value).doAfterTerminate(v::release));
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<UpdateMode> getUpdateMode();
|
Mono<UpdateMode> getUpdateMode();
|
||||||
@ -50,7 +50,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.single()
|
.single()
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.update(updater, updateReturnMode, existsAlmostCertainly)
|
.update(updater, updateReturnMode, existsAlmostCertainly)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.single()
|
.single()
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateAndGetDelta(updater, existsAlmostCertainly)
|
.updateAndGetDelta(updater, existsAlmostCertainly)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
||||||
return at(null, key).single().flatMap(v -> v.setAndGetPrevious(value).doFinally(s -> v.release()));
|
return at(null, key).single().flatMap(v -> v.setAndGetPrevious(value).doAfterTerminate(v::release));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -91,7 +91,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
* @return true if the key was associated with any value, false if the key didn't exist.
|
* @return true if the key was associated with any value, false if the key didn't exist.
|
||||||
*/
|
*/
|
||||||
default Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
default Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
||||||
return at(null, key).single().flatMap(v -> v.setAndGetChanged(value).doFinally(s -> v.release())).single();
|
return at(null, key).single().flatMap(v -> v.setAndGetChanged(value).doAfterTerminate(v::release)).single();
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> remove(T key) {
|
default Mono<Void> remove(T key) {
|
||||||
@ -99,7 +99,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> removeAndGetPrevious(T key) {
|
default Mono<U> removeAndGetPrevious(T key) {
|
||||||
return at(null, key).flatMap(v -> v.clearAndGetPrevious().doFinally(s -> v.release()));
|
return at(null, key).flatMap(v -> v.clearAndGetPrevious().doAfterTerminate(v::release));
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Boolean> removeAndGetStatus(T key) {
|
default Mono<Boolean> removeAndGetStatus(T key) {
|
||||||
@ -129,7 +129,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.getValue()
|
.getValue()
|
||||||
.get(snapshot, true)
|
.get(snapshot, true)
|
||||||
.map(value -> Map.entry(entry.getKey(), value))
|
.map(value -> Map.entry(entry.getKey(), value))
|
||||||
.doFinally(s -> entry.getValue().release())
|
.doAfterTerminate(() -> entry.getValue().release())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,7 +152,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.flatMap(entriesReplacer)
|
.flatMap(entriesReplacer)
|
||||||
.flatMap(replacedEntry -> this
|
.flatMap(replacedEntry -> this
|
||||||
.at(null, replacedEntry.getKey())
|
.at(null, replacedEntry.getKey())
|
||||||
.flatMap(v -> v.set(replacedEntry.getValue()).doFinally(s -> v.release())))
|
.flatMap(v -> v.set(replacedEntry.getValue()).doAfterTerminate(v::release)))
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -162,7 +162,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.getAllStages(null)
|
.getAllStages(null)
|
||||||
.flatMap(stage -> Mono
|
.flatMap(stage -> Mono
|
||||||
.defer(() -> entriesReplacer.apply(stage))
|
.defer(() -> entriesReplacer.apply(stage))
|
||||||
.doFinally(s -> stage.getValue().release())
|
.doAfterTerminate(() -> stage.getValue().release())
|
||||||
)
|
)
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
|
@ -46,32 +46,37 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
|||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
ByteBuf prefixKey,
|
ByteBuf prefixKey,
|
||||||
List<ByteBuf> debuggingKeys) {
|
List<ByteBuf> debuggingKeys) {
|
||||||
return Mono
|
try {
|
||||||
.defer(() -> {
|
return Mono
|
||||||
if (assertsEnabled) {
|
.defer(() -> {
|
||||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
if (assertsEnabled) {
|
||||||
} else {
|
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
||||||
return Mono
|
} else {
|
||||||
.fromCallable(() -> {
|
return Mono
|
||||||
for (ByteBuf key : debuggingKeys) {
|
.fromCallable(() -> {
|
||||||
key.release();
|
for (ByteBuf key : debuggingKeys) {
|
||||||
}
|
key.release();
|
||||||
return null;
|
}
|
||||||
});
|
return null;
|
||||||
}
|
});
|
||||||
})
|
}
|
||||||
.then(Mono
|
})
|
||||||
.fromSupplier(() -> DatabaseMapDictionaryHashed
|
.then(Mono
|
||||||
.tail(dictionary,
|
.fromSupplier(() -> DatabaseMapDictionaryHashed
|
||||||
prefixKey.retain(),
|
.tail(dictionary,
|
||||||
keySerializer,
|
prefixKey.retain(),
|
||||||
valueSerializer,
|
keySerializer,
|
||||||
keyHashFunction,
|
valueSerializer,
|
||||||
keyHashSerializer
|
keyHashFunction,
|
||||||
)
|
keyHashSerializer
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> prefixKey.release());
|
)
|
||||||
|
.doFirst(prefixKey::retain)
|
||||||
|
.doAfterTerminate(prefixKey::release);
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -44,31 +44,36 @@ public class SubStageGetterHashSet<T, TH> implements
|
|||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
ByteBuf prefixKey,
|
ByteBuf prefixKey,
|
||||||
List<ByteBuf> debuggingKeys) {
|
List<ByteBuf> debuggingKeys) {
|
||||||
return Mono
|
try {
|
||||||
.defer(() -> {
|
return Mono
|
||||||
if (assertsEnabled) {
|
.defer(() -> {
|
||||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
if (assertsEnabled) {
|
||||||
} else {
|
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
||||||
return Mono
|
} else {
|
||||||
.fromCallable(() -> {
|
return Mono
|
||||||
for (ByteBuf key : debuggingKeys) {
|
.fromCallable(() -> {
|
||||||
key.release();
|
for (ByteBuf key : debuggingKeys) {
|
||||||
}
|
key.release();
|
||||||
return null;
|
}
|
||||||
});
|
return null;
|
||||||
}
|
});
|
||||||
})
|
}
|
||||||
.then(Mono
|
})
|
||||||
.fromSupplier(() -> DatabaseSetDictionaryHashed
|
.then(Mono
|
||||||
.tail(dictionary,
|
.fromSupplier(() -> DatabaseSetDictionaryHashed
|
||||||
prefixKey.retain(),
|
.tail(dictionary,
|
||||||
keySerializer,
|
prefixKey.retain(),
|
||||||
keyHashFunction,
|
keySerializer,
|
||||||
keyHashSerializer
|
keyHashFunction,
|
||||||
)
|
keyHashSerializer
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> prefixKey.release());
|
)
|
||||||
|
.doFirst(prefixKey::retain)
|
||||||
|
.doAfterTerminate(prefixKey::release);
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -37,30 +37,35 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
|||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
ByteBuf prefixKey,
|
ByteBuf prefixKey,
|
||||||
List<ByteBuf> debuggingKeys) {
|
List<ByteBuf> debuggingKeys) {
|
||||||
return Mono
|
try {
|
||||||
.defer(() -> {
|
return Mono
|
||||||
if (assertsEnabled) {
|
.defer(() -> {
|
||||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
if (assertsEnabled) {
|
||||||
} else {
|
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
||||||
return Mono
|
} else {
|
||||||
.fromCallable(() -> {
|
return Mono
|
||||||
for (ByteBuf key : debuggingKeys) {
|
.fromCallable(() -> {
|
||||||
key.release();
|
for (ByteBuf key : debuggingKeys) {
|
||||||
}
|
key.release();
|
||||||
return null;
|
}
|
||||||
});
|
return null;
|
||||||
}
|
});
|
||||||
})
|
}
|
||||||
.then(Mono
|
})
|
||||||
.fromSupplier(() -> DatabaseMapDictionary
|
.then(Mono
|
||||||
.tail(dictionary,
|
.fromSupplier(() -> DatabaseMapDictionary
|
||||||
prefixKey.retain(),
|
.tail(dictionary,
|
||||||
keySerializer,
|
prefixKey.retain(),
|
||||||
valueSerializer
|
keySerializer,
|
||||||
)
|
valueSerializer
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> prefixKey.release());
|
)
|
||||||
|
.doFirst(prefixKey::retain)
|
||||||
|
.doAfterTerminate(prefixKey::release);
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -51,31 +51,36 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
|||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
ByteBuf prefixKey,
|
ByteBuf prefixKey,
|
||||||
List<ByteBuf> debuggingKeys) {
|
List<ByteBuf> debuggingKeys) {
|
||||||
return Mono
|
try {
|
||||||
.defer(() -> {
|
return Mono
|
||||||
if (assertsEnabled) {
|
.defer(() -> {
|
||||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
if (assertsEnabled) {
|
||||||
} else {
|
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
||||||
return Mono
|
} else {
|
||||||
.fromCallable(() -> {
|
return Mono
|
||||||
for (ByteBuf key : debuggingKeys) {
|
.fromCallable(() -> {
|
||||||
key.release();
|
for (ByteBuf key : debuggingKeys) {
|
||||||
}
|
key.release();
|
||||||
return null;
|
}
|
||||||
});
|
return null;
|
||||||
}
|
});
|
||||||
})
|
}
|
||||||
.then(Mono
|
})
|
||||||
.fromSupplier(() -> DatabaseMapDictionaryDeep
|
.then(Mono
|
||||||
.deepIntermediate(dictionary,
|
.fromSupplier(() -> DatabaseMapDictionaryDeep
|
||||||
prefixKey.retain(),
|
.deepIntermediate(dictionary,
|
||||||
keySerializer,
|
prefixKey.retain(),
|
||||||
subStageGetter,
|
keySerializer,
|
||||||
keyExtLength
|
subStageGetter,
|
||||||
)
|
keyExtLength
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> prefixKey.release());
|
)
|
||||||
|
.doFirst(prefixKey::retain)
|
||||||
|
.doAfterTerminate(prefixKey::release);
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -38,9 +38,7 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
|||||||
return Mono
|
return Mono
|
||||||
.defer(() -> {
|
.defer(() -> {
|
||||||
if (assertsEnabled) {
|
if (assertsEnabled) {
|
||||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys)
|
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
|
||||||
.doFirst(() -> prefixKey.retain())
|
|
||||||
.doFinally(s -> prefixKey.release());
|
|
||||||
} else {
|
} else {
|
||||||
return Mono
|
return Mono
|
||||||
.fromCallable(() -> {
|
.fromCallable(() -> {
|
||||||
@ -60,8 +58,8 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFirst(() -> prefixKey.retain())
|
.doFirst(prefixKey::retain)
|
||||||
.doFinally(s -> prefixKey.release());
|
.doAfterTerminate(prefixKey::release);
|
||||||
} finally {
|
} finally {
|
||||||
prefixKey.release();
|
prefixKey.release();
|
||||||
}
|
}
|
||||||
@ -78,20 +76,26 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {
|
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {
|
||||||
return Mono
|
try {
|
||||||
.fromCallable(() -> {
|
return Mono
|
||||||
try {
|
.<Void>fromCallable(() -> {
|
||||||
for (ByteBuf key : keys) {
|
try {
|
||||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
for (ByteBuf key : keys) {
|
||||||
|
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
for (ByteBuf key : keys) {
|
||||||
|
key.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
return null;
|
||||||
prefixKey.release();
|
})
|
||||||
for (ByteBuf key : keys) {
|
.doFirst(prefixKey::retain)
|
||||||
key.release();
|
.doAfterTerminate(prefixKey::release);
|
||||||
}
|
} finally {
|
||||||
}
|
prefixKey.release();
|
||||||
return null;
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getKeyBinaryLength() {
|
public int getKeyBinaryLength() {
|
||||||
|
@ -52,8 +52,8 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
|
|||||||
.then(Mono
|
.then(Mono
|
||||||
.<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix.retain(), serializer))
|
.<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix.retain(), serializer))
|
||||||
)
|
)
|
||||||
.doFirst(() -> keyPrefix.retain())
|
.doFirst(keyPrefix::retain)
|
||||||
.doFinally(s -> keyPrefix.release());
|
.doAfterTerminate(keyPrefix::release);
|
||||||
} finally {
|
} finally {
|
||||||
keyPrefix.release();
|
keyPrefix.release();
|
||||||
}
|
}
|
||||||
|
@ -208,7 +208,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Reading {}", LLUtils.toString(key));
|
logger.trace("Reading {}", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
return dbGet(cfh, resolveSnapshot(snapshot), key.retain(), existsAlmostCertainly);
|
return dbGet(cfh, resolveSnapshot(snapshot), key.retain(), existsAlmostCertainly);
|
||||||
} finally {
|
} finally {
|
||||||
@ -217,10 +217,10 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toString(key), cause))
|
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> key.retain())
|
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toStringSafe(key), cause))
|
||||||
.doFinally(s -> key.release());
|
.doFirst(key::retain)
|
||||||
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -368,16 +368,14 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
return Mono
|
return Mono
|
||||||
.defer(() -> {
|
.defer(() -> {
|
||||||
if (range.isSingle()) {
|
if (range.isSingle()) {
|
||||||
return this
|
return this.containsKey(snapshot, range.getSingle().retain());
|
||||||
.containsKey(snapshot, range.getSingle().retain());
|
|
||||||
} else {
|
} else {
|
||||||
return this
|
return this.containsRange(snapshot, range.retain());
|
||||||
.containsRange(snapshot, range.retain());
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map(isContained -> !isContained)
|
.map(isContained -> !isContained)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -425,8 +423,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to read range " + range.toString(), cause))
|
.onErrorMap(cause -> new IOException("Failed to read range " + range.toString(), cause))
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -464,10 +462,10 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toString(key), cause))
|
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toStringSafe(key), cause))
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -492,7 +490,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Writing {}: {}", LLUtils.toString(key), LLUtils.toString(value));
|
logger.trace("Writing {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(value));
|
||||||
}
|
}
|
||||||
dbPut(cfh, null, key.retain(), value.retain());
|
dbPut(cfh, null, key.retain(), value.retain());
|
||||||
return null;
|
return null;
|
||||||
@ -503,14 +501,14 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.onErrorMap(cause -> new IOException("Failed to write " + LLUtils.toString(key), cause))
|
.onErrorMap(cause -> new IOException("Failed to write " + LLUtils.toStringSafe(key), cause))
|
||||||
)
|
)
|
||||||
.singleOrEmpty()
|
.singleOrEmpty()
|
||||||
.doFirst(() -> {
|
.doFirst(() -> {
|
||||||
key.retain();
|
key.retain();
|
||||||
value.retain();
|
value.retain();
|
||||||
})
|
})
|
||||||
.doFinally(s -> {
|
.doAfterTerminate(() -> {
|
||||||
key.release();
|
key.release();
|
||||||
value.release();
|
value.release();
|
||||||
});
|
});
|
||||||
@ -550,7 +548,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Reading {}", LLUtils.toString(key));
|
logger.trace("Reading {}", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
while (true) {
|
while (true) {
|
||||||
@Nullable ByteBuf prevData;
|
@Nullable ByteBuf prevData;
|
||||||
@ -597,7 +595,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Deleting {}", LLUtils.toString(key));
|
logger.trace("Deleting {}", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
dbDelete(cfh, null, key.retain());
|
dbDelete(cfh, null, key.retain());
|
||||||
} else if (newData != null
|
} else if (newData != null
|
||||||
@ -615,7 +613,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Writing {}: {}", LLUtils.toString(key), LLUtils.toString(newData));
|
logger.trace("Writing {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
|
||||||
}
|
}
|
||||||
dbPut(cfh, null, key.retain(), newData.retain());
|
dbPut(cfh, null, key.retain(), newData.retain());
|
||||||
}
|
}
|
||||||
@ -646,10 +644,10 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to read or write " + (key.refCnt() > 0 ? LLUtils.toString(key) : "(released)"), cause))
|
.onErrorMap(cause -> new IOException("Failed to read or write " + LLUtils.toStringSafe(key), cause))
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -677,7 +675,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Reading {}", LLUtils.toString(key));
|
logger.trace("Reading {}", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
while (true) {
|
while (true) {
|
||||||
@Nullable ByteBuf prevData;
|
@Nullable ByteBuf prevData;
|
||||||
@ -724,7 +722,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Deleting {}", LLUtils.toString(key));
|
logger.trace("Deleting {}", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
dbDelete(cfh, null, key.retain());
|
dbDelete(cfh, null, key.retain());
|
||||||
} else if (newData != null
|
} else if (newData != null
|
||||||
@ -742,7 +740,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Writing {}: {}", LLUtils.toString(key), LLUtils.toString(newData));
|
logger.trace("Writing {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
|
||||||
}
|
}
|
||||||
dbPut(cfh, null, key.retain(), newData.retain());
|
dbPut(cfh, null, key.retain(), newData.retain());
|
||||||
}
|
}
|
||||||
@ -767,10 +765,10 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to read or write " + (key.refCnt() > 0 ? LLUtils.toString(key) : "(released)"), cause))
|
.onErrorMap(cause -> new IOException("Failed to read or write " + LLUtils.toStringSafe(key), cause))
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -812,7 +810,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Deleting {}", LLUtils.toString(key));
|
logger.trace("Deleting {}", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
dbDelete(cfh, null, key.retain());
|
dbDelete(cfh, null, key.retain());
|
||||||
return null;
|
return null;
|
||||||
@ -822,13 +820,13 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to delete " + LLUtils.toString(key), cause))
|
.onErrorMap(cause -> new IOException("Failed to delete " + LLUtils.toStringSafe(key), cause))
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.then(Mono.empty())
|
.then(Mono.empty())
|
||||||
)
|
)
|
||||||
.singleOrEmpty()
|
.singleOrEmpty()
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -844,7 +842,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
.containsKey(null, key.retain())
|
.containsKey(null, key.retain())
|
||||||
.single()
|
.single()
|
||||||
.map(LLUtils::booleanToResponseByteBuffer)
|
.map(LLUtils::booleanToResponseByteBuffer)
|
||||||
.doFinally(s -> {
|
.doAfterTerminate(() -> {
|
||||||
assert key.refCnt() > 0;
|
assert key.refCnt() > 0;
|
||||||
});
|
});
|
||||||
case PREVIOUS_VALUE:
|
case PREVIOUS_VALUE:
|
||||||
@ -884,7 +882,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toString(key), cause))
|
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toStringSafe(key), cause))
|
||||||
.subscribeOn(dbScheduler);
|
.subscribeOn(dbScheduler);
|
||||||
case VOID:
|
case VOID:
|
||||||
return Mono.empty();
|
return Mono.empty();
|
||||||
@ -892,8 +890,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
return Mono.error(new IllegalStateException("Unexpected value: " + resultType));
|
return Mono.error(new IllegalStateException("Unexpected value: " + resultType));
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -956,7 +954,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
.flatMapMany(Flux::fromIterable)
|
.flatMapMany(Flux::fromIterable)
|
||||||
.onErrorMap(cause -> new IOException("Failed to read keys "
|
.onErrorMap(cause -> new IOException("Failed to read keys "
|
||||||
+ Arrays.deepToString(keysWindow.toArray(ByteBuf[]::new)), cause))
|
+ Arrays.deepToString(keysWindow.toArray(ByteBuf[]::new)), cause))
|
||||||
.doFinally(s -> keysWindow.forEach(ReferenceCounted::release))
|
.doAfterTerminate(() -> keysWindow.forEach(ReferenceCounted::release))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doOnDiscard(Entry.class, discardedEntry -> {
|
.doOnDiscard(Entry.class, discardedEntry -> {
|
||||||
@ -1081,8 +1079,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
return getRangeMulti(snapshot, range.retain());
|
return getRangeMulti(snapshot, range.retain());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1101,8 +1099,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
return getRangeMultiGrouped(snapshot, range.retain(), prefixLength);
|
return getRangeMultiGrouped(snapshot, range.retain(), prefixLength);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1114,8 +1112,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
.defer(() -> this.get(snapshot, key.retain(), existsAlmostCertainly))
|
.defer(() -> this.get(snapshot, key.retain(), existsAlmostCertainly))
|
||||||
.map(value -> Map.entry(key.retain(), value))
|
.map(value -> Map.entry(key.retain(), value))
|
||||||
.flux()
|
.flux()
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -1136,8 +1134,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
castedEntry.getValue().release();
|
castedEntry.getValue().release();
|
||||||
})
|
})
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1159,8 +1157,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
LLLocalGroupedReactiveRocksIterator::release
|
LLLocalGroupedReactiveRocksIterator::release
|
||||||
)
|
)
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1177,8 +1175,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
return this.getRangeKeysMulti(snapshot, range.retain());
|
return this.getRangeKeysMulti(snapshot, range.retain());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1201,8 +1199,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
LLLocalGroupedReactiveRocksIterator::release
|
LLLocalGroupedReactiveRocksIterator::release
|
||||||
)
|
)
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1226,8 +1224,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
LLLocalKeyPrefixReactiveRocksIterator::release
|
LLLocalKeyPrefixReactiveRocksIterator::release
|
||||||
)
|
)
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1246,8 +1244,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||||
.doFirst(() -> key.retain())
|
.doFirst(key::retain)
|
||||||
.doFinally(s -> key.release());
|
.doAfterTerminate(key::release);
|
||||||
} finally {
|
} finally {
|
||||||
key.release();
|
key.release();
|
||||||
}
|
}
|
||||||
@ -1263,8 +1261,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
)
|
)
|
||||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1387,8 +1385,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
)
|
)
|
||||||
.then()
|
.then()
|
||||||
.onErrorMap(cause -> new IOException("Failed to write range", cause))
|
.onErrorMap(cause -> new IOException("Failed to write range", cause))
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} else {
|
} else {
|
||||||
if (USE_WRITE_BATCHES_IN_SET_RANGE) {
|
if (USE_WRITE_BATCHES_IN_SET_RANGE) {
|
||||||
return Mono.fromCallable(() -> {
|
return Mono.fromCallable(() -> {
|
||||||
@ -1415,8 +1413,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
.then(Mono.<Void>empty())
|
.then(Mono.<Void>empty())
|
||||||
)
|
)
|
||||||
.onErrorMap(cause -> new IOException("Failed to write range", cause))
|
.onErrorMap(cause -> new IOException("Failed to write range", cause))
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
@ -1630,61 +1628,64 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
@Override
|
@Override
|
||||||
public Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, LLRange range, boolean fast) {
|
public Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, LLRange range, boolean fast) {
|
||||||
try {
|
try {
|
||||||
Mono<Long> result;
|
return Mono
|
||||||
if (range.isAll()) {
|
.defer(() -> {
|
||||||
result = Mono
|
if (range.isAll()) {
|
||||||
.fromCallable(() -> fast ? fastSizeAll(snapshot) : exactSizeAll(snapshot))
|
return Mono
|
||||||
.onErrorMap(IOException::new)
|
.fromCallable(() -> fast ? fastSizeAll(snapshot) : exactSizeAll(snapshot))
|
||||||
.subscribeOn(dbScheduler);
|
.onErrorMap(IOException::new)
|
||||||
} else {
|
.subscribeOn(dbScheduler);
|
||||||
result = Mono
|
} else {
|
||||||
.fromCallable(() -> {
|
return Mono
|
||||||
var readOpts = resolveSnapshot(snapshot);
|
.fromCallable(() -> {
|
||||||
readOpts.setFillCache(false);
|
var readOpts = resolveSnapshot(snapshot);
|
||||||
readOpts.setVerifyChecksums(VERIFY_CHECKSUMS_WHEN_NOT_NEEDED);
|
readOpts.setFillCache(false);
|
||||||
ReleasableSlice minBound;
|
readOpts.setVerifyChecksums(VERIFY_CHECKSUMS_WHEN_NOT_NEEDED);
|
||||||
if (range.hasMin()) {
|
ReleasableSlice minBound;
|
||||||
minBound = setIterateBound(readOpts, IterateBound.LOWER, range.getMin().retain());
|
if (range.hasMin()) {
|
||||||
} else {
|
minBound = setIterateBound(readOpts, IterateBound.LOWER, range.getMin().retain());
|
||||||
minBound = emptyReleasableSlice();
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
ReleasableSlice maxBound;
|
|
||||||
if (range.hasMax()) {
|
|
||||||
maxBound = setIterateBound(readOpts, IterateBound.UPPER, range.getMax().retain());
|
|
||||||
} else {
|
|
||||||
maxBound = emptyReleasableSlice();
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
if (fast) {
|
|
||||||
readOpts.setIgnoreRangeDeletions(true);
|
|
||||||
|
|
||||||
}
|
|
||||||
try (var rocksIterator = db.newIterator(cfh, readOpts)) {
|
|
||||||
if (!LLLocalDictionary.PREFER_SEEK_TO_FIRST && range.hasMin()) {
|
|
||||||
rocksIterSeekTo(rocksIterator, range.getMin().retain());
|
|
||||||
} else {
|
} else {
|
||||||
rocksIterator.seekToFirst();
|
minBound = emptyReleasableSlice();
|
||||||
}
|
}
|
||||||
long i = 0;
|
try {
|
||||||
while (rocksIterator.isValid()) {
|
ReleasableSlice maxBound;
|
||||||
rocksIterator.next();
|
if (range.hasMax()) {
|
||||||
i++;
|
maxBound = setIterateBound(readOpts, IterateBound.UPPER, range.getMax().retain());
|
||||||
|
} else {
|
||||||
|
maxBound = emptyReleasableSlice();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (fast) {
|
||||||
|
readOpts.setIgnoreRangeDeletions(true);
|
||||||
|
|
||||||
|
}
|
||||||
|
try (var rocksIterator = db.newIterator(cfh, readOpts)) {
|
||||||
|
if (!LLLocalDictionary.PREFER_SEEK_TO_FIRST && range.hasMin()) {
|
||||||
|
rocksIterSeekTo(rocksIterator, range.getMin().retain());
|
||||||
|
} else {
|
||||||
|
rocksIterator.seekToFirst();
|
||||||
|
}
|
||||||
|
long i = 0;
|
||||||
|
while (rocksIterator.isValid()) {
|
||||||
|
rocksIterator.next();
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
maxBound.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
minBound.release();
|
||||||
}
|
}
|
||||||
return i;
|
})
|
||||||
}
|
.onErrorMap(cause -> new IOException("Failed to get size of range "
|
||||||
} finally {
|
+ range.toString(), cause))
|
||||||
maxBound.release();
|
.subscribeOn(dbScheduler);
|
||||||
}
|
}
|
||||||
} finally {
|
})
|
||||||
minBound.release();
|
.doFirst(range::retain)
|
||||||
}
|
.doAfterTerminate(range::release);
|
||||||
})
|
|
||||||
.onErrorMap(cause -> new IOException("Failed to get size of range "
|
|
||||||
+ range.toString(), cause))
|
|
||||||
.subscribeOn(dbScheduler);
|
|
||||||
}
|
|
||||||
return result.doFirst(() -> range.retain()).doFinally(s -> range.release());
|
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1738,8 +1739,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1785,8 +1786,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
@ -1934,8 +1935,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
})
|
})
|
||||||
.onErrorMap(cause -> new IOException("Failed to delete " + range.toString(), cause))
|
.onErrorMap(cause -> new IOException("Failed to delete " + range.toString(), cause))
|
||||||
.subscribeOn(dbScheduler)
|
.subscribeOn(dbScheduler)
|
||||||
.doFirst(() -> range.retain())
|
.doFirst(range::retain)
|
||||||
.doFinally(s -> range.release());
|
.doAfterTerminate(range::release);
|
||||||
} finally {
|
} finally {
|
||||||
range.release();
|
range.release();
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ import it.cavallium.dbengine.database.LLRange;
|
|||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||||
import it.cavallium.dbengine.database.disk.LLLocalDictionary.ReleasableSlice;
|
import it.cavallium.dbengine.database.disk.LLLocalDictionary.ReleasableSlice;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
@ -43,7 +44,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
|
|
||||||
public Flux<T> flux() {
|
public Flux<T> flux() {
|
||||||
return Flux
|
return Flux
|
||||||
.generate(() -> {
|
.<T, @NotNull Tuple3<RocksIterator, ReleasableSlice, ReleasableSlice>>generate(() -> {
|
||||||
var readOptions = new ReadOptions(this.readOptions);
|
var readOptions = new ReadOptions(this.readOptions);
|
||||||
if (!range.hasMin() || !range.hasMax()) {
|
if (!range.hasMin() || !range.hasMax()) {
|
||||||
readOptions.setReadaheadSize(2 * 1024 * 1024);
|
readOptions.setReadaheadSize(2 * 1024 * 1024);
|
||||||
@ -84,7 +85,9 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
rocksIterator.close();
|
rocksIterator.close();
|
||||||
tuple.getT2().release();
|
tuple.getT2().release();
|
||||||
tuple.getT3().release();
|
tuple.getT3().release();
|
||||||
});
|
})
|
||||||
|
.doFirst(range::retain)
|
||||||
|
.doAfterTerminate(range::release);
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract T getEntry(ByteBuf key, ByteBuf value);
|
public abstract T getEntry(ByteBuf key, ByteBuf value);
|
||||||
|
@ -283,6 +283,6 @@ public class LuceneUtils {
|
|||||||
DatabaseMapDictionaryDeep<T, Map<U, V>, DatabaseMapDictionary<U, V>> dictionaryDeep) {
|
DatabaseMapDictionaryDeep<T, Map<U, V>, DatabaseMapDictionary<U, V>> dictionaryDeep) {
|
||||||
return entry -> dictionaryDeep
|
return entry -> dictionaryDeep
|
||||||
.at(snapshot, entry.getKey())
|
.at(snapshot, entry.getKey())
|
||||||
.flatMap(sub -> sub.getValue(snapshot, entry.getValue()).doFinally(s -> sub.release()));
|
.flatMap(sub -> sub.getValue(snapshot, entry.getValue()).doAfterTerminate(sub::release));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ public class TestDictionaryMap {
|
|||||||
.flatMap(map -> map
|
.flatMap(map -> map
|
||||||
.putValue(key, value)
|
.putValue(key, value)
|
||||||
.then(map.getValue(null, key))
|
.then(map.getValue(null, key))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -105,9 +105,9 @@ public class TestDictionaryMap {
|
|||||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||||
.flatMap(map -> map
|
.flatMap(map -> map
|
||||||
.at(null, key).flatMap(v -> v.set(value).doFinally(s -> v.release()))
|
.at(null, key).flatMap(v -> v.set(value).doAfterTerminate(v::release))
|
||||||
.then(map.at(null, key).flatMap(v -> v.get(null).doFinally(s -> v.release())))
|
.then(map.at(null, key).flatMap(v -> v.get(null).doAfterTerminate(v::release)))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -129,7 +129,7 @@ public class TestDictionaryMap {
|
|||||||
map.putValueAndGetPrevious(key, value),
|
map.putValueAndGetPrevious(key, value),
|
||||||
map.putValueAndGetPrevious(key, value)
|
map.putValueAndGetPrevious(key, value)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -151,7 +151,7 @@ public class TestDictionaryMap {
|
|||||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||||
map.removeAndGetPrevious(key)
|
map.removeAndGetPrevious(key)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -173,7 +173,7 @@ public class TestDictionaryMap {
|
|||||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||||
map.removeAndGetStatus(key)
|
map.removeAndGetStatus(key)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -215,7 +215,7 @@ public class TestDictionaryMap {
|
|||||||
return value;
|
return value;
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||||
@ -257,7 +257,7 @@ public class TestDictionaryMap {
|
|||||||
return value;
|
return value;
|
||||||
}).then(map.getValue(null, key))
|
}).then(map.getValue(null, key))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||||
@ -281,7 +281,7 @@ public class TestDictionaryMap {
|
|||||||
map.remove(key),
|
map.remove(key),
|
||||||
map.putValueAndGetChanged(key, "error?").single()
|
map.putValueAndGetChanged(key, "error?").single()
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -339,7 +339,7 @@ public class TestDictionaryMap {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -363,7 +363,7 @@ public class TestDictionaryMap {
|
|||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -389,7 +389,7 @@ public class TestDictionaryMap {
|
|||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -415,7 +415,7 @@ public class TestDictionaryMap {
|
|||||||
map.set(entries).then(Mono.empty()),
|
map.set(entries).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -450,7 +450,7 @@ public class TestDictionaryMap {
|
|||||||
removalMono.then(Mono.empty()),
|
removalMono.then(Mono.empty()),
|
||||||
map.setAndGetChanged(entries).single()
|
map.setAndGetChanged(entries).single()
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release());
|
.doAfterTerminate(map::release);
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -471,7 +471,7 @@ public class TestDictionaryMap {
|
|||||||
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMap(Flux::fromIterable)
|
.flatMap(Flux::fromIterable)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -496,7 +496,7 @@ public class TestDictionaryMap {
|
|||||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMap(Flux::fromIterable)
|
.flatMap(Flux::fromIterable)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -522,7 +522,7 @@ public class TestDictionaryMap {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getAllValues(null)
|
map.getAllValues(null)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -550,7 +550,7 @@ public class TestDictionaryMap {
|
|||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMapMany(Flux::fromIterable)
|
.flatMapMany(Flux::fromIterable)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -580,10 +580,10 @@ public class TestDictionaryMap {
|
|||||||
.getValue()
|
.getValue()
|
||||||
.get(null)
|
.get(null)
|
||||||
.map(val -> Map.entry(stage.getKey(), val))
|
.map(val -> Map.entry(stage.getKey(), val))
|
||||||
.doFinally(s -> stage.getValue().release())
|
.doAfterTerminate(() -> stage.getValue().release())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -610,7 +610,7 @@ public class TestDictionaryMap {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
||||||
));
|
));
|
||||||
@ -636,7 +636,7 @@ public class TestDictionaryMap {
|
|||||||
map.clear().then(Mono.empty()),
|
map.clear().then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
||||||
));
|
));
|
||||||
|
@ -155,7 +155,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.flatMap(map -> map
|
.flatMap(map -> map
|
||||||
.putValue(key, value)
|
.putValue(key, value)
|
||||||
.then(map.getValue(null, key))
|
.then(map.getValue(null, key))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -177,7 +177,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.putValue(key, value)
|
.putValue(key, value)
|
||||||
.thenMany(map.getAllValues(null))
|
.thenMany(map.getAllValues(null))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -198,13 +198,13 @@ public class TestDictionaryMapDeep {
|
|||||||
.at(null, key)
|
.at(null, key)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.set(value)
|
.set(value)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
.then(map
|
.then(map
|
||||||
.at(null, "capra")
|
.at(null, "capra")
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.set(Map.of("normal", "123", "ormaln", "456"))
|
.set(Map.of("normal", "123", "ormaln", "456"))
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.thenMany(map
|
.thenMany(map
|
||||||
@ -212,10 +212,10 @@ public class TestDictionaryMapDeep {
|
|||||||
.flatMap(v -> v.getValue()
|
.flatMap(v -> v.getValue()
|
||||||
.getAllValues(null)
|
.getAllValues(null)
|
||||||
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue()))
|
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue()))
|
||||||
.doFinally(s -> v.getValue().release())
|
.doAfterTerminate(() -> v.getValue().release())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -239,9 +239,9 @@ public class TestDictionaryMapDeep {
|
|||||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||||
.flatMap(map -> map
|
.flatMap(map -> map
|
||||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.release()))
|
.at(null, key1).flatMap(v -> v.putValue(key2, value).doAfterTerminate(v::release))
|
||||||
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doFinally(s -> v.release())))
|
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doAfterTerminate(v::release)))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -265,7 +265,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.putValueAndGetPrevious(key, value),
|
map.putValueAndGetPrevious(key, value),
|
||||||
map.putValueAndGetPrevious(key, value)
|
map.putValueAndGetPrevious(key, value)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -287,22 +287,22 @@ public class TestDictionaryMapDeep {
|
|||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValueAndGetPrevious(key2, "error?")
|
.putValueAndGetPrevious(key2, "error?")
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValueAndGetPrevious(key2, value)
|
.putValueAndGetPrevious(key2, value)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValueAndGetPrevious(key2, value)
|
.putValueAndGetPrevious(key2, value)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -324,7 +324,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||||
map.removeAndGetPrevious(key)
|
map.removeAndGetPrevious(key)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -347,22 +347,22 @@ public class TestDictionaryMapDeep {
|
|||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, "error?")
|
.putValue(key2, "error?")
|
||||||
.then(v.removeAndGetPrevious(key2))
|
.then(v.removeAndGetPrevious(key2))
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, value)
|
.putValue(key2, value)
|
||||||
.then(v.removeAndGetPrevious(key2))
|
.then(v.removeAndGetPrevious(key2))
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v.removeAndGetPrevious(key2)
|
.flatMap(v -> v.removeAndGetPrevious(key2)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -384,7 +384,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||||
map.removeAndGetStatus(key)
|
map.removeAndGetStatus(key)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -407,22 +407,22 @@ public class TestDictionaryMapDeep {
|
|||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, "error?")
|
.putValue(key2, "error?")
|
||||||
.then(v.removeAndGetStatus(key2))
|
.then(v.removeAndGetStatus(key2))
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, value)
|
.putValue(key2, value)
|
||||||
.then(v.removeAndGetStatus(key2))
|
.then(v.removeAndGetStatus(key2))
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v.removeAndGetStatus(key2)
|
.flatMap(v -> v.removeAndGetStatus(key2)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -464,7 +464,7 @@ public class TestDictionaryMapDeep {
|
|||||||
return value;
|
return value;
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||||
@ -489,28 +489,28 @@ public class TestDictionaryMapDeep {
|
|||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> prev)
|
.updateValue(key2, prev -> prev)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> null)
|
.updateValue(key2, prev -> null)
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||||
@ -552,7 +552,7 @@ public class TestDictionaryMapDeep {
|
|||||||
return value;
|
return value;
|
||||||
}).then(map.getValue(null, key))
|
}).then(map.getValue(null, key))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||||
@ -579,7 +579,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> prev)
|
.updateValue(key2, prev -> prev)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
@ -587,7 +587,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
@ -595,7 +595,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
@ -603,10 +603,10 @@ public class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> null)
|
.updateValue(key2, prev -> null)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doFinally(s -> v.release())
|
.doAfterTerminate(v::release)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||||
@ -630,7 +630,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.remove(key),
|
map.remove(key),
|
||||||
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single()
|
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single()
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -683,7 +683,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -707,7 +707,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -733,7 +733,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -759,7 +759,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.set(entries).then(Mono.empty()),
|
map.set(entries).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -793,7 +793,7 @@ public class TestDictionaryMapDeep {
|
|||||||
removalMono.then(Mono.empty()),
|
removalMono.then(Mono.empty()),
|
||||||
map.setAndGetChanged(entries).single()
|
map.setAndGetChanged(entries).single()
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release());
|
.doAfterTerminate(map::release);
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -817,7 +817,7 @@ public class TestDictionaryMapDeep {
|
|||||||
)
|
)
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMap(Flux::fromIterable)
|
.flatMap(Flux::fromIterable)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -842,7 +842,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMap(Flux::fromIterable)
|
.flatMap(Flux::fromIterable)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -868,7 +868,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getAllValues(null)
|
map.getAllValues(null)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -896,7 +896,7 @@ public class TestDictionaryMapDeep {
|
|||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMapMany(Flux::fromIterable)
|
.flatMapMany(Flux::fromIterable)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -926,10 +926,10 @@ public class TestDictionaryMapDeep {
|
|||||||
.getValue()
|
.getValue()
|
||||||
.get(null)
|
.get(null)
|
||||||
.map(val -> Map.entry(stage.getKey(), val))
|
.map(val -> Map.entry(stage.getKey(), val))
|
||||||
.doFinally(s -> stage.getValue().release())
|
.doAfterTerminate(() -> stage.getValue().release())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -955,7 +955,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -979,7 +979,7 @@ public class TestDictionaryMapDeep {
|
|||||||
map.clear().then(Mono.empty()),
|
map.clear().then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
|
@ -101,14 +101,14 @@ public class TestDictionaryMapDeepHashMap {
|
|||||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
.map(dict -> tempDatabaseMapDictionaryDeepMapHashMap(dict, 5))
|
.map(dict -> tempDatabaseMapDictionaryDeepMapHashMap(dict, 5))
|
||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.release()))
|
.at(null, key1).flatMap(v -> v.putValue(key2, value).doAfterTerminate(v::release))
|
||||||
.thenMany(map
|
.thenMany(map
|
||||||
.getAllValues(null)
|
.getAllValues(null)
|
||||||
.map(Entry::getValue)
|
.map(Entry::getValue)
|
||||||
.flatMap(maps -> Flux.fromIterable(maps.entrySet()))
|
.flatMap(maps -> Flux.fromIterable(maps.entrySet()))
|
||||||
.map(Entry::getValue)
|
.map(Entry::getValue)
|
||||||
)
|
)
|
||||||
.doFinally(s -> map.release())
|
.doAfterTerminate(map::release)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
|
Loading…
Reference in New Issue
Block a user