Add netty statistics
This commit is contained in:
parent
bc12b22a4b
commit
1dcf5a1a9c
@ -64,7 +64,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
return dictionary
|
||||
.getRange(resolveSnapshot(snapshot), range.retain(), existsAlmostCertainly)
|
||||
.collectMap(
|
||||
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
||||
entry -> deserializeSuffix(stripPrefix(entry.getKey(), false)),
|
||||
entry -> deserialize(entry.getValue()),
|
||||
HashMap::new)
|
||||
.filter(map -> !map.isEmpty());
|
||||
@ -251,7 +251,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
}
|
||||
})), existsAlmostCertainly)
|
||||
.flatMap(entry -> Mono
|
||||
.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())))
|
||||
.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey(), false)), deserialize(entry.getValue())))
|
||||
);
|
||||
}
|
||||
|
||||
@ -274,12 +274,13 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
var serializedEntries = entries
|
||||
.flatMap(entry -> Mono
|
||||
.fromCallable(() -> serializeEntry(entry.getKey(), entry.getValue()))
|
||||
).doOnDiscard(Entry.class, entry -> {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
});
|
||||
.doOnDiscard(Entry.class, uncastedEntry -> {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) uncastedEntry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
})
|
||||
);
|
||||
return dictionary
|
||||
.putMulti(serializedEntries, false)
|
||||
.then();
|
||||
@ -290,15 +291,17 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
return dictionary
|
||||
.getRangeKeys(resolveSnapshot(snapshot), range.retain())
|
||||
.map(key -> {
|
||||
ByteBuf keySuffixWithExt = stripPrefix(key, false);
|
||||
// Don't use "key" under this point ---
|
||||
try {
|
||||
return Map.entry(deserializeSuffix(stripPrefix(key.retain())),
|
||||
return Map.entry(deserializeSuffix(keySuffixWithExt.retainedSlice()),
|
||||
new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary,
|
||||
toKey(stripPrefix(key.retain())),
|
||||
toKey(keySuffixWithExt.retainedSlice()),
|
||||
Serializer.noop()
|
||||
), valueSerializer)
|
||||
);
|
||||
} finally {
|
||||
key.release();
|
||||
keySuffixWithExt.release();
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -308,7 +311,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
return dictionary
|
||||
.getRange(resolveSnapshot(snapshot), range.retain())
|
||||
.map(serializedEntry -> Map.entry(
|
||||
deserializeSuffix(stripPrefix(serializedEntry.getKey())),
|
||||
deserializeSuffix(stripPrefix(serializedEntry.getKey(), false)),
|
||||
valueSerializer.deserialize(serializedEntry.getValue())
|
||||
))
|
||||
.doOnDiscard(Entry.class, entry -> {
|
||||
|
@ -297,9 +297,13 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
/**
|
||||
* Keep only suffix and ext
|
||||
*/
|
||||
protected ByteBuf stripPrefix(ByteBuf key) {
|
||||
protected ByteBuf stripPrefix(ByteBuf key, boolean slice) {
|
||||
try {
|
||||
return key.retainedSlice(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength);
|
||||
if (slice) {
|
||||
return key.retainedSlice(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength);
|
||||
} else {
|
||||
return key.retain().readerIndex(key.readerIndex() + keyPrefixLength);
|
||||
}
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
@ -308,9 +312,13 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
/**
|
||||
* Remove ext from full key
|
||||
*/
|
||||
protected ByteBuf removeExtFromFullKey(ByteBuf key) {
|
||||
protected ByteBuf removeExtFromFullKey(ByteBuf key, boolean slice) {
|
||||
try {
|
||||
return key.slice(key.readerIndex(), keyPrefixLength + keySuffixLength).retain();
|
||||
if (slice) {
|
||||
return key.retainedSlice(key.readerIndex(), keyPrefixLength + keySuffixLength);
|
||||
} else {
|
||||
return key.retain().writerIndex(key.writerIndex() - (keyPrefixLength + keySuffixLength));
|
||||
}
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
@ -438,9 +446,9 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
.using(
|
||||
() -> {
|
||||
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
||||
ByteBuf groupKeyWithExt = rangeKeys.get(0).retain();
|
||||
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain());
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain());
|
||||
ByteBuf groupKeyWithExt = rangeKeys.get(0).retainedSlice();
|
||||
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain(), true);
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true);
|
||||
return new GroupBuffers(groupKeyWithExt, groupKeyWithoutExt, groupSuffix);
|
||||
},
|
||||
buffers -> Mono
|
||||
@ -482,7 +490,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
keyPrefixLength + keySuffixLength)
|
||||
)
|
||||
.flatMapSequential(groupKeyWithoutExt -> {
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain());
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true);
|
||||
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
||||
return this.subStageGetter
|
||||
.subStage(dictionary,
|
||||
|
@ -67,9 +67,10 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
try {
|
||||
int keySuffixLength = serialized.readInt();
|
||||
int initialReaderIndex = serialized.readerIndex();
|
||||
T keySuffix = keySuffixSerializer.deserialize(serialized.retain());
|
||||
assert serialized.readerIndex() <= initialReaderIndex + keySuffixLength;
|
||||
U value = valueSerializer.deserialize(serialized.readerIndex(initialReaderIndex + keySuffixLength).retain());
|
||||
int initialWriterIndex = serialized.writerIndex();
|
||||
T keySuffix = keySuffixSerializer.deserialize(serialized.setIndex(initialReaderIndex, initialReaderIndex + keySuffixLength).retain());
|
||||
assert serialized.readerIndex() == initialReaderIndex + keySuffixLength;
|
||||
U value = valueSerializer.deserialize(serialized.setIndex(initialReaderIndex + keySuffixLength, initialWriterIndex).retain());
|
||||
return Map.entry(keySuffix, value);
|
||||
} finally {
|
||||
serialized.release();
|
||||
@ -79,18 +80,21 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
@Override
|
||||
public @NotNull ByteBuf serialize(@NotNull Entry<T, U> deserialized) {
|
||||
ByteBuf keySuffix = keySuffixSerializer.serialize(deserialized.getKey());
|
||||
ByteBuf value = valueSerializer.serialize(deserialized.getValue());
|
||||
try {
|
||||
ByteBuf keySuffixLen = alloc.directBuffer(Integer.BYTES, Integer.BYTES);
|
||||
ByteBuf value = valueSerializer.serialize(deserialized.getValue());
|
||||
try {
|
||||
keySuffixLen.writeInt(keySuffix.readableBytes());
|
||||
return LLUtils.directCompositeBuffer(alloc, keySuffixLen.retain(), keySuffix.retain(), value.retain());
|
||||
ByteBuf keySuffixLen = alloc.directBuffer(Integer.BYTES, Integer.BYTES);
|
||||
try {
|
||||
keySuffixLen.writeInt(keySuffix.readableBytes());
|
||||
return LLUtils.directCompositeBuffer(alloc, keySuffixLen.retain(), keySuffix.retain(), value.retain());
|
||||
} finally {
|
||||
keySuffixLen.release();
|
||||
}
|
||||
} finally {
|
||||
keySuffixLen.release();
|
||||
value.release();
|
||||
}
|
||||
} finally {
|
||||
keySuffix.release();
|
||||
value.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import it.cavallium.dbengine.database.LLDatabaseConnection;
|
||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
||||
import it.cavallium.dbengine.netty.JMXNettyMonitoringManager;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
@ -15,6 +16,10 @@ import reactor.core.scheduler.Schedulers;
|
||||
|
||||
public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
||||
|
||||
static {
|
||||
JMXNettyMonitoringManager.start();
|
||||
}
|
||||
|
||||
private final Path basePath;
|
||||
private final boolean crashIfWalError;
|
||||
|
||||
|
@ -752,17 +752,21 @@ public class LLLocalDictionary implements LLDictionary {
|
||||
return entries
|
||||
.window(Math.min(MULTI_GET_WINDOW, CAPPED_WRITE_BATCH_CAP))
|
||||
.doOnDiscard(Entry.class, entry -> {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
if (entry.getKey() instanceof ByteBuf && entry.getValue() instanceof ByteBuf) {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
}
|
||||
})
|
||||
.flatMap(Flux::collectList)
|
||||
.doOnDiscard(Entry.class, entry -> {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
if (entry.getKey() instanceof ByteBuf && entry.getValue() instanceof ByteBuf) {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
}
|
||||
})
|
||||
.flatMap(ew -> Mono
|
||||
.using(
|
||||
|
@ -36,7 +36,9 @@ public interface Serializer<A, B> {
|
||||
@Override
|
||||
public @NotNull String deserialize(@NotNull ByteBuf serialized) {
|
||||
try {
|
||||
return serialized.toString(StandardCharsets.UTF_8);
|
||||
var result = serialized.toString(StandardCharsets.UTF_8);
|
||||
serialized.readerIndex(serialized.writerIndex());
|
||||
return result;
|
||||
} finally {
|
||||
serialized.release();
|
||||
}
|
||||
|
@ -59,7 +59,9 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
|
||||
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
|
||||
+ serialized.readableBytes() + " bytes instead");
|
||||
}
|
||||
return serialized.toString(StandardCharsets.UTF_8);
|
||||
var result = serialized.toString(StandardCharsets.UTF_8);
|
||||
serialized.readerIndex(serialized.writerIndex());
|
||||
return result;
|
||||
} finally {
|
||||
serialized.release();
|
||||
}
|
||||
|
@ -0,0 +1,64 @@
|
||||
package it.cavallium.dbengine.netty;
|
||||
|
||||
import io.netty.buffer.ByteBufAllocatorMetric;
|
||||
|
||||
public class JMXNettyMonitoring implements JMXNettyMonitoringMBean {
|
||||
|
||||
private final String name;
|
||||
private final ByteBufAllocatorMetric metric;
|
||||
|
||||
public JMXNettyMonitoring(String name, io.netty.buffer.ByteBufAllocatorMetric metric) {
|
||||
this.name = name;
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getHeapUsed() {
|
||||
return metric.usedHeapMemory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getDirectUsed() {
|
||||
return metric.usedDirectMemory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumHeapArenas() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumDirectArenas() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumThreadLocalCachesArenas() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getTinyCacheSize() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getSmallCacheSize() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNormalCacheSize() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getChunkSize() {
|
||||
return null;
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package it.cavallium.dbengine.netty;
|
||||
|
||||
public interface JMXNettyMonitoringMBean {
|
||||
|
||||
String getName();
|
||||
|
||||
Long getHeapUsed();
|
||||
|
||||
Long getDirectUsed();
|
||||
|
||||
Integer getNumHeapArenas();
|
||||
|
||||
Integer getNumDirectArenas();
|
||||
|
||||
Integer getNumThreadLocalCachesArenas();
|
||||
|
||||
Integer getTinyCacheSize();
|
||||
|
||||
Integer getSmallCacheSize();
|
||||
|
||||
Integer getNormalCacheSize();
|
||||
|
||||
Integer getChunkSize();
|
||||
}
|
@ -0,0 +1,79 @@
|
||||
package it.cavallium.dbengine.netty;
|
||||
|
||||
import io.netty.buffer.ByteBufAllocatorMetric;
|
||||
import io.netty.buffer.PoolArenaMetric;
|
||||
import io.netty.buffer.PooledByteBufAllocator;
|
||||
import io.netty.buffer.PooledByteBufAllocatorMetric;
|
||||
import io.netty.buffer.UnpooledByteBufAllocator;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Stream;
|
||||
import javax.management.InstanceAlreadyExistsException;
|
||||
import javax.management.MBeanRegistrationException;
|
||||
import javax.management.MBeanServer;
|
||||
import javax.management.MalformedObjectNameException;
|
||||
import javax.management.NotCompliantMBeanException;
|
||||
import javax.management.ObjectName;
|
||||
import javax.management.StandardMBean;
|
||||
|
||||
public class JMXNettyMonitoringManager {
|
||||
|
||||
private static JMXNettyMonitoringManager instance;
|
||||
|
||||
private final MBeanServer platformMBeanServer;
|
||||
|
||||
private JMXNettyMonitoringManager() {
|
||||
this.platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
|
||||
}
|
||||
|
||||
public synchronized static void start() {
|
||||
if (instance == null) {
|
||||
instance = new JMXNettyMonitoringManager();
|
||||
instance.startInternal();
|
||||
}
|
||||
}
|
||||
|
||||
private void startInternal() {
|
||||
try {
|
||||
int arenaId = 0;
|
||||
Map<String, ByteBufAllocatorMetric> allocators = new HashMap<>();
|
||||
allocators.put("unpooled", UnpooledByteBufAllocator.DEFAULT.metric());
|
||||
allocators.put("pooled", PooledByteBufAllocator.DEFAULT.metric());
|
||||
|
||||
for (var entry : allocators.entrySet()) {
|
||||
var name = entry.getKey().replaceAll("[^\\p{IsAlphabetic}\\p{IsDigit}_]", "_");
|
||||
var metric = entry.getValue();
|
||||
String type;
|
||||
StandardMBean mbean;
|
||||
if (metric instanceof PooledByteBufAllocatorMetric) {
|
||||
var pooledMetric = (PooledByteBufAllocatorMetric) metric;
|
||||
for (var arenaEntry : (Iterable<Entry<String, PoolArenaMetric>>) Stream.concat(
|
||||
pooledMetric.directArenas().stream().map(arena -> Map.entry("direct", arena)),
|
||||
pooledMetric.heapArenas().stream().map(arena -> Map.entry("heap", arena))
|
||||
)::iterator) {
|
||||
var arenaType = arenaEntry.getKey();
|
||||
var arenaMetric = arenaEntry.getValue();
|
||||
var jmx = new JMXPoolArenaNettyMonitoring(arenaMetric);
|
||||
mbean = new StandardMBean(jmx, JMXPoolArenaNettyMonitoringMBean.class);
|
||||
ObjectName botObjectName = new ObjectName("io.netty.stats:name=PoolArena,type=" + arenaType + ",arenaId=" + arenaId++);
|
||||
platformMBeanServer.registerMBean(mbean, botObjectName);
|
||||
}
|
||||
var jmx = new JMXPooledNettyMonitoring(name, pooledMetric);
|
||||
type = "pooled";
|
||||
mbean = new StandardMBean(jmx, JMXNettyMonitoringMBean.class);
|
||||
} else {
|
||||
var jmx = new JMXNettyMonitoring(name, metric);
|
||||
type = "unpooled";
|
||||
mbean = new StandardMBean(jmx, JMXNettyMonitoringMBean.class);
|
||||
}
|
||||
|
||||
ObjectName botObjectName = new ObjectName("io.netty.stats:name=ByteBufAllocator,allocatorName=" + name + ",type=" + type);
|
||||
platformMBeanServer.registerMBean(mbean, botObjectName);
|
||||
}
|
||||
} catch (MalformedObjectNameException | NotCompliantMBeanException | InstanceAlreadyExistsException | MBeanRegistrationException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,114 @@
|
||||
package it.cavallium.dbengine.netty;
|
||||
|
||||
import io.netty.buffer.PoolArenaMetric;
|
||||
|
||||
public class JMXPoolArenaNettyMonitoring implements JMXPoolArenaNettyMonitoringMBean {
|
||||
|
||||
private final PoolArenaMetric metric;
|
||||
|
||||
public JMXPoolArenaNettyMonitoring(PoolArenaMetric metric) {
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumThreadCaches() {
|
||||
return metric.numThreadCaches();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public Integer getNumTinySubpages() {
|
||||
return metric.numTinySubpages();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumSmallSubpages() {
|
||||
return metric.numSmallSubpages();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumChunkLists() {
|
||||
return metric.numChunkLists();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumAllocations() {
|
||||
return metric.numAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumTinyAllocations() {
|
||||
return metric.numTinyAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumSmallAllocations() {
|
||||
return metric.numSmallAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumNormalAllocations() {
|
||||
return metric.numNormalAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumHugeAllocations() {
|
||||
return metric.numHugeAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumDeallocations() {
|
||||
return metric.numDeallocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumTinyDeallocations() {
|
||||
return metric.numTinyDeallocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumSmallDeallocations() {
|
||||
return metric.numSmallDeallocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumNormalDeallocations() {
|
||||
return metric.numNormalDeallocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumHugeDeallocations() {
|
||||
return metric.numHugeDeallocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumActiveAllocations() {
|
||||
return metric.numActiveAllocations();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public Long getNumActiveTinyAllocations() {
|
||||
return metric.numActiveTinyAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumActiveSmallAllocations() {
|
||||
return metric.numActiveSmallAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumActiveNormalAllocations() {
|
||||
return metric.numActiveNormalAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumActiveHugeAllocations() {
|
||||
return metric.numActiveHugeAllocations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNumActiveBytes() {
|
||||
return metric.numActiveBytes();
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
package it.cavallium.dbengine.netty;
|
||||
|
||||
public interface JMXPoolArenaNettyMonitoringMBean {
|
||||
|
||||
Integer getNumThreadCaches();
|
||||
|
||||
Integer getNumTinySubpages();
|
||||
|
||||
Integer getNumSmallSubpages();
|
||||
|
||||
Integer getNumChunkLists();
|
||||
|
||||
Long getNumAllocations();
|
||||
|
||||
Long getNumTinyAllocations();
|
||||
|
||||
Long getNumSmallAllocations();
|
||||
|
||||
Long getNumNormalAllocations();
|
||||
|
||||
Long getNumHugeAllocations();
|
||||
|
||||
Long getNumDeallocations();
|
||||
|
||||
Long getNumTinyDeallocations();
|
||||
|
||||
Long getNumSmallDeallocations();
|
||||
|
||||
Long getNumNormalDeallocations();
|
||||
|
||||
Long getNumHugeDeallocations();
|
||||
|
||||
Long getNumActiveAllocations();
|
||||
|
||||
Long getNumActiveTinyAllocations();
|
||||
|
||||
Long getNumActiveSmallAllocations();
|
||||
|
||||
Long getNumActiveNormalAllocations();
|
||||
|
||||
Long getNumActiveHugeAllocations();
|
||||
|
||||
Long getNumActiveBytes();
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package it.cavallium.dbengine.netty;
|
||||
|
||||
import io.netty.buffer.ByteBufAllocatorMetric;
|
||||
import io.netty.buffer.PooledByteBufAllocatorMetric;
|
||||
|
||||
public class JMXPooledNettyMonitoring extends JMXNettyMonitoring implements JMXNettyMonitoringMBean {
|
||||
|
||||
private final PooledByteBufAllocatorMetric metric;
|
||||
|
||||
public JMXPooledNettyMonitoring(String name, PooledByteBufAllocatorMetric metric) {
|
||||
super(name, metric);
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumHeapArenas() {
|
||||
return metric.numHeapArenas();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumDirectArenas() {
|
||||
return metric.numDirectArenas();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNumThreadLocalCachesArenas() {
|
||||
return metric.numThreadLocalCaches();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public Integer getTinyCacheSize() {
|
||||
return metric.tinyCacheSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getSmallCacheSize() {
|
||||
return metric.smallCacheSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getNormalCacheSize() {
|
||||
return metric.normalCacheSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getChunkSize() {
|
||||
return metric.chunkSize();
|
||||
}
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.PooledByteBufAllocator;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
@ -7,6 +9,8 @@ import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryHashed;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseStageEntry;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseStageMap;
|
||||
import it.cavallium.dbengine.database.collections.SubStageGetterHashMap;
|
||||
import it.cavallium.dbengine.database.collections.SubStageGetterMap;
|
||||
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
|
||||
@ -21,6 +25,7 @@ import java.util.Map;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.reactivestreams.Publisher;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
@ -78,10 +83,54 @@ public class DbTestUtils {
|
||||
return database.getDictionary(name, updateMode);
|
||||
}
|
||||
|
||||
public static DatabaseMapDictionary<String, String> tempDatabaseMapDictionaryMap(
|
||||
|
||||
public enum DbType {
|
||||
MAP,
|
||||
HASH_MAP
|
||||
}
|
||||
|
||||
public static DatabaseStageMap<String, String, DatabaseStageEntry<String>> tempDatabaseMapDictionaryMap(
|
||||
LLDictionary dictionary,
|
||||
DbType dbType,
|
||||
int keyBytes) {
|
||||
return DatabaseMapDictionary.simple(dictionary, SerializerFixedBinaryLength.utf8(keyBytes), Serializer.utf8());
|
||||
if (dbType == DbType.MAP) {
|
||||
return DatabaseMapDictionary.simple(dictionary, SerializerFixedBinaryLength.utf8(keyBytes), Serializer.utf8());
|
||||
} else {
|
||||
return DatabaseMapDictionaryHashed.simple(dictionary,
|
||||
SerializerFixedBinaryLength.utf8(keyBytes),
|
||||
Serializer.utf8(),
|
||||
String::hashCode,
|
||||
new SerializerFixedBinaryLength<>() {
|
||||
@Override
|
||||
public int getSerializedBinaryLength() {
|
||||
return keyBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Integer deserialize(@NotNull ByteBuf serialized) {
|
||||
try {
|
||||
var val = serialized.readInt();
|
||||
serialized.readerIndex(serialized.readerIndex() + keyBytes);
|
||||
return val;
|
||||
} finally {
|
||||
serialized.release();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull ByteBuf serialize(@NotNull Integer deserialized) {
|
||||
var out = PooledByteBufAllocator.DEFAULT.directBuffer(keyBytes);
|
||||
try {
|
||||
out.writeInt(deserialized);
|
||||
out.writerIndex(keyBytes);
|
||||
return out.retain();
|
||||
} finally {
|
||||
out.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static <T, U> DatabaseMapDictionaryDeep<String, Map<String, String>, DatabaseMapDictionary<String, String>> tempDatabaseMapDictionaryDeepMap(
|
||||
|
@ -65,15 +65,24 @@ public class TestDictionaryMap {
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3()
|
||||
)))
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3(), fullTuple.getT4()));
|
||||
.flatMap(entryTuple -> Stream.of(Tuples.of(DbType.MAP, entryTuple.getT1(),
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3(),
|
||||
entryTuple.getT4()
|
||||
), Tuples.of(DbType.HASH_MAP, entryTuple.getT1(),
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3(),
|
||||
entryTuple.getT4()
|
||||
)))
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3(), fullTuple.getT4(), fullTuple.getT5()));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testPut(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testPut(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMap(map -> map
|
||||
.putValue(key, value)
|
||||
.then(map.getValue(null, key))
|
||||
@ -89,10 +98,10 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtSetAtGet(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testAtSetAtGet(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMap(map -> map
|
||||
.at(null, key).flatMap(v -> v.set(value).doFinally(s -> v.release()))
|
||||
.then(map.at(null, key).flatMap(v -> v.get(null).doFinally(s -> v.release())))
|
||||
@ -108,10 +117,10 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testPutAndGetPrevious(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testPutAndGetPrevious(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putValueAndGetPrevious(key, "error?"),
|
||||
@ -130,10 +139,10 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testPutValueRemoveAndGetPrevious(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testPutValueRemoveAndGetPrevious(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.removeAndGetPrevious(key),
|
||||
@ -152,10 +161,10 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testPutValueRemoveAndGetStatus(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testPutValueRemoveAndGetStatus(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.removeAndGetStatus(key),
|
||||
@ -174,13 +183,13 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testUpdate(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testUpdate(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
if (updateMode == UpdateMode.DISALLOW && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.updateValue(key, old -> {
|
||||
@ -216,13 +225,13 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testUpdateGet(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testUpdateGet(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
if (updateMode == UpdateMode.DISALLOW && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.updateValue(key, old -> {
|
||||
@ -258,10 +267,10 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testPutAndGetChanged(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testPutAndGetChanged(DbType dbType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putValueAndGetChanged(key, "error?").single(),
|
||||
@ -305,16 +314,23 @@ public class TestDictionaryMap {
|
||||
entryTuple.getT1(),
|
||||
entryTuple.getT2()
|
||||
)))
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3()));
|
||||
.flatMap(entryTuple -> Stream.of(Tuples.of(DbType.MAP, entryTuple.getT1(),
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3()
|
||||
), Tuples.of(DbType.HASH_MAP, entryTuple.getT1(),
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3()
|
||||
)))
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3(), fullTuple.getT4()));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testPutMultiGetMulti(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testPutMultiGetMulti(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
@ -336,11 +352,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetAllValuesGetMulti(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetAllValuesGetMulti(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> map
|
||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||
@ -360,11 +376,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetAllValuesAndGetPrevious(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetAllValuesAndGetPrevious(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||
@ -386,11 +402,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetGetMulti(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetGetMulti(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.set(entries).then(Mono.empty()),
|
||||
@ -412,11 +428,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetAndGetChanged(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetAndGetChanged(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> {
|
||||
Mono<Void> removalMono;
|
||||
if (entries.isEmpty()) {
|
||||
@ -443,11 +459,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetAndGetPrevious(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetAndGetPrevious(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
||||
.map(Map::entrySet)
|
||||
@ -468,11 +484,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetClearAndGetPreviousGet(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetClearAndGetPreviousGet(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||
.map(Map::entrySet)
|
||||
@ -493,11 +509,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testPutMultiGetAllValues(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testPutMultiGetAllValues(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
@ -519,11 +535,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testPutMultiGet(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testPutMultiGet(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
@ -547,11 +563,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testPutMultiGetAllStagesGet(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testPutMultiGetAllStagesGet(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Entry<String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
@ -580,11 +596,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testPutMultiIsEmpty(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testPutMultiIsEmpty(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.isEmpty(null),
|
||||
@ -603,11 +619,11 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testPutMultiClear(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testPutMultiClear(DbType dbType, UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, dbType, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.isEmpty(null),
|
||||
|
Loading…
Reference in New Issue
Block a user