Bugfixes
This commit is contained in:
parent
2e6aceafe6
commit
0c26daba57
@ -24,6 +24,8 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
||||
|
||||
Mono<ByteBuf> put(ByteBuf key, ByteBuf value, LLDictionaryResultType resultType);
|
||||
|
||||
Mono<UpdateMode> getUpdateMode();
|
||||
|
||||
Mono<Boolean> update(ByteBuf key, Function<@Nullable ByteBuf, @Nullable ByteBuf> updater, boolean existsAlmostCertainly);
|
||||
|
||||
default Mono<Boolean> update(ByteBuf key, Function<@Nullable ByteBuf, @Nullable ByteBuf> updater) {
|
||||
@ -65,7 +67,7 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
||||
|
||||
Flux<ByteBuf> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
|
||||
|
||||
Flux<Entry<ByteBuf, ByteBuf>> setRange(LLRange range, Flux<Entry<ByteBuf, ByteBuf>> entries, boolean getOldValues);
|
||||
Mono<Void> setRange(LLRange range, Flux<Entry<ByteBuf, ByteBuf>> entries);
|
||||
|
||||
default Mono<Void> replaceRange(LLRange range,
|
||||
boolean canKeysChange,
|
||||
@ -76,8 +78,8 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
||||
return this
|
||||
.setRange(range, this
|
||||
.getRange(null, range, existsAlmostCertainly)
|
||||
.flatMap(entriesReplacer), false)
|
||||
.then();
|
||||
.flatMap(entriesReplacer)
|
||||
);
|
||||
} else {
|
||||
return this
|
||||
.putMulti(this
|
||||
|
@ -32,8 +32,8 @@ import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.rocksdb.RocksDB;
|
||||
|
||||
import static io.netty.buffer.Unpooled.EMPTY_BUFFER;
|
||||
import static io.netty.buffer.Unpooled.wrappedBuffer;
|
||||
import static it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public class LLUtils {
|
||||
@ -217,81 +217,82 @@ public class LLUtils {
|
||||
@Nullable
|
||||
public static ByteBuf readNullableDirectNioBuffer(ByteBufAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
||||
ByteBuf buffer = alloc.directBuffer();
|
||||
try {
|
||||
ByteBuf directBuffer = null;
|
||||
ByteBuffer nioBuffer;
|
||||
int size;
|
||||
Boolean mustBeCopied = null;
|
||||
do {
|
||||
if (mustBeCopied == null || !mustBeCopied) {
|
||||
nioBuffer = LLUtils.toDirectFast(buffer.retain());
|
||||
if (nioBuffer != null) {
|
||||
nioBuffer.limit(nioBuffer.capacity());
|
||||
ByteBuf directBuffer = null;
|
||||
ByteBuffer nioBuffer;
|
||||
int size;
|
||||
Boolean mustBeCopied = null;
|
||||
do {
|
||||
if (mustBeCopied == null || !mustBeCopied) {
|
||||
nioBuffer = LLUtils.toDirectFast(buffer);
|
||||
if (nioBuffer != null) {
|
||||
nioBuffer.limit(nioBuffer.capacity());
|
||||
}
|
||||
} else {
|
||||
nioBuffer = null;
|
||||
}
|
||||
if ((mustBeCopied != null && mustBeCopied) || nioBuffer == null) {
|
||||
directBuffer = LLUtils.toDirectCopy(buffer.retain());
|
||||
nioBuffer = directBuffer.nioBuffer(0, directBuffer.capacity());
|
||||
mustBeCopied = true;
|
||||
} else {
|
||||
mustBeCopied = false;
|
||||
}
|
||||
try {
|
||||
assert nioBuffer.isDirect();
|
||||
size = reader.applyAsInt(nioBuffer);
|
||||
if (size != RocksDB.NOT_FOUND) {
|
||||
if (mustBeCopied) {
|
||||
buffer.writerIndex(0).writeBytes(nioBuffer);
|
||||
}
|
||||
} else {
|
||||
if (size == nioBuffer.limit()) {
|
||||
buffer.setIndex(0, size);
|
||||
return buffer;
|
||||
} else {
|
||||
assert size > nioBuffer.limit();
|
||||
assert nioBuffer.limit() > 0;
|
||||
buffer.capacity(size);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (nioBuffer != null) {
|
||||
nioBuffer = null;
|
||||
}
|
||||
if ((mustBeCopied != null && mustBeCopied) || nioBuffer == null) {
|
||||
directBuffer = LLUtils.toDirectCopy(buffer.retain());
|
||||
nioBuffer = directBuffer.nioBuffer(0, directBuffer.capacity());
|
||||
mustBeCopied = true;
|
||||
} else {
|
||||
mustBeCopied = false;
|
||||
if(directBuffer != null) {
|
||||
directBuffer.release();
|
||||
directBuffer = null;
|
||||
}
|
||||
try {
|
||||
assert nioBuffer.isDirect();
|
||||
size = reader.applyAsInt(nioBuffer);
|
||||
if (size != RocksDB.NOT_FOUND) {
|
||||
if (mustBeCopied) {
|
||||
buffer.writerIndex(0).writeBytes(nioBuffer);
|
||||
}
|
||||
if (size == nioBuffer.limit()) {
|
||||
buffer.setIndex(0, size);
|
||||
return buffer;
|
||||
} else {
|
||||
assert size > nioBuffer.limit();
|
||||
assert nioBuffer.limit() > 0;
|
||||
buffer.capacity(size);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (nioBuffer != null) {
|
||||
nioBuffer = null;
|
||||
}
|
||||
if(directBuffer != null) {
|
||||
directBuffer.release();
|
||||
directBuffer = null;
|
||||
}
|
||||
}
|
||||
} while (size != RocksDB.NOT_FOUND);
|
||||
} catch (Throwable t) {
|
||||
buffer.release();
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
} while (size != RocksDB.NOT_FOUND);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static ByteBuffer toDirectFast(ByteBuf buffer) {
|
||||
try {
|
||||
ByteBuffer result = buffer.nioBuffer(0, buffer.capacity());
|
||||
if (result.isDirect()) {
|
||||
result.limit(buffer.writerIndex());
|
||||
ByteBuffer result = buffer.nioBuffer(0, buffer.capacity());
|
||||
if (result.isDirect()) {
|
||||
result.limit(buffer.writerIndex());
|
||||
|
||||
assert result.isDirect();
|
||||
assert result.capacity() == buffer.capacity();
|
||||
assert buffer.readerIndex() == result.position();
|
||||
assert result.limit() - result.position() == buffer.readableBytes();
|
||||
assert result.isDirect();
|
||||
assert result.capacity() == buffer.capacity();
|
||||
assert buffer.readerIndex() == result.position();
|
||||
assert result.limit() - result.position() == buffer.readableBytes();
|
||||
|
||||
return result;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} finally {
|
||||
buffer.release();
|
||||
return result;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static ByteBuffer toDirect(ByteBuf buffer) {
|
||||
ByteBuffer result = toDirectFast(buffer);
|
||||
if (result == null) {
|
||||
throw new IllegalArgumentException("The supplied ByteBuf is not direct "
|
||||
+ "(if it's a CompositeByteBuf it must be consolidated before)");
|
||||
}
|
||||
assert result.isDirect();
|
||||
return result;
|
||||
}
|
||||
|
||||
public static ByteBuf toDirectCopy(ByteBuf buffer) {
|
||||
try {
|
||||
ByteBuf directCopyBuf = buffer.alloc().directBuffer(buffer.capacity(), buffer.maxCapacity());
|
||||
@ -324,59 +325,99 @@ public class LLUtils {
|
||||
}
|
||||
|
||||
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf buffer) {
|
||||
return wrappedBuffer(buffer);
|
||||
try {
|
||||
ByteBuf result = alloc.directBuffer(buffer.readableBytes());
|
||||
try {
|
||||
result.writeBytes(buffer, buffer.readerIndex(), buffer.readableBytes());
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
} finally {
|
||||
buffer.release();
|
||||
}
|
||||
}
|
||||
|
||||
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf buffer1, ByteBuf buffer2) {
|
||||
assert buffer1.isDirect();
|
||||
assert buffer1.nioBuffer().isDirect();
|
||||
assert buffer2.isDirect();
|
||||
assert buffer2.nioBuffer().isDirect();
|
||||
if (buffer1.readableBytes() == 0) {
|
||||
return wrappedBuffer(buffer2);
|
||||
} else if (buffer2.readableBytes() == 0) {
|
||||
return wrappedBuffer(buffer1);
|
||||
try {
|
||||
assert buffer1.isDirect();
|
||||
assert buffer1.nioBuffer().isDirect();
|
||||
assert buffer2.isDirect();
|
||||
assert buffer2.nioBuffer().isDirect();
|
||||
if (buffer1.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer2.retain());
|
||||
} else if (buffer2.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer1.retain());
|
||||
}
|
||||
ByteBuf result = alloc.directBuffer(buffer1.readableBytes() + buffer2.readableBytes());
|
||||
try {
|
||||
result.writeBytes(buffer1, buffer1.readerIndex(), buffer1.readableBytes());
|
||||
result.writeBytes(buffer2, buffer2.readerIndex(), buffer2.readableBytes());
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
} finally {
|
||||
buffer1.release();
|
||||
buffer2.release();
|
||||
}
|
||||
CompositeByteBuf compositeBuffer = alloc.compositeDirectBuffer(2);
|
||||
compositeBuffer.addComponent(true, buffer1);
|
||||
compositeBuffer.addComponent(true, buffer2);
|
||||
compositeBuffer.consolidate();
|
||||
assert compositeBuffer.isDirect();
|
||||
assert compositeBuffer.nioBuffer().isDirect();
|
||||
return compositeBuffer;
|
||||
}
|
||||
|
||||
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf buffer1, ByteBuf buffer2, ByteBuf buffer3) {
|
||||
if (buffer1.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer2, buffer3);
|
||||
} else if (buffer2.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer1, buffer3);
|
||||
} else if (buffer3.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer1, buffer2);
|
||||
try {
|
||||
if (buffer1.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer2.retain(), buffer3.retain());
|
||||
} else if (buffer2.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer1.retain(), buffer3.retain());
|
||||
} else if (buffer3.readableBytes() == 0) {
|
||||
return directCompositeBuffer(alloc, buffer1.retain(), buffer2.retain());
|
||||
}
|
||||
ByteBuf result = alloc.directBuffer(buffer1.readableBytes() + buffer2.readableBytes() + buffer3.readableBytes());
|
||||
try {
|
||||
result.writeBytes(buffer1, buffer1.readerIndex(), buffer1.readableBytes());
|
||||
result.writeBytes(buffer2, buffer2.readerIndex(), buffer2.readableBytes());
|
||||
result.writeBytes(buffer3, buffer3.readerIndex(), buffer3.readableBytes());
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
} finally {
|
||||
buffer1.release();
|
||||
buffer2.release();
|
||||
buffer3.release();
|
||||
}
|
||||
CompositeByteBuf compositeBuffer = alloc.compositeDirectBuffer(3);
|
||||
compositeBuffer.addComponent(true, buffer1);
|
||||
compositeBuffer.addComponent(true, buffer2);
|
||||
compositeBuffer.addComponent(true, buffer3);
|
||||
compositeBuffer.consolidate();
|
||||
return compositeBuffer;
|
||||
}
|
||||
|
||||
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf... buffers) {
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
return EMPTY_BUFFER;
|
||||
case 1:
|
||||
return directCompositeBuffer(alloc, buffers[0]);
|
||||
case 2:
|
||||
return directCompositeBuffer(alloc, buffers[0], buffers[1]);
|
||||
case 3:
|
||||
return directCompositeBuffer(alloc, buffers[0], buffers[1], buffers[2]);
|
||||
default:
|
||||
CompositeByteBuf compositeBuffer = alloc.compositeDirectBuffer(buffers.length);
|
||||
compositeBuffer.addComponents(true, buffers);
|
||||
compositeBuffer.consolidate();
|
||||
return compositeBuffer;
|
||||
try {
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
return EMPTY_BYTES;
|
||||
case 1:
|
||||
return directCompositeBuffer(alloc, buffers[0].retain().retain());
|
||||
case 2:
|
||||
return directCompositeBuffer(alloc, buffers[0].retain(), buffers[1].retain());
|
||||
case 3:
|
||||
return directCompositeBuffer(alloc, buffers[0].retain(), buffers[1].retain(), buffers[2].retain());
|
||||
default:
|
||||
int readableTotal = 0;
|
||||
for (ByteBuf buffer : buffers) {
|
||||
readableTotal += buffer.readableBytes();
|
||||
}
|
||||
ByteBuf result = alloc.directBuffer(readableTotal);
|
||||
try {
|
||||
for (ByteBuf buffer : buffers) {
|
||||
result.writeBytes(buffer, buffer.readerIndex(), buffer.readableBytes());
|
||||
}
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
for (ByteBuf buffer : buffers) {
|
||||
buffer.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,16 +1,19 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.rocksdb.RocksDBException;
|
||||
@ -48,8 +51,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
}
|
||||
|
||||
private ByteBuf toKey(ByteBuf suffixKey) {
|
||||
assert suffixKeyConsistency(suffixKey.readableBytes());
|
||||
try {
|
||||
assert suffixKeyConsistency(suffixKey.readableBytes());
|
||||
return LLUtils.directCompositeBuffer(dictionary.getAllocator(), keyPrefix.retain(), suffixKey.retain());
|
||||
} finally {
|
||||
suffixKey.release();
|
||||
@ -63,32 +66,31 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
.collectMap(
|
||||
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
||||
entry -> deserialize(entry.getValue()),
|
||||
HashMap::new);
|
||||
HashMap::new)
|
||||
.filter(map -> !map.isEmpty());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
|
||||
return dictionary
|
||||
.setRange(range.retain(),
|
||||
Flux
|
||||
.fromIterable(value.entrySet())
|
||||
.map(entry -> Map.entry(serializeSuffix(entry.getKey()), serialize(entry.getValue()))),
|
||||
true
|
||||
)
|
||||
.collectMap(
|
||||
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
||||
entry -> deserialize(entry.getValue()),
|
||||
HashMap::new);
|
||||
return Mono
|
||||
.usingWhen(
|
||||
Mono.just(true),
|
||||
b -> get(null, false),
|
||||
b -> dictionary
|
||||
.setRange(range.retain(),
|
||||
Flux
|
||||
.fromIterable(value.entrySet())
|
||||
.map(entry -> Map
|
||||
.entry(this.toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Map<T, U>> clearAndGetPrevious() {
|
||||
return dictionary
|
||||
.setRange(range.retain(), Flux.empty(), true)
|
||||
.collectMap(
|
||||
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
||||
entry -> deserialize(entry.getValue()),
|
||||
HashMap::new);
|
||||
return this
|
||||
.setAndGetPrevious(Map.of());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -103,28 +105,21 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
|
||||
@Override
|
||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||
return Mono
|
||||
.fromSupplier(() -> new DatabaseSingle<>(dictionary, keyBuf.retain(), Serializer.noop()))
|
||||
.<DatabaseStageEntry<U>>map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer))
|
||||
.doFinally(s -> {
|
||||
keyBuf.release();
|
||||
keySuffixBuf.release();
|
||||
});
|
||||
.fromSupplier(() -> new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noop()))
|
||||
.<DatabaseStageEntry<U>>map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
|
||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||
return dictionary
|
||||
.get(resolveSnapshot(snapshot), keyBuf.retain(), existsAlmostCertainly)
|
||||
.map(this::deserialize)
|
||||
.doFinally(s -> {
|
||||
keyBuf.release();
|
||||
keySuffixBuf.release();
|
||||
});
|
||||
return Mono
|
||||
.using(
|
||||
() -> toKey(serializeSuffix(keySuffix)),
|
||||
keyBuf -> dictionary
|
||||
.get(resolveSnapshot(snapshot), keyBuf.retain(), existsAlmostCertainly)
|
||||
.map(this::deserialize),
|
||||
ReferenceCounted::release
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -139,29 +134,34 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
}).then();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<UpdateMode> getUpdateMode() {
|
||||
return dictionary.getUpdateMode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Boolean> updateValue(T keySuffix,
|
||||
boolean existsAlmostCertainly,
|
||||
Function<@Nullable U, @Nullable U> updater) {
|
||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||
return dictionary.update(keyBuf.retain(), oldSerialized -> {
|
||||
try {
|
||||
var result = updater.apply(oldSerialized == null ? null : this.deserialize(oldSerialized.retain()));
|
||||
if (result == null) {
|
||||
return null;
|
||||
} else {
|
||||
return this.serialize(result);
|
||||
}
|
||||
} finally {
|
||||
if (oldSerialized != null) {
|
||||
oldSerialized.release();
|
||||
}
|
||||
}
|
||||
}, existsAlmostCertainly).doFinally(s -> {
|
||||
keyBuf.release();
|
||||
keySuffixBuf.release();
|
||||
});
|
||||
return Mono
|
||||
.using(
|
||||
() -> toKey(serializeSuffix(keySuffix)),
|
||||
keyBuf -> dictionary.update(keyBuf.retain(), oldSerialized -> {
|
||||
try {
|
||||
var result = updater.apply(oldSerialized == null ? null : this.deserialize(oldSerialized.retain()));
|
||||
if (result == null) {
|
||||
return null;
|
||||
} else {
|
||||
return this.serialize(result);
|
||||
}
|
||||
} finally {
|
||||
if (oldSerialized != null) {
|
||||
oldSerialized.release();
|
||||
}
|
||||
}
|
||||
}, existsAlmostCertainly),
|
||||
ReferenceCounted::release
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -180,13 +180,15 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Boolean> putValueAndGetStatus(T keySuffix, U value) {
|
||||
public Mono<Boolean> putValueAndGetChanged(T keySuffix, U value) {
|
||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||
ByteBuf valueBuf = serialize(value);
|
||||
return dictionary
|
||||
.put(keyBuf.retain(), valueBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
|
||||
.map(LLUtils::responseToBoolean)
|
||||
.put(keyBuf.retain(), valueBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
||||
.map(this::deserialize)
|
||||
.map(oldValue -> !Objects.equals(oldValue, value))
|
||||
.defaultIfEmpty(value != null)
|
||||
.doFinally(s -> {
|
||||
keyBuf.release();
|
||||
keySuffixBuf.release();
|
||||
@ -196,12 +198,12 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
|
||||
@Override
|
||||
public Mono<Void> remove(T keySuffix) {
|
||||
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||
return dictionary.remove(keyBuf.retain(), LLDictionaryResultType.VOID).doFinally(s -> {
|
||||
keyBuf.release();
|
||||
keySuffixBuf.release();
|
||||
}).then();
|
||||
return Mono
|
||||
.using(
|
||||
() -> toKey(serializeSuffix(keySuffix)),
|
||||
keyBuf -> dictionary.remove(keyBuf.retain(), LLDictionaryResultType.VOID).then(),
|
||||
ReferenceCounted::release
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -241,15 +243,38 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
keySuffixBuf.release();
|
||||
}
|
||||
})), existsAlmostCertainly)
|
||||
.flatMap(entry -> Mono.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue()))));
|
||||
.flatMap(entry -> Mono
|
||||
.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())))
|
||||
);
|
||||
}
|
||||
|
||||
private Entry<ByteBuf, ByteBuf> serializeEntry(T key, U value) {
|
||||
ByteBuf serializedKey = toKey(serializeSuffix(key));
|
||||
try {
|
||||
ByteBuf serializedValue = serialize(value);
|
||||
try {
|
||||
return Map.entry(serializedKey.retain(), serializedValue.retain());
|
||||
} finally {
|
||||
serializedValue.release();
|
||||
}
|
||||
} finally {
|
||||
serializedKey.release();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> putMulti(Flux<Entry<T, U>> entries) {
|
||||
var serializedEntries = entries
|
||||
.flatMap(entry -> Mono
|
||||
.fromCallable(() -> serializeEntry(entry.getKey(), entry.getValue()))
|
||||
).doOnDiscard(Entry.class, entry -> {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
});
|
||||
return dictionary
|
||||
.putMulti(entries.flatMap(entry -> Mono.fromCallable(() -> Map.entry(toKey(serializeSuffix(entry.getKey())),
|
||||
serialize(entry.getValue())
|
||||
))), false)
|
||||
.putMulti(serializedEntries, false)
|
||||
.then();
|
||||
}
|
||||
|
||||
@ -257,14 +282,18 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||
return dictionary
|
||||
.getRangeKeys(resolveSnapshot(snapshot), range.retain())
|
||||
.map(key -> Map.entry(deserializeSuffix(stripPrefix(key)),
|
||||
new DatabaseSingleMapped<>(
|
||||
new DatabaseSingle<>(dictionary,
|
||||
toKey(stripPrefix(key)),
|
||||
Serializer.noop()),
|
||||
valueSerializer
|
||||
)
|
||||
));
|
||||
.map(key -> {
|
||||
try {
|
||||
return Map.entry(deserializeSuffix(stripPrefix(key.retain())),
|
||||
new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary,
|
||||
toKey(stripPrefix(key.retain())),
|
||||
Serializer.noop()
|
||||
), valueSerializer)
|
||||
);
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -274,16 +303,28 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
.map(serializedEntry -> Map.entry(
|
||||
deserializeSuffix(stripPrefix(serializedEntry.getKey())),
|
||||
valueSerializer.deserialize(serializedEntry.getValue())
|
||||
));
|
||||
))
|
||||
.doOnDiscard(Entry.class, entry -> {
|
||||
//noinspection unchecked
|
||||
var castedEntry = (Entry<ByteBuf, ByteBuf>) entry;
|
||||
castedEntry.getKey().release();
|
||||
castedEntry.getValue().release();
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
||||
return dictionary
|
||||
.setRange(range.retain(),
|
||||
entries.map(entry ->
|
||||
Map.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))), true)
|
||||
.map(entry -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())));
|
||||
return Flux
|
||||
.usingWhen(
|
||||
Mono.just(true),
|
||||
b -> getAllValues(null),
|
||||
b -> dictionary
|
||||
.setRange(range.retain(),
|
||||
entries.map(entry ->
|
||||
Map.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -297,8 +338,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
.then();
|
||||
} else {
|
||||
return dictionary
|
||||
.setRange(range.retain(), Flux.empty(), false)
|
||||
.then();
|
||||
.setRange(range.retain(), Flux.empty());
|
||||
}
|
||||
}
|
||||
|
||||
@ -315,9 +355,4 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
private ByteBuf serialize(U bytes) {
|
||||
return valueSerializer.serialize(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
super.release();
|
||||
}
|
||||
}
|
||||
|
@ -2,23 +2,24 @@ package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufAllocator;
|
||||
import io.netty.buffer.ByteBufUtil;
|
||||
import io.netty.buffer.PooledByteBufAllocator;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||
import it.cavallium.dbengine.database.LLRange;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.disk.LLLocalDictionary;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import lombok.Value;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
import static io.netty.buffer.Unpooled.*;
|
||||
|
||||
// todo: implement optimized methods
|
||||
@ -34,6 +35,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
protected final int keySuffixLength;
|
||||
protected final int keyExtLength;
|
||||
protected final LLRange range;
|
||||
private volatile boolean released;
|
||||
|
||||
private static ByteBuf incrementPrefix(ByteBufAllocator alloc, ByteBuf originalKey, int prefixLength) {
|
||||
try {
|
||||
@ -115,9 +117,13 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
assert zeroSuffixAndExt.nioBuffer().isDirect();
|
||||
zeroSuffixAndExt.writeZero(suffixLength + extLength);
|
||||
ByteBuf result = LLUtils.directCompositeBuffer(alloc, prefixKey.retain(), zeroSuffixAndExt.retain());
|
||||
assert result.isDirect();
|
||||
assert result.nioBuffer().isDirect();
|
||||
return result;
|
||||
try {
|
||||
assert result.isDirect();
|
||||
assert result.nioBuffer().isDirect();
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
} finally {
|
||||
zeroSuffixAndExt.release();
|
||||
}
|
||||
@ -169,13 +175,26 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
int prefixLength,
|
||||
int suffixLength,
|
||||
int extLength) {
|
||||
assert prefixKey.readableBytes() == prefixLength;
|
||||
assert suffixKey.readableBytes() == suffixLength;
|
||||
assert suffixLength > 0;
|
||||
assert extLength >= 0;
|
||||
var result = LLUtils.directCompositeBuffer(alloc, prefixKey, suffixKey, alloc.buffer(extLength, extLength).writeZero(extLength));
|
||||
assert result.readableBytes() == prefixLength + suffixLength + extLength;
|
||||
return result;
|
||||
try {
|
||||
assert prefixKey.readableBytes() == prefixLength;
|
||||
assert suffixKey.readableBytes() == suffixLength;
|
||||
assert suffixLength > 0;
|
||||
assert extLength >= 0;
|
||||
ByteBuf result = LLUtils.directCompositeBuffer(alloc,
|
||||
prefixKey.retain(),
|
||||
suffixKey.retain(),
|
||||
alloc.directBuffer(extLength, extLength).writeZero(extLength)
|
||||
);
|
||||
try {
|
||||
assert result.readableBytes() == prefixLength + suffixLength + extLength;
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
} finally {
|
||||
prefixKey.release();
|
||||
suffixKey.release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -213,7 +232,9 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
this.alloc = dictionary.getAllocator();
|
||||
this.subStageGetter = subStageGetter;
|
||||
this.keySuffixSerializer = keySuffixSerializer;
|
||||
this.keyPrefix = wrappedUnmodifiableBuffer(prefixKey).retain();
|
||||
assert prefixKey.refCnt() > 0;
|
||||
this.keyPrefix = prefixKey.retain();
|
||||
assert keyPrefix.refCnt() > 0;
|
||||
this.keyPrefixLength = keyPrefix.readableBytes();
|
||||
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
|
||||
this.keyExtLength = keyExtLength;
|
||||
@ -221,31 +242,35 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
throw new IllegalArgumentException("KeyPrefix must be a direct buffer");
|
||||
}
|
||||
assert keyPrefix.isDirect();
|
||||
ByteBuf firstKey = wrappedUnmodifiableBuffer(firstRangeKey(alloc,
|
||||
ByteBuf firstKey = firstRangeKey(alloc,
|
||||
keyPrefix.retain(),
|
||||
keyPrefixLength,
|
||||
keySuffixLength,
|
||||
keyExtLength
|
||||
));
|
||||
ByteBuf nextRangeKey = wrappedUnmodifiableBuffer(nextRangeKey(alloc,
|
||||
keyPrefix.retain(),
|
||||
keyPrefixLength,
|
||||
keySuffixLength,
|
||||
keyExtLength
|
||||
));
|
||||
);
|
||||
try {
|
||||
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
|
||||
assert firstKey.isDirect();
|
||||
assert nextRangeKey.isDirect();
|
||||
assert firstKey.nioBuffer().isDirect();
|
||||
assert nextRangeKey.nioBuffer().isDirect();
|
||||
this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.retain(), nextRangeKey.retain());
|
||||
assert range == null || !range.hasMin() || range.getMin().isDirect();
|
||||
assert range == null || !range.hasMax() || range.getMax().isDirect();
|
||||
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
|
||||
ByteBuf nextRangeKey = nextRangeKey(alloc,
|
||||
keyPrefix.retain(),
|
||||
keyPrefixLength,
|
||||
keySuffixLength,
|
||||
keyExtLength
|
||||
);
|
||||
try {
|
||||
assert keyPrefix.refCnt() > 0;
|
||||
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
|
||||
assert firstKey.isDirect();
|
||||
assert nextRangeKey.isDirect();
|
||||
assert firstKey.nioBuffer().isDirect();
|
||||
assert nextRangeKey.nioBuffer().isDirect();
|
||||
this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.retain(), nextRangeKey.retain());
|
||||
assert range == null || !range.hasMin() || range.getMin().isDirect();
|
||||
assert range == null || !range.hasMax() || range.getMax().isDirect();
|
||||
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
|
||||
} finally {
|
||||
nextRangeKey.release();
|
||||
}
|
||||
} finally {
|
||||
firstKey.release();
|
||||
nextRangeKey.release();
|
||||
}
|
||||
} finally {
|
||||
prefixKey.release();
|
||||
@ -271,7 +296,11 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
* Keep only suffix and ext
|
||||
*/
|
||||
protected ByteBuf stripPrefix(ByteBuf key) {
|
||||
return key.slice(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength);
|
||||
try {
|
||||
return key.retainedSlice(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength);
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -292,8 +321,13 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
try {
|
||||
assert suffixKey.readableBytes() == keySuffixLength;
|
||||
ByteBuf result = LLUtils.directCompositeBuffer(alloc, keyPrefix.retain(), suffixKey.retain());
|
||||
assert result.readableBytes() == keyPrefixLength + keySuffixLength;
|
||||
return result;
|
||||
assert keyPrefix.refCnt() > 0;
|
||||
try {
|
||||
assert result.readableBytes() == keyPrefixLength + keySuffixLength;
|
||||
return result.retain();
|
||||
} finally {
|
||||
result.release();
|
||||
}
|
||||
} finally {
|
||||
suffixKey.release();
|
||||
}
|
||||
@ -323,6 +357,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
keySuffixLength,
|
||||
keyExtLength
|
||||
);
|
||||
assert keyPrefix.refCnt() > 0;
|
||||
return LLRange.of(first, end);
|
||||
} finally {
|
||||
keySuffix.release();
|
||||
@ -331,65 +366,129 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
|
||||
@Override
|
||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||
return dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast);
|
||||
return Mono.defer(() -> dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain());
|
||||
return Mono.defer(() -> dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||
ByteBuf keySuffixData = serializeSuffix(keySuffix);
|
||||
Flux<ByteBuf> keyFlux;
|
||||
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
||||
keyFlux = this.dictionary.getRangeKeys(resolveSnapshot(snapshot), toExtRange(keySuffixData.retain()));
|
||||
} else {
|
||||
keyFlux = Flux.empty();
|
||||
}
|
||||
return this.subStageGetter
|
||||
.subStage(dictionary, snapshot, toKeyWithoutExt(keySuffixData.retain()), keyFlux)
|
||||
.doFinally(s -> keySuffixData.release());
|
||||
return Mono
|
||||
.using(
|
||||
() -> serializeSuffix(keySuffix),
|
||||
keySuffixData -> {
|
||||
Flux<ByteBuf> keyFlux = Flux
|
||||
.defer(() -> {
|
||||
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED
|
||||
&& this.subStageGetter.needsDebuggingKeyFlux()) {
|
||||
return Flux
|
||||
.using(
|
||||
() -> toExtRange(keySuffixData.retain()),
|
||||
extRangeBuf -> this.dictionary
|
||||
.getRangeKeys(resolveSnapshot(snapshot), extRangeBuf.retain()),
|
||||
LLRange::release
|
||||
);
|
||||
} else {
|
||||
return Flux.empty();
|
||||
}
|
||||
});
|
||||
return Mono
|
||||
.using(
|
||||
() -> toKeyWithoutExt(keySuffixData.retain()),
|
||||
keyBuf -> this.subStageGetter
|
||||
.subStage(dictionary, snapshot, keyBuf.retain(), keyFlux),
|
||||
ReferenceCounted::release
|
||||
)
|
||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::release);
|
||||
},
|
||||
ReferenceCounted::release
|
||||
)
|
||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::release);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<UpdateMode> getUpdateMode() {
|
||||
return dictionary.getUpdateMode();
|
||||
}
|
||||
|
||||
@Value
|
||||
private static class GroupBuffers {
|
||||
ByteBuf groupKeyWithExt;
|
||||
ByteBuf groupKeyWithoutExt;
|
||||
ByteBuf groupSuffix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
||||
return dictionary
|
||||
.getRangeKeysGrouped(resolveSnapshot(snapshot), range.retain(), keyPrefixLength + keySuffixLength)
|
||||
.flatMapSequential(rangeKeys -> {
|
||||
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
||||
ByteBuf groupKeyWithExt = rangeKeys.get(0).retain();
|
||||
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain());
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain());
|
||||
assert subStageKeysConsistency(groupKeyWithExt.readableBytes());
|
||||
return this.subStageGetter
|
||||
.subStage(dictionary,
|
||||
snapshot,
|
||||
groupKeyWithoutExt,
|
||||
Flux.fromIterable(rangeKeys)
|
||||
)
|
||||
.map(us -> Map.entry(this.deserializeSuffix(wrappedUnmodifiableBuffer(groupSuffix.retain())), us))
|
||||
.doFinally(s -> {
|
||||
groupSuffix.release();
|
||||
groupKeyWithoutExt.release();
|
||||
groupKeyWithExt.release();
|
||||
});
|
||||
return Flux
|
||||
.defer(() -> dictionary
|
||||
.getRangeKeysGrouped(resolveSnapshot(snapshot), range.retain(),
|
||||
keyPrefixLength + keySuffixLength)
|
||||
)
|
||||
.flatMapSequential(rangeKeys -> Flux
|
||||
.using(
|
||||
() -> {
|
||||
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
||||
ByteBuf groupKeyWithExt = rangeKeys.get(0).retain();
|
||||
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain());
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain());
|
||||
return new GroupBuffers(groupKeyWithExt, groupKeyWithoutExt, groupSuffix);
|
||||
},
|
||||
buffers -> Mono
|
||||
.fromCallable(() -> {
|
||||
assert subStageKeysConsistency(buffers.groupKeyWithExt.readableBytes());
|
||||
return null;
|
||||
})
|
||||
.then(this.subStageGetter
|
||||
.subStage(dictionary,
|
||||
snapshot,
|
||||
buffers.groupKeyWithoutExt.retain(),
|
||||
Flux
|
||||
.fromIterable(rangeKeys)
|
||||
.map(ByteBuf::retain)
|
||||
)
|
||||
.map(us -> Map.entry(this.deserializeSuffix(buffers.groupSuffix.retain()), us))
|
||||
),
|
||||
buffers -> {
|
||||
buffers.groupSuffix.release();
|
||||
buffers.groupKeyWithoutExt.release();
|
||||
buffers.groupKeyWithExt.release();
|
||||
}
|
||||
)
|
||||
.doFinally(s -> {
|
||||
for (ByteBuf rangeKey : rangeKeys) {
|
||||
rangeKey.release();
|
||||
}
|
||||
})
|
||||
)
|
||||
.doOnDiscard(Collection.class, discardedCollection -> {
|
||||
//noinspection unchecked
|
||||
var rangeKeys = (Collection<ByteBuf>) discardedCollection;
|
||||
for (ByteBuf rangeKey : rangeKeys) {
|
||||
rangeKey.release();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return dictionary
|
||||
.getRangeKeyPrefixes(resolveSnapshot(snapshot), range, keyPrefixLength + keySuffixLength)
|
||||
return Flux
|
||||
.defer(() -> dictionary
|
||||
.getRangeKeyPrefixes(resolveSnapshot(snapshot), range.retain(),
|
||||
keyPrefixLength + keySuffixLength)
|
||||
)
|
||||
.flatMapSequential(groupKeyWithoutExt -> {
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt);
|
||||
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain());
|
||||
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
||||
return this.subStageGetter
|
||||
.subStage(dictionary,
|
||||
snapshot,
|
||||
groupKeyWithoutExt,
|
||||
groupKeyWithoutExt.retain(),
|
||||
Flux.empty()
|
||||
)
|
||||
.map(us -> Map.entry(this.deserializeSuffix(wrappedUnmodifiableBuffer(groupSuffix)), us));
|
||||
.map(us -> Map.entry(this.deserializeSuffix(groupSuffix.retain()), us))
|
||||
.doFinally(s -> groupSuffix.release());
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -408,12 +507,22 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
|
||||
@Override
|
||||
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
||||
return getAllStages(null)
|
||||
.flatMapSequential(stage -> stage.getValue().get(null).map(val -> Map.entry(stage.getKey(), val)))
|
||||
.concatWith(clear().then(entries
|
||||
.flatMap(entry -> at(null, entry.getKey()).map(us -> Tuples.of(us, entry.getValue())))
|
||||
.flatMap(tuple -> tuple.getT1().set(tuple.getT2()))
|
||||
.then(Mono.empty())));
|
||||
return this
|
||||
.getAllValues(null)
|
||||
.concatWith(this
|
||||
.clear()
|
||||
.then(entries
|
||||
.flatMap(entry -> this
|
||||
.at(null, entry.getKey())
|
||||
.flatMap(us -> us
|
||||
.set(entry.getValue())
|
||||
.doFinally(s -> us.release())
|
||||
)
|
||||
)
|
||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::release)
|
||||
.then(Mono.empty())
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -422,38 +531,47 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
return dictionary
|
||||
.clear();
|
||||
} else if (range.isSingle()) {
|
||||
return dictionary
|
||||
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
||||
return Mono
|
||||
.defer(() -> dictionary
|
||||
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
||||
)
|
||||
.then();
|
||||
} else {
|
||||
return dictionary
|
||||
.setRange(range.retain(), Flux.empty(), false)
|
||||
.then();
|
||||
return Mono
|
||||
.defer(() -> dictionary
|
||||
.setRange(range.retain(), Flux.empty())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
protected T deserializeSuffix(ByteBuf keySuffix) {
|
||||
assert suffixKeyConsistency(keySuffix.readableBytes());
|
||||
return keySuffixSerializer.deserialize(keySuffix);
|
||||
try {
|
||||
assert suffixKeyConsistency(keySuffix.readableBytes());
|
||||
var result = keySuffixSerializer.deserialize(keySuffix.retain());
|
||||
assert keyPrefix.refCnt() > 0;
|
||||
return result;
|
||||
} finally {
|
||||
keySuffix.release();
|
||||
}
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
protected ByteBuf serializeSuffix(T keySuffix) {
|
||||
ByteBuf suffixData = keySuffixSerializer.serialize(keySuffix);
|
||||
assert suffixKeyConsistency(suffixData.readableBytes());
|
||||
assert keyPrefix.refCnt() > 0;
|
||||
return suffixData;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
range.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
this.range.release();
|
||||
this.keyPrefix.release();
|
||||
if (!released) {
|
||||
released = true;
|
||||
this.range.release();
|
||||
this.keyPrefix.release();
|
||||
} else {
|
||||
throw new IllegalStateException("Already released");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import io.netty.buffer.ByteBufAllocator;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.collections.Joiner.ValueGetter;
|
||||
import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBlocking;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
@ -34,16 +35,21 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
Serializer<U, ByteBuf> valueSerializer,
|
||||
Function<T, TH> keySuffixHashFunction,
|
||||
SerializerFixedBinaryLength<TH, ByteBuf> keySuffixHashSerializer) {
|
||||
ValueWithHashSerializer<T, U> valueWithHashSerializer = new ValueWithHashSerializer<>(keySuffixSerializer,
|
||||
valueSerializer
|
||||
);
|
||||
this.alloc = dictionary.getAllocator();
|
||||
this.valueMapper = ValueMapper::new;
|
||||
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
|
||||
prefixKey,
|
||||
keySuffixHashSerializer, valueWithHashSerializer
|
||||
);
|
||||
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||
try {
|
||||
ValueWithHashSerializer<T, U> valueWithHashSerializer = new ValueWithHashSerializer<>(keySuffixSerializer,
|
||||
valueSerializer
|
||||
);
|
||||
this.alloc = dictionary.getAllocator();
|
||||
this.valueMapper = ValueMapper::new;
|
||||
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
|
||||
prefixKey.retain(),
|
||||
keySuffixHashSerializer,
|
||||
valueWithHashSerializer
|
||||
);
|
||||
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||
} finally {
|
||||
prefixKey.release();
|
||||
}
|
||||
}
|
||||
|
||||
private class ValueWithHashSerializer<T, U> implements Serializer<Entry<T, U>, ByteBuf> {
|
||||
@ -72,9 +78,18 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
public @NotNull ByteBuf serialize(@NotNull Entry<T, U> deserialized) {
|
||||
ByteBuf keySuffix = keySuffixSerializer.serialize(deserialized.getKey());
|
||||
ByteBuf value = valueSerializer.serialize(deserialized.getValue());
|
||||
ByteBuf keySuffixLen = alloc.buffer(Integer.BYTES, Integer.BYTES);
|
||||
keySuffixLen.writeInt(keySuffix.readableBytes());
|
||||
return LLUtils.directCompositeBuffer(alloc, keySuffixLen, keySuffix, value);
|
||||
try {
|
||||
ByteBuf keySuffixLen = alloc.directBuffer(Integer.BYTES, Integer.BYTES);
|
||||
try {
|
||||
keySuffixLen.writeInt(keySuffix.readableBytes());
|
||||
return LLUtils.directCompositeBuffer(alloc, keySuffixLen.retain(), keySuffix.retain(), value.retain());
|
||||
} finally {
|
||||
keySuffixLen.release();
|
||||
}
|
||||
} finally {
|
||||
keySuffix.release();
|
||||
value.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,6 +241,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
return subDictionary.putValue(keySuffixHashFunction.apply(key), Map.entry(key, value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<UpdateMode> getUpdateMode() {
|
||||
return subDictionary.getUpdateMode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Boolean> updateValue(T key, boolean existsAlmostCertainly, Function<@Nullable U, @Nullable U> updater) {
|
||||
return subDictionary.updateValue(keySuffixHashFunction.apply(key), existsAlmostCertainly, old -> {
|
||||
@ -258,9 +278,9 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Boolean> putValueAndGetStatus(T key, U value) {
|
||||
public Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
||||
return subDictionary
|
||||
.putValueAndGetStatus(keySuffixHashFunction.apply(key), Map.entry(key, value));
|
||||
.putValueAndGetChanged(keySuffixHashFunction.apply(key), Map.entry(key, value));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -20,12 +20,16 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
||||
private final Serializer<U, ByteBuf> serializer;
|
||||
|
||||
public DatabaseSingle(LLDictionary dictionary, ByteBuf key, Serializer<U, ByteBuf> serializer) {
|
||||
this.dictionary = dictionary;
|
||||
if (!key.isDirect()) {
|
||||
throw new IllegalArgumentException("Key must be direct");
|
||||
try {
|
||||
this.dictionary = dictionary;
|
||||
if (!key.isDirect()) {
|
||||
throw new IllegalArgumentException("Key must be direct");
|
||||
}
|
||||
this.key = key.retain();
|
||||
this.serializer = serializer;
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
this.key = key;
|
||||
this.serializer = serializer;
|
||||
}
|
||||
|
||||
private LLSnapshot resolveSnapshot(@Nullable CompositeSnapshot snapshot) {
|
||||
|
@ -25,13 +25,18 @@ public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
||||
}
|
||||
|
||||
default Mono<Void> set(T value) {
|
||||
return setAndGetChanged(value).then();
|
||||
return this
|
||||
.setAndGetChanged(value)
|
||||
.then();
|
||||
}
|
||||
|
||||
Mono<T> setAndGetPrevious(T value);
|
||||
|
||||
default Mono<Boolean> setAndGetChanged(T value) {
|
||||
return setAndGetPrevious(value).map(oldValue -> !Objects.equals(oldValue, value)).defaultIfEmpty(value != null);
|
||||
return this
|
||||
.setAndGetPrevious(value)
|
||||
.map(oldValue -> !Objects.equals(oldValue, value))
|
||||
.switchIfEmpty(Mono.fromSupplier(() -> value != null));
|
||||
}
|
||||
|
||||
Mono<Boolean> update(Function<@Nullable T, @Nullable T> updater, boolean existsAlmostCertainly);
|
||||
|
@ -1,15 +1,20 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.collections.Joiner.ValueGetter;
|
||||
import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBlocking;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends DatabaseStageEntry<Map<T, U>> {
|
||||
@ -32,8 +37,16 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
return at(null, key).single().flatMap(v -> v.set(value).doFinally(s -> v.release()));
|
||||
}
|
||||
|
||||
Mono<UpdateMode> getUpdateMode();
|
||||
|
||||
default Mono<Boolean> updateValue(T key, boolean existsAlmostCertainly, Function<@Nullable U, @Nullable U> updater) {
|
||||
return at(null, key).single().flatMap(v -> v.update(updater, existsAlmostCertainly).doFinally(s -> v.release()));
|
||||
return this
|
||||
.at(null, key)
|
||||
.single()
|
||||
.flatMap(v -> v
|
||||
.update(updater, existsAlmostCertainly)
|
||||
.doFinally(s -> v.release())
|
||||
);
|
||||
}
|
||||
|
||||
default Mono<Boolean> updateValue(T key, Function<@Nullable U, @Nullable U> updater) {
|
||||
@ -50,7 +63,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
* @param value
|
||||
* @return true if the key was associated with any value, false if the key didn't exist.
|
||||
*/
|
||||
default Mono<Boolean> putValueAndGetStatus(T key, U value) {
|
||||
default Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
||||
return at(null, key).single().flatMap(v -> v.setAndGetChanged(value).doFinally(s -> v.release())).single();
|
||||
}
|
||||
|
||||
@ -89,6 +102,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
.getValue()
|
||||
.get(snapshot, true)
|
||||
.map(value -> Map.entry(entry.getKey(), value))
|
||||
.doFinally(s -> entry.getValue().release())
|
||||
);
|
||||
}
|
||||
|
||||
@ -111,7 +125,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
.flatMap(entriesReplacer)
|
||||
.flatMap(replacedEntry -> this
|
||||
.at(null, replacedEntry.getKey())
|
||||
.map(v -> v.set(replacedEntry.getValue()).doFinally(s -> v.release())))
|
||||
.flatMap(v -> v.set(replacedEntry.getValue()).doFinally(s -> v.release())))
|
||||
.then();
|
||||
}
|
||||
}
|
||||
@ -119,7 +133,10 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) {
|
||||
return this
|
||||
.getAllStages(null)
|
||||
.flatMap(entriesReplacer)
|
||||
.flatMap(stage -> Mono
|
||||
.defer(() -> entriesReplacer.apply(stage))
|
||||
.doFinally(s -> stage.getValue().release())
|
||||
)
|
||||
.then();
|
||||
}
|
||||
|
||||
@ -130,27 +147,55 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
default Mono<Boolean> setAndGetChanged(Map<T, U> value) {
|
||||
return this
|
||||
.setAndGetPrevious(value)
|
||||
.map(oldValue -> !Objects.equals(oldValue, value))
|
||||
.switchIfEmpty(Mono.fromSupplier(() -> !value.isEmpty()));
|
||||
}
|
||||
|
||||
@Override
|
||||
default Mono<Boolean> update(Function<@Nullable Map<T, U>, @Nullable Map<T, U>> updater, boolean existsAlmostCertainly) {
|
||||
return this
|
||||
.getAllValues(null)
|
||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||
.getUpdateMode()
|
||||
.single()
|
||||
.<Map<T, U>>handle((v, sink) -> {
|
||||
if (v == null || v.isEmpty()) {
|
||||
sink.complete();
|
||||
.flatMap(updateMode -> {
|
||||
if (updateMode == UpdateMode.ALLOW_UNSAFE) {
|
||||
return this
|
||||
.getAllValues(null)
|
||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||
.single()
|
||||
.<Tuple2<Optional<Map<T, U>>, Boolean>>handle((v, sink) -> {
|
||||
if (v.isEmpty()) {
|
||||
v = null;
|
||||
}
|
||||
var result = updater.apply(v);
|
||||
if (result != null && result.isEmpty()) {
|
||||
result = null;
|
||||
}
|
||||
boolean changed = !Objects.equals(v, result);
|
||||
sink.next(Tuples.of(Optional.ofNullable(result), changed));
|
||||
})
|
||||
.flatMap(result -> Mono
|
||||
.justOrEmpty(result.getT1())
|
||||
.flatMap(values -> this.setAllValues(Flux.fromIterable(values.entrySet())))
|
||||
.thenReturn(result.getT2())
|
||||
);
|
||||
} else if (updateMode == UpdateMode.ALLOW) {
|
||||
return Mono.fromCallable(() -> {
|
||||
throw new UnsupportedOperationException("Maps can't be updated atomically");
|
||||
});
|
||||
} else if (updateMode == UpdateMode.DISALLOW) {
|
||||
return Mono.fromCallable(() -> {
|
||||
throw new UnsupportedOperationException("Map can't be updated because updates are disabled");
|
||||
});
|
||||
} else {
|
||||
var result = updater.apply(v);
|
||||
if (result == null) {
|
||||
sink.complete();
|
||||
} else {
|
||||
sink.next(result);
|
||||
}
|
||||
return Mono.fromCallable(() -> {
|
||||
throw new UnsupportedOperationException("Unknown update mode: " + updateMode);
|
||||
});
|
||||
}
|
||||
})
|
||||
.flatMap(values -> this.setAllValues(Flux.fromIterable(values.entrySet())))
|
||||
//todo: can be optimized by calculating the correct return value
|
||||
.thenReturn(true);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -166,7 +211,12 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
||||
|
||||
@Override
|
||||
default Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||
return getAllStages(snapshot).count();
|
||||
return getAllStages(snapshot)
|
||||
.flatMap(stage -> Mono
|
||||
.fromRunnable(() -> stage.getValue().release())
|
||||
.thenReturn(true)
|
||||
)
|
||||
.count();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
@ -44,17 +45,22 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
ByteBuf prefixKey,
|
||||
Flux<ByteBuf> debuggingKeyFlux) {
|
||||
Mono<DatabaseMapDictionaryHashed<T, U, TH>> result = Mono.just(DatabaseMapDictionaryHashed.tail(dictionary,
|
||||
prefixKey,
|
||||
Mono<DatabaseMapDictionaryHashed<T, U, TH>> result = Mono.fromSupplier(() -> DatabaseMapDictionaryHashed.tail(dictionary,
|
||||
prefixKey.retain(),
|
||||
keySerializer,
|
||||
valueSerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
));
|
||||
if (assertsEnabled) {
|
||||
return checkKeyFluxConsistency(prefixKey, debuggingKeyFlux).then(result);
|
||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux)
|
||||
.then(result)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
} else {
|
||||
return result;
|
||||
return debuggingKeyFlux
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.then(result)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,9 +75,14 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
||||
}
|
||||
|
||||
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||
return keyFlux.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyHashBinaryLength();
|
||||
}).then();
|
||||
return keyFlux
|
||||
.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyHashBinaryLength();
|
||||
})
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||
.then()
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
|
||||
public int getKeyHashBinaryLength() {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||
@ -42,16 +43,21 @@ public class SubStageGetterHashSet<T, TH> implements
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
ByteBuf prefixKey,
|
||||
Flux<ByteBuf> debuggingKeyFlux) {
|
||||
Mono<DatabaseSetDictionaryHashed<T, TH>> result = Mono.just(DatabaseSetDictionaryHashed.tail(dictionary,
|
||||
prefixKey,
|
||||
Mono<DatabaseSetDictionaryHashed<T, TH>> result = Mono.fromSupplier(() -> DatabaseSetDictionaryHashed.tail(dictionary,
|
||||
prefixKey.retain(),
|
||||
keySerializer,
|
||||
keyHashFunction,
|
||||
keyHashSerializer
|
||||
));
|
||||
if (assertsEnabled) {
|
||||
return checkKeyFluxConsistency(prefixKey, debuggingKeyFlux).then(result);
|
||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux)
|
||||
.then(result)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
} else {
|
||||
return result;
|
||||
return debuggingKeyFlux
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.then(result)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
}
|
||||
|
||||
@ -66,9 +72,14 @@ public class SubStageGetterHashSet<T, TH> implements
|
||||
}
|
||||
|
||||
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||
return keyFlux.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyHashBinaryLength();
|
||||
}).then();
|
||||
return keyFlux
|
||||
.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyHashBinaryLength();
|
||||
})
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||
.then()
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
|
||||
public int getKeyHashBinaryLength() {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
@ -35,14 +36,27 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
ByteBuf prefixKey,
|
||||
Flux<ByteBuf> debuggingKeyFlux) {
|
||||
Mono<DatabaseMapDictionary<T, U>> result = Mono.just(DatabaseMapDictionary.tail(dictionary, prefixKey, keySerializer,
|
||||
valueSerializer
|
||||
));
|
||||
if (assertsEnabled) {
|
||||
return checkKeyFluxConsistency(prefixKey, debuggingKeyFlux).then(result);
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
return Mono
|
||||
.using(
|
||||
() -> true,
|
||||
b -> Mono
|
||||
.fromSupplier(() -> DatabaseMapDictionary.tail(dictionary, prefixKey.retain(), keySerializer, valueSerializer))
|
||||
.doOnDiscard(DatabaseMapDictionary.class, DatabaseMapDictionary::release)
|
||||
.transformDeferred(result -> {
|
||||
if (assertsEnabled) {
|
||||
return this
|
||||
.checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux)
|
||||
.then(result);
|
||||
} else {
|
||||
return debuggingKeyFlux
|
||||
.flatMap(buf -> Mono.fromRunnable(buf::release))
|
||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||
.then(result);
|
||||
}
|
||||
})
|
||||
.doOnDiscard(DatabaseMapDictionary.class, DatabaseMapDictionary::release),
|
||||
b -> prefixKey.release()
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -56,9 +70,14 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
||||
}
|
||||
|
||||
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||
return keyFlux.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||
}).then();
|
||||
return keyFlux
|
||||
.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||
})
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||
.then()
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
|
||||
public int getKeyBinaryLength() {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
@ -49,17 +50,24 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
ByteBuf prefixKey,
|
||||
Flux<ByteBuf> debuggingKeyFlux) {
|
||||
Mono<DatabaseMapDictionaryDeep<T, U, US>> result = Mono.just(DatabaseMapDictionaryDeep.deepIntermediate(dictionary,
|
||||
prefixKey,
|
||||
keySerializer,
|
||||
subStageGetter,
|
||||
keyExtLength
|
||||
));
|
||||
if (assertsEnabled) {
|
||||
return checkKeyFluxConsistency(prefixKey, debuggingKeyFlux).then(result);
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
return Flux
|
||||
.defer(() -> {
|
||||
if (assertsEnabled) {
|
||||
return this
|
||||
.checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux);
|
||||
} else {
|
||||
return debuggingKeyFlux.flatMap(buf -> Mono.fromRunnable(buf::release));
|
||||
}
|
||||
})
|
||||
.then(Mono
|
||||
.fromSupplier(() -> DatabaseMapDictionaryDeep.deepIntermediate(dictionary,
|
||||
prefixKey.retain(),
|
||||
keySerializer,
|
||||
subStageGetter,
|
||||
keyExtLength
|
||||
))
|
||||
)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -73,9 +81,14 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
||||
}
|
||||
|
||||
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||
return keyFlux.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||
}).then();
|
||||
return keyFlux
|
||||
.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||
})
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||
.then()
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
|
||||
public int getKeyBinaryLength() {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||
@ -32,16 +33,17 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
||||
@Nullable CompositeSnapshot snapshot,
|
||||
ByteBuf prefixKey,
|
||||
Flux<ByteBuf> debuggingKeyFlux) {
|
||||
try {
|
||||
Mono<DatabaseSetDictionary<T>> result = Mono
|
||||
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer));
|
||||
if (assertsEnabled) {
|
||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux).then(result);
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
} finally {
|
||||
prefixKey.release();
|
||||
Mono<DatabaseSetDictionary<T>> result = Mono
|
||||
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer));
|
||||
if (assertsEnabled) {
|
||||
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux)
|
||||
.then(result)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
} else {
|
||||
return debuggingKeyFlux
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.then(result)
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,9 +58,14 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
||||
}
|
||||
|
||||
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||
return keyFlux.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||
}).doFinally(s -> prefixKey.release()).then();
|
||||
return keyFlux
|
||||
.doOnNext(key -> {
|
||||
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||
})
|
||||
.flatMap(key -> Mono.fromRunnable(key::release))
|
||||
.doOnDiscard(ByteBuf.class, ReferenceCounted::release)
|
||||
.then()
|
||||
.doFinally(s -> prefixKey.release());
|
||||
}
|
||||
|
||||
public int getKeyBinaryLength() {
|
||||
|
@ -36,13 +36,20 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
|
||||
.singleOrEmpty()
|
||||
.flatMap(key -> Mono
|
||||
.<DatabaseStageEntry<T>>fromCallable(() -> {
|
||||
if (!LLUtils.equals(keyPrefix, key)) {
|
||||
throw new IndexOutOfBoundsException("Found more than one element!");
|
||||
try {
|
||||
if (!LLUtils.equals(keyPrefix, key)) {
|
||||
throw new IndexOutOfBoundsException("Found more than one element!");
|
||||
}
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
return null;
|
||||
})
|
||||
)
|
||||
.then(Mono.fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix, serializer)));
|
||||
.then(Mono
|
||||
.<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix.retain(), serializer))
|
||||
)
|
||||
.doFinally(s -> keyPrefix.release());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,9 @@ public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReacti
|
||||
|
||||
@Override
|
||||
public ByteBuf getEntry(ByteBuf key, ByteBuf value) {
|
||||
if (value != null) {
|
||||
value.release();
|
||||
}
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
@ -7,8 +7,10 @@ import io.netty.buffer.ByteBufAllocator;
|
||||
import io.netty.buffer.ByteBufUtil;
|
||||
import it.cavallium.dbengine.database.LLRange;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.rocksdb.ColumnFamilyHandle;
|
||||
import org.rocksdb.ReadOptions;
|
||||
import org.rocksdb.RocksDB;
|
||||
@ -49,52 +51,65 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
||||
.generate(() -> {
|
||||
var readOptions = new ReadOptions(this.readOptions);
|
||||
readOptions.setFillCache(canFillCache && range.hasMin() && range.hasMax());
|
||||
return getRocksIterator(readOptions, range, db, cfh);
|
||||
return getRocksIterator(readOptions, range.retain(), db, cfh);
|
||||
}, (tuple, sink) -> {
|
||||
var rocksIterator = tuple.getT1();
|
||||
ObjectArrayList<T> values = new ObjectArrayList<>();
|
||||
ByteBuf firstGroupKey = null;
|
||||
|
||||
range.retain();
|
||||
try {
|
||||
while (rocksIterator.isValid()) {
|
||||
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||
try {
|
||||
if (firstGroupKey == null) {
|
||||
firstGroupKey = key.retainedSlice();
|
||||
} else if (!ByteBufUtil.equals(firstGroupKey, 0, key, 0, prefixLength)) {
|
||||
break;
|
||||
}
|
||||
ByteBuf value = readValues ? LLUtils.readDirectNioBuffer(alloc, rocksIterator::value) : EMPTY_BUFFER;
|
||||
var rocksIterator = tuple.getT1();
|
||||
ObjectArrayList<T> values = new ObjectArrayList<>();
|
||||
ByteBuf firstGroupKey = null;
|
||||
|
||||
try {
|
||||
while (rocksIterator.isValid()) {
|
||||
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||
try {
|
||||
rocksIterator.next();
|
||||
T entry = getEntry(key.retain(), value.retain());
|
||||
values.add(entry);
|
||||
if (firstGroupKey == null) {
|
||||
firstGroupKey = key.retain();
|
||||
} else if (!ByteBufUtil.equals(firstGroupKey, firstGroupKey.readerIndex(), key, key.readerIndex(), prefixLength)) {
|
||||
break;
|
||||
}
|
||||
ByteBuf value;
|
||||
if (readValues) {
|
||||
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
|
||||
} else {
|
||||
value = DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
||||
}
|
||||
try {
|
||||
rocksIterator.next();
|
||||
T entry = getEntry(key.retain(), value.retain());
|
||||
values.add(entry);
|
||||
} finally {
|
||||
value.release();
|
||||
}
|
||||
} finally {
|
||||
value.release();
|
||||
key.release();
|
||||
}
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
} finally {
|
||||
if (firstGroupKey != null) {
|
||||
firstGroupKey.release();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (firstGroupKey != null) {
|
||||
firstGroupKey.release();
|
||||
if (!values.isEmpty()) {
|
||||
sink.next(values);
|
||||
} else {
|
||||
sink.complete();
|
||||
}
|
||||
return tuple;
|
||||
} finally {
|
||||
range.release();
|
||||
}
|
||||
if (!values.isEmpty()) {
|
||||
sink.next(values);
|
||||
} else {
|
||||
sink.complete();
|
||||
}
|
||||
return tuple;
|
||||
}, tuple -> {
|
||||
var rocksIterator = tuple.getT1();
|
||||
rocksIterator.close();
|
||||
tuple.getT2().release();
|
||||
tuple.getT3().release();
|
||||
range.release();
|
||||
});
|
||||
}
|
||||
|
||||
public abstract T getEntry(ByteBuf key, ByteBuf value);
|
||||
|
||||
public void release() {
|
||||
range.release();
|
||||
}
|
||||
}
|
||||
|
@ -51,41 +51,48 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
||||
}
|
||||
return LLLocalDictionary.getRocksIterator(readOptions, range.retain(), db, cfh);
|
||||
}, (tuple, sink) -> {
|
||||
var rocksIterator = tuple.getT1();
|
||||
ByteBuf firstGroupKey = null;
|
||||
range.retain();
|
||||
try {
|
||||
while (rocksIterator.isValid()) {
|
||||
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||
try {
|
||||
if (firstGroupKey == null) {
|
||||
firstGroupKey = key.retain();
|
||||
} else if (!ByteBufUtil.equals(firstGroupKey, 0, key, 0, prefixLength)) {
|
||||
break;
|
||||
var rocksIterator = tuple.getT1();
|
||||
ByteBuf firstGroupKey = null;
|
||||
try {
|
||||
while (rocksIterator.isValid()) {
|
||||
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||
try {
|
||||
if (firstGroupKey == null) {
|
||||
firstGroupKey = key.retain();
|
||||
} else if (!ByteBufUtil.equals(firstGroupKey, 0, key, 0, prefixLength)) {
|
||||
break;
|
||||
}
|
||||
rocksIterator.next();
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
rocksIterator.next();
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
if (firstGroupKey != null) {
|
||||
var groupKeyPrefix = firstGroupKey.slice(0, prefixLength);
|
||||
sink.next(groupKeyPrefix.retain());
|
||||
} else {
|
||||
sink.complete();
|
||||
}
|
||||
} finally {
|
||||
if (firstGroupKey != null) {
|
||||
firstGroupKey.release();
|
||||
}
|
||||
}
|
||||
if (firstGroupKey != null) {
|
||||
var groupKeyPrefix = firstGroupKey.slice(0, prefixLength);
|
||||
sink.next(groupKeyPrefix.retain());
|
||||
} else {
|
||||
sink.complete();
|
||||
}
|
||||
return tuple;
|
||||
} finally {
|
||||
if (firstGroupKey != null) {
|
||||
firstGroupKey.release();
|
||||
}
|
||||
range.release();
|
||||
}
|
||||
return tuple;
|
||||
}, tuple -> {
|
||||
var rocksIterator = tuple.getT1();
|
||||
rocksIterator.close();
|
||||
tuple.getT2().release();
|
||||
tuple.getT3().release();
|
||||
range.release();
|
||||
});
|
||||
}
|
||||
|
||||
public void release() {
|
||||
range.release();
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,9 @@ public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterato
|
||||
|
||||
@Override
|
||||
public ByteBuf getEntry(ByteBuf key, ByteBuf value) {
|
||||
if (value != null) {
|
||||
value.release();
|
||||
}
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
@ -6,11 +6,16 @@ import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufAllocator;
|
||||
import it.cavallium.dbengine.database.LLRange;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||
import it.cavallium.dbengine.database.disk.LLLocalDictionary.ReleasableSlice;
|
||||
import org.rocksdb.ColumnFamilyHandle;
|
||||
import org.rocksdb.ReadOptions;
|
||||
import org.rocksdb.RocksDB;
|
||||
import org.rocksdb.RocksIterator;
|
||||
import org.rocksdb.RocksMutableObject;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.util.function.Tuple3;
|
||||
|
||||
import static io.netty.buffer.Unpooled.*;
|
||||
|
||||
public abstract class LLLocalReactiveRocksIterator<T> {
|
||||
@ -46,24 +51,34 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
||||
}
|
||||
return getRocksIterator(readOptions, range.retain(), db, cfh);
|
||||
}, (tuple, sink) -> {
|
||||
var rocksIterator = tuple.getT1();
|
||||
if (rocksIterator.isValid()) {
|
||||
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||
try {
|
||||
ByteBuf value = readValues ? LLUtils.readDirectNioBuffer(alloc, rocksIterator::value) : EMPTY_BUFFER;
|
||||
range.retain();
|
||||
try {
|
||||
var rocksIterator = tuple.getT1();
|
||||
if (rocksIterator.isValid()) {
|
||||
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||
try {
|
||||
rocksIterator.next();
|
||||
sink.next(getEntry(key.retain(), value.retain()));
|
||||
ByteBuf value;
|
||||
if (readValues) {
|
||||
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
|
||||
} else {
|
||||
value = DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
||||
}
|
||||
try {
|
||||
rocksIterator.next();
|
||||
sink.next(getEntry(key.retain(), value.retain()));
|
||||
} finally {
|
||||
value.release();
|
||||
}
|
||||
} finally {
|
||||
value.release();
|
||||
key.release();
|
||||
}
|
||||
} finally {
|
||||
key.release();
|
||||
} else {
|
||||
sink.complete();
|
||||
}
|
||||
} else {
|
||||
sink.complete();
|
||||
return tuple;
|
||||
} finally {
|
||||
range.release();
|
||||
}
|
||||
return tuple;
|
||||
}, tuple -> {
|
||||
var rocksIterator = tuple.getT1();
|
||||
rocksIterator.close();
|
||||
@ -73,4 +88,8 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
||||
}
|
||||
|
||||
public abstract T getEntry(ByteBuf key, ByteBuf value);
|
||||
|
||||
public void release() {
|
||||
range.release();
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +72,8 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
|
||||
try {
|
||||
ByteBufUtil.writeUtf8(buf, deserialized);
|
||||
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
||||
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to serialize an element with "
|
||||
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength()
|
||||
+ " bytes has tried to serialize an element with "
|
||||
+ buf.readableBytes() + " bytes instead");
|
||||
}
|
||||
return buf.retain();
|
||||
|
@ -287,6 +287,6 @@ public class LuceneUtils {
|
||||
DatabaseMapDictionaryDeep<T, Map<U, V>, DatabaseMapDictionary<U, V>> dictionaryDeep) {
|
||||
return entry -> dictionaryDeep
|
||||
.at(snapshot, entry.getKey())
|
||||
.flatMap(sub -> sub.getValue(snapshot, entry.getValue()));
|
||||
.flatMap(sub -> sub.getValue(snapshot, entry.getValue()).doFinally(s -> sub.release()));
|
||||
}
|
||||
}
|
||||
|
@ -17,6 +17,10 @@ import org.warp.commonutils.concurrency.atomicity.NotAtomic;
|
||||
@NotAtomic
|
||||
public class CappedWriteBatch extends WriteBatch {
|
||||
|
||||
/**
|
||||
* Default: true, Use false to debug problems with direct buffers
|
||||
*/
|
||||
private static final boolean USE_FAST_DIRECT_BUFFERS = false;
|
||||
private final RocksDB db;
|
||||
private final int cap;
|
||||
private final WriteOptions writeOptions;
|
||||
@ -41,17 +45,20 @@ public class CappedWriteBatch extends WriteBatch {
|
||||
|
||||
private synchronized void flushIfNeeded(boolean force) throws RocksDBException {
|
||||
if (this.count() >= (force ? 1 : cap)) {
|
||||
db.write(writeOptions, this);
|
||||
db.write(writeOptions, this.getWriteBatch());
|
||||
this.clear();
|
||||
releaseAllBuffers();
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized void releaseAllBuffers() {
|
||||
for (ByteBuf byteBuffer : buffersToRelease) {
|
||||
byteBuffer.release();
|
||||
if (!buffersToRelease.isEmpty()) {
|
||||
for (ByteBuf byteBuffer : buffersToRelease) {
|
||||
assert byteBuffer.refCnt() > 0;
|
||||
byteBuffer.release();
|
||||
}
|
||||
buffersToRelease.clear();
|
||||
}
|
||||
buffersToRelease.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -84,33 +91,24 @@ public class CappedWriteBatch extends WriteBatch {
|
||||
}
|
||||
|
||||
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, ByteBuf key, ByteBuf value) throws RocksDBException {
|
||||
buffersToRelease.add(key);
|
||||
buffersToRelease.add(value);
|
||||
ByteBuf keyDirectBuf = key.retain();
|
||||
ByteBuffer keyNioBuffer = LLUtils.toDirectFast(keyDirectBuf.retain());
|
||||
if (keyNioBuffer == null) {
|
||||
keyDirectBuf.release();
|
||||
keyDirectBuf = LLUtils.toDirectCopy(key.retain());
|
||||
keyNioBuffer = keyDirectBuf.nioBuffer();
|
||||
}
|
||||
try {
|
||||
if (USE_FAST_DIRECT_BUFFERS) {
|
||||
buffersToRelease.add(key);
|
||||
buffersToRelease.add(value);
|
||||
ByteBuffer keyNioBuffer = LLUtils.toDirect(key);
|
||||
assert keyNioBuffer.isDirect();
|
||||
|
||||
ByteBuf valueDirectBuf = value.retain();
|
||||
ByteBuffer valueNioBuffer = LLUtils.toDirectFast(valueDirectBuf.retain());
|
||||
if (valueNioBuffer == null) {
|
||||
valueDirectBuf.release();
|
||||
valueDirectBuf = LLUtils.toDirectCopy(value.retain());
|
||||
valueNioBuffer = valueDirectBuf.nioBuffer();
|
||||
}
|
||||
ByteBuffer valueNioBuffer = LLUtils.toDirect(value);
|
||||
assert valueNioBuffer.isDirect();
|
||||
super.put(columnFamilyHandle, keyNioBuffer, valueNioBuffer);
|
||||
} else {
|
||||
try {
|
||||
assert valueNioBuffer.isDirect();
|
||||
super.put(columnFamilyHandle, keyNioBuffer, valueNioBuffer);
|
||||
byte[] keyArray = LLUtils.toArray(key);
|
||||
byte[] valueArray = LLUtils.toArray(value);
|
||||
super.put(columnFamilyHandle, keyArray, valueArray);
|
||||
} finally {
|
||||
buffersToRelease.add(valueDirectBuf);
|
||||
key.release();
|
||||
value.release();
|
||||
}
|
||||
} finally {
|
||||
buffersToRelease.add(keyDirectBuf);
|
||||
}
|
||||
flushIfNeeded(false);
|
||||
}
|
||||
@ -154,15 +152,9 @@ public class CappedWriteBatch extends WriteBatch {
|
||||
}
|
||||
|
||||
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, ByteBuf key) throws RocksDBException {
|
||||
buffersToRelease.add(key);
|
||||
ByteBuf keyDirectBuf = key.retain();
|
||||
ByteBuffer keyNioBuffer = LLUtils.toDirectFast(keyDirectBuf.retain());
|
||||
if (keyNioBuffer == null) {
|
||||
keyDirectBuf.release();
|
||||
keyDirectBuf = LLUtils.toDirectCopy(key.retain());
|
||||
keyNioBuffer = keyDirectBuf.nioBuffer();
|
||||
}
|
||||
try {
|
||||
if (USE_FAST_DIRECT_BUFFERS) {
|
||||
buffersToRelease.add(key);
|
||||
ByteBuffer keyNioBuffer = LLUtils.toDirect(key);
|
||||
assert keyNioBuffer.isDirect();
|
||||
removeDirect(nativeHandle_,
|
||||
keyNioBuffer,
|
||||
@ -171,8 +163,12 @@ public class CappedWriteBatch extends WriteBatch {
|
||||
columnFamilyHandle.nativeHandle_
|
||||
);
|
||||
keyNioBuffer.position(keyNioBuffer.limit());
|
||||
} finally {
|
||||
buffersToRelease.add(keyDirectBuf);
|
||||
} else {
|
||||
try {
|
||||
super.delete(columnFamilyHandle, LLUtils.toArray(key));
|
||||
} finally {
|
||||
key.release();
|
||||
}
|
||||
}
|
||||
flushIfNeeded(false);
|
||||
}
|
||||
@ -248,11 +244,24 @@ public class CappedWriteBatch extends WriteBatch {
|
||||
|
||||
@Override
|
||||
public synchronized WriteBatch getWriteBatch() {
|
||||
return this;
|
||||
return super.getWriteBatch();
|
||||
}
|
||||
|
||||
public synchronized void writeToDbAndClose() throws RocksDBException {
|
||||
flushIfNeeded(true);
|
||||
try {
|
||||
flushIfNeeded(true);
|
||||
super.close();
|
||||
} finally {
|
||||
releaseAllBuffers();
|
||||
}
|
||||
}
|
||||
|
||||
public void flush() throws RocksDBException {
|
||||
try {
|
||||
flushIfNeeded(true);
|
||||
} finally {
|
||||
releaseAllBuffers();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,4 +1,4 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
@ -67,6 +67,7 @@ public class DbTestUtils {
|
||||
}).subscribeOn(Schedulers.boundedElastic()))
|
||||
);
|
||||
}
|
||||
|
||||
public static Mono<? extends LLDictionary> tempDictionary(LLKeyValueDatabase database, UpdateMode updateMode) {
|
||||
return tempDictionary(database, "testmap", updateMode);
|
||||
}
|
@ -1,9 +1,11 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import static it.cavallium.dbengine.client.CompositeDatabasePartLocation.CompositeDatabasePartType.KV_DATABASE;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.client.CompositeDatabasePartLocation;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
@ -14,7 +16,6 @@ import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
|
||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
@ -24,7 +25,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.CompletionException;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.Test;
|
31
src/test/java/it/cavallium/dbengine/TestDictionary.java
Normal file
31
src/test/java/it/cavallium/dbengine/TestDictionary.java
Normal file
@ -0,0 +1,31 @@
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDb;
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
|
||||
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Stream;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
public class TestDictionary {
|
||||
|
||||
private static Stream<Arguments> provideArgumentsCreate() {
|
||||
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsCreate")
|
||||
public void testCreate(UpdateMode updateMode) {
|
||||
StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.flatMap(LLDictionary::clear)
|
||||
.then()
|
||||
))
|
||||
.verifyComplete();
|
||||
}
|
||||
}
|
@ -1,18 +1,14 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import static it.cavallium.dbengine.client.DbTestUtils.*;
|
||||
import static it.cavallium.dbengine.DbTestUtils.*;
|
||||
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
@ -21,38 +17,37 @@ import org.junit.jupiter.params.provider.MethodSource;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
import reactor.test.StepVerifier.FirstStep;
|
||||
import reactor.test.StepVerifier.Step;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuple3;
|
||||
import reactor.util.function.Tuple4;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
public class TestDictionaryMap {
|
||||
|
||||
private static Stream<Arguments> provideArgumentsCreate() {
|
||||
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
|
||||
private static boolean isTestBadKeysEnabled() {
|
||||
return System.getProperty("badkeys", "true").equalsIgnoreCase("true");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsCreate")
|
||||
public void testCreate(UpdateMode updateMode) {
|
||||
StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.flatMap(LLDictionary::clear)
|
||||
.then()
|
||||
))
|
||||
.verifyComplete();
|
||||
}
|
||||
private static final String BIG_STRING
|
||||
= "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789";
|
||||
|
||||
private static Stream<Arguments> provideArgumentsPut() {
|
||||
var goodKeys = Set.of("12345", "zebra");
|
||||
var badKeys = Set.of("", "a", "aaaa", "aaaaaa");
|
||||
Set<String> badKeys;
|
||||
if (isTestBadKeysEnabled()) {
|
||||
badKeys = Set.of("", "a", "aaaa", "aaaaaa");
|
||||
} else {
|
||||
badKeys = Set.of();
|
||||
}
|
||||
Set<Tuple2<String, Boolean>> keys = Stream.concat(
|
||||
goodKeys.stream().map(s -> Tuples.of(s, false)),
|
||||
badKeys.stream().map(s -> Tuples.of(s, true))
|
||||
).collect(Collectors.toSet());
|
||||
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", "bzzazazqzeztzgzzhz!");
|
||||
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING);
|
||||
|
||||
return keys
|
||||
.stream()
|
||||
@ -180,6 +175,9 @@ public class TestDictionaryMap {
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testUpdate(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
if (updateMode == UpdateMode.DISALLOW && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
@ -219,6 +217,9 @@ public class TestDictionaryMap {
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testUpdateGet(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
if (updateMode == UpdateMode.DISALLOW && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
@ -257,17 +258,17 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testPutAndGetStatus(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
public void testPutAndGetChanged(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putValueAndGetStatus(key, "error?").single(),
|
||||
map.putValueAndGetStatus(key, value).single(),
|
||||
map.putValueAndGetStatus(key, value).single(),
|
||||
map.putValueAndGetChanged(key, "error?").single(),
|
||||
map.putValueAndGetChanged(key, value).single(),
|
||||
map.putValueAndGetChanged(key, value).single(),
|
||||
map.remove(key),
|
||||
map.putValueAndGetStatus(key, "error?").single()
|
||||
map.putValueAndGetChanged(key, "error?").single()
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
@ -275,18 +276,23 @@ public class TestDictionaryMap {
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(false, true, true, false).verifyComplete();
|
||||
stpVer.expectNext(true, true, false, true).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
private static Stream<Arguments> provideArgumentsPutMulti() {
|
||||
var goodKeys = Set.of(Set.of("12345", "67890"), Set.of("zebra"), Set.<String>of());
|
||||
var badKeys = Set.of(Set.of("", "12345"), Set.of("12345", "a"), Set.of("45678", "aaaa"), Set.of("aaaaaa", "capra"));
|
||||
Set<Set<String>> badKeys;
|
||||
if (isTestBadKeysEnabled()) {
|
||||
badKeys = Set.of(Set.of("", "12345"), Set.of("12345", "a"), Set.of("45678", "aaaa"), Set.of("aaaaaa", "capra"));
|
||||
} else {
|
||||
badKeys = Set.of();
|
||||
}
|
||||
Set<Tuple2<Set<String>, Boolean>> keys = Stream.concat(
|
||||
goodKeys.stream().map(s -> Tuples.of(s, false)),
|
||||
badKeys.stream().map(s -> Tuples.of(s, true))
|
||||
).collect(Collectors.toSet());
|
||||
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", "bzzazazqzeztzgzzhz!");
|
||||
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING);
|
||||
|
||||
return keys
|
||||
.stream()
|
||||
@ -406,7 +412,7 @@ public class TestDictionaryMap {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPutMulti")
|
||||
public void testSetAndGetStatus(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
public void testSetAndGetChanged(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
993
src/test/java/it/cavallium/dbengine/TestDictionaryMapDeep.java
Normal file
993
src/test/java/it/cavallium/dbengine/TestDictionaryMapDeep.java
Normal file
@ -0,0 +1,993 @@
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMap;
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDb;
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
|
||||
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
import reactor.test.StepVerifier.FirstStep;
|
||||
import reactor.test.StepVerifier.Step;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuple3;
|
||||
import reactor.util.function.Tuple4;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
public class TestDictionaryMapDeep {
|
||||
|
||||
private static boolean isTestBadKeysEnabled() {
|
||||
return System.getProperty("badkeys", "true").equalsIgnoreCase("true");
|
||||
}
|
||||
|
||||
private static final String BIG_STRING
|
||||
= "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789"
|
||||
+ "01234567890123456789012345678901234567890123456789012345678901234567890123456789";
|
||||
|
||||
private static Stream<Arguments> provideArgumentsSet() {
|
||||
var goodKeys = Set.of("12345", "zebra");
|
||||
Set<String> badKeys;
|
||||
if (isTestBadKeysEnabled()) {
|
||||
badKeys = Set.of("", "a", "aaaa", "aaaaaa");
|
||||
} else {
|
||||
badKeys = Set.of();
|
||||
}
|
||||
Set<Tuple2<String, Boolean>> keys = Stream.concat(
|
||||
goodKeys.stream().map(s -> Tuples.of(s, false)),
|
||||
badKeys.stream().map(s -> Tuples.of(s, true))
|
||||
).collect(Collectors.toSet());
|
||||
var values = Set.of(
|
||||
Map.of("123456", "a", "234567", ""),
|
||||
Map.of("123456", "", "234567", "bb"),
|
||||
Map.of("123456", "\0", "234567", "\0\0", "345678", BIG_STRING)
|
||||
);
|
||||
|
||||
return keys
|
||||
.stream()
|
||||
.flatMap(keyTuple -> {
|
||||
Stream<Map<String, String>> strm;
|
||||
if (keyTuple.getT2()) {
|
||||
strm = values.stream().limit(1);
|
||||
} else {
|
||||
strm = values.stream();
|
||||
}
|
||||
return strm.map(val -> Tuples.of(keyTuple.getT1(), val, keyTuple.getT2()));
|
||||
})
|
||||
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode,
|
||||
entryTuple.getT1(),
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3()
|
||||
)))
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3(), fullTuple.getT4()));
|
||||
}
|
||||
|
||||
private static Stream<Arguments> provideArgumentsPut() {
|
||||
var goodKeys1 = Set.of("12345", "zebra");
|
||||
Set<String> badKeys1;
|
||||
if (isTestBadKeysEnabled()) {
|
||||
badKeys1 = Set.of("", "a", "aaaa", "aaaaaa");
|
||||
} else {
|
||||
badKeys1 = Set.of();
|
||||
}
|
||||
var goodKeys2 = Set.of("123456", "anatra");
|
||||
Set<String> badKeys2;
|
||||
if (isTestBadKeysEnabled()) {
|
||||
badKeys2 = Set.of("", "a", "aaaaa", "aaaaaaa");
|
||||
} else {
|
||||
badKeys2 = Set.of();
|
||||
}
|
||||
|
||||
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING);
|
||||
|
||||
Flux<Tuple4<String, String, String, Boolean>> failOnKeys1 = Flux
|
||||
.fromIterable(badKeys1)
|
||||
.map(badKey1 -> Tuples.of(
|
||||
badKey1,
|
||||
goodKeys2.stream().findAny().orElseThrow(),
|
||||
values.stream().findAny().orElseThrow(),
|
||||
true
|
||||
));
|
||||
Flux<Tuple4<String, String, String, Boolean>> failOnKeys2 = Flux
|
||||
.fromIterable(badKeys2)
|
||||
.map(badKey2 -> Tuples.of(
|
||||
goodKeys1.stream().findAny().orElseThrow(),
|
||||
badKey2,
|
||||
values.stream().findAny().orElseThrow(),
|
||||
true
|
||||
));
|
||||
|
||||
Flux<Tuple4<String, String, String, Boolean>> goodKeys1And2 = Flux
|
||||
.fromIterable(values)
|
||||
.map(value -> Tuples.of(
|
||||
goodKeys1.stream().findAny().orElseThrow(),
|
||||
goodKeys2.stream().findAny().orElseThrow(),
|
||||
value,
|
||||
false
|
||||
));
|
||||
|
||||
Flux<Tuple4<String, String, String, Boolean>> keys1And2 = Flux
|
||||
.concat(
|
||||
goodKeys1And2,
|
||||
failOnKeys1,
|
||||
failOnKeys2
|
||||
);
|
||||
|
||||
return keys1And2
|
||||
.flatMap(entryTuple -> Flux
|
||||
.fromArray(UpdateMode.values())
|
||||
.map(updateMode -> Tuples.of(updateMode,
|
||||
entryTuple.getT1(),
|
||||
entryTuple.getT2(),
|
||||
entryTuple.getT3(),
|
||||
entryTuple.getT4()
|
||||
))
|
||||
)
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(),
|
||||
fullTuple.getT2(),
|
||||
fullTuple.getT3(),
|
||||
fullTuple.getT4(),
|
||||
fullTuple.getT5()
|
||||
))
|
||||
.toStream();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testSetValueGetValue(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMap(map -> map
|
||||
.putValue(key, value)
|
||||
.then(map.getValue(null, key))
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testSetValueGetAllValues(UpdateMode updateMode,
|
||||
String key,
|
||||
Map<String, String> value,
|
||||
boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> map
|
||||
.putValue(key, value)
|
||||
.thenMany(map.getAllValues(null))
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(Map.entry(key, value)).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testAtSetGetAllStagesGetAllValues(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Tuple3<String, String, String>, Boolean>().keySet(true);
|
||||
Step<Tuple3<String, String, String>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> map
|
||||
.at(null, key)
|
||||
.flatMap(v -> v
|
||||
.set(value)
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
.then(map
|
||||
.at(null, "capra")
|
||||
.flatMap(v -> v
|
||||
.set(Map.of("normal", "123", "ormaln", "456"))
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
)
|
||||
.thenMany(map
|
||||
.getAllStages(null)
|
||||
.flatMap(v -> v.getValue()
|
||||
.getAllValues(null)
|
||||
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue()))
|
||||
.doFinally(s -> v.getValue().release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
value.forEach((k, v) -> remainingEntries.add(Tuples.of(key, k, v)));
|
||||
remainingEntries.add(Tuples.of("capra", "normal", "123"));
|
||||
remainingEntries.add(Tuples.of("capra", "ormaln", "456"));
|
||||
for (Tuple3<String, String, String> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
assert remainingEntries.isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtPutValueAtGetValue(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMap(map -> map
|
||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.release()))
|
||||
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doFinally(s -> v.release())))
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testSetAndGetPrevious(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map
|
||||
.putValueAndGetPrevious(key, Map.of("error?", "error."))
|
||||
.defaultIfEmpty(Map.of("nothing", "nothing")),
|
||||
map.putValueAndGetPrevious(key, value),
|
||||
map.putValueAndGetPrevious(key, value)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(Map.of("nothing", "nothing"), Map.of("error?", "error.")).expectNext(value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtPutValueAndGetPrevious(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValueAndGetPrevious(key2, "error?")
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValueAndGetPrevious(key2, value)
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValueAndGetPrevious(key2, value)
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext("error?", value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testSetValueRemoveAndGetPrevious(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.removeAndGetPrevious(key),
|
||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||
map.removeAndGetPrevious(key)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtPutValueRemoveAndGetPrevious(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, "error?")
|
||||
.then(v.removeAndGetPrevious(key2))
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, value)
|
||||
.then(v.removeAndGetPrevious(key2))
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v.removeAndGetPrevious(key2)
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext("error?", value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testSetValueRemoveAndGetStatus(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.removeAndGetStatus(key),
|
||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||
map.removeAndGetStatus(key)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(false, true, false).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtPutValueRemoveAndGetStatus(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, "error?")
|
||||
.then(v.removeAndGetStatus(key2))
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.putValue(key2, value)
|
||||
.then(v.removeAndGetStatus(key2))
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v.removeAndGetStatus(key2)
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(true, true, false).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testUpdate(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
if (updateMode != UpdateMode.ALLOW_UNSAFE && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.updateValue(key, old -> {
|
||||
assert old == null;
|
||||
return Map.of("error?", "error.");
|
||||
}),
|
||||
map.updateValue(key, false, old -> {
|
||||
assert Objects.equals(old, Map.of("error?", "error."));
|
||||
return Map.of("error?", "error.");
|
||||
}),
|
||||
map.updateValue(key, true, old -> {
|
||||
assert Objects.equals(old, Map.of("error?", "error."));
|
||||
return Map.of("error?", "error.");
|
||||
}),
|
||||
map.updateValue(key, true, old -> {
|
||||
assert Objects.equals(old, Map.of("error?", "error."));
|
||||
return value;
|
||||
}),
|
||||
map.updateValue(key, true, old -> {
|
||||
assert Objects.equals(old, value);
|
||||
return value;
|
||||
})
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(true, false, false, true, false).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtUpdate(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
|
||||
if (updateMode == UpdateMode.DISALLOW && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> prev)
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> value)
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> value)
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> null)
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(false, true, false, true).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testUpdateGet(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
if (updateMode != UpdateMode.ALLOW_UNSAFE && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.updateValue(key, old -> {
|
||||
assert old == null;
|
||||
return Map.of("error?", "error.");
|
||||
}).then(map.getValue(null, key)),
|
||||
map.updateValue(key, false, old -> {
|
||||
assert Objects.equals(old, Map.of("error?", "error."));
|
||||
return Map.of("error?", "error.");
|
||||
}).then(map.getValue(null, key)),
|
||||
map.updateValue(key, true, old -> {
|
||||
assert Objects.equals(old, Map.of("error?", "error."));
|
||||
return Map.of("error?", "error.");
|
||||
}).then(map.getValue(null, key)),
|
||||
map.updateValue(key, true, old -> {
|
||||
assert Objects.equals(old, Map.of("error?", "error."));
|
||||
return value;
|
||||
}).then(map.getValue(null, key)),
|
||||
map.updateValue(key, true, old -> {
|
||||
assert Objects.equals(old, value);
|
||||
return value;
|
||||
}).then(map.getValue(null, key))
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(Map.of("error?", "error."), Map.of("error?", "error."), Map.of("error?", "error."), value, value).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsPut")
|
||||
public void testAtUpdateGetValue(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
|
||||
if (updateMode == UpdateMode.DISALLOW && !isTestBadKeysEnabled()) {
|
||||
return;
|
||||
}
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> prev)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> value)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> value)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doFinally(s -> v.release())
|
||||
),
|
||||
map
|
||||
.at(null, key1)
|
||||
.flatMap(v -> v
|
||||
.updateValue(key2, prev -> null)
|
||||
.then(v.getValue(null, key2))
|
||||
.defaultIfEmpty("empty")
|
||||
.doFinally(s -> v.release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext("empty", value, value, "empty").verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSet")
|
||||
public void testSetAndGetChanged(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||
var stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single(),
|
||||
map.putValueAndGetChanged(key, value).single(),
|
||||
map.putValueAndGetChanged(key, value).single(),
|
||||
map.remove(key),
|
||||
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single()
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(true, true, false, true).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
private static Stream<Arguments> provideArgumentsSetMulti() {
|
||||
var goodKeys = Set.of(Set.of("12345", "67890"), Set.of("zebra"), Set.<String>of());
|
||||
Set<Set<String>> badKeys;
|
||||
if (isTestBadKeysEnabled()) {
|
||||
badKeys = Set.of(Set.of("", "12345"), Set.of("12345", "a"), Set.of("45678", "aaaa"), Set.of("aaaaaa", "capra"));
|
||||
} else {
|
||||
badKeys = Set.of();
|
||||
}
|
||||
Set<Tuple2<Set<String>, Boolean>> keys = Stream.concat(
|
||||
goodKeys.stream().map(s -> Tuples.of(s, false)),
|
||||
badKeys.stream().map(s -> Tuples.of(s, true))
|
||||
).collect(Collectors.toSet());
|
||||
var values = Set.of(
|
||||
Map.of("123456", "a", "234567", ""),
|
||||
Map.of("123456", "", "234567", "bb"),
|
||||
Map.of("123456", "\0", "234567", "\0\0", "345678", BIG_STRING)
|
||||
);
|
||||
|
||||
return keys
|
||||
.stream()
|
||||
.map(keyTuple -> keyTuple.mapT1(ks -> Flux
|
||||
.zip(Flux.fromIterable(ks), Flux.fromIterable(values))
|
||||
.collectMap(Tuple2::getT1, Tuple2::getT2)
|
||||
.block()
|
||||
))
|
||||
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode,
|
||||
entryTuple.getT1(),
|
||||
entryTuple.getT2()
|
||||
)))
|
||||
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3()));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetMultiGetMulti(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetAllValuesGetMulti(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> map
|
||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetAllValuesAndGetPrevious(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetGetMulti(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.set(entries).then(Mono.empty()),
|
||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetAndGetStatus(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> {
|
||||
Mono<Void> removalMono;
|
||||
if (entries.isEmpty()) {
|
||||
removalMono = Mono.empty();
|
||||
} else {
|
||||
removalMono = map.remove(entries.keySet().stream().findAny().orElseThrow());
|
||||
}
|
||||
return Flux
|
||||
.concat(
|
||||
map.setAndGetChanged(entries).single(),
|
||||
map.setAndGetChanged(entries).single(),
|
||||
removalMono.then(Mono.empty()),
|
||||
map.setAndGetChanged(entries).single()
|
||||
)
|
||||
.doFinally(s -> map.release());
|
||||
})
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(!entries.isEmpty(), false, !entries.isEmpty()).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetAndGetPrevious(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.setAndGetPrevious(entries),
|
||||
map.setAndGetPrevious(entries)
|
||||
)
|
||||
.map(Map::entrySet)
|
||||
.flatMap(Flux::fromIterable)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetClearAndGetPreviousGet(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||
.map(Map::entrySet)
|
||||
.flatMap(Flux::fromIterable)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetMultiGetAllValues(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.getAllValues(null)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetMultiGet(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.get(null)
|
||||
.map(Map::entrySet)
|
||||
.flatMapMany(Flux::fromIterable)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetMultiGetAllStagesGet(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
var remainingEntries = new ConcurrentHashMap<Entry<String, Map<String, String>>, Boolean>().keySet(true);
|
||||
Step<Entry<String, Map<String, String>>> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map
|
||||
.getAllStages(null)
|
||||
.flatMap(stage -> stage
|
||||
.getValue()
|
||||
.get(null)
|
||||
.map(val -> Map.entry(stage.getKey(), val))
|
||||
.doFinally(s -> stage.getValue().release())
|
||||
)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.verifyError();
|
||||
} else {
|
||||
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||
for (Entry<String, Map<String, String>> ignored : remainingEntries) {
|
||||
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||
}
|
||||
stpVer.verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetMultiIsEmpty(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.isEmpty(null),
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.isEmpty(null)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.expectNext(true).verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(true, entries.isEmpty()).verifyComplete();
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("provideArgumentsSetMulti")
|
||||
public void testSetMultiClear(UpdateMode updateMode, Map<String, Map<String, String>> entries, boolean shouldFail) {
|
||||
Step<Boolean> stpVer = StepVerifier
|
||||
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||
.flatMapMany(map -> Flux
|
||||
.concat(
|
||||
map.isEmpty(null),
|
||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||
map.isEmpty(null),
|
||||
map.clear().then(Mono.empty()),
|
||||
map.isEmpty(null)
|
||||
)
|
||||
.doFinally(s -> map.release())
|
||||
)
|
||||
));
|
||||
if (shouldFail) {
|
||||
stpVer.expectNext(true).verifyError();
|
||||
} else {
|
||||
stpVer.expectNext(true, entries.isEmpty(), true).verifyComplete();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
package it.cavallium.dbengine;
|
||||
|
||||
import static it.cavallium.dbengine.client.DbTestUtils.tempDb;
|
||||
import static it.cavallium.dbengine.DbTestUtils.tempDb;
|
||||
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseInt;
|
||||
@ -9,8 +9,6 @@ import java.util.stream.Stream;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.ArgumentsProvider;
|
||||
import org.junit.jupiter.params.provider.ArgumentsSource;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import reactor.core.publisher.Mono;
|
Loading…
x
Reference in New Issue
Block a user