Test more numbers

This commit is contained in:
Andrea Cavalli 2021-07-18 19:37:24 +02:00
parent ff9ee54857
commit aa1aa7a6fb
25 changed files with 317 additions and 138 deletions

View File

@ -44,12 +44,4 @@ public class CountedStream<T> {
public Mono<List<T>> collectList() { public Mono<List<T>> collectList() {
return stream.collectList(); return stream.collectList();
} }
public static <T> Mono<CountedStream<T>> counted(Flux<T> flux) {
var publishedFlux = flux.cache();
return publishedFlux
.count()
.map(count -> new CountedStream<>(publishedFlux, count))
.switchIfEmpty(Mono.fromSupplier(() -> new CountedStream<>(Flux.empty(), 0)));
}
} }

View File

@ -13,4 +13,5 @@ public record DatabaseOptions(Map<String, String> extraFlags,
boolean useDirectIO, boolean useDirectIO,
boolean allowMemoryMapping, boolean allowMemoryMapping,
boolean allowNettyDirect, boolean allowNettyDirect,
boolean useNettyDirect) {} boolean useNettyDirect,
boolean enableDbAssertionsWhenUsingAssertions) {}

View File

@ -87,7 +87,7 @@ public interface LuceneIndex<T, U> extends LLSnapshottable {
Mono<Void> flush(); Mono<Void> flush();
Mono<Void> refresh(); Mono<Void> refresh(boolean force);
private static <T, U> ValueTransformer<T, U> getValueGetterTransformer(ValueGetter<T, U> valueGetter) { private static <T, U> ValueTransformer<T, U> getValueGetterTransformer(ValueGetter<T, U> valueGetter) {
return new ValueTransformer<T, U>() { return new ValueTransformer<T, U>() {

View File

@ -93,7 +93,7 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
private Mono<SearchResult<T, U>> transformLuceneResultWithValues(LLSearchResultShard llSearchResult, private Mono<SearchResult<T, U>> transformLuceneResultWithValues(LLSearchResultShard llSearchResult,
ValueGetter<T, U> valueGetter) { ValueGetter<T, U> valueGetter) {
return Mono.just(new SearchResult<>(llSearchResult.results().map(signal -> { return Mono.fromCallable(() -> new SearchResult<>(llSearchResult.results().map(signal -> {
var key = signal.key().map(indicizer::getKey); var key = signal.key().map(indicizer::getKey);
return new SearchResultItem<>(key, key.flatMap(valueGetter::get), signal.score()); return new SearchResultItem<>(key, key.flatMap(valueGetter::get), signal.score());
}), llSearchResult.totalHitsCount(), llSearchResult.release())); }), llSearchResult.totalHitsCount(), llSearchResult.release()));
@ -110,7 +110,10 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
Mono.just(tuple3.getT3()), Mono.just(tuple3.getT3()),
tuple3.getT1() tuple3.getT1()
)); ));
return Mono.just(new SearchResult<>(resultItemsFlux, llSearchResult.totalHitsCount(), llSearchResult.release())); return Mono.fromCallable(() -> new SearchResult<>(resultItemsFlux,
llSearchResult.totalHitsCount(),
llSearchResult.release()
));
} }
@Override @Override
@ -214,8 +217,8 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
* Refresh index searcher * Refresh index searcher
*/ */
@Override @Override
public Mono<Void> refresh() { public Mono<Void> refresh(boolean force) {
return luceneIndex.refresh(); return luceneIndex.refresh(force);
} }
@Override @Override

View File

@ -70,5 +70,5 @@ public interface LLLuceneIndex extends LLSnapshottable {
/** /**
* Refresh index searcher * Refresh index searcher
*/ */
Mono<Void> refresh(); Mono<Void> refresh(boolean force);
} }

View File

@ -12,7 +12,9 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
@ -115,12 +117,20 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
return removeAndGetPrevious(key).map(o -> true).defaultIfEmpty(false); return removeAndGetPrevious(key).map(o -> true).defaultIfEmpty(false);
} }
/**
* GetMulti must return the elements in sequence!
*/
default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) { default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return keys.flatMapSequential(key -> this return keys
.getValue(snapshot, key, existsAlmostCertainly) .flatMapSequential(key -> this
.map(value -> Map.entry(key, value))); .getValue(snapshot, key, existsAlmostCertainly)
.map(value -> Map.entry(key, value))
);
} }
/**
* GetMulti must return the elements in sequence!
*/
default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) { default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) {
return getMulti(snapshot, keys, false); return getMulti(snapshot, keys, false);
} }
@ -271,9 +281,14 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
@Override @Override
public <X> Flux<Tuple3<X, T, U>> transform(Flux<Tuple2<X, T>> keys) { public <X> Flux<Tuple3<X, T, U>> transform(Flux<Tuple2<X, T>> keys) {
return Flux.defer(() -> { return Flux.defer(() -> {
ConcurrentHashMap<T, X> extraValues = new ConcurrentHashMap<>(); ConcurrentLinkedQueue<X> extraValues = new ConcurrentLinkedQueue<>();
return getMulti(snapshot, keys.doOnNext(key -> extraValues.put(key.getT2(), key.getT1())).map(Tuple2::getT2)) return getMulti(snapshot, keys.map(key -> {
.map(result -> Tuples.of(extraValues.get(result.getKey()), result.getKey(), result.getValue())); extraValues.add(key.getT1());
return key.getT2();
})).map(result -> {
var extraValue = extraValues.remove();
return Tuples.of(extraValue, result.getKey(), result.getValue());
});
}); });
} }
}; };

View File

@ -30,15 +30,18 @@ public class SubStageGetterHashMap<T, U, TH> implements
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, ByteBuf> valueSerializer;
private final Function<T, TH> keyHashFunction; private final Function<T, TH> keyHashFunction;
private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer; private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer;
private final boolean enableAssertionsWhenUsingAssertions;
public SubStageGetterHashMap(Serializer<T, ByteBuf> keySerializer, public SubStageGetterHashMap(Serializer<T, ByteBuf> keySerializer,
Serializer<U, ByteBuf> valueSerializer, Serializer<U, ByteBuf> valueSerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer,
boolean enableAssertionsWhenUsingAssertions) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
this.keyHashFunction = keyHashFunction; this.keyHashFunction = keyHashFunction;
this.keyHashSerializer = keyHashSerializer; this.keyHashSerializer = keyHashSerializer;
this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions;
} }
@Override @Override
@ -49,7 +52,7 @@ public class SubStageGetterHashMap<T, U, TH> implements
try { try {
return Mono return Mono
.defer(() -> { .defer(() -> {
if (assertsEnabled) { if (assertsEnabled && enableAssertionsWhenUsingAssertions) {
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys); return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
} else { } else {
return Mono return Mono
@ -86,7 +89,7 @@ public class SubStageGetterHashMap<T, U, TH> implements
@Override @Override
public boolean needsDebuggingKeyFlux() { public boolean needsDebuggingKeyFlux() {
return assertsEnabled; return assertsEnabled && enableAssertionsWhenUsingAssertions;
} }
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) { private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {

View File

@ -30,13 +30,16 @@ public class SubStageGetterHashSet<T, TH> implements
private final Serializer<T, ByteBuf> keySerializer; private final Serializer<T, ByteBuf> keySerializer;
private final Function<T, TH> keyHashFunction; private final Function<T, TH> keyHashFunction;
private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer; private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer;
private final boolean enableAssertionsWhenUsingAssertions;
public SubStageGetterHashSet(Serializer<T, ByteBuf> keySerializer, public SubStageGetterHashSet(Serializer<T, ByteBuf> keySerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer,
boolean enableAssertionsWhenUsingAssertions) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.keyHashFunction = keyHashFunction; this.keyHashFunction = keyHashFunction;
this.keyHashSerializer = keyHashSerializer; this.keyHashSerializer = keyHashSerializer;
this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions;
} }
@Override @Override
@ -47,7 +50,7 @@ public class SubStageGetterHashSet<T, TH> implements
try { try {
return Mono return Mono
.defer(() -> { .defer(() -> {
if (assertsEnabled) { if (assertsEnabled && enableAssertionsWhenUsingAssertions) {
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys); return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
} else { } else {
return Mono return Mono
@ -83,7 +86,7 @@ public class SubStageGetterHashSet<T, TH> implements
@Override @Override
public boolean needsDebuggingKeyFlux() { public boolean needsDebuggingKeyFlux() {
return assertsEnabled; return assertsEnabled && enableAssertionsWhenUsingAssertions;
} }
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) { private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {

View File

@ -25,11 +25,13 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, ByteBuf> valueSerializer;
private final boolean enableAssertionsWhenUsingAssertions;
public SubStageGetterMap(SerializerFixedBinaryLength<T, ByteBuf> keySerializer, public SubStageGetterMap(SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, ByteBuf> valueSerializer, boolean enableAssertionsWhenUsingAssertions) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions;
} }
@Override @Override
@ -40,7 +42,7 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
try { try {
return Mono return Mono
.defer(() -> { .defer(() -> {
if (assertsEnabled) { if (assertsEnabled && enableAssertionsWhenUsingAssertions) {
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys); return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
} else { } else {
return Mono return Mono
@ -75,7 +77,7 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
@Override @Override
public boolean needsDebuggingKeyFlux() { public boolean needsDebuggingKeyFlux() {
return assertsEnabled; return assertsEnabled && enableAssertionsWhenUsingAssertions;
} }
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) { private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {

View File

@ -25,14 +25,16 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
private final SubStageGetter<U, US> subStageGetter; private final SubStageGetter<U, US> subStageGetter;
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
private final int keyExtLength; private final int keyExtLength;
private final boolean enableAssertionsWhenUsingAssertions;
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter, public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
int keyExtLength) { int keyExtLength, boolean enableAssertionsWhenUsingAssertions) {
this.subStageGetter = subStageGetter; this.subStageGetter = subStageGetter;
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.keyExtLength = keyExtLength; this.keyExtLength = keyExtLength;
assert keyExtConsistency(); assert keyExtConsistency();
this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions;
} }
@ -54,7 +56,7 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
try { try {
return Mono return Mono
.defer(() -> { .defer(() -> {
if (assertsEnabled) { if (assertsEnabled && enableAssertionsWhenUsingAssertions) {
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys); return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
} else { } else {
return Mono return Mono
@ -90,7 +92,7 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
@Override @Override
public boolean needsDebuggingKeyFlux() { public boolean needsDebuggingKeyFlux() {
return assertsEnabled; return assertsEnabled && enableAssertionsWhenUsingAssertions;
} }
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) { private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {

View File

@ -24,9 +24,12 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
} }
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
private final boolean enableAssertionsWhenUsingAssertions;
public SubStageGetterSet(SerializerFixedBinaryLength<T, ByteBuf> keySerializer) { public SubStageGetterSet(SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
boolean enableAssertionsWhenUsingAssertions) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.enableAssertionsWhenUsingAssertions = enableAssertionsWhenUsingAssertions;
} }
@Override @Override
@ -37,7 +40,7 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
try { try {
return Mono return Mono
.defer(() -> { .defer(() -> {
if (assertsEnabled) { if (assertsEnabled && enableAssertionsWhenUsingAssertions) {
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys); return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeys);
} else { } else {
return Mono return Mono
@ -72,7 +75,7 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
@Override @Override
public boolean needsDebuggingKeyFlux() { public boolean needsDebuggingKeyFlux() {
return assertsEnabled; return assertsEnabled && enableAssertionsWhenUsingAssertions;
} }
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) { private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) {

View File

@ -287,7 +287,9 @@ public class LLLocalDictionary implements LLDictionary {
throw new RocksDBException("Key buffer must be direct"); throw new RocksDBException("Key buffer must be direct");
} }
ByteBuffer keyNioBuffer = LLUtils.toDirect(key); ByteBuffer keyNioBuffer = LLUtils.toDirect(key);
assert keyNioBuffer.isDirect(); if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert keyNioBuffer.isDirect();
}
// Create a direct result buffer because RocksDB works only with direct buffers // Create a direct result buffer because RocksDB works only with direct buffers
ByteBuf resultBuf = alloc.directBuffer(LLLocalDictionary.INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES); ByteBuf resultBuf = alloc.directBuffer(LLLocalDictionary.INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES);
try { try {
@ -297,35 +299,39 @@ public class LLLocalDictionary implements LLDictionary {
do { do {
// Create the result nio buffer to pass to RocksDB // Create the result nio buffer to pass to RocksDB
resultNioBuf = resultBuf.nioBuffer(0, resultBuf.capacity()); resultNioBuf = resultBuf.nioBuffer(0, resultBuf.capacity());
assert keyNioBuffer.isDirect(); if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert resultNioBuf.isDirect(); assert keyNioBuffer.isDirect();
assert resultNioBuf.isDirect();
}
valueSize = db.get(cfh, valueSize = db.get(cfh,
Objects.requireNonNullElse(readOptions, EMPTY_READ_OPTIONS), Objects.requireNonNullElse(readOptions, EMPTY_READ_OPTIONS),
keyNioBuffer.position(0), keyNioBuffer.position(0),
resultNioBuf resultNioBuf
); );
if (valueSize != RocksDB.NOT_FOUND) { if (valueSize != RocksDB.NOT_FOUND) {
// todo: check if position is equal to data that have been read if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
// todo: check if limit is equal to value size or data that have been read // todo: check if position is equal to data that have been read
assert valueSize <= 0 || resultNioBuf.limit() > 0; // todo: check if limit is equal to value size or data that have been read
assert valueSize <= 0 || resultNioBuf.limit() > 0;
// If the locking is enabled the data is safe, so since we are appending data to the end, // If the locking is enabled the data is safe, so since we are appending data to the end,
// we need to check if it has been appended correctly or it it has been overwritten. // we need to check if it has been appended correctly or it it has been overwritten.
// We must not do this check otherwise because if there is no locking the data can be // We must not do this check otherwise because if there is no locking the data can be
// overwritten with a smaller value the next time. // overwritten with a smaller value the next time.
if (updateMode == UpdateMode.ALLOW) { if (updateMode == UpdateMode.ALLOW) {
// Check if read data is larger than previously read data. // Check if read data is larger than previously read data.
// If it's smaller or equals it means that RocksDB is overwriting the beginning of the result buffer. // If it's smaller or equals it means that RocksDB is overwriting the beginning of the result buffer.
assert resultNioBuf.limit() > assertionReadData; assert resultNioBuf.limit() > assertionReadData;
if (ASSERTIONS_ENABLED) { if (ASSERTIONS_ENABLED) {
assertionReadData = resultNioBuf.limit(); assertionReadData = resultNioBuf.limit();
}
} }
}
// Check if read data is not bigger than the total value size. // Check if read data is not bigger than the total value size.
// If it's bigger it means that RocksDB is writing the start of the result into the result // If it's bigger it means that RocksDB is writing the start of the result into the result
// buffer more than once. // buffer more than once.
assert resultNioBuf.limit() <= valueSize; assert resultNioBuf.limit() <= valueSize;
}
if (valueSize <= resultNioBuf.limit()) { if (valueSize <= resultNioBuf.limit()) {
// Return the result ready to be read // Return the result ready to be read
@ -392,13 +398,17 @@ public class LLLocalDictionary implements LLDictionary {
if (!value.isDirect()) { if (!value.isDirect()) {
throw new RocksDBException("Value buffer must be direct"); throw new RocksDBException("Value buffer must be direct");
} }
var keyNioBuffer = LLUtils.toDirect(key); var keyNioBuffer = LLUtils.toDirect(key);
if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert keyNioBuffer.isDirect(); assert keyNioBuffer.isDirect();
}
var valueNioBuffer = LLUtils.toDirect(value); var valueNioBuffer = LLUtils.toDirect(value);
if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert valueNioBuffer.isDirect(); assert valueNioBuffer.isDirect();
db.put(cfh, Objects.requireNonNullElse(writeOptions, EMPTY_WRITE_OPTIONS), keyNioBuffer, valueNioBuffer); }
db.put(cfh, Objects.requireNonNullElse(writeOptions, EMPTY_WRITE_OPTIONS), keyNioBuffer, valueNioBuffer);
} else { } else {
db.put(cfh, Objects.requireNonNullElse(writeOptions, EMPTY_WRITE_OPTIONS), LLUtils.toArray(key), LLUtils.toArray(value)); db.put(cfh, Objects.requireNonNullElse(writeOptions, EMPTY_WRITE_OPTIONS), LLUtils.toArray(key), LLUtils.toArray(value));
} }
@ -750,9 +760,11 @@ public class LLLocalDictionary implements LLDictionary {
ByteBuf prevDataToSendToUpdater = prevData == null ? null : prevData.retainedSlice(); ByteBuf prevDataToSendToUpdater = prevData == null ? null : prevData.retainedSlice();
try { try {
newData = updater.apply(prevDataToSendToUpdater == null ? null : prevDataToSendToUpdater.retain()); newData = updater.apply(prevDataToSendToUpdater == null ? null : prevDataToSendToUpdater.retain());
assert prevDataToSendToUpdater == null if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
|| prevDataToSendToUpdater.readerIndex() == 0 assert prevDataToSendToUpdater == null
|| !prevDataToSendToUpdater.isReadable(); || prevDataToSendToUpdater.readerIndex() == 0
|| !prevDataToSendToUpdater.isReadable();
}
} finally { } finally {
if (prevDataToSendToUpdater != null) { if (prevDataToSendToUpdater != null) {
prevDataToSendToUpdater.release(); prevDataToSendToUpdater.release();
@ -892,7 +904,9 @@ public class LLLocalDictionary implements LLDictionary {
.single() .single()
.map(LLUtils::booleanToResponseByteBuffer) .map(LLUtils::booleanToResponseByteBuffer)
.doAfterTerminate(() -> { .doAfterTerminate(() -> {
assert key.refCnt() > 0; if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert key.refCnt() > 0;
}
}); });
case PREVIOUS_VALUE -> Mono case PREVIOUS_VALUE -> Mono
.fromCallable(() -> { .fromCallable(() -> {
@ -918,7 +932,9 @@ public class LLLocalDictionary implements LLDictionary {
try { try {
return dbGet(cfh, null, key.retain(), true); return dbGet(cfh, null, key.retain(), true);
} finally { } finally {
assert key.refCnt() > 0; if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert key.refCnt() > 0;
}
} }
} }
} else { } else {

View File

@ -470,8 +470,10 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
private ColumnFamilyHandle getCfh(byte[] columnName) throws RocksDBException { private ColumnFamilyHandle getCfh(byte[] columnName) throws RocksDBException {
ColumnFamilyHandle cfh = handles.get(Column.special(Column.toString(columnName))); ColumnFamilyHandle cfh = handles.get(Column.special(Column.toString(columnName)));
//noinspection RedundantIfStatement //noinspection RedundantIfStatement
if (!enableColumnsBug) { if (databaseOptions.enableDbAssertionsWhenUsingAssertions()) {
assert Arrays.equals(cfh.getName(), columnName); if (!enableColumnsBug) {
assert Arrays.equals(cfh.getName(), columnName);
}
} }
return cfh; return cfh;
} }

View File

@ -91,7 +91,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene", "lucene",
Integer.MAX_VALUE, Integer.MAX_VALUE,
true false
); );
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks // Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneSearcherScheduler = Schedulers.newBoundedElastic( private final Scheduler luceneSearcherScheduler = Schedulers.newBoundedElastic(
@ -99,7 +99,15 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-searcher", "lucene-searcher",
60, 60,
true false
);
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneWriterScheduler = Schedulers.newBoundedElastic(
4,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-writer",
60,
false
); );
private final String luceneIndexName; private final String luceneIndexName;
@ -353,12 +361,13 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
return Mono.<Void>fromCallable(() -> { return Mono.<Void>fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.addDocument(LLUtils.toDocument(doc)); indexWriter.addDocument(LLUtils.toDocument(doc));
return null; return null;
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}).subscribeOn(Schedulers.boundedElastic()); }).subscribeOn(luceneWriterScheduler);
} }
@Override @Override
@ -369,13 +378,14 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.<Void>fromCallable(() -> { .<Void>fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.addDocuments(LLUtils.toDocumentsFromEntries(documentsList)); indexWriter.addDocuments(LLUtils.toDocumentsFromEntries(documentsList));
return null; return null;
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}) })
.subscribeOn(Schedulers.boundedElastic()) .subscribeOn(luceneWriterScheduler)
); );
} }
@ -385,12 +395,13 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
return Mono.<Void>fromCallable(() -> { return Mono.<Void>fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.deleteDocuments(LLUtils.toTerm(id)); indexWriter.deleteDocuments(LLUtils.toTerm(id));
return null; return null;
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}).subscribeOn(Schedulers.boundedElastic()); }).subscribeOn(luceneWriterScheduler);
} }
@Override @Override
@ -398,12 +409,13 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
return Mono.<Void>fromCallable(() -> { return Mono.<Void>fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document)); indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document));
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
return null; return null;
}).subscribeOn(Schedulers.boundedElastic()); }).subscribeOn(luceneWriterScheduler);
} }
@Override @Override
@ -419,6 +431,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) { for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) {
LLTerm key = entry.getKey(); LLTerm key = entry.getKey();
LLDocument value = entry.getValue(); LLDocument value = entry.getValue();
//noinspection BlockingMethodInNonBlockingContext
indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value)); indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value));
} }
return null; return null;
@ -426,7 +439,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}) })
.subscribeOn(Schedulers.boundedElastic()); .subscribeOn(luceneWriterScheduler);
} }
@Override @Override
@ -634,14 +647,20 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
} }
@Override @Override
public Mono<Void> refresh() { public Mono<Void> refresh(boolean force) {
return Mono return Mono
.<Void>fromCallable(() -> { .<Void>fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
if (scheduledTasksLifecycle.isCancelled()) return null; if (scheduledTasksLifecycle.isCancelled()) return null;
//noinspection BlockingMethodInNonBlockingContext if (force) {
searcherManager.maybeRefresh(); if (scheduledTasksLifecycle.isCancelled()) return null;
//noinspection BlockingMethodInNonBlockingContext
searcherManager.maybeRefreshBlocking();
} else {
//noinspection BlockingMethodInNonBlockingContext
searcherManager.maybeRefresh();
}
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }

View File

@ -269,10 +269,10 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
} }
@Override @Override
public Mono<Void> refresh() { public Mono<Void> refresh(boolean force) {
return Flux return Flux
.fromArray(luceneIndices) .fromArray(luceneIndices)
.flatMap(LLLocalLuceneIndex::refresh) .flatMap(index -> index.refresh(force))
.then(); .then();
} }

View File

@ -252,6 +252,22 @@ public class BinaryLexicographicList implements ByteList {
return true; return true;
} }
if (o == null || getClass() != o.getClass()) { if (o == null || getClass() != o.getClass()) {
if (o instanceof List) {
int i = 0;
for (Object o1 : ((List<?>) o)) {
if (i >= size()) {
return false;
}
if (!(o1 instanceof Byte)) {
return false;
}
if (this.bytes[i] != (Byte) o1) {
return false;
}
i++;
}
return (size() == i);
}
return false; return false;
} }
BinaryLexicographicList bytes1 = (BinaryLexicographicList) o; BinaryLexicographicList bytes1 = (BinaryLexicographicList) o;

View File

@ -19,6 +19,7 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
@ -150,7 +151,7 @@ public class LLMemoryDictionary implements LLDictionary {
public Mono<ByteBuf> put(ByteBuf key, ByteBuf value, LLDictionaryResultType resultType) { public Mono<ByteBuf> put(ByteBuf key, ByteBuf value, LLDictionaryResultType resultType) {
try { try {
return Mono return Mono
.fromCallable(() -> mainDb.put(k(key),k(value))) .fromCallable(() -> mainDb.put(k(key), k(value)))
.transform(result -> this.transformResult(result, resultType)) .transform(result -> this.transformResult(result, resultType))
.onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toStringSafe(key), cause)) .onErrorMap(cause -> new IOException("Failed to read " + LLUtils.toStringSafe(key), cause))
.doFirst(key::retain) .doFirst(key::retain)
@ -169,7 +170,23 @@ public class LLMemoryDictionary implements LLDictionary {
public Mono<Delta<ByteBuf>> updateAndGetDelta(ByteBuf key, public Mono<Delta<ByteBuf>> updateAndGetDelta(ByteBuf key,
Function<@Nullable ByteBuf, @Nullable ByteBuf> updater, Function<@Nullable ByteBuf, @Nullable ByteBuf> updater,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return null; return Mono.fromCallable(() -> {
AtomicReference<ByteBuf> oldRef = new AtomicReference<>(null);
var newValue = mainDb.compute(k(key), (_unused, old) -> {
if (old != null) {
oldRef.set(kk(old));
}
var v = updater.apply(old != null ? kk(old) : null);
try {
return k(v);
} finally {
if (v != null) {
v.release();
}
}
});
return new Delta<>(oldRef.get(), kk(newValue));
});
} }
@Override @Override
@ -197,13 +214,13 @@ public class LLMemoryDictionary implements LLDictionary {
Flux<Tuple2<K, ByteBuf>> keys, Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return keys return keys
.handle((key, sink) -> { .flatMapSequential(key -> {
try { try {
var v = snapshots.get(resolveSnapshot(snapshot)).get(k(key.getT2())); ByteList v = snapshots.get(resolveSnapshot(snapshot)).get(k(key.getT2()));
if (v == null) { if (v == null) {
sink.complete(); return Flux.empty();
} else { } else {
sink.next(Tuples.of(key.getT1(), key.getT2().retain(), kk(v))); return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), kk(v)));
} }
} finally { } finally {
key.getT2().release(); key.getT2().release();

View File

@ -34,6 +34,7 @@ public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase {
private final ConcurrentHashMap<Long, ConcurrentHashMap<String, ConcurrentSkipListMap<ByteList, ByteList>>> snapshots = new ConcurrentHashMap<>(); private final ConcurrentHashMap<Long, ConcurrentHashMap<String, ConcurrentSkipListMap<ByteList, ByteList>>> snapshots = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, ConcurrentSkipListMap<ByteList, ByteList>> mainDb; private final ConcurrentHashMap<String, ConcurrentSkipListMap<ByteList, ByteList>> mainDb;
private final ConcurrentHashMap<String, LLMemoryDictionary> singletons = new ConcurrentHashMap<>();
public LLMemoryKeyValueDatabase(ByteBufAllocator allocator, String name, List<Column> columns) { public LLMemoryKeyValueDatabase(ByteBufAllocator allocator, String name, List<Column> columns) {
this.allocator = allocator; this.allocator = allocator;
@ -46,8 +47,21 @@ public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase {
} }
@Override @Override
public Mono<? extends LLSingleton> getSingleton(byte[] singletonListColumnName, byte[] name, byte[] defaultValue) { public Mono<? extends LLSingleton> getSingleton(byte[] singletonListColumnName, byte[] singletonName, byte[] defaultValue) {
return Mono.error(new UnsupportedOperationException("Not implemented")); var columnNameString = new String(singletonListColumnName, StandardCharsets.UTF_8);
var dict = singletons.computeIfAbsent(columnNameString, _unused -> new LLMemoryDictionary(allocator,
name,
columnNameString,
UpdateMode.ALLOW,
snapshots,
mainDb
));
return Mono
.fromCallable(() -> new LLMemorySingleton(dict, singletonName)).flatMap(singleton -> singleton
.get(null)
.switchIfEmpty(singleton.set(defaultValue).then(Mono.empty()))
.thenReturn(singleton)
);
} }
@Override @Override

View File

@ -0,0 +1,57 @@
package it.cavallium.dbengine.database.memory;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLSingleton;
import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils;
import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class LLMemorySingleton implements LLSingleton {
private final LLMemoryDictionary dict;
private final byte[] singletonName;
public LLMemorySingleton(LLMemoryDictionary dict, byte[] singletonName) {
this.dict = dict;
this.singletonName = singletonName;
}
@Override
public String getDatabaseName() {
return dict.getDatabaseName();
}
@Override
public Mono<byte[]> get(@Nullable LLSnapshot snapshot) {
var bb = Unpooled.wrappedBuffer(singletonName);
return Mono
.defer(() -> dict.get(snapshot, bb.retain(), false))
.map(b -> {
try {
return LLUtils.toArray(b);
} finally {
b.release();
}
})
.doAfterTerminate(bb::release)
.doFirst(bb::retain);
}
@Override
public Mono<Void> set(byte[] value) {
var bbKey = Unpooled.wrappedBuffer(singletonName);
var bbVal = Unpooled.wrappedBuffer(value);
return Mono
.defer(() -> dict
.put(bbKey.retain(), bbVal.retain(), LLDictionaryResultType.VOID)
)
.doAfterTerminate(bbKey::release)
.doAfterTerminate(bbVal::release)
.doFirst(bbKey::retain)
.doFirst(bbVal::retain)
.then();
}
}

View File

@ -3,9 +3,11 @@ package it.cavallium.dbengine.lucene;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.client.IndicizerAnalyzers; import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities; import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.client.query.BasicType;
import it.cavallium.dbengine.client.query.QueryParser; import it.cavallium.dbengine.client.query.QueryParser;
import it.cavallium.dbengine.client.query.current.data.QueryParams; import it.cavallium.dbengine.client.query.current.data.QueryParams;
import it.cavallium.dbengine.database.LLKeyScore; import it.cavallium.dbengine.database.LLKeyScore;
import it.cavallium.dbengine.database.LLScoreMode;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary; import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep; import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
import it.cavallium.dbengine.database.collections.ValueGetter; import it.cavallium.dbengine.database.collections.ValueGetter;
@ -432,6 +434,6 @@ public class LuceneUtils {
} }
public static int totalHitsThreshold() { public static int totalHitsThreshold() {
return 0; return 1;
} }
} }

View File

@ -142,8 +142,8 @@ class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
return new LuceneSearchResult(result.totalHits.value, return new LuceneSearchResult(result.totalHits.value,
firstPageHits firstPageHits
.concatWith(nextHits) .concatWith(nextHits),
.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)), //.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
release release
); );
}) })

View File

@ -56,48 +56,58 @@ public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
.take(queryParams.limit(), true); .take(queryParams.limit(), true);
Flux<LLKeyScore> nextHits = Flux.defer(() -> { Flux<LLKeyScore> nextHits;
if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) { if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
return Flux.empty(); nextHits = null;
} } else {
return Flux nextHits = Flux.defer(() -> {
.<TopDocs, CurrentPageInfo>generate( return Flux
() -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1), .<TopDocs, CurrentPageInfo>generate(
(s, sink) -> { () -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
if (s.last() != null && s.remainingLimit() > 0) { (s, sink) -> {
TopDocs pageTopDocs; if (s.last() != null && s.remainingLimit() > 0) {
try { TopDocs pageTopDocs;
TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(), try {
s.currentPageLimit(), TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
s.last(), s.currentPageLimit(),
LuceneUtils.totalHitsThreshold() s.last(),
); LuceneUtils.totalHitsThreshold()
//noinspection BlockingMethodInNonBlockingContext );
indexSearcher.search(queryParams.query(), collector); //noinspection BlockingMethodInNonBlockingContext
pageTopDocs = collector.topDocs(); indexSearcher.search(queryParams.query(), collector);
} catch (IOException e) { pageTopDocs = collector.topDocs();
sink.error(e); } catch (IOException e) {
sink.error(e);
return EMPTY_STATUS;
}
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
sink.next(pageTopDocs);
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1);
} else {
sink.complete();
return EMPTY_STATUS; return EMPTY_STATUS;
} }
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs); },
sink.next(pageTopDocs); s -> {}
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1); )
} else { .subscribeOn(scheduler)
sink.complete(); .concatMap(topFieldDoc -> LuceneUtils
return EMPTY_STATUS; .convertHits(topFieldDoc.scoreDocs, IndexSearchers.unsharded(indexSearcher), keyFieldName, scheduler)
} );
}, });
s -> {} }
)
.subscribeOn(scheduler)
.concatMap(topFieldDoc -> LuceneUtils
.convertHits(topFieldDoc.scoreDocs, IndexSearchers.unsharded(indexSearcher), keyFieldName, scheduler)
);
});
return new LuceneSearchResult(firstPageTopDocs.totalHits.value, firstPageMono Flux<LLKeyScore> combinedFlux;
.concatWith(nextHits)
.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)), if (nextHits != null) {
combinedFlux = firstPageMono
.concatWith(nextHits);
} else {
combinedFlux = firstPageMono;
}
return new LuceneSearchResult(firstPageTopDocs.totalHits.value, combinedFlux,
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
releaseIndexSearcher releaseIndexSearcher
); );
}) })

View File

@ -131,8 +131,8 @@ class UnscoredLuceneShardSearcher implements LuceneShardSearcher {
}); });
return new LuceneSearchResult(result.totalHits.value, firstPageHits return new LuceneSearchResult(result.totalHits.value, firstPageHits
.concatWith(nextHits) .concatWith(nextHits),
.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)), //.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
release release
); );
}) })

View File

@ -58,7 +58,7 @@ public class DbTestUtils {
.then(new LLLocalDatabaseConnection(DbTestUtils.ALLOCATOR, wrkspcPath).connect()) .then(new LLLocalDatabaseConnection(DbTestUtils.ALLOCATOR, wrkspcPath).connect())
.flatMap(conn -> conn.getDatabase("testdb", .flatMap(conn -> conn.getDatabase("testdb",
List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")), List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")),
new DatabaseOptions(Map.of(), true, false, true, false, true, true, true) new DatabaseOptions(Map.of(), true, false, true, false, true, true, true, true)
)), )),
action, action,
db -> db.close().then(Mono.fromCallable(() -> { db -> db.close().then(Mono.fromCallable(() -> {
@ -149,7 +149,8 @@ public class DbTestUtils {
SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key1Bytes), SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key1Bytes),
key2Bytes, key2Bytes,
new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key2Bytes), new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(DbTestUtils.ALLOCATOR, key2Bytes),
Serializer.utf8(DbTestUtils.ALLOCATOR) Serializer.utf8(DbTestUtils.ALLOCATOR),
true
) )
); );
} }
@ -164,7 +165,8 @@ public class DbTestUtils {
new SubStageGetterHashMap<>(Serializer.utf8(DbTestUtils.ALLOCATOR), new SubStageGetterHashMap<>(Serializer.utf8(DbTestUtils.ALLOCATOR),
Serializer.utf8(DbTestUtils.ALLOCATOR), Serializer.utf8(DbTestUtils.ALLOCATOR),
String::hashCode, String::hashCode,
SerializerFixedBinaryLength.intSerializer(DbTestUtils.ALLOCATOR) SerializerFixedBinaryLength.intSerializer(DbTestUtils.ALLOCATOR),
true
) )
); );
} }

View File

@ -75,7 +75,7 @@ public class OldDatabaseTests {
.map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary, .map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary,
new FixedStringSerializer(3), new FixedStringSerializer(3),
4, 4,
new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop()) new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop(), true)
)) ))
.flatMap(collection -> Flux .flatMap(collection -> Flux
.fromIterable(originalSuperKeys) .fromIterable(originalSuperKeys)
@ -135,7 +135,7 @@ public class OldDatabaseTests {
.then(new LLLocalDatabaseConnection(PooledByteBufAllocator.DEFAULT, wrkspcPath).connect()) .then(new LLLocalDatabaseConnection(PooledByteBufAllocator.DEFAULT, wrkspcPath).connect())
.flatMap(conn -> conn.getDatabase("testdb", .flatMap(conn -> conn.getDatabase("testdb",
List.of(Column.dictionary("testmap")), List.of(Column.dictionary("testmap")),
new DatabaseOptions(Map.of(), true, false, true, false, true, true, true) new DatabaseOptions(Map.of(), true, false, true, false, true, true, true, true)
)); ));
} }
@ -159,14 +159,14 @@ public class OldDatabaseTests {
.map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary, .map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary,
new FixedStringSerializer(3), new FixedStringSerializer(3),
4, 4,
new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop()) new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop(), true)
)), )),
db db
.getDictionary("testmap", UpdateMode.DISALLOW) .getDictionary("testmap", UpdateMode.DISALLOW)
.map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary, .map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary,
new FixedStringSerializer(6), new FixedStringSerializer(6),
7, 7,
new SubStageGetterMap<>(new FixedStringSerializer(7), Serializer.noop()) new SubStageGetterMap<>(new FixedStringSerializer(7), Serializer.noop(), true)
)) ))
) )
.single() .single()