Fix sigsegv

This commit is contained in:
Andrea Cavalli 2023-02-22 22:31:36 +01:00
parent cd15f8d23d
commit 59f9f01268
71 changed files with 1198 additions and 2834 deletions

View File

@ -489,6 +489,9 @@ public class LLUtils {
} }
public static Buf unmodifiableBytes(Buf previous) { public static Buf unmodifiableBytes(Buf previous) {
if (previous == null) {
return null;
}
previous.freeze(); previous.freeze();
return previous; return previous;
} }

View File

@ -176,7 +176,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
buf1.skipNBytes(keyPrefixLength); buf1.skipNBytes(keyPrefixLength);
suffixAndExtKeyConsistency(buf1.available()); suffixAndExtKeyConsistency(buf1.available());
key = deserializeSuffix(serializedValue); key = deserializeSuffix(buf1);
U value = valueSerializer.deserialize(serializedValue); U value = valueSerializer.deserialize(serializedValue);
deserializedEntry = Map.entry(key, value); deserializedEntry = Map.entry(key, value);
return deserializedEntry; return deserializedEntry;
@ -295,6 +295,9 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
var keyMono = serializeKeySuffixToKey(keySuffix); var keyMono = serializeKeySuffixToKey(keySuffix);
var valueMono = serializeValue(value); var valueMono = serializeValue(value);
var valueBuf = dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE); var valueBuf = dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE);
if (valueBuf == null) {
return null;
}
return deserializeValue(keySuffix, BufDataInput.create(valueBuf)); return deserializeValue(keySuffix, BufDataInput.create(valueBuf));
} }

View File

@ -18,6 +18,7 @@ import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
@ -28,6 +29,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.VisibleForTesting;
// todo: implement optimized methods (which?) // todo: implement optimized methods (which?)
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> { public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
@ -74,18 +76,25 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
} }
} }
static Buf firstRangeKey(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) { @VisibleForTesting
var modifiablePrefixKey = Buf.create(prefixLength + suffixAndExtZeroes.size()); public static Buf firstRangeKey(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) {
modifiablePrefixKey.addAll(prefixKey); return createFullKeyWithEmptySuffixAndExt(prefixKey, prefixLength, suffixAndExtZeroes);
zeroFillKeySuffixAndExt(modifiablePrefixKey, prefixLength, suffixAndExtZeroes); }
@VisibleForTesting
public static Buf nextRangeKey(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) {
Buf modifiablePrefixKey = createFullKeyWithEmptySuffixAndExt(prefixKey, prefixLength, suffixAndExtZeroes);
incrementPrefix(modifiablePrefixKey, prefixLength);
return modifiablePrefixKey; return modifiablePrefixKey;
} }
static Buf nextRangeKey(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) { private static Buf createFullKeyWithEmptySuffixAndExt(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) {
var modifiablePrefixKey = Buf.create(prefixLength + suffixAndExtZeroes.size()); var modifiablePrefixKey = Buf.create(prefixLength + suffixAndExtZeroes.size());
modifiablePrefixKey.addAll(prefixKey); if (prefixKey != null) {
modifiablePrefixKey.addAll(prefixKey);
}
assert prefixKey != null || prefixLength == 0 : "Prefix length is " + prefixLength + " but the prefix key is null";
zeroFillKeySuffixAndExt(modifiablePrefixKey, prefixLength, suffixAndExtZeroes); zeroFillKeySuffixAndExt(modifiablePrefixKey, prefixLength, suffixAndExtZeroes);
incrementPrefix(modifiablePrefixKey, prefixLength);
return modifiablePrefixKey; return modifiablePrefixKey;
} }
@ -98,7 +107,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
var suffixLengthAndExtLength = suffixAndExtZeroes.size(); var suffixLengthAndExtLength = suffixAndExtZeroes.size();
assert result.size() == prefixLength; assert result.size() == prefixLength;
assert suffixLengthAndExtLength > 0 : "Suffix length + ext length is < 0: " + suffixLengthAndExtLength; assert suffixLengthAndExtLength > 0 : "Suffix length + ext length is < 0: " + suffixLengthAndExtLength;
result.size(prefixLength + suffixLengthAndExtLength); result.size(prefixLength);
modifiablePrefixKey.addAll(suffixAndExtZeroes); modifiablePrefixKey.addAll(suffixAndExtZeroes);
assert modifiablePrefixKey.size() == prefixLength + suffixAndExtZeroes.size() : "Result buffer size is wrong"; assert modifiablePrefixKey.size() == prefixLength + suffixAndExtZeroes.size() : "Result buffer size is wrong";
} }
@ -172,6 +181,10 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
@SuppressWarnings("unused") @SuppressWarnings("unused")
protected boolean suffixKeyLengthConsistency(int keySuffixLength) { protected boolean suffixKeyLengthConsistency(int keySuffixLength) {
assert
this.keySuffixLength == keySuffixLength :
"Key suffix length is " + keySuffixLength + ", but it should be " + this.keySuffixLength + " bytes long";
//noinspection ConstantValue
return this.keySuffixLength == keySuffixLength; return this.keySuffixLength == keySuffixLength;
} }
@ -313,8 +326,8 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
} }
protected void serializeSuffixTo(T keySuffix, BufDataOutput output) throws SerializationException { protected void serializeSuffixTo(T keySuffix, BufDataOutput output) throws SerializationException {
assert suffixKeyLengthConsistency(output.size());
var beforeWriterOffset = output.size(); var beforeWriterOffset = output.size();
assert beforeWriterOffset == keyPrefixLength;
keySuffixSerializer.serialize(keySuffix, output); keySuffixSerializer.serialize(keySuffix, output);
var afterWriterOffset = output.size(); var afterWriterOffset = output.size();
assert suffixKeyLengthConsistency(afterWriterOffset - beforeWriterOffset) assert suffixKeyLengthConsistency(afterWriterOffset - beforeWriterOffset)

View File

@ -88,10 +88,9 @@ public final class DatabaseMapSingle<U> implements DatabaseStageEntry<U> {
} }
@Override @Override
public U update(SerializationFunction<@Nullable U, @Nullable U> updater, public U update(SerializationFunction<@Nullable U, @Nullable U> updater, UpdateReturnMode updateReturnMode) {
UpdateReturnMode updateReturnMode) {
Buf resultBytes = dictionary.update(key, this.createUpdater(updater), updateReturnMode); Buf resultBytes = dictionary.update(key, this.createUpdater(updater), updateReturnMode);
return deserializeValue(resultBytes); return resultBytes != null ? deserializeValue(resultBytes) : null;
} }
@Override @Override

View File

@ -39,6 +39,9 @@ public class DatabaseSingleton<U> implements DatabaseStageEntry<U> {
} }
private U deserializeValue(Buf value) { private U deserializeValue(Buf value) {
if (value == null) {
return null;
}
try { try {
return serializer.deserialize(BufDataInput.create(value)); return serializer.deserialize(BufDataInput.create(value));
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {

View File

@ -203,10 +203,11 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
@Override @Override
default Object2ObjectSortedMap<T, U> get(@Nullable CompositeSnapshot snapshot) { default Object2ObjectSortedMap<T, U> get(@Nullable CompositeSnapshot snapshot) {
Object2ObjectSortedMap<T, U> map = this try (var stream = this.getAllValues(snapshot, true)) {
.getAllValues(snapshot, true) Object2ObjectSortedMap<T, U> map = stream
.collect(Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, Object2ObjectLinkedOpenHashMap::new)); .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, Object2ObjectLinkedOpenHashMap::new));
return map.isEmpty() ? null : map; return map.isEmpty() ? null : map;
}
} }
@Override @Override

View File

@ -5,6 +5,7 @@ import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
import static it.cavallium.dbengine.database.LLUtils.isBoundedRange; import static it.cavallium.dbengine.database.LLUtils.isBoundedRange;
import static it.cavallium.dbengine.database.LLUtils.toStringSafe; import static it.cavallium.dbengine.database.LLUtils.toStringSafe;
import static it.cavallium.dbengine.database.disk.UpdateAtomicResultMode.DELTA; import static it.cavallium.dbengine.database.disk.UpdateAtomicResultMode.DELTA;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static it.cavallium.dbengine.utils.StreamUtils.batches; import static it.cavallium.dbengine.utils.StreamUtils.batches;
@ -699,7 +700,7 @@ public class LLLocalDictionary implements LLDictionary {
ro.close(); ro.close();
throw new DBException("Failed to open rocksdb iterator", ex); throw new DBException("Failed to open rocksdb iterator", ex);
} }
return Stream.generate(() -> { return streamWhileNonNull(() -> {
if (!rocksIterator.isValid()) return null; if (!rocksIterator.isValid()) return null;
Buf rawKey = null; Buf rawKey = null;
try { try {
@ -709,7 +710,7 @@ public class LLLocalDictionary implements LLDictionary {
return new BadBlock(databaseName, ColumnUtils.special(columnName), rawKey, ex); return new BadBlock(databaseName, ColumnUtils.special(columnName), rawKey, ex);
} }
return null; return null;
}).takeWhile(x -> rocksIterator.isValid()).filter(Objects::nonNull).onClose(() -> { }).takeWhile(x -> rocksIterator.isValid()).onClose(() -> {
rocksIterator.close(); rocksIterator.close();
ro.close(); ro.close();
}); });
@ -1030,14 +1031,16 @@ public class LLLocalDictionary implements LLDictionary {
if (USE_CURRENT_FASTSIZE_FOR_OLD_SNAPSHOTS || rocksdbSnapshot.snapshot() == null) { if (USE_CURRENT_FASTSIZE_FOR_OLD_SNAPSHOTS || rocksdbSnapshot.snapshot() == null) {
try { try {
if (USE_NUM_ENTRIES_PRECISE_COUNTER) { if (USE_NUM_ENTRIES_PRECISE_COUNTER) {
return exactSizeAll(null); return getRocksDBNumEntries();
} }
return db.getLongProperty("rocksdb.estimate-num-keys"); return db.getLongProperty("rocksdb.estimate-num-keys");
} catch (RocksDBException e) { } catch (RocksDBException e) {
logger.error(MARKER_ROCKSDB, "Failed to get RocksDB estimated keys count property", e); logger.error(MARKER_ROCKSDB, "Failed to get RocksDB estimated keys count property", e);
return 0; return 0;
} }
} else if (USE_NUM_ENTRIES_PRECISE_COUNTER || PARALLEL_EXACT_SIZE) { } else if (USE_NUM_ENTRIES_PRECISE_COUNTER && snapshot == null) {
return getRocksDBNumEntries();
} else if (PARALLEL_EXACT_SIZE) {
return exactSizeAll(snapshot); return exactSizeAll(snapshot);
} else { } else {
rocksdbSnapshot.setFillCache(false); rocksdbSnapshot.setFillCache(false);
@ -1057,17 +1060,18 @@ public class LLLocalDictionary implements LLDictionary {
} }
} }
private long getRocksDBNumEntries() {
try {
return db.getNumEntries();
} catch (RocksDBException ex) {
throw new IllegalStateException("Failed to read exact size", ex);
}
}
private long exactSizeAll(@Nullable LLSnapshot snapshot) { private long exactSizeAll(@Nullable LLSnapshot snapshot) {
if (LLUtils.isInNonBlockingThread()) { if (LLUtils.isInNonBlockingThread()) {
throw new UnsupportedOperationException("Called exactSizeAll in a nonblocking thread"); throw new UnsupportedOperationException("Called exactSizeAll in a nonblocking thread");
} }
if (snapshot == null && USE_NUM_ENTRIES_PRECISE_COUNTER) {
try {
return db.getNumEntries();
} catch (RocksDBException ex) {
throw new IllegalStateException("Failed to read exact size", ex);
}
}
try (var readOpts = LLUtils.generateCustomReadOptions(generateReadOptionsOrNull(snapshot), false, false, false)) { try (var readOpts = LLUtils.generateCustomReadOptions(generateReadOptionsOrNull(snapshot), false, false, false)) {
if (LLUtils.MANUAL_READAHEAD) { if (LLUtils.MANUAL_READAHEAD) {
readOpts.setReadaheadSize(128 * 1024); // 128KiB readOpts.setReadaheadSize(128 * 1024); // 128KiB

View File

@ -9,6 +9,7 @@ import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.disk.rocksdb.RocksIteratorObj; import it.cavallium.dbengine.database.disk.rocksdb.RocksIteratorObj;
import it.cavallium.dbengine.utils.DBException; import it.cavallium.dbengine.utils.DBException;
import it.cavallium.dbengine.utils.StreamUtils;
import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -64,7 +65,7 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
throw new DBException("Failed to iterate the range", e); throw new DBException("Failed to iterate the range", e);
} }
return Stream.<List<T>>generate(() -> { return StreamUtils.<List<T>>streamWhileNonNull(() -> {
try { try {
ObjectArrayList<T> values = new ObjectArrayList<>(); ObjectArrayList<T> values = new ObjectArrayList<>();
Buf firstGroupKey = null; Buf firstGroupKey = null;
@ -111,7 +112,7 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
} }
throw new CompletionException(new DBException("Range failed", ex)); throw new CompletionException(new DBException("Range failed", ex));
} }
}).takeWhile(Objects::nonNull).onClose(() -> { }).onClose(() -> {
rocksIterator.close(); rocksIterator.close();
readOptions.close(); readOptions.close();
}); });

View File

@ -3,12 +3,17 @@ package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB; import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
import static it.cavallium.dbengine.database.LLUtils.generateCustomReadOptions; import static it.cavallium.dbengine.database.LLUtils.generateCustomReadOptions;
import static it.cavallium.dbengine.database.LLUtils.isBoundedRange; import static it.cavallium.dbengine.database.LLUtils.isBoundedRange;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import com.google.common.collect.Iterators;
import com.google.common.collect.Streams;
import it.cavallium.dbengine.buffers.Buf; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.utils.DBException; import it.cavallium.dbengine.utils.DBException;
import it.cavallium.dbengine.utils.StreamUtils;
import java.io.IOException; import java.io.IOException;
import java.util.Objects;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -52,7 +57,7 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
} }
var rocksIterator = db.newRocksIterator(readOptions, range, false); var rocksIterator = db.newRocksIterator(readOptions, range, false);
return Stream.generate(() -> { return streamWhileNonNull(() -> {
try { try {
Buf firstGroupKey = null; Buf firstGroupKey = null;
while (rocksIterator.isValid()) { while (rocksIterator.isValid()) {

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.generateCustomReadOptions; import static it.cavallium.dbengine.database.LLUtils.generateCustomReadOptions;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import it.cavallium.dbengine.buffers.Buf; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
@ -39,7 +40,7 @@ public final class LLLocalMigrationReactiveRocksIterator {
} catch (RocksDBException e) { } catch (RocksDBException e) {
throw new DBException("Failed to open iterator", e); throw new DBException("Failed to open iterator", e);
} }
return Stream.generate(() -> { return streamWhileNonNull(() -> {
try { try {
if (rocksIterator.isValid()) { if (rocksIterator.isValid()) {
var key = rocksIterator.keyBuf().copy(); var key = rocksIterator.keyBuf().copy();
@ -52,7 +53,7 @@ public final class LLLocalMigrationReactiveRocksIterator {
} catch (RocksDBException ex) { } catch (RocksDBException ex) {
throw new CompletionException(new DBException("Failed to iterate", ex)); throw new CompletionException(new DBException("Failed to iterate", ex));
} }
}).takeWhile(Objects::nonNull).onClose(() -> { }).onClose(() -> {
rocksIterator.close(); rocksIterator.close();
readOptions.close(); readOptions.close();
}); });

View File

@ -3,6 +3,7 @@ package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB; import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
import static it.cavallium.dbengine.database.LLUtils.generateCustomReadOptions; import static it.cavallium.dbengine.database.LLUtils.generateCustomReadOptions;
import static it.cavallium.dbengine.database.LLUtils.isBoundedRange; import static it.cavallium.dbengine.database.LLUtils.isBoundedRange;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import it.cavallium.dbengine.buffers.Buf; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
@ -59,7 +60,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
throw new DBException("Failed to iterate the range", e); throw new DBException("Failed to iterate the range", e);
} }
return Stream.generate(() -> { return streamWhileNonNull(() -> {
try { try {
if (rocksIterator.isValid()) { if (rocksIterator.isValid()) {
// Note that the underlying array is subject to changes! // Note that the underlying array is subject to changes!
@ -100,7 +101,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
} }
throw new CompletionException(ex); throw new CompletionException(ex);
} }
}).takeWhile(Objects::nonNull).onClose(() -> { }).onClose(() -> {
rocksIterator.close(); rocksIterator.close();
readOptions.close(); readOptions.close();
}); });

View File

@ -59,7 +59,7 @@ public class LLMemoryKeyValueDatabase implements LLKeyValueDatabase {
)); ));
var singleton = new LLMemorySingleton(dict, columnNameString, singletonName); var singleton = new LLMemorySingleton(dict, columnNameString, singletonName);
Buf returnValue = singleton.get(null); Buf returnValue = singleton.get(null);
if (returnValue == null) { if (returnValue == null && defaultValue != null) {
singleton.set(Buf.wrap(defaultValue)); singleton.set(Buf.wrap(defaultValue));
} }
return singleton; return singleton;

View File

@ -16,7 +16,7 @@ public class CheckIndexInput extends IndexInput {
} }
private static void checkThread() { private static void checkThread() {
assert LuceneUtils.isLuceneThread(); warnLuceneThread();
} }
@Override @Override

View File

@ -15,7 +15,7 @@ public class CheckIndexOutput extends IndexOutput {
} }
private static void checkThread() { private static void checkThread() {
assert LuceneUtils.isLuceneThread(); LuceneUtils.warnLuceneThread();
} }
@Override @Override

View File

@ -49,7 +49,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public IndexOutput createOutput(String name, IOContext context) { public IndexOutput createOutput(String name, IOContext context) {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
return new CheckIndexOutput(directory.createOutput(name, context)); return new CheckIndexOutput(directory.createOutput(name, context));
} catch (IOException e) { } catch (IOException e) {
@ -59,7 +59,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) { public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
return new CheckIndexOutput(directory.createTempOutput(prefix, suffix, context)); return new CheckIndexOutput(directory.createTempOutput(prefix, suffix, context));
} catch (IOException e) { } catch (IOException e) {
@ -69,7 +69,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public void sync(Collection<String> names) { public void sync(Collection<String> names) {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
directory.sync(names); directory.sync(names);
} catch (IOException e) { } catch (IOException e) {
@ -79,7 +79,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public void syncMetaData() { public void syncMetaData() {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
directory.syncMetaData(); directory.syncMetaData();
} catch (IOException e) { } catch (IOException e) {
@ -89,7 +89,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public void rename(String source, String dest) { public void rename(String source, String dest) {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
directory.rename(source, dest); directory.rename(source, dest);
} catch (IOException e) { } catch (IOException e) {
@ -99,7 +99,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public IndexInput openInput(String name, IOContext context) { public IndexInput openInput(String name, IOContext context) {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
return new CheckIndexInput(directory.openInput(name, context)); return new CheckIndexInput(directory.openInput(name, context));
} catch (IOException e) { } catch (IOException e) {
@ -109,7 +109,7 @@ public class CheckOutputDirectory extends Directory {
@Override @Override
public Lock obtainLock(String name) { public Lock obtainLock(String name) {
LuceneUtils.checkLuceneThread(); LuceneUtils.warnLuceneThread();
try { try {
return directory.obtainLock(name); return directory.obtainLock(name);
} catch (IOException e) { } catch (IOException e) {

View File

@ -1,5 +1,7 @@
package it.cavallium.dbengine.lucene.searcher; package it.cavallium.dbengine.lucene.searcher;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import java.io.IOException; import java.io.IOException;
import it.cavallium.dbengine.utils.DBException; import it.cavallium.dbengine.utils.DBException;
import java.util.Iterator; import java.util.Iterator;
@ -51,7 +53,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
throw new IllegalArgumentException("Sorting is not allowed"); throw new IllegalArgumentException("Sorting is not allowed");
} }
var lg = new LuceneGenerator(shard, localQueryParams, shardIndex); var lg = new LuceneGenerator(shard, localQueryParams, shardIndex);
return Stream.generate(lg).takeWhile(Objects::nonNull); return streamWhileNonNull(lg);
} }
@Override @Override

View File

@ -2,6 +2,7 @@ package it.cavallium.dbengine.lucene.searcher;
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS; import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS;
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT; import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.LLKeyScore; import it.cavallium.dbengine.database.LLKeyScore;
@ -130,14 +131,14 @@ public class PagedLocalSearcher implements LocalSearcher {
LocalQueryParams queryParams, String keyFieldName, CurrentPageInfo secondPageInfo) { LocalQueryParams queryParams, String keyFieldName, CurrentPageInfo secondPageInfo) {
AtomicReference<CurrentPageInfo> pageInfo = new AtomicReference<>(secondPageInfo); AtomicReference<CurrentPageInfo> pageInfo = new AtomicReference<>(secondPageInfo);
Object lock = new Object(); Object lock = new Object();
Stream<ScoreDoc> topFieldDocFlux = Stream.generate(() -> { Stream<ScoreDoc> topFieldDocFlux = streamWhileNonNull(() -> {
synchronized (lock) { synchronized (lock) {
var currentPageInfo = pageInfo.getPlain(); var currentPageInfo = pageInfo.getPlain();
var result = searchPageSync(queryParams, indexSearchers, true, 0, currentPageInfo); var result = searchPageSync(queryParams, indexSearchers, true, 0, currentPageInfo);
pageInfo.setPlain(result.nextPageToIterate()); pageInfo.setPlain(result.nextPageToIterate());
return result.pageData(); return result.pageData();
} }
}).takeWhile(Objects::nonNull).flatMap(pd -> Stream.of(pd.topDocs().scoreDocs)); }).flatMap(pd -> Stream.of(pd.topDocs().scoreDocs));
return LuceneUtils.convertHits(topFieldDocFlux, indexSearchers, keyFieldName); return LuceneUtils.convertHits(topFieldDocFlux, indexSearchers, keyFieldName);
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.lucene.searcher; package it.cavallium.dbengine.lucene.searcher;
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT; import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
import static it.cavallium.dbengine.utils.StreamUtils.streamWhileNonNull;
import com.google.common.collect.Streams; import com.google.common.collect.Streams;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
@ -133,7 +134,7 @@ public class ScoredPagedMultiSearcher implements MultiSearcher {
private Stream<LLKeyScore> searchOtherPages(List<IndexSearcher> indexSearchers, private Stream<LLKeyScore> searchOtherPages(List<IndexSearcher> indexSearchers,
LocalQueryParams queryParams, String keyFieldName, CurrentPageInfo secondPageInfo) { LocalQueryParams queryParams, String keyFieldName, CurrentPageInfo secondPageInfo) {
AtomicReference<CurrentPageInfo> currentPageInfoRef = new AtomicReference<>(secondPageInfo); AtomicReference<CurrentPageInfo> currentPageInfoRef = new AtomicReference<>(secondPageInfo);
Stream<ScoreDoc> topFieldDocStream = Stream.generate(() -> { Stream<ScoreDoc> topFieldDocStream = streamWhileNonNull(() -> {
var currentPageInfo = currentPageInfoRef.getPlain(); var currentPageInfo = currentPageInfoRef.getPlain();
if (currentPageInfo == null) return null; if (currentPageInfo == null) return null;
LOG.trace("Current page info: {}", currentPageInfo); LOG.trace("Current page info: {}", currentPageInfo);
@ -145,7 +146,7 @@ public class ScoredPagedMultiSearcher implements MultiSearcher {
} else { } else {
return Arrays.asList(result.topDocs().scoreDocs); return Arrays.asList(result.topDocs().scoreDocs);
} }
}).takeWhile(Objects::nonNull).flatMap(Collection::stream); }).flatMap(Collection::stream);
return LuceneUtils.convertHits(topFieldDocStream, indexSearchers, keyFieldName); return LuceneUtils.convertHits(topFieldDocStream, indexSearchers, keyFieldName);
} }

View File

@ -8,6 +8,7 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Spliterator; import java.util.Spliterator;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
@ -48,6 +49,31 @@ public class StreamUtils {
: StreamSupport.stream(new BatchSpliterator<>(stream.spliterator(), batchSize), stream.isParallel()); : StreamSupport.stream(new BatchSpliterator<>(stream.spliterator(), batchSize), stream.isParallel());
} }
@SuppressWarnings("UnstableApiUsage")
public static <X> Stream<X> streamWhileNonNull(Supplier<X> supplier) {
var it = new Iterator<X>() {
private boolean nextSet = false;
private X next;
@Override
public boolean hasNext() {
if (!nextSet) {
next = supplier.get();
nextSet = true;
}
return next != null;
}
@Override
public X next() {
nextSet = false;
return next;
}
};
return Streams.stream(it);
}
private record BatchSpliterator<E>(Spliterator<E> base, int batchSize) implements Spliterator<List<E>> { private record BatchSpliterator<E>(Spliterator<E> base, int batchSize) implements Spliterator<List<E>> {
@Override @Override

View File

@ -19,6 +19,7 @@ module dbengine {
exports it.cavallium.dbengine.utils; exports it.cavallium.dbengine.utils;
exports it.cavallium.dbengine.database.disk.rocksdb; exports it.cavallium.dbengine.database.disk.rocksdb;
exports it.cavallium.dbengine.buffers; exports it.cavallium.dbengine.buffers;
exports it.cavallium.dbengine.lucene.hugepq.search;
requires org.jetbrains.annotations; requires org.jetbrains.annotations;
requires com.google.common; requires com.google.common;
requires micrometer.core; requires micrometer.core;

View File

@ -1,131 +0,0 @@
package it.cavallium.dbengine;
import static it.cavallium.dbengine.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.client.DefaultDatabaseOptions.DEFAULT_DATABASE_OPTIONS;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import it.cavallium.data.generator.nativedata.Nullableboolean;
import it.cavallium.data.generator.nativedata.Nullabledouble;
import it.cavallium.data.generator.nativedata.Nullableint;
import it.cavallium.data.generator.nativedata.Nullablelong;
import it.cavallium.dbengine.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.client.DefaultDatabaseOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.database.ColumnUtils;
import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
import it.cavallium.dbengine.rpc.current.data.ByteBuffersDirectory;
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
import it.cavallium.dbengine.rpc.current.data.DatabaseOptionsBuilder;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import it.cavallium.dbengine.rpc.current.data.nullables.NullableFilter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletionException;
import java.util.concurrent.atomic.AtomicInteger;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
public class LocalTemporaryDbGenerator implements TemporaryDbGenerator {
private static final AtomicInteger dbId = new AtomicInteger(0);
private static final LuceneOptions LUCENE_OPTS = new LuceneOptions(Map.of(),
Duration.ofSeconds(5),
Duration.ofSeconds(5),
false,
new ByteBuffersDirectory(),
Nullableboolean.empty(),
Nullabledouble.empty(),
Nullableint.empty(),
Nullableboolean.empty(),
Nullableboolean.empty(),
true,
MAX_IN_MEMORY_RESULT_ENTRIES,
LuceneUtils.getDefaultMergePolicy()
);
@Override
public Mono<TempDb> openTempDb(TestAllocator allocator) {
boolean canUseNettyDirect = DbTestUtils.computeCanUseNettyDirect();
return Mono.defer(() -> {
var wrkspcPath = Path.of("/tmp/.cache/tempdb-" + dbId.incrementAndGet() + "/");
return Mono
.<LLKeyValueDatabase>fromCallable(() -> {
if (Files.exists(wrkspcPath)) {
Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> {
try {
Files.delete(file);
} catch (IOException ex) {
throw new CompletionException(ex);
}
});
}
Files.createDirectories(wrkspcPath);
return null;
})
.subscribeOn(Schedulers.boundedElastic())
.then(new LLLocalDatabaseConnection(allocator.allocator(),
new SimpleMeterRegistry(),
wrkspcPath,
true,
null
).connect())
.flatMap(conn -> {
SwappableLuceneSearcher searcher = new SwappableLuceneSearcher();
var luceneHacks = new LuceneHacks(() -> searcher, () -> searcher);
return Mono.zip(
conn.getDatabase("testdb",
List.of(ColumnUtils.dictionary("testmap"), ColumnUtils.special("ints"), ColumnUtils.special("longs")),
DefaultDatabaseOptions.builder().allowNettyDirect(canUseNettyDirect).build()
),
conn.getLuceneIndex("testluceneindex1",
LuceneUtils.singleStructure(),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
conn.getLuceneIndex("testluceneindex16",
LuceneUtils.shardsStructure(3),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
Mono.just(searcher)
)
.map(tuple -> new TempDb(allocator, conn, tuple.getT1(), tuple.getT2(), tuple.getT3(), tuple.getT4(), wrkspcPath));
});
});
}
@Override
public Mono<Void> closeTempDb(TempDb tempDb) {
return tempDb.db().close().then(tempDb.connection().disconnect()).then(Mono.fromCallable(() -> {
ensureNoLeaks(tempDb.allocator().allocator(), false, false);
if (Files.exists(tempDb.path())) {
Files.walk(tempDb.path()).sorted(Comparator.reverseOrder()).forEach(file -> {
try {
Files.delete(file);
} catch (IOException ex) {
throw new CompletionException(ex);
}
});
}
return null;
}).subscribeOn(Schedulers.boundedElastic())).then();
}
}

View File

@ -1,87 +0,0 @@
package it.cavallium.dbengine;
import static it.cavallium.dbengine.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import it.cavallium.data.generator.nativedata.Nullableboolean;
import it.cavallium.data.generator.nativedata.Nullabledouble;
import it.cavallium.data.generator.nativedata.Nullableint;
import it.cavallium.data.generator.nativedata.Nullablelong;
import it.cavallium.dbengine.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.client.DefaultDatabaseOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.database.ColumnUtils;
import it.cavallium.dbengine.database.memory.LLMemoryDatabaseConnection;
import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
import it.cavallium.dbengine.rpc.current.data.ByteBuffersDirectory;
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import it.cavallium.dbengine.rpc.current.data.nullables.NullableFilter;
import it.unimi.dsi.fastutil.ints.IntList;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import reactor.core.publisher.Mono;
public class MemoryTemporaryDbGenerator implements TemporaryDbGenerator {
private static final LuceneOptions LUCENE_OPTS = new LuceneOptions(Map.of(),
Duration.ofSeconds(5),
Duration.ofSeconds(5),
false,
new ByteBuffersDirectory(),
Nullableboolean.empty(),
Nullabledouble.empty(),
Nullableint.empty(),
Nullableboolean.empty(),
Nullableboolean.empty(),
false,
MAX_IN_MEMORY_RESULT_ENTRIES,
LuceneUtils.getDefaultMergePolicy()
);
@Override
public Mono<TempDb> openTempDb(TestAllocator allocator) {
boolean canUseNettyDirect = DbTestUtils.computeCanUseNettyDirect();
return Mono
.fromCallable(() -> new LLMemoryDatabaseConnection(allocator.allocator(), new SimpleMeterRegistry()))
.flatMap(conn -> {
SwappableLuceneSearcher searcher = new SwappableLuceneSearcher();
var luceneHacks = new LuceneHacks(() -> searcher, () -> searcher);
return Mono
.zip(
conn.getDatabase("testdb",
List.of(ColumnUtils.dictionary("testmap"), ColumnUtils.special("ints"), ColumnUtils.special("longs")),
DefaultDatabaseOptions.builder().allowNettyDirect(canUseNettyDirect).build()
),
conn.getLuceneIndex("testluceneindex1",
LuceneUtils.singleStructure(),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
conn.getLuceneIndex("testluceneindex16",
LuceneUtils.shardsStructure(3),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
Mono.just(searcher)
)
.map(tuple -> new TempDb(allocator, conn, tuple.getT1(), tuple.getT2(), tuple.getT3(), tuple.getT4(), null));
});
}
@Override
public Mono<Void> closeTempDb(TempDb db) {
return db.db().close();
}
}

View File

@ -1,40 +0,0 @@
package it.cavallium.dbengine;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
public class SyncUtils {
public static void run(Flux<?> publisher) {
publisher.subscribeOn(Schedulers.immediate()).blockLast();
}
public static void runVoid(Mono<Void> publisher) {
publisher.then().subscribeOn(Schedulers.immediate()).block();
}
public static <T> T run(Mono<T> publisher) {
return publisher.subscribeOn(Schedulers.immediate()).block();
}
public static <T> T run(boolean shouldFail, Mono<T> publisher) {
return publisher.subscribeOn(Schedulers.immediate()).transform(mono -> {
if (shouldFail) {
return mono.onErrorResume(ex -> Mono.empty());
} else {
return mono;
}
}).block();
}
public static void runVoid(boolean shouldFail, Mono<Void> publisher) {
publisher.then().subscribeOn(Schedulers.immediate()).transform(mono -> {
if (shouldFail) {
return mono.onErrorResume(ex -> Mono.empty());
} else {
return mono;
}
}).block();
}
}

View File

@ -1,13 +0,0 @@
package it.cavallium.dbengine;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import reactor.core.publisher.Mono;
public interface TemporaryDbGenerator {
Mono<TempDb> openTempDb(TestAllocator allocator);
Mono<Void> closeTempDb(TempDb db);
}

View File

@ -1,62 +0,0 @@
package it.cavallium.dbengine;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import io.netty5.buffer.Buffer;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class TestAllocator {
private DbTestUtils.TestAllocator allocator;
@BeforeEach
public void beforeEach() {
this.allocator = newAllocator();
ensureNoLeaks(allocator.allocator(), false, false);
}
@AfterEach
public void afterEach() {
ensureNoLeaks(allocator.allocator(), true, false);
destroyAllocator(allocator);
}
@Test
public void testNoOp() {
}
@Test
public void testShouldPass() {
Buffer allocated = allocator.allocator().allocate(5000);
allocated.close();
ensureNoLeaks(allocator.allocator(), true, false);
}
@Test
public void testShouldFail() {
Buffer allocated = null;
try {
boolean failed;
try {
allocated = allocator.allocator().allocate(5000);
ensureNoLeaks(allocator.allocator(), true, true);
failed = false;
} catch (Exception ex) {
failed = true;
}
if (!failed) {
Assertions.fail("A leak was not detected!");
}
} finally {
if (allocated != null) {
allocated.close();
}
}
}
}

View File

@ -1,149 +0,0 @@
package it.cavallium.dbengine;
import static io.netty5.buffer.internal.InternalBufferUtils.allocatorClosedException;
import static io.netty5.buffer.internal.InternalBufferUtils.assertValidBufferSize;
import static io.netty5.buffer.internal.InternalBufferUtils.standardDrop;
import io.netty5.buffer.AllocationType;
import io.netty5.buffer.AllocatorControl;
import io.netty5.buffer.Buffer;
import io.netty5.buffer.BufferAllocator;
import io.netty5.buffer.Drop;
import io.netty5.buffer.MemoryManager;
import io.netty5.buffer.StandardAllocationTypes;
import io.netty5.buffer.pool.PooledBufferAllocator;
import java.util.concurrent.atomic.LongAdder;
import java.util.function.Function;
import java.util.function.Supplier;
public class TestAllocatorImpl implements BufferAllocator, AllocatorControl {
private final TestMemoryManager manager;
private final AllocationType allocationType = StandardAllocationTypes.ON_HEAP;
private volatile boolean closed;
private TestAllocatorImpl(TestMemoryManager testMemoryManager) {
this.manager = testMemoryManager;
}
public static TestAllocatorImpl create() {
return new TestAllocatorImpl(new TestMemoryManager(MemoryManager.instance()));
}
@Override
public boolean isPooling() {
return false;
}
@Override
public AllocationType getAllocationType() {
return allocationType;
}
@Override
public Buffer allocate(int size) {
if (closed) {
throw allocatorClosedException();
}
assertValidBufferSize(size);
return manager.allocateShared(this, size, standardDrop(manager), allocationType);
}
@Override
public Supplier<Buffer> constBufferSupplier(byte[] bytes) {
if (closed) {
throw allocatorClosedException();
}
Buffer constantBuffer = manager.allocateShared(
this, bytes.length, standardDrop(manager), allocationType);
constantBuffer.writeBytes(bytes).makeReadOnly();
return () -> manager.allocateConstChild(constantBuffer);
}
@Override
public void close() {
closed = true;
}
public long getActiveAllocations() {
return this.manager.getActiveAllocations();
}
@Override
public BufferAllocator getAllocator() {
return this;
}
private static class TestMemoryManager implements MemoryManager {
private final MemoryManager instance;
private final LongAdder activeAllocations = new LongAdder();
public TestMemoryManager(MemoryManager instance) {
this.instance = instance;
}
@Override
public Buffer allocateShared(AllocatorControl allocatorControl,
long size,
Function<Drop<Buffer>, Drop<Buffer>> dropDecorator,
AllocationType allocationType) {
return instance.allocateShared(allocatorControl, size, this::createDrop, allocationType);
}
@Override
public Buffer allocateConstChild(Buffer readOnlyConstParent) {
return instance.allocateConstChild(readOnlyConstParent);
}
@Override
public Object unwrapRecoverableMemory(Buffer buf) {
return instance.unwrapRecoverableMemory(buf);
}
@Override
public Buffer recoverMemory(AllocatorControl allocatorControl, Object recoverableMemory, Drop<Buffer> drop) {
return instance.recoverMemory(allocatorControl, recoverableMemory, drop);
}
@Override
public Object sliceMemory(Object memory, int offset, int length) {
return instance.sliceMemory(memory, offset, length);
}
@Override
public void clearMemory(Object o) {
instance.clearMemory(o);
}
@Override
public String implementationName() {
return instance.implementationName();
}
private Drop<Buffer> createDrop(Drop<Buffer> drop) {
activeAllocations.increment();
return new Drop<>() {
@Override
public void drop(Buffer obj) {
activeAllocations.decrement();
drop.drop(obj);
}
@Override
public Drop<Buffer> fork() {
return createDrop(drop.fork());
}
@Override
public void attach(Buffer obj) {
drop.attach(obj);
}
};
}
public long getActiveAllocations() {
return activeAllocations.longValue();
}
}
}

View File

@ -1,56 +0,0 @@
package it.cavallium.dbengine;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.isCIMode;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.DbTestUtils.tempDb;
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.UpdateMode;
import java.util.Arrays;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import reactor.test.StepVerifier;
public abstract class TestDictionary {
private TestAllocator allocator;
protected abstract TemporaryDbGenerator getTempDbGenerator();
private static Stream<Arguments> provideArgumentsCreate() {
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
}
@BeforeEach
public void beforeEach() {
this.allocator = newAllocator();
ensureNoLeaks(allocator.allocator(), false, false);
}
@AfterEach
public void afterEach() {
if (!isCIMode()) {
ensureNoLeaks(allocator.allocator(), true, false);
}
destroyAllocator(allocator);
}
@ParameterizedTest
@MethodSource("provideArgumentsCreate")
public void testCreate(UpdateMode updateMode) {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
.flatMap(LLDictionary::clear)
.then()
))
.verifyComplete();
}
}

View File

@ -1,157 +0,0 @@
package it.cavallium.dbengine;
import static org.junit.jupiter.api.Assertions.assertEquals;
import io.netty5.buffer.Buffer;
import it.cavallium.dbengine.database.disk.LLTempHugePqEnv;
import it.cavallium.dbengine.lucene.HugePqCodec;
import it.cavallium.dbengine.lucene.HugePqPriorityQueue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.Function;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class TestHugePq {
private LLTempHugePqEnv env;
private HugePqPriorityQueue<Integer> queue;
@BeforeEach
public void beforeEach() throws IOException {
this.env = new LLTempHugePqEnv();
this.queue = new HugePqPriorityQueue<>(env, new HugePqCodec<Integer>() {
@Override
public Buffer serialize(Function<Integer, Buffer> allocator, Integer data) {
var buf = allocator.apply(Integer.BYTES);
HugePqCodec.setLexInt(buf, 0, false, data);
return buf.writerOffset(Integer.BYTES);
}
@Override
public Integer deserialize(Buffer b) {
return HugePqCodec.getLexInt(b, 0, false);
}
});
}
@Test
public void testNoOp() {
}
@Test
public void testEmptyTop() {
Assertions.assertNull(queue.top());
}
@Test
public void testAddSingle() {
queue.add(2);
Assertions.assertEquals(2, queue.top());
}
@Test
public void testAddSame() {
queue.add(2);
queue.add(2);
Assertions.assertEquals(2, queue.top());
Assertions.assertEquals(2, queue.size());
}
@Test
public void testAddMulti() {
for (int i = 0; i < 1000; i++) {
queue.add(i);
}
Assertions.assertEquals(0, queue.top());
}
@Test
public void testAddRandomMulti() {
var list = new ArrayList<Integer>(1000);
for (int i = 0; i < 1000; i++) {
var n = ThreadLocalRandom.current().nextInt(-20, 20);
queue.add(n);
list.add(n);
}
list.sort(Comparator.reverseOrder());
for (int i = 0; i < 1000; i++) {
Assertions.assertEquals(list.remove(list.size() - 1), queue.pop());
}
}
@Test
public void testAddMultiClear() {
for (int i = 0; i < 1000; i++) {
queue.add(i);
}
queue.clear();
Assertions.assertNull(queue.top());
}
@Test
public void testAddRemove() {
queue.add(0);
queue.remove(0);
Assertions.assertNull(queue.top());
}
@Test
public void testAddRemoveNonexistent() {
queue.add(0);
queue.remove(1);
Assertions.assertEquals(0, queue.top());
}
@Test
public void testAddMultiSameRemove() {
queue.add(0);
queue.add(0);
queue.add(1);
queue.remove(0);
Assertions.assertEquals(2, queue.size());
Assertions.assertEquals(0, queue.top());
}
@Test
public void testAddMultiRemove() {
for (int i = 0; i < 1000; i++) {
queue.add(i);
}
queue.remove(0);
Assertions.assertEquals(1, queue.top());
}
@Test
public void testSort() {
var sortedNumbers = new ArrayList<Integer>();
for (int i = 0; i < 1000; i++) {
sortedNumbers.add(i);
}
var shuffledNumbers = new ArrayList<>(sortedNumbers);
Collections.shuffle(shuffledNumbers);
for (Integer number : shuffledNumbers) {
queue.add(number);
}
var newSortedNumbers = new ArrayList<>();
Integer popped;
while ((popped = queue.pop()) != null) {
newSortedNumbers.add(popped);
}
Assertions.assertEquals(sortedNumbers, newSortedNumbers);
}
@AfterEach
public void afterEach() throws IOException {
queue.close();
env.close();
}
}

View File

@ -1,381 +0,0 @@
package it.cavallium.dbengine;
import com.google.common.collect.Lists;
import io.netty5.buffer.Buffer;
import it.cavallium.dbengine.database.SafeCloseable;
import it.cavallium.dbengine.database.disk.LLTempHugePqEnv;
import it.cavallium.dbengine.lucene.LLScoreDoc;
import it.cavallium.dbengine.lucene.HugePqCodec;
import it.cavallium.dbengine.lucene.HugePqPriorityQueue;
import it.cavallium.dbengine.lucene.PriorityQueue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.Function;
import org.apache.lucene.search.HitQueue;
import org.apache.lucene.search.ScoreDoc;
import org.assertj.core.description.Description;
import org.assertj.core.description.TextDescription;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
public class TestHugePqHitQueue {
public static final int NUM_HITS = 1024;
private LLTempHugePqEnv env;
private SafeCloseable hugePqQueue;
private TestingPriorityQueue testingPriorityQueue;
protected static boolean lessThan(ScoreDoc hitA, ScoreDoc hitB) {
if (hitA.score == hitB.score) {
return hitA.doc > hitB.doc;
} else {
return hitA.score < hitB.score;
}
}
private static int compareScoreDoc(ScoreDoc hitA, ScoreDoc hitB) {
if (hitA.score == hitB.score) {
if (hitA.doc == hitB.doc) {
return Integer.compare(hitA.shardIndex, hitB.shardIndex);
} else {
return Integer.compare(hitB.doc, hitA.doc);
}
} else {
return Float.compare(hitA.score, hitB.score);
}
}
private static void assertEqualsScoreDoc(Description description, ScoreDoc expected, ScoreDoc actual) {
org.assertj.core.api.Assertions.assertThat(toLLScoreDoc(expected)).as(description).isEqualTo(toLLScoreDoc(actual));
}
private static void assertEqualsScoreDoc(List<ScoreDoc> expected, List<ScoreDoc> actual) {
var list1 = expected.iterator();
var list2 = actual.iterator();
Assertions.assertEquals(expected.size(), actual.size());
while (list1.hasNext() && list2.hasNext()) {
Assertions.assertFalse(lessThan(list1.next(), list2.next()));
}
}
@BeforeEach
public void beforeEach() throws IOException {
this.env = new LLTempHugePqEnv();
var hugePqQueue = new HugePqPriorityQueue<ScoreDoc>(env, new HugePqCodec<>() {
@Override
public Buffer serialize(Function<Integer, Buffer> allocator, ScoreDoc data) {
var buf = allocator.apply(Float.BYTES + Integer.BYTES + Integer.BYTES);
buf.writerOffset(Float.BYTES + Integer.BYTES + Integer.BYTES);
setScore(buf, data.score);
setDoc(buf, data.doc);
setShardIndex(buf, data.shardIndex);
return buf;
}
@Override
public ScoreDoc deserialize(Buffer buf) {
return new ScoreDoc(getDoc(buf), getScore(buf), getShardIndex(buf));
}
private static float getScore(Buffer hit) {
return HugePqCodec.getLexFloat(hit, 0, false);
}
private static int getDoc(Buffer hit) {
return HugePqCodec.getLexInt(hit, Float.BYTES, true);
}
private static int getShardIndex(Buffer hit) {
return HugePqCodec.getLexInt(hit, Float.BYTES + Integer.BYTES, false);
}
private static void setScore(Buffer hit, float score) {
HugePqCodec.setLexFloat(hit, 0, false, score);
}
private static void setDoc(Buffer hit, int doc) {
HugePqCodec.setLexInt(hit, Float.BYTES, true, doc);
}
private static void setShardIndex(Buffer hit, int shardIndex) {
HugePqCodec.setLexInt(hit, Float.BYTES + Integer.BYTES, false, shardIndex);
}
@Override
public ScoreDoc clone(ScoreDoc obj) {
return new ScoreDoc(obj.doc, obj.score, obj.shardIndex);
}
});
this.hugePqQueue = hugePqQueue;
PriorityQueueAdaptor<ScoreDoc> hitQueue = new PriorityQueueAdaptor<>(new HitQueue(NUM_HITS, false));
Assertions.assertEquals(0, hugePqQueue.size());
Assertions.assertEquals(0, hitQueue.size());
this.testingPriorityQueue = new TestingPriorityQueue(hitQueue, hugePqQueue);
}
@Test
public void testNoOp() {
}
@Test
public void testEmptyTop() {
Assertions.assertNull(testingPriorityQueue.top());
}
@Test
public void testAddSingle() {
var item = new ScoreDoc(0, 0, 0);
testingPriorityQueue.add(item);
assertEqualsScoreDoc(new TextDescription("top value of %s", testingPriorityQueue), item, testingPriorityQueue.top());
}
@Test
public void testAddMulti() {
for (int i = 0; i < 1000; i++) {
var item = new ScoreDoc(i, i >> 1, -1);
testingPriorityQueue.addUnsafe(item);
}
assertEqualsScoreDoc(new TextDescription("top value of %s", testingPriorityQueue), new ScoreDoc(1, 0, -1), testingPriorityQueue.top());
}
@Test
public void testAddMultiRandom() {
var list = new ArrayList<Integer>(1000);
for (int i = 0; i < 1000; i++) {
var ri = ThreadLocalRandom.current().nextInt(0, 20);
list.add(ri);
var item = new ScoreDoc(ri, ri << 1, ri % 4);
testingPriorityQueue.addUnsafe(item);
}
list.sort(Comparator.reverseOrder());
for (int i = 0; i < 1000; i++) {
var top = list.remove(list.size() - 1);
assertEqualsScoreDoc(new TextDescription("%d value of %s", i, testingPriorityQueue), new ScoreDoc(top, top << 1, top % 4), testingPriorityQueue.pop());
}
}
@Test
public void testAddMultiClear() {
for (int i = 0; i < 1000; i++) {
var item = new ScoreDoc(i, i >> 1, -1);
testingPriorityQueue.addUnsafe(item);
}
testingPriorityQueue.clear();
Assertions.assertNull(testingPriorityQueue.top());
}
@Test
public void testAddRemove() {
var item = new ScoreDoc(0, 0, -1);
testingPriorityQueue.add(item);
testingPriorityQueue.remove(item);
Assertions.assertNull(testingPriorityQueue.top());
}
@Test
public void testAddRemoveNonexistent() {
var item = new ScoreDoc(0, 0, 0);
testingPriorityQueue.addUnsafe(item);
testingPriorityQueue.remove(new ScoreDoc(2, 0, 0));
assertEqualsScoreDoc(new TextDescription("top value of %s", testingPriorityQueue), item, testingPriorityQueue.top());
}
@Test
public void testAddMultiRemove1() {
ScoreDoc toRemove = null;
ScoreDoc top = null;
for (int i = 0; i < 1000; i++) {
var item = new ScoreDoc(i, i >> 1, -1);
if (i == 1) {
toRemove = item;
} else if (i == 0) {
top = item;
}
testingPriorityQueue.addUnsafe(item);
}
testingPriorityQueue.removeUnsafe(toRemove);
assertEqualsScoreDoc(new TextDescription("top value of %s", testingPriorityQueue), top, testingPriorityQueue.top());
}
@Test
public void testAddMultiRemove2() {
ScoreDoc toRemove = null;
ScoreDoc top = null;
for (int i = 0; i < 1000; i++) {
var item = new ScoreDoc(i, i >> 1, -1);
if (i == 0) {
toRemove = item;
} else if (i == 1) {
top = item;
}
testingPriorityQueue.addUnsafe(item);
}
testingPriorityQueue.removeUnsafe(new ScoreDoc(0, 0, -1));
assertEqualsScoreDoc(new TextDescription("top value of %s", testingPriorityQueue), top, testingPriorityQueue.top());
}
@Test
public void testSort() {
var sortedNumbers = new ArrayList<ScoreDoc>();
for (int i = 0; i < 1000; i++) {
sortedNumbers.add(new ScoreDoc(i, i >> 1, -1));
}
sortedNumbers.sort(TestHugePqHitQueue::compareScoreDoc);
var shuffledNumbers = new ArrayList<>(sortedNumbers);
Collections.shuffle(shuffledNumbers, new Random(1000));
org.assertj.core.api.Assertions.assertThat(testingPriorityQueue.size()).isEqualTo(0);
for (ScoreDoc scoreDoc : shuffledNumbers) {
testingPriorityQueue.addUnsafe(scoreDoc);
}
org.assertj.core.api.Assertions.assertThat(testingPriorityQueue.size()).isEqualTo(sortedNumbers.size());
var newSortedNumbers = new ArrayList<ScoreDoc>();
ScoreDoc popped;
while ((popped = testingPriorityQueue.popUnsafe()) != null) {
newSortedNumbers.add(popped);
}
org.assertj.core.api.Assertions.assertThat(testingPriorityQueue.size()).isEqualTo(0);
assertEqualsScoreDoc(sortedNumbers, newSortedNumbers);
}
@AfterEach
public void afterEach() throws IOException {
hugePqQueue.close();
env.close();
}
private static class TestingPriorityQueue implements PriorityQueue<ScoreDoc> {
private final PriorityQueue<ScoreDoc> referenceQueue;
private final PriorityQueue<ScoreDoc> myQueue;
public TestingPriorityQueue(PriorityQueue<ScoreDoc> referenceQueue, PriorityQueue<ScoreDoc> myQueue) {
this.referenceQueue = referenceQueue;
this.myQueue = myQueue;
}
@Override
public void add(ScoreDoc element) {
referenceQueue.add(element);
myQueue.add(element);
ensureEquality();
}
public void addUnsafe(ScoreDoc element) {
referenceQueue.add(element);
myQueue.add(element);
}
@Override
public ScoreDoc top() {
var top1 = referenceQueue.top();
var top2 = myQueue.top();
assertEqualsScoreDoc(new TextDescription("top value of %s", myQueue), top1, top2);
return top2;
}
public ScoreDoc topUnsafe() {
var top1 = referenceQueue.top();
var top2 = myQueue.top();
return top2;
}
@Override
public ScoreDoc pop() {
var top1 = referenceQueue.pop();
var top2 = myQueue.pop();
assertEqualsScoreDoc(new TextDescription("top value of %s", myQueue), top1, top2);
return top2;
}
public ScoreDoc popUnsafe() {
var top1 = referenceQueue.pop();
var top2 = myQueue.pop();
return top2;
}
@Override
public void replaceTop(ScoreDoc oldTop, ScoreDoc newTop) {
referenceQueue.replaceTop(oldTop, newTop);
myQueue.replaceTop(oldTop, newTop);
}
@Override
public long size() {
var size1 = referenceQueue.size();
var size2 = myQueue.size();
Assertions.assertEquals(size1, size2);
return size2;
}
@Override
public void clear() {
referenceQueue.clear();
myQueue.clear();
}
@Override
public boolean remove(ScoreDoc element) {
var removedRef = referenceQueue.remove(element);
var removedMy = myQueue.remove(element);
Assertions.assertEquals(removedRef, removedMy);
return removedMy;
}
public boolean removeUnsafe(ScoreDoc element) {
var removed1 = referenceQueue.remove(element);
var removed2 = myQueue.remove(element);
return removed2;
}
@Override
public Flux<ScoreDoc> iterate() {
//noinspection BlockingMethodInNonBlockingContext
var it1 = referenceQueue.iterate().collectList().blockOptional().orElseThrow();
//noinspection BlockingMethodInNonBlockingContext
var it2 = myQueue.iterate().collectList().blockOptional().orElseThrow();
assertEqualsScoreDoc(it1, it2);
return Flux.fromIterable(it2);
}
@Override
public void close() {
referenceQueue.close();
myQueue.close();
}
private void ensureEquality() {
Assertions.assertEquals(referenceQueue.size(), myQueue.size());
var referenceQueueElements = Lists.newArrayList(referenceQueue
.iterate()
.map(TestHugePqHitQueue::toLLScoreDoc)
.toIterable());
var testQueueElements = Lists.newArrayList(myQueue
.iterate()
.map(TestHugePqHitQueue::toLLScoreDoc)
.toIterable());
Assertions.assertEquals(referenceQueueElements, testQueueElements);
}
}
public static LLScoreDoc toLLScoreDoc(ScoreDoc scoreDoc) {
if (scoreDoc == null) return null;
return new LLScoreDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex);
}
}

View File

@ -1,283 +0,0 @@
package it.cavallium.dbengine;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.SyncUtils.run;
import static it.cavallium.dbengine.SyncUtils.runVoid;
import static org.junit.jupiter.api.Assertions.assertEquals;
import io.netty5.buffer.Buffer;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import it.cavallium.dbengine.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Objects;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Mono;
public abstract class TestLLDictionary {
private final Logger log = LogManager.getLogger(this.getClass());
private static final Mono<LLRange> RANGE_ALL = Mono.fromCallable(LLRange::all);
private TestAllocator allocator;
private TempDb tempDb;
private LLKeyValueDatabase db;
protected abstract TemporaryDbGenerator getTempDbGenerator();
@BeforeEach
public void beforeEach() {
this.allocator = newAllocator();
ensureNoLeaks(allocator.allocator(), false, false);
tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(allocator).block(), "TempDB");
db = tempDb.db();
}
@AfterEach
public void afterEach() {
getTempDbGenerator().closeTempDb(tempDb).block();
ensureNoLeaks(allocator.allocator(), true, false);
destroyAllocator(allocator);
}
public static Stream<Arguments> provideArguments() {
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
}
public static Stream<Arguments> providePutArguments() {
var updateModes = Arrays.stream(UpdateMode.values());
return updateModes.flatMap(updateMode -> {
var resultTypes = Arrays.stream(LLDictionaryResultType.values());
return resultTypes.map(resultType -> Arguments.of(updateMode, resultType));
});
}
public static Stream<Arguments> provideUpdateArguments() {
var updateModes = Arrays.stream(UpdateMode.values());
return updateModes.flatMap(updateMode -> {
var resultTypes = Arrays.stream(UpdateReturnMode.values());
return resultTypes.map(resultType -> Arguments.of(updateMode, resultType));
});
}
private LLDictionary getDict(UpdateMode updateMode) {
var dict = DbTestUtils.tempDictionary(db, updateMode).blockOptional().orElseThrow();
var key1 = Mono.fromCallable(() -> fromString("test-key-1"));
var key2 = Mono.fromCallable(() -> fromString("test-key-2"));
var key3 = Mono.fromCallable(() -> fromString("test-key-3"));
var key4 = Mono.fromCallable(() -> fromString("test-key-4"));
var value = Mono.fromCallable(() -> fromString("test-value"));
dict.put(key1, value, LLDictionaryResultType.VOID).block();
dict.put(key2, value, LLDictionaryResultType.VOID).block();
dict.put(key3, value, LLDictionaryResultType.VOID).block();
dict.put(key4, value, LLDictionaryResultType.VOID).block();
return dict;
}
private Buffer fromString(String s) {
var sb = s.getBytes(StandardCharsets.UTF_8);
try (var b = db.getAllocator().allocate(sb.length + 3 + 13)) {
assert b.writerOffset() == 0;
assert b.readerOffset() == 0;
b.writerOffset(3).writeBytes(sb);
b.readerOffset(3);
assert b.readableBytes() == sb.length;
var part1 = b.split();
return LLUtils.compositeBuffer(db.getAllocator(), part1.send(), b.send());
}
}
private String toString(Buffer bb) {
try (bb) {
byte[] data = new byte[bb.readableBytes()];
bb.copyInto(bb.readerOffset(), data, 0, data.length);
return new String(data, StandardCharsets.UTF_8);
}
}
@Test
public void testNoOp() {
}
@Test
public void testNoOpAllocation() {
for (int i = 0; i < 10; i++) {
var a = allocator.allocator().allocate(i * 512);
a.send().receive().close();
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetDict(UpdateMode updateMode) {
var dict = getDict(updateMode);
Assertions.assertNotNull(dict);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetColumnName(UpdateMode updateMode) {
var dict = getDict(updateMode);
Assertions.assertEquals("hash_map_testmap", dict.getColumnName());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetAllocator(UpdateMode updateMode) {
var dict = getDict(updateMode);
var alloc = dict.getAllocator();
Assertions.assertEquals(alloc, alloc);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGet(UpdateMode updateMode) {
var dict = getDict(updateMode);
var keyEx = Mono.fromCallable(() -> fromString("test-key-1"));
var keyNonEx = Mono.fromCallable(() -> fromString("test-nonexistent"));
Assertions.assertEquals("test-value", run(dict.get(null, keyEx).map(this::toString)));
Assertions.assertEquals("test-value", run(dict.get(null, keyEx).map(this::toString)));
Assertions.assertEquals("test-value", run(dict.get(null, keyEx).map(this::toString)));
Assertions.assertEquals((String) null, run(dict.get(null, keyNonEx).map(this::toString)));
Assertions.assertEquals((String) null, run(dict.get(null, keyNonEx).map(this::toString)));
Assertions.assertEquals((String) null, run(dict.get(null, keyNonEx).map(this::toString)));
}
@ParameterizedTest
@MethodSource("providePutArguments")
public void testPutExisting(UpdateMode updateMode, LLDictionaryResultType resultType) {
var dict = getDict(updateMode);
var keyEx = Mono.fromCallable(() -> fromString("test-key-1"));
var value = Mono.fromCallable(() -> fromString("test-value"));
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
runVoid(dict.put(keyEx, value, resultType).then().doOnDiscard(Resource.class, Resource::close));
var afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
Assertions.assertEquals(0, afterSize - beforeSize);
}
@ParameterizedTest
@MethodSource("providePutArguments")
public void testPutNew(UpdateMode updateMode, LLDictionaryResultType resultType) {
var dict = getDict(updateMode);
var keyNonEx = Mono.fromCallable(() -> fromString("test-nonexistent"));
var value = Mono.fromCallable(() -> fromString("test-value"));
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
runVoid(dict.put(keyNonEx, value, resultType).then().doOnDiscard(Resource.class, Resource::close));
var afterSize = run(dict.sizeRange(null, Mono.fromCallable(LLRange::all), false));
Assertions.assertEquals(1, afterSize - beforeSize);
Assertions.assertTrue(run(dict.getRangeKeys(null, RANGE_ALL, false, false).map(this::toString).collectList()).contains("test-nonexistent"));
Assertions.assertTrue(run(dict.getRangeKeys(null, RANGE_ALL, true, false).map(this::toString).collectList()).contains("test-nonexistent"));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetUpdateMode(UpdateMode updateMode) {
var dict = getDict(updateMode);
assertEquals(updateMode, dict.getUpdateMode());
}
@ParameterizedTest
@MethodSource("provideUpdateArguments")
public void testUpdateExisting(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
var dict = getDict(updateMode);
var keyEx = Mono.fromCallable(() -> fromString("test-key-1"));
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
long afterSize;
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(keyEx, old -> fromString("test-value"), updateReturnMode).doOnNext(Resource::close).then()
);
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
assertEquals(0, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(keyEx, old -> fromString("test-value"), updateReturnMode).doOnNext(Resource::close).then()
);
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
assertEquals(0, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(keyEx, old -> fromString("test-value"), updateReturnMode).doOnNext(Resource::close).then()
);
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
assertEquals(0, afterSize - beforeSize);
}
@ParameterizedTest
@MethodSource("provideUpdateArguments")
public void testUpdateNew(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
int expected = updateMode == UpdateMode.DISALLOW ? 0 : 1;
var dict = getDict(updateMode);
var keyNonEx = Mono.fromCallable(() -> fromString("test-nonexistent"));
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
long afterSize;
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode).doOnNext(Resource::close).then()
);
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
assertEquals(expected, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode).doOnNext(Resource::close).then()
);
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
assertEquals(expected, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode).doOnNext(Resource::close).then()
);
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
assertEquals(expected, afterSize - beforeSize);
if (updateMode != UpdateMode.DISALLOW) {
Assertions.assertTrue(run(dict.getRangeKeys(null, RANGE_ALL, false, false).map(this::toString).collectList()).contains(
"test-nonexistent"));
Assertions.assertTrue(run(dict.getRangeKeys(null, RANGE_ALL, true, false).map(this::toString).collectList()).contains(
"test-nonexistent"));
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testUpdateAndGetDelta(UpdateMode updateMode) {
log.warn("Test not implemented");
//todo: implement
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testClear(UpdateMode updateMode) {
log.warn("Test not implemented");
//todo: implement
}
@ParameterizedTest
@MethodSource("providePutArguments")
public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) {
log.warn("Test not implemented");
//todo: implement
}
}

View File

@ -1,183 +0,0 @@
package it.cavallium.dbengine;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.DbTestUtils.tempDb;
import it.cavallium.data.generator.nativedata.StringSerializer;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.LLSingleton;
import it.cavallium.dbengine.database.collections.DatabaseInt;
import it.cavallium.dbengine.database.collections.DatabaseLong;
import it.cavallium.dbengine.database.collections.DatabaseSingleton;
import it.cavallium.dbengine.database.serialization.Serializer;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
public abstract class TestSingletons {
private TestAllocator allocator;
protected abstract TemporaryDbGenerator getTempDbGenerator();
private static Stream<Arguments> provideNumberWithRepeats() {
return Stream.of(
Arguments.of(Integer.MIN_VALUE, 2),
Arguments.of(-11, 2),
Arguments.of(0, 3),
Arguments.of(102, 5)
);
}
private static Stream<Arguments> provideLongNumberWithRepeats() {
return Stream.of(
Arguments.of(Long.MIN_VALUE, 2),
Arguments.of(-11L, 2),
Arguments.of(0L, 3),
Arguments.of(102L, 5)
);
}
@BeforeEach
public void beforeEach() {
this.allocator = newAllocator();
ensureNoLeaks(allocator.allocator(), false, false);
}
@AfterEach
public void afterEach() {
ensureNoLeaks(allocator.allocator(), true, false);
destroyAllocator(allocator);
}
@Test
public void testCreateInteger() {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempInt(db, "test", 0)
.flatMap(dbInt -> dbInt.get(null))
.then()
))
.verifyComplete();
}
@Test
public void testCreateIntegerNoop() {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempInt(db, "test", 0)
.then()
))
.verifyComplete();
}
@Test
public void testCreateLong() {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempLong(db, "test", 0)
.flatMap(dbLong -> dbLong.get(null))
.then()
))
.verifyComplete();
}
@Test
public void testCreateSingleton() {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempSingleton(db, "testsingleton")
.flatMap(dbSingleton -> dbSingleton.get(null))
))
.verifyComplete();
}
@ParameterizedTest
@ValueSource(ints = {Integer.MIN_VALUE, -192, -2, -1, 0, 1, 2, 1292, Integer.MAX_VALUE})
public void testDefaultValueInteger(int i) {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempInt(db, "test", i)
.flatMap(dbInt -> dbInt.get(null))
))
.expectNext(i)
.verifyComplete();
}
@ParameterizedTest
@ValueSource(longs = {Long.MIN_VALUE, -192, -2, -1, 0, 1, 2, 1292, Long.MAX_VALUE})
public void testDefaultValueLong(long i) {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempLong(db, "test", i)
.flatMap(dbLong -> dbLong.get(null))
))
.expectNext(i)
.verifyComplete();
}
@ParameterizedTest
@MethodSource("provideNumberWithRepeats")
public void testSetInteger(Integer i, Integer repeats) {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempInt(db, "test", 0)
.flatMap(dbInt -> Mono
.defer(() -> dbInt.set((int) System.currentTimeMillis()))
.repeat(repeats)
.then(dbInt.set(i))
.then(dbInt.get(null)))
))
.expectNext(i)
.verifyComplete();
}
@ParameterizedTest
@MethodSource("provideLongNumberWithRepeats")
public void testSetLong(Long i, Integer repeats) {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempLong(db, "test", 0)
.flatMap(dbLong -> Mono
.defer(() -> dbLong.set(System.currentTimeMillis()))
.repeat(repeats)
.then(dbLong.set(i))
.then(dbLong.get(null)))
))
.expectNext(i)
.verifyComplete();
}
@ParameterizedTest
@MethodSource("provideLongNumberWithRepeats")
public void testSetSingleton(Long i, Integer repeats) {
StepVerifier
.create(tempDb(getTempDbGenerator(), allocator, db -> tempSingleton(db, "test")
.flatMap(dbSingleton -> Mono
.defer(() -> dbSingleton.set(Long.toString(System.currentTimeMillis())))
.repeat(repeats)
.then(dbSingleton.set(Long.toString(i)))
.then(dbSingleton.get(null)))
))
.expectNext(Long.toString(i))
.verifyComplete();
}
public static Mono<DatabaseInt> tempInt(LLKeyValueDatabase database, String name, int defaultValue) {
return database
.getInteger("ints", name, defaultValue);
}
public static Mono<DatabaseLong> tempLong(LLKeyValueDatabase database, String name, long defaultValue) {
return database
.getLong("longs", name, defaultValue);
}
public static Mono<DatabaseSingleton<String>> tempSingleton(LLKeyValueDatabase database, String name) {
return database
.getSingleton("longs", name)
.map(singleton -> new DatabaseSingleton<>(singleton, Serializer.UTF8_SERIALIZER));
}
}

View File

@ -1,289 +0,0 @@
package it.cavallium.dbengine.database.remote;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.fail;
import io.netty.handler.ssl.ClientAuth;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import io.netty.handler.ssl.util.SelfSignedCertificate;
import io.netty.incubator.codec.quic.InsecureQuicTokenHandler;
import io.netty.incubator.codec.quic.QuicConnectionIdGenerator;
import io.netty.incubator.codec.quic.QuicSslContext;
import io.netty.incubator.codec.quic.QuicSslContextBuilder;
import it.cavallium.dbengine.database.remote.RPCCodecs.RPCEventCodec;
import it.cavallium.dbengine.rpc.current.data.Empty;
import it.cavallium.dbengine.rpc.current.data.RPCEvent;
import it.cavallium.dbengine.rpc.current.data.SingletonGet;
import it.cavallium.dbengine.rpc.current.data.nullables.NullableLLSnapshot;
import java.net.InetSocketAddress;
import java.security.cert.CertificateException;
import java.time.Duration;
import java.util.List;
import java.util.logging.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SignalType;
import reactor.netty.Connection;
import reactor.netty.incubator.quic.QuicClient;
import reactor.netty.incubator.quic.QuicConnection;
class QuicUtilsTest {
private static final int NORMAL = 0;
private static final int WAIT_TIME = 1;
private static final int FAIL_IMMEDIATELY = 2;
private static final int WAIT_TIME_THEN_FAIL = 3;
private Connection serverConn;
private QuicConnection clientConn;
private InetSocketAddress clientAddress;
private InetSocketAddress serverAddress;
@BeforeEach
void setUp() throws CertificateException {
var selfSignedCert = new SelfSignedCertificate();
this.clientAddress = new InetSocketAddress("localhost", 8081);
this.serverAddress = new InetSocketAddress("localhost", 8080);
QuicSslContext sslContext = QuicSslContextBuilder
.forServer(selfSignedCert.key(), null, selfSignedCert.cert())
.applicationProtocols("db/0.9")
.clientAuth(ClientAuth.NONE)
.build();
var qs = reactor.netty.incubator.quic.QuicServer
.create()
.tokenHandler(InsecureQuicTokenHandler.INSTANCE)
.bindAddress(() -> serverAddress)
.secure(sslContext)
.idleTimeout(Duration.ofSeconds(30))
.connectionIdAddressGenerator(QuicConnectionIdGenerator.randomGenerator())
.initialSettings(spec -> spec
.maxData(10000000)
.maxStreamDataBidirectionalLocal(1000000)
.maxStreamDataBidirectionalRemote(1000000)
.maxStreamsBidirectional(100)
.maxStreamsUnidirectional(100)
)
.handleStream((in, out) -> in
.withConnection(conn -> conn.addHandler(new RPCEventCodec()))
.receiveObject()
.cast(RPCEvent.class)
.log("recv", Level.FINEST)
.flatMapSequential(req -> (switch ((int) ((SingletonGet) req).singletonId()) {
case NORMAL -> Mono.<RPCEvent>just(Empty.of());
case FAIL_IMMEDIATELY -> Mono.<RPCEvent>error(new Throwable("Expected error"));
case WAIT_TIME -> Mono.delay(Duration.ofSeconds(3)).<RPCEvent>thenReturn(Empty.of());
case WAIT_TIME_THEN_FAIL -> Mono
.delay(Duration.ofSeconds(3))
.then(Mono.<RPCEvent>error(new Throwable("Expected error")));
default -> Mono.<RPCEvent>error(new UnsupportedOperationException("Unsupported request id " + req));
}).log("Server", Level.SEVERE, SignalType.ON_ERROR).onErrorResume(QuicUtils::catchRPCErrors))
.concatMap(message -> Mono.defer(() -> out
.withConnection(conn -> conn.addHandler(new RPCEventCodec()))
.sendObject(message)
.then())
.log("send", Level.FINEST)
)
);
this.serverConn = qs.bindNow();
var clientSslContext = QuicSslContextBuilder
.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.applicationProtocols("db/0.9")
.build();
this.clientConn = QuicClient.create()
.bindAddress(() -> new InetSocketAddress(0))
.remoteAddress(() -> serverAddress)
.secure(clientSslContext)
.idleTimeout(Duration.ofSeconds(30))
.initialSettings(spec -> spec
.maxData(10000000)
.maxStreamDataBidirectionalLocal(1000000)
)
.connectNow();
}
@AfterEach
void tearDown() {
if (clientConn != null) {
clientConn.disposeNow();
}
if (serverConn != null) {
serverConn.disposeNow();
}
}
@Test
void sendSimpleRequest() {
RPCEvent response = QuicUtils.<RPCEvent, RPCEvent>sendSimpleRequest(clientConn,
RPCEventCodec::new,
RPCEventCodec::new,
new SingletonGet(NORMAL, NullableLLSnapshot.empty())
).blockOptional().orElseThrow();
assertEquals(Empty.of(), response);
}
@Test
void sendSimpleRequestFlux() {
List<RPCEvent> results = QuicUtils.<RPCEvent, RPCEvent>sendSimpleRequestFlux(clientConn,
RPCEventCodec::new,
RPCEventCodec::new,
Flux.just(
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
new SingletonGet(NORMAL, NullableLLSnapshot.empty())
)
).collectList().blockOptional().orElseThrow();
assertEquals(5, results.size());
assertEquals(List.of(Empty.of(), Empty.of(), Empty.of(), Empty.of(), Empty.of()), results);
}
@Test
void sendUpdateFluxNormal() {
RPCEvent results = QuicUtils.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
serverData -> Mono.fromCallable(() -> {
assertEquals(Empty.of(), serverData);
return new SingletonGet(NORMAL, NullableLLSnapshot.empty());
})
).blockOptional().orElseThrow();
assertEquals(Empty.of(), results);
}
@Test
void sendUpdateFluxSlowClient() {
RPCEvent results = QuicUtils.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
serverData -> Mono.<RPCEvent>fromCallable(() -> {
assertEquals(Empty.of(), serverData);
return new SingletonGet(NORMAL, NullableLLSnapshot.empty());
}).delayElement(Duration.ofSeconds(2))
).blockOptional().orElseThrow();
assertEquals(Empty.of(), results);
}
@Test
void sendUpdateFluxSlowServer() {
RPCEvent results = QuicUtils.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(WAIT_TIME, NullableLLSnapshot.empty()),
serverData -> Mono.fromCallable(() -> {
assertEquals(Empty.of(), serverData);
return new SingletonGet(WAIT_TIME, NullableLLSnapshot.empty());
})
).blockOptional().orElseThrow();
assertEquals(Empty.of(), results);
}
@Test
void sendUpdateFluxSlowClientAndServer() {
RPCEvent results = QuicUtils.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(WAIT_TIME, NullableLLSnapshot.empty()),
serverData -> Mono.<RPCEvent>fromCallable(() -> {
assertEquals(Empty.of(), serverData);
return new SingletonGet(WAIT_TIME, NullableLLSnapshot.empty());
}).delayElement(Duration.ofSeconds(2))
).blockOptional().orElseThrow();
assertEquals(Empty.of(), results);
}
@Test
void sendUpdateClientFail() {
class ExpectedException extends Throwable {}
assertThrows(ExpectedException.class, () -> {
try {
RPCEvent results = QuicUtils
.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
serverData -> Mono.error(new ExpectedException())
)
.blockOptional()
.orElseThrow();
} catch (Throwable e) {
throw e.getCause();
}
});
}
@Test
void sendUpdateServerFail1() {
assertThrows(RPCException.class,
() -> QuicUtils
.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(FAIL_IMMEDIATELY, NullableLLSnapshot.empty()),
serverData -> Mono.fromCallable(() -> {
fail("Called update");
return new SingletonGet(NORMAL, NullableLLSnapshot.empty());
})
)
.blockOptional()
.orElseThrow()
);
}
@Test
void sendUpdateServerFail2() {
assertThrows(RPCException.class,
() -> QuicUtils
.<RPCEvent>sendUpdate(clientConn,
RPCEventCodec::new,
new SingletonGet(NORMAL, NullableLLSnapshot.empty()),
serverData -> Mono.fromCallable(() -> {
assertEquals(Empty.of(), serverData);
return new SingletonGet(FAIL_IMMEDIATELY, NullableLLSnapshot.empty());
})
)
.blockOptional()
.orElseThrow()
);
}
@Test
void sendSimpleRequestConcurrently() {
// Send the request a second time
var requestMono = QuicUtils.<RPCEvent, RPCEvent>sendSimpleRequest(clientConn,
RPCEventCodec::new,
RPCEventCodec::new,
new SingletonGet(NORMAL, NullableLLSnapshot.empty())
);
var results = Flux
.merge(requestMono, requestMono, requestMono, requestMono, requestMono)
.collectList()
.blockOptional()
.orElseThrow();
assertEquals(5, results.size());
assertEquals(List.of(Empty.of(), Empty.of(), Empty.of(), Empty.of(), Empty.of()), results);
}
@Test
void sendFailedRequest() {
assertThrows(RPCException.class,
() -> QuicUtils
.<RPCEvent, RPCEvent>sendSimpleRequest(clientConn,
RPCEventCodec::new,
RPCEventCodec::new,
new SingletonGet(FAIL_IMMEDIATELY, NullableLLSnapshot.empty())
)
.blockOptional()
.orElseThrow()
);
}
@Test
void createStream() {
}
}

View File

@ -1,174 +0,0 @@
package it.cavallium.dbengine.lucene.hugepq.search;
import static org.junit.jupiter.api.Assertions.assertEquals;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.disk.LLTempHugePqEnv;
import it.cavallium.dbengine.lucene.LLFieldDoc;
import it.cavallium.dbengine.lucene.LLScoreDoc;
import it.cavallium.dbengine.lucene.analyzer.WordAnalyzer;
import it.cavallium.dbengine.lucene.searcher.ShardIndexSearcher;
import it.cavallium.dbengine.lucene.searcher.SharedShardStatistics;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortField.Type;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.util.QueryBuilder;
import org.junit.jupiter.api.Test;
public class HugePqFullFieldDocCollectorTest {
Sort sort = new Sort(new SortedNumericSortField("number_sort", Type.LONG));
Query luceneQuery = LongPoint.newRangeQuery("number", -100, 100);
@Test
public void testSingleShard() throws IOException {
try (var dir = new ByteBuffersDirectory(); var env = new LLTempHugePqEnv()) {
var analyzer = new WordAnalyzer(true, true);
var writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
writer.updateDocument(new Term("id", "00"), List.of(new SortedNumericDocValuesField("number_sort", 1), new LongPoint("number", 1)));
writer.updateDocument(new Term("id", "01"), List.of(new SortedNumericDocValuesField("number_sort", 44), new LongPoint("number", 44)));
writer.updateDocument(new Term("id", "02"), List.of(new SortedNumericDocValuesField("number_sort", 203), new LongPoint("number", 203)));
writer.updateDocument(new Term("id", "03"), List.of(new SortedNumericDocValuesField("number_sort", 209), new LongPoint("number", 209)));
writer.updateDocument(new Term("id", "04"), List.of(new SortedNumericDocValuesField("number_sort", -33), new LongPoint("number", -33)));
writer.updateDocument(new Term("id", "05"), List.of(new SortedNumericDocValuesField("number_sort", 0), new LongPoint("number", 0)));
writer.updateDocument(new Term("id", "06"), List.of(new SortedNumericDocValuesField("number_sort", 933), new LongPoint("number", 933)));
writer.updateDocument(new Term("id", "07"), List.of(new SortedNumericDocValuesField("number_sort", 6), new LongPoint("number", 6)));
writer.updateDocument(new Term("id", "08"), List.of(new SortedNumericDocValuesField("number_sort", -11), new LongPoint("number", -11)));
writer.updateDocument(new Term("id", "09"), List.of(new SortedNumericDocValuesField("number_sort", 9996), new LongPoint("number", 9996)));
writer.updateDocument(new Term("id", "10"), List.of(new SortedNumericDocValuesField("number_sort", 9), new LongPoint("number", 9)));
writer.updateDocument(new Term("id", "11"), List.of(new SortedNumericDocValuesField("number_sort", 66), new LongPoint("number", 66)));
writer.updateDocument(new Term("id", "12"), List.of(new SortedNumericDocValuesField("number_sort", 88), new LongPoint("number", 88)));
writer.updateDocument(new Term("id", "13"), List.of(new SortedNumericDocValuesField("number_sort", 222), new LongPoint("number", 222)));
writer.updateDocument(new Term("id", "14"), List.of(new SortedNumericDocValuesField("number_sort", -2), new LongPoint("number", -2)));
writer.updateDocument(new Term("id", "15"), List.of(new SortedNumericDocValuesField("number_sort", 7), new LongPoint("number", 7)));
writer.updateDocument(new Term("id", "16"), List.of(new SortedNumericDocValuesField("number_sort", 1010912093), new LongPoint("number", 1010912093)));
writer.updateDocument(new Term("id", "17"), List.of(new SortedNumericDocValuesField("number_sort", -3894789), new LongPoint("number", -3894789)));
writer.updateDocument(new Term("id", "18"), List.of(new SortedNumericDocValuesField("number_sort", 122), new LongPoint("number", 122)));
writer.updateDocument(new Term("id", "19"), List.of(new SortedNumericDocValuesField("number_sort", 2), new LongPoint("number", 2)));
writer.flush();
writer.commit();
try (var reader = DirectoryReader.open(writer, true, true)) {
var searcher = new IndexSearcher(reader);
var expectedResults = searcher.search(luceneQuery, 20, sort, false);
var expectedTotalHits = new TotalHitsCount(expectedResults.totalHits.value, expectedResults.totalHits.relation == Relation.EQUAL_TO);
var expectedDocs = Arrays
.stream(expectedResults.scoreDocs)
.map(sd -> (FieldDoc) sd)
.map(fieldDoc -> new LLFieldDoc(fieldDoc.doc, fieldDoc.score, fieldDoc.shardIndex, Arrays.asList(fieldDoc.fields)))
.toList();
try (var collector = HugePqFullFieldDocCollector.create(env, sort, 20, Integer.MAX_VALUE)) {
searcher.search(luceneQuery, collector);
var docs = collector.fullDocs().iterate().collectList().blockOptional().orElseThrow();
System.out.println("Expected docs:");
for (var expectedDoc : expectedDocs) {
System.out.println(expectedDoc);
}
System.out.println("");
System.out.println("Obtained docs:");
for (var doc : docs) {
System.out.println(doc);
}
assertEquals(expectedDocs,
docs.stream().map(elem -> new LLFieldDoc(elem.doc(), elem.score(), -1, elem.fields())).toList()
);
assertEquals(expectedTotalHits, new TotalHitsCount(collector.getTotalHits(), true));
}
}
}
}
@Test
public void testMultiShard() throws IOException {
try (var dir1 = new ByteBuffersDirectory(); var dir2 = new ByteBuffersDirectory(); var env = new LLTempHugePqEnv()) {
var analyzer = new WordAnalyzer(true, true);
var writer1 = new IndexWriter(dir1, new IndexWriterConfig(analyzer));
var writer2 = new IndexWriter(dir2, new IndexWriterConfig(analyzer));
writer1.updateDocument(new Term("id", "00"), List.of(new SortedNumericDocValuesField("number_sort", 1), new LongPoint("number", 1)));
writer1.updateDocument(new Term("id", "01"), List.of(new SortedNumericDocValuesField("number_sort", 44), new LongPoint("number", 44)));
writer1.updateDocument(new Term("id", "02"), List.of(new SortedNumericDocValuesField("number_sort", 203), new LongPoint("number", 203)));
writer1.updateDocument(new Term("id", "03"), List.of(new SortedNumericDocValuesField("number_sort", 209), new LongPoint("number", 209)));
writer1.updateDocument(new Term("id", "04"), List.of(new SortedNumericDocValuesField("number_sort", -33), new LongPoint("number", -33)));
writer1.updateDocument(new Term("id", "05"), List.of(new SortedNumericDocValuesField("number_sort", 0), new LongPoint("number", 0)));
writer1.updateDocument(new Term("id", "06"), List.of(new SortedNumericDocValuesField("number_sort", 933), new LongPoint("number", 933)));
writer1.updateDocument(new Term("id", "07"), List.of(new SortedNumericDocValuesField("number_sort", 6), new LongPoint("number", 6)));
writer1.updateDocument(new Term("id", "08"), List.of(new SortedNumericDocValuesField("number_sort", -11), new LongPoint("number", -11)));
writer1.updateDocument(new Term("id", "09"), List.of(new SortedNumericDocValuesField("number_sort", 9996), new LongPoint("number", 9996)));
writer2.updateDocument(new Term("id", "10"), List.of(new SortedNumericDocValuesField("number_sort", 9), new LongPoint("number", 9)));
writer2.updateDocument(new Term("id", "11"), List.of(new SortedNumericDocValuesField("number_sort", 66), new LongPoint("number", 66)));
writer2.updateDocument(new Term("id", "12"), List.of(new SortedNumericDocValuesField("number_sort", 88), new LongPoint("number", 88)));
writer2.updateDocument(new Term("id", "13"), List.of(new SortedNumericDocValuesField("number_sort", 222), new LongPoint("number", 222)));
writer2.updateDocument(new Term("id", "14"), List.of(new SortedNumericDocValuesField("number_sort", -2), new LongPoint("number", -2)));
writer2.updateDocument(new Term("id", "15"), List.of(new SortedNumericDocValuesField("number_sort", 7), new LongPoint("number", 7)));
writer2.updateDocument(new Term("id", "16"), List.of(new SortedNumericDocValuesField("number_sort", 1010912093), new LongPoint("number", 1010912093)));
writer2.updateDocument(new Term("id", "17"), List.of(new SortedNumericDocValuesField("number_sort", -3894789), new LongPoint("number", -3894789)));
writer2.updateDocument(new Term("id", "18"), List.of(new SortedNumericDocValuesField("number_sort", 122), new LongPoint("number", 122)));
writer2.updateDocument(new Term("id", "19"), List.of(new SortedNumericDocValuesField("number_sort", 2), new LongPoint("number", 2)));
writer1.flush();
writer2.flush();
writer1.commit();
writer2.commit();
var sharedStats = new SharedShardStatistics();
try (var reader1 = DirectoryReader.open(writer1, true, true);
var reader2 = DirectoryReader.open(writer2, true, true)) {
var searcher1 = new IndexSearcher(reader1);
var searcher2 = new IndexSearcher(reader2);
var shardSearcher1 = new ShardIndexSearcher(sharedStats, List.of(searcher1, searcher2), 0);
var shardSearcher2 = new ShardIndexSearcher(sharedStats, List.of(searcher1, searcher2), 1);
var standardSharedManager = TopFieldCollector.createSharedManager(sort, 20, null, Integer.MAX_VALUE);
var standardCollector1 = standardSharedManager.newCollector();
var standardCollector2 = standardSharedManager.newCollector();
shardSearcher1.search(luceneQuery, standardCollector1);
shardSearcher2.search(luceneQuery, standardCollector2);
var expectedResults = standardSharedManager.reduce(List.of(standardCollector1, standardCollector2));
var expectedTotalHits = new TotalHitsCount(expectedResults.totalHits.value, expectedResults.totalHits.relation == Relation.EQUAL_TO);
var expectedDocs = Arrays
.stream(expectedResults.scoreDocs)
.map(sd -> (FieldDoc) sd)
.map(fieldDoc -> new LLFieldDoc(fieldDoc.doc, fieldDoc.score, fieldDoc.shardIndex, Arrays.asList(fieldDoc.fields)))
.toList();
var collectorManager = HugePqFullFieldDocCollector.createSharedManager(env, sort, 20, Integer.MAX_VALUE);
var collector1 = collectorManager.newCollector();
var collector2 = collectorManager.newCollector();
shardSearcher1.search(luceneQuery, collector1);
shardSearcher2.search(luceneQuery, collector2);
try (var results = collectorManager.reduce(List.of(collector1, collector2))) {
var docs = results.iterate().collectList().blockOptional().orElseThrow();
System.out.println("Expected docs:");
for (var expectedDoc : expectedDocs) {
System.out.println(expectedDoc);
}
System.out.println("");
System.out.println("Obtained docs:");
for (var doc : docs) {
System.out.println(doc);
}
assertEquals(expectedDocs,
docs.stream().map(elem -> new LLFieldDoc(elem.doc(), elem.score(), -1, elem.fields())).toList()
);
assertEquals(expectedTotalHits, new TotalHitsCount(results.totalHits().value, results.totalHits().relation == Relation.EQUAL_TO));
}
}
}
}
}

View File

@ -1,178 +0,0 @@
package it.cavallium.dbengine.lucene.hugepq.search;
import static org.junit.jupiter.api.Assertions.*;
import it.cavallium.dbengine.client.query.QueryUtils;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.disk.IndexSearcherManager;
import it.cavallium.dbengine.database.disk.LLTempHugePqEnv;
import it.cavallium.dbengine.lucene.LLScoreDoc;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.analyzer.LegacyWordAnalyzer;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import it.cavallium.dbengine.lucene.analyzer.WordAnalyzer;
import it.cavallium.dbengine.lucene.searcher.ShardIndexSearcher;
import it.cavallium.dbengine.lucene.searcher.SharedShardStatistics;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.util.QueryBuilder;
import org.junit.jupiter.api.Test;
public class HugePqFullScoreDocCollectorTest {
@Test
public void testSingleShard() throws IOException {
try (var dir = new ByteBuffersDirectory(); var env = new LLTempHugePqEnv()) {
var analyzer = new WordAnalyzer(true, true);
var writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
writer.updateDocument(new Term("id", "00"), List.of(new TextField("text", "Mario Rossi", Store.YES)));
writer.updateDocument(new Term("id", "01"), List.of(new TextField("text", "Mario Rossi", Store.YES)));
writer.updateDocument(new Term("id", "02"), List.of(new TextField("text", "Mario Rossi", Store.YES)));
writer.updateDocument(new Term("id", "03"), List.of(new TextField("text", "Marios Rossi", Store.YES)));
writer.updateDocument(new Term("id", "04"), List.of(new TextField("text", "Rossi", Store.YES)));
writer.updateDocument(new Term("id", "05"), List.of(new TextField("text", "Rossi", Store.YES)));
writer.updateDocument(new Term("id", "06"), List.of(new TextField("text", "Rossi", Store.YES)));
writer.updateDocument(new Term("id", "07"), List.of(new TextField("text", "Rossi", Store.YES)));
writer.updateDocument(new Term("id", "08"), List.of(new TextField("text", "Rossi", Store.YES)));
writer.updateDocument(new Term("id", "09"), List.of(new TextField("text", "Rossi", Store.YES)));
writer.updateDocument(new Term("id", "10"), List.of(new TextField("text", "ROSSI UA", Store.YES)));
writer.updateDocument(new Term("id", "11"), List.of(new TextField("text", "Mario Barman", Store.YES)));
writer.updateDocument(new Term("id", "12"), List.of(new TextField("text", "Mario batman", Store.YES)));
writer.updateDocument(new Term("id", "13"), List.of(new TextField("text", "Admin Rossi desk", Store.YES)));
writer.updateDocument(new Term("id", "14"), List.of(new TextField("text", "MRI Marios bot", Store.YES)));
writer.updateDocument(new Term("id", "15"), List.of(new TextField("text", "Mario Rossi [beta]", Store.YES)));
writer.updateDocument(new Term("id", "16"), List.of(new TextField("text", "Mario Music Bot", Store.YES)));
writer.updateDocument(new Term("id", "17"), List.of(new TextField("text", "Mario night mode", Store.YES)));
writer.updateDocument(new Term("id", "18"), List.of(new TextField("text", "Mario stats bot", Store.YES)));
writer.updateDocument(new Term("id", "19"), List.of(new TextField("text", "Very very long text with Mario Giovanni and Rossi inside", Store.YES)));
writer.flush();
writer.commit();
try (var reader = DirectoryReader.open(writer, true, true)) {
var searcher = new IndexSearcher(reader);
var qb = new QueryBuilder(analyzer);
var luceneQuery = qb.createMinShouldMatchQuery("text", "Mario rossi", 0.3f);
var expectedResults = searcher.search(luceneQuery, 20);
var expectedTotalHits = new TotalHitsCount(expectedResults.totalHits.value, expectedResults.totalHits.relation == Relation.EQUAL_TO);
var expectedDocs = Arrays
.stream(expectedResults.scoreDocs)
.map(scoreDoc -> new LLScoreDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex))
.toList();
try (var collector = HugePqFullScoreDocCollector.create(env, 20)) {
searcher.search(luceneQuery, collector);
var docs = collector
.fullDocs()
.iterate()
.collectList()
.transform(LLUtils::handleDiscard)
.blockOptional()
.orElseThrow();
System.out.println("Expected docs:");
for (LLScoreDoc expectedDoc : expectedDocs) {
System.out.println(expectedDoc);
}
System.out.println("");
System.out.println("Obtained docs:");
for (LLScoreDoc doc : docs) {
System.out.println(doc);
}
assertEquals(expectedDocs, docs.stream().map(elem -> new LLScoreDoc(elem.doc(), elem.score(), -1)).toList());
assertEquals(expectedTotalHits, new TotalHitsCount(collector.getTotalHits(), true));
}
}
}
}
@Test
public void testMultiShard() throws IOException {
try (var dir1 = new ByteBuffersDirectory(); var dir2 = new ByteBuffersDirectory(); var env = new LLTempHugePqEnv()) {
var analyzer = new WordAnalyzer(true, true);
var writer1 = new IndexWriter(dir1, new IndexWriterConfig(analyzer));
var writer2 = new IndexWriter(dir2, new IndexWriterConfig(analyzer));
writer1.updateDocument(new Term("id", "00"), List.of(new TextField("text", "Mario Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "01"), List.of(new TextField("text", "Mario Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "02"), List.of(new TextField("text", "Mario Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "03"), List.of(new TextField("text", "Marios Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "04"), List.of(new TextField("text", "Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "05"), List.of(new TextField("text", "Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "06"), List.of(new TextField("text", "Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "07"), List.of(new TextField("text", "Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "08"), List.of(new TextField("text", "Rossi", Store.YES)));
writer1.updateDocument(new Term("id", "09"), List.of(new TextField("text", "Rossi", Store.YES)));
writer2.updateDocument(new Term("id", "10"), List.of(new TextField("text", "ROSSI UA", Store.YES)));
writer2.updateDocument(new Term("id", "11"), List.of(new TextField("text", "Mario Barman", Store.YES)));
writer2.updateDocument(new Term("id", "12"), List.of(new TextField("text", "Mario batman", Store.YES)));
writer2.updateDocument(new Term("id", "13"), List.of(new TextField("text", "Admin Rossi desk", Store.YES)));
writer2.updateDocument(new Term("id", "14"), List.of(new TextField("text", "MRI Marios bot", Store.YES)));
writer2.updateDocument(new Term("id", "15"), List.of(new TextField("text", "Mario Rossi [beta]", Store.YES)));
writer2.updateDocument(new Term("id", "16"), List.of(new TextField("text", "Mario Music Bot", Store.YES)));
writer2.updateDocument(new Term("id", "17"), List.of(new TextField("text", "Mario night mode", Store.YES)));
writer2.updateDocument(new Term("id", "18"), List.of(new TextField("text", "Mario stats bot", Store.YES)));
writer2.updateDocument(new Term("id", "19"), List.of(new TextField("text", "Very very long text with Mario Giovanni and Rossi inside", Store.YES)));
writer1.flush();
writer2.flush();
writer1.commit();
writer2.commit();
var sharedStats = new SharedShardStatistics();
try (var reader1 = DirectoryReader.open(writer1, true, true);
var reader2 = DirectoryReader.open(writer2, true, true)) {
var searcher1 = new IndexSearcher(reader1);
var searcher2 = new IndexSearcher(reader2);
var shardSearcher1 = new ShardIndexSearcher(sharedStats, List.of(searcher1, searcher2), 0);
var shardSearcher2 = new ShardIndexSearcher(sharedStats, List.of(searcher1, searcher2), 1);
var qb = new QueryBuilder(analyzer);
var luceneQuery = qb.createMinShouldMatchQuery("text", "Mario rossi", 0.3f);
var standardSharedManager = TopScoreDocCollector.createSharedManager(20, null, Integer.MAX_VALUE);
var standardCollector1 = standardSharedManager.newCollector();
var standardCollector2 = standardSharedManager.newCollector();
shardSearcher1.search(luceneQuery, standardCollector1);
shardSearcher2.search(luceneQuery, standardCollector2);
var expectedResults = standardSharedManager.reduce(List.of(standardCollector1, standardCollector2));
var expectedTotalHits = new TotalHitsCount(expectedResults.totalHits.value, expectedResults.totalHits.relation == Relation.EQUAL_TO);
var expectedDocs = Arrays
.stream(expectedResults.scoreDocs)
.map(scoreDoc -> new LLScoreDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex))
.toList();
var collectorManager = HugePqFullScoreDocCollector.createSharedManager(env, 20, Integer.MAX_VALUE);
try (var collector1 = collectorManager.newCollector();
var collector2 = collectorManager.newCollector()) {
shardSearcher1.search(luceneQuery, collector1);
shardSearcher2.search(luceneQuery, collector2);
try (var results = collectorManager.reduce(List.of(collector1, collector2))) {
var docs = results.iterate().collectList().blockOptional().orElseThrow();
System.out.println("Expected docs:");
for (LLScoreDoc expectedDoc : expectedDocs) {
System.out.println(expectedDoc);
}
System.out.println("");
System.out.println("Obtained docs:");
for (LLScoreDoc doc : docs) {
System.out.println(doc);
}
assertEquals(expectedDocs, docs.stream().map(elem -> new LLScoreDoc(elem.doc(), elem.score(), -1)).toList());
assertEquals(expectedTotalHits, new TotalHitsCount(results.totalHits().value, results.totalHits().relation == Relation.EQUAL_TO));
}
}
}
}
}
}

View File

@ -1,23 +1,17 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import io.netty5.buffer.Buffer; import io.netty.util.ResourceLeakDetector;
import io.netty5.buffer.MemoryManager; import io.netty.util.ResourceLeakDetector.Level;
import io.netty5.buffer.internal.LeakDetection; import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.internal.LifecycleTracer; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.buffer.pool.PoolArenaMetric;
import io.netty5.buffer.pool.PooledBufferAllocator;
import io.netty5.util.ResourceLeakDetector;
import io.netty5.util.ResourceLeakDetector.Level;
import io.netty5.util.internal.PlatformDependent;
import it.cavallium.dbengine.client.LuceneIndex; import it.cavallium.dbengine.client.LuceneIndex;
import it.cavallium.dbengine.client.LuceneIndexImpl; import it.cavallium.dbengine.client.LuceneIndexImpl;
import it.cavallium.dbengine.database.LLDatabaseConnection; import it.cavallium.dbengine.database.LLDatabaseConnection;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.LLLuceneIndex; import it.cavallium.dbengine.database.LLLuceneIndex;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary; import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep; import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
@ -26,23 +20,20 @@ import it.cavallium.dbengine.database.collections.DatabaseStageEntry;
import it.cavallium.dbengine.database.collections.DatabaseStageMap; import it.cavallium.dbengine.database.collections.DatabaseStageMap;
import it.cavallium.dbengine.database.collections.SubStageGetterHashMap; import it.cavallium.dbengine.database.collections.SubStageGetterHashMap;
import it.cavallium.dbengine.database.collections.SubStageGetterMap; import it.cavallium.dbengine.database.collections.SubStageGetterMap;
import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Objects; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function; import java.util.function.Function;
import org.apache.lucene.util.IOSupplier;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.reactivestreams.Publisher; import org.junit.jupiter.api.Assertions;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public class DbTestUtils { public class DbTestUtils {
static {
LLUtils.initHooks();
}
public static final String BIG_STRING = generateBigString(); public static final String BIG_STRING = generateBigString();
public static final int MAX_IN_MEMORY_RESULT_ENTRIES = 8192; public static final int MAX_IN_MEMORY_RESULT_ENTRIES = 8192;
@ -50,84 +41,61 @@ public class DbTestUtils {
return "0123456789".repeat(1024); return "0123456789".repeat(1024);
} }
public record TestAllocator(TestAllocatorImpl allocator) {}
public static TestAllocator newAllocator() {
return new TestAllocator(TestAllocatorImpl.create());
}
public static void destroyAllocator(TestAllocator testAllocator) {
testAllocator.allocator().close();
}
@SuppressWarnings("SameParameterValue")
private static long getActiveAllocations(TestAllocatorImpl allocator, boolean printStats) {
long activeAllocations = allocator.getActiveAllocations();
if (printStats) {
System.out.println("activeAllocations=" + activeAllocations);
}
return activeAllocations;
}
public static boolean isCIMode() { public static boolean isCIMode() {
return System.getProperty("dbengine.ci", "false").equalsIgnoreCase("true"); return System.getProperty("dbengine.ci", "false").equalsIgnoreCase("true");
} }
public static <U> Flux<U> tempDb(TemporaryDbGenerator temporaryDbGenerator, public static <U> U tempDb(TemporaryDbGenerator temporaryDbGenerator,
TestAllocator alloc, Function<LLKeyValueDatabase, U> action) throws IOException {
Function<LLKeyValueDatabase, Publisher<U>> action) { var tempDb = temporaryDbGenerator.openTempDb();
return Flux.usingWhen( try {
temporaryDbGenerator.openTempDb(alloc), return action.apply(tempDb.db());
tempDb -> Flux } finally {
.from(action.apply(tempDb.db())) temporaryDbGenerator.closeTempDb(tempDb);
.doOnDiscard(Object.class, o -> System.out.println("Discarded: " + o.getClass().getName() + ", " + o)), }
temporaryDbGenerator::closeTempDb
);
} }
public record TempDb(TestAllocator allocator, LLDatabaseConnection connection, LLKeyValueDatabase db, public static void runVoid(boolean shouldFail, Runnable consumer) {
if (shouldFail) {
Assertions.assertThrows(Throwable.class, consumer::run);
} else {
Assertions.assertDoesNotThrow(consumer::run);
}
}
public static <X> X run(boolean shouldFail, IOSupplier<X> consumer) {
AtomicReference<X> result = new AtomicReference<>(null);
if (shouldFail) {
Assertions.assertThrows(Throwable.class, consumer::get);
} else {
Assertions.assertDoesNotThrow(() -> result.set(consumer.get()));
}
return result.get();
}
public record TempDb(LLDatabaseConnection connection, LLKeyValueDatabase db,
LLLuceneIndex luceneSingle, LLLuceneIndex luceneSingle,
LLLuceneIndex luceneMulti, LLLuceneIndex luceneMulti,
SwappableLuceneSearcher swappableLuceneSearcher, SwappableLuceneSearcher swappableLuceneSearcher,
Path path) {} Path path) {}
static boolean computeCanUseNettyDirect() { public static void ensureNoLeaks(boolean printStats, boolean useClassicException) {
boolean canUse = true;
if (!PlatformDependent.hasUnsafe()) {
System.err.println("Warning! Unsafe is not available!"
+ " Netty direct buffers will not be used in tests!");
canUse = false;
}
return canUse;
}
public static void ensureNoLeaks(TestAllocatorImpl allocator, boolean printStats, boolean useClassicException) {
ResourceLeakDetector.setLevel(Level.PARANOID); ResourceLeakDetector.setLevel(Level.PARANOID);
System.gc(); System.gc();
if (allocator != null) {
var allocs = getActiveAllocations(allocator, printStats);
if (useClassicException) {
if (allocs != 0) {
throw new IllegalStateException("Active allocations: " + allocs);
}
} else {
assertEquals(0L, allocs);
}
}
} }
public static Mono<? extends LLDictionary> tempDictionary(LLKeyValueDatabase database, UpdateMode updateMode) { public static LLDictionary tempDictionary(LLKeyValueDatabase database, UpdateMode updateMode) {
return tempDictionary(database, "testmap", updateMode); return tempDictionary(database, "testmap", updateMode);
} }
public static Mono<? extends LLDictionary> tempDictionary(LLKeyValueDatabase database, public static LLDictionary tempDictionary(LLKeyValueDatabase database,
String name, String name,
UpdateMode updateMode) { UpdateMode updateMode) {
return database.getDictionary(name, updateMode); return database.getDictionary(name, updateMode);
} }
public static Mono<? extends LuceneIndex<String, String>> tempLuceneIndex(LLLuceneIndex index) { public static LuceneIndex<String, String> tempLuceneIndex(LLLuceneIndex index) {
return Mono.fromCallable(() -> new LuceneIndexImpl<>(index, new StringIndicizer())); return new LuceneIndexImpl<>(index, new StringIndicizer());
} }
@ -157,14 +125,13 @@ public class DbTestUtils {
} }
@Override @Override
public @NotNull Short deserialize(@NotNull Buffer serialized) { public @NotNull Short deserialize(@NotNull BufDataInput in) throws SerializationException {
Objects.requireNonNull(serialized); return in.readShort();
return serialized.readShort();
} }
@Override @Override
public void serialize(@NotNull Short deserialized, Buffer output) { public void serialize(@NotNull Short deserialized, BufDataOutput out) throws SerializationException {
output.writeShort(deserialized); out.writeShort(deserialized);
} }
} }
); );
@ -195,7 +162,7 @@ public class DbTestUtils {
new SubStageGetterHashMap<>(Serializer.UTF8_SERIALIZER, new SubStageGetterHashMap<>(Serializer.UTF8_SERIALIZER,
Serializer.UTF8_SERIALIZER, Serializer.UTF8_SERIALIZER,
String::hashCode, String::hashCode,
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator()) SerializerFixedBinaryLength.intSerializer()
) )
); );
} }
@ -206,7 +173,7 @@ public class DbTestUtils {
Serializer.UTF8_SERIALIZER, Serializer.UTF8_SERIALIZER,
Serializer.UTF8_SERIALIZER, Serializer.UTF8_SERIALIZER,
String::hashCode, String::hashCode,
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator()) SerializerFixedBinaryLength.intSerializer()
); );
} }
} }

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import it.cavallium.dbengine.client.Sort; import it.cavallium.dbengine.client.Sort;
import it.cavallium.dbengine.client.query.BaseType; import it.cavallium.dbengine.client.query.BaseType;

View File

@ -0,0 +1,113 @@
package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.tests.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import it.cavallium.data.generator.nativedata.Nullableboolean;
import it.cavallium.data.generator.nativedata.Nullabledouble;
import it.cavallium.data.generator.nativedata.Nullableint;
import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import it.cavallium.dbengine.client.DefaultDatabaseOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.database.ColumnUtils;
import it.cavallium.dbengine.database.LLDatabaseConnection;
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
import it.cavallium.dbengine.rpc.current.data.ByteBuffersDirectory;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletionException;
import java.util.concurrent.atomic.AtomicInteger;
public class LocalTemporaryDbGenerator implements TemporaryDbGenerator {
private static final AtomicInteger dbId = new AtomicInteger(0);
private static final LuceneOptions LUCENE_OPTS = new LuceneOptions(Map.of(),
Duration.ofSeconds(5),
Duration.ofSeconds(5),
false,
new ByteBuffersDirectory(),
Nullableboolean.empty(),
Nullabledouble.empty(),
Nullableint.empty(),
Nullableboolean.empty(),
Nullableboolean.empty(),
MAX_IN_MEMORY_RESULT_ENTRIES,
LuceneUtils.getDefaultMergePolicy()
);
@Override
public TempDb openTempDb() throws IOException {
var wrkspcPath = Path.of("/tmp/.cache/tempdb-" + dbId.incrementAndGet() + "/");
if (Files.exists(wrkspcPath)) {
Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> {
try {
Files.delete(file);
} catch (IOException ex) {
throw new CompletionException(ex);
}
});
}
Files.createDirectories(wrkspcPath);
LLDatabaseConnection conn = new LLLocalDatabaseConnection(
new SimpleMeterRegistry(),
wrkspcPath,
true
).connect();
SwappableLuceneSearcher searcher = new SwappableLuceneSearcher();
var luceneHacks = new LuceneHacks(() -> searcher, () -> searcher);
return new TempDb(conn,
conn.getDatabase("testdb",
List.of(ColumnUtils.dictionary("testmap"), ColumnUtils.special("ints"), ColumnUtils.special("longs")),
DefaultDatabaseOptions.builder().build()
),
conn.getLuceneIndex("testluceneindex1",
LuceneUtils.singleStructure(),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
conn.getLuceneIndex("testluceneindex16",
LuceneUtils.shardsStructure(3),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
searcher,
wrkspcPath
);
}
@Override
public void closeTempDb(TempDb tempDb) throws IOException {
tempDb.db().close();
tempDb.connection().disconnect();
ensureNoLeaks(false, false);
if (Files.exists(tempDb.path())) {
Files.walk(tempDb.path()).sorted(Comparator.reverseOrder()).forEach(file -> {
try {
Files.delete(file);
} catch (IOException ex) {
throw new CompletionException(ex);
}
});
}
}
}

View File

@ -1,6 +1,8 @@
package it.cavallium.dbengine.lucene.searcher; package it.cavallium.dbengine.tests;
import it.cavallium.dbengine.lucene.ExponentialPageLimits; import it.cavallium.dbengine.lucene.ExponentialPageLimits;
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
import it.cavallium.dbengine.lucene.searcher.LuceneGenerator;
import it.unimi.dsi.fastutil.longs.LongList; import it.unimi.dsi.fastutil.longs.LongList;
import java.io.IOException; import java.io.IOException;
import java.time.Duration; import java.time.Duration;
@ -80,7 +82,7 @@ public class LuceneGeneratorTest {
var reactiveGenerator = LuceneGenerator.reactive(is, localQueryParams, -1); var reactiveGenerator = LuceneGenerator.reactive(is, localQueryParams, -1);
var results = fixResults(localQueryParams.isSorted(), var results = fixResults(localQueryParams.isSorted(),
localQueryParams.needsScores(), reactiveGenerator.collectList().block()); localQueryParams.needsScores(), reactiveGenerator.toList());
Assertions.assertNotEquals(0, results.size()); Assertions.assertNotEquals(0, results.size());
@ -98,7 +100,7 @@ public class LuceneGeneratorTest {
localQueryParams.needsScores(), List.of(is.search(query, limit).scoreDocs)); localQueryParams.needsScores(), List.of(is.search(query, limit).scoreDocs));
var reactiveGenerator = LuceneGenerator.reactive(is, localQueryParams, -1); var reactiveGenerator = LuceneGenerator.reactive(is, localQueryParams, -1);
var results = fixResults(localQueryParams.isSorted(), localQueryParams.needsScores(), reactiveGenerator.collectList().block()); var results = fixResults(localQueryParams.isSorted(), localQueryParams.needsScores(), reactiveGenerator.toList());
Assertions.assertNotEquals(0, results.size()); Assertions.assertNotEquals(0, results.size());
@ -123,7 +125,7 @@ public class LuceneGeneratorTest {
), ),
-1 -1
); );
var results = reactiveGenerator.collectList().block(); var results = reactiveGenerator.toList();
Assertions.assertNotNull(results); Assertions.assertNotNull(results);
Assertions.assertEquals(0, results.size()); Assertions.assertEquals(0, results.size());
@ -144,7 +146,7 @@ public class LuceneGeneratorTest {
), ),
-1 -1
); );
var results = reactiveGenerator.collectList().block(); var results = reactiveGenerator.toList();
Assertions.assertNotNull(results); Assertions.assertNotNull(results);
Assertions.assertEquals(limit, results.size()); Assertions.assertEquals(limit, results.size());
@ -161,7 +163,7 @@ public class LuceneGeneratorTest {
var reactiveGenerator = LuceneGenerator.reactive(is, localQueryParams, -1); var reactiveGenerator = LuceneGenerator.reactive(is, localQueryParams, -1);
var results = fixResults(localQueryParams.isSorted(), var results = fixResults(localQueryParams.isSorted(),
localQueryParams.needsScores(), reactiveGenerator.collectList().block()); localQueryParams.needsScores(), reactiveGenerator.toList());
Assertions.assertEquals(4, results.size()); Assertions.assertEquals(4, results.size());
Assertions.assertEquals(expectedResults, results); Assertions.assertEquals(expectedResults, results);

View File

@ -0,0 +1,75 @@
package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.tests.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import it.cavallium.data.generator.nativedata.Nullableboolean;
import it.cavallium.data.generator.nativedata.Nullabledouble;
import it.cavallium.data.generator.nativedata.Nullableint;
import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import it.cavallium.dbengine.client.DefaultDatabaseOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.database.ColumnUtils;
import it.cavallium.dbengine.database.memory.LLMemoryDatabaseConnection;
import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
import it.cavallium.dbengine.rpc.current.data.ByteBuffersDirectory;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import java.time.Duration;
import java.util.List;
import java.util.Map;
public class MemoryTemporaryDbGenerator implements TemporaryDbGenerator {
private static final LuceneOptions LUCENE_OPTS = new LuceneOptions(Map.of(),
Duration.ofSeconds(5),
Duration.ofSeconds(5),
false,
new ByteBuffersDirectory(),
Nullableboolean.empty(),
Nullabledouble.empty(),
Nullableint.empty(),
Nullableboolean.empty(),
Nullableboolean.empty(),
MAX_IN_MEMORY_RESULT_ENTRIES,
LuceneUtils.getDefaultMergePolicy()
);
@Override
public TempDb openTempDb() {
var conn = new LLMemoryDatabaseConnection(new SimpleMeterRegistry());
SwappableLuceneSearcher searcher = new SwappableLuceneSearcher();
var luceneHacks = new LuceneHacks(() -> searcher, () -> searcher);
return new TempDb(conn,
conn.getDatabase("testdb",
List.of(ColumnUtils.dictionary("testmap"), ColumnUtils.special("ints"), ColumnUtils.special("longs")),
DefaultDatabaseOptions.builder().build()
),
conn.getLuceneIndex("testluceneindex1",
LuceneUtils.singleStructure(),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
conn.getLuceneIndex("testluceneindex16",
LuceneUtils.shardsStructure(3),
IndicizerAnalyzers.of(TextFieldsAnalyzer.ICUCollationKey),
IndicizerSimilarities.of(TextFieldsSimilarity.Boolean),
LUCENE_OPTS,
luceneHacks
),
searcher,
null
);
}
@Override
public void closeTempDb(TempDb db) {
db.db().close();
}
}

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import it.cavallium.dbengine.database.DiscardingCloseable; import it.cavallium.dbengine.database.DiscardingCloseable;
import it.cavallium.dbengine.lucene.PriorityQueue; import it.cavallium.dbengine.lucene.PriorityQueue;
@ -7,8 +7,8 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Stream;
import org.apache.lucene.search.HitQueue; import org.apache.lucene.search.HitQueue;
import reactor.core.publisher.Flux;
public class PriorityQueueAdaptor<T> extends SimpleResource implements PriorityQueue<T>, DiscardingCloseable { public class PriorityQueueAdaptor<T> extends SimpleResource implements PriorityQueue<T>, DiscardingCloseable {
@ -61,7 +61,7 @@ public class PriorityQueueAdaptor<T> extends SimpleResource implements PriorityQ
} }
@Override @Override
public Flux<T> iterate() { public Stream<T> iterate() {
List<T> items = new ArrayList<>(hitQueue.size()); List<T> items = new ArrayList<>(hitQueue.size());
T item; T item;
while ((item = hitQueue.pop()) != null) { while ((item = hitQueue.pop()) != null) {
@ -70,7 +70,7 @@ public class PriorityQueueAdaptor<T> extends SimpleResource implements PriorityQ
for (T t : items) { for (T t : items) {
hitQueue.insertWithOverflow(t); hitQueue.insertWithOverflow(t);
} }
return Flux.fromIterable(items); return items.stream();
} }
@Override @Override

View File

@ -1,3 +1,3 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
record Scored(String key, float score) {} record Scored(String key, float score) {}

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
@ -16,30 +16,27 @@ import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Mono;
public class StringIndicizer extends Indicizer<String, String> { public class StringIndicizer extends Indicizer<String, String> {
@Override @Override
public @NotNull Mono<LLUpdateDocument> toIndexRequest(@NotNull String key, @NotNull String value) { public @NotNull LLUpdateDocument toIndexRequest(@NotNull String key, @NotNull String value) {
return Mono.fromCallable(() -> { var fields = new LinkedList<LLItem>();
var fields = new LinkedList<LLItem>(); fields.add(LLItem.newStringField("uid", key, Field.Store.YES));
fields.add(LLItem.newStringField("uid", key, Field.Store.YES)); fields.add(LLItem.newTextField("text", value, Store.NO));
fields.add(LLItem.newTextField("text", value, Store.NO)); @SuppressWarnings("UnstableApiUsage")
@SuppressWarnings("UnstableApiUsage") var numInt = Ints.tryParse(value);
var numInt = Ints.tryParse(value); if (numInt != null) {
if (numInt != null) { fields.add(LLItem.newIntPoint("intpoint", numInt));
fields.add(LLItem.newIntPoint("intpoint", numInt)); fields.add(LLItem.newNumericDocValuesField("intsort", numInt));
fields.add(LLItem.newNumericDocValuesField("intsort", numInt)); }
} @SuppressWarnings("UnstableApiUsage")
@SuppressWarnings("UnstableApiUsage") var numLong = Longs.tryParse(value);
var numLong = Longs.tryParse(value); if (numLong != null) {
if (numLong != null) { fields.add(LLItem.newLongPoint("longpoint", numLong));
fields.add(LLItem.newLongPoint("longpoint", numLong)); fields.add(LLItem.newNumericDocValuesField("longsort", numLong));
fields.add(LLItem.newNumericDocValuesField("longsort", numLong)); }
} return new LLUpdateDocument(fields);
return new LLUpdateDocument(fields);
});
} }
@Override @Override

View File

@ -1,9 +1,8 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static java.util.Objects.requireNonNullElseGet; import static java.util.Objects.requireNonNullElseGet;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.disk.LLIndexSearcher; import it.cavallium.dbengine.database.disk.LLIndexSearcher;
import it.cavallium.dbengine.database.disk.LLIndexSearchers; import it.cavallium.dbengine.database.disk.LLIndexSearchers;
import it.cavallium.dbengine.lucene.searcher.GlobalQueryRewrite; import it.cavallium.dbengine.lucene.searcher.GlobalQueryRewrite;
@ -15,7 +14,6 @@ import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class SwappableLuceneSearcher implements LocalSearcher, MultiSearcher, Closeable { public class SwappableLuceneSearcher implements LocalSearcher, MultiSearcher, Closeable {
@ -27,7 +25,7 @@ public class SwappableLuceneSearcher implements LocalSearcher, MultiSearcher, Cl
} }
@Override @Override
public Mono<LuceneSearchResult> collect(Mono<LLIndexSearcher> indexSearcherMono, public LuceneSearchResult collect(LLIndexSearcher indexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams,
@Nullable String keyFieldName, @Nullable String keyFieldName,
GlobalQueryRewrite transformer) { GlobalQueryRewrite transformer) {
@ -36,7 +34,7 @@ public class SwappableLuceneSearcher implements LocalSearcher, MultiSearcher, Cl
single = this.multi.get(); single = this.multi.get();
} }
requireNonNull(single, "LuceneLocalSearcher not set"); requireNonNull(single, "LuceneLocalSearcher not set");
return single.collect(indexSearcherMono, queryParams, keyFieldName, transformer); return single.collect(indexSearcher, queryParams, keyFieldName, transformer);
} }
@Override @Override
@ -55,12 +53,12 @@ public class SwappableLuceneSearcher implements LocalSearcher, MultiSearcher, Cl
} }
@Override @Override
public Mono<LuceneSearchResult> collectMulti(Mono<LLIndexSearchers> indexSearchersMono, public LuceneSearchResult collectMulti(LLIndexSearchers indexSearchers,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName,
GlobalQueryRewrite transformer) { GlobalQueryRewrite transformer) {
var multi = requireNonNull(this.multi.get(), "LuceneMultiSearcher not set"); var multi = requireNonNull(this.multi.get(), "LuceneMultiSearcher not set");
return multi.collectMulti(indexSearchersMono, queryParams, keyFieldName, transformer); return multi.collectMulti(indexSearchers, queryParams, keyFieldName, transformer);
} }
public void setSingle(LocalSearcher single) { public void setSingle(LocalSearcher single) {

View File

@ -0,0 +1,11 @@
package it.cavallium.dbengine.tests;
import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import java.io.IOException;
public interface TemporaryDbGenerator {
TempDb openTempDb() throws IOException;
void closeTempDb(TempDb db) throws IOException;
}

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import it.cavallium.dbengine.lucene.DirectNIOFSDirectory; import it.cavallium.dbengine.lucene.DirectNIOFSDirectory;
import it.cavallium.dbengine.lucene.LuceneUtils; import it.cavallium.dbengine.lucene.LuceneUtils;

View File

@ -0,0 +1,46 @@
package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.tests.DbTestUtils.isCIMode;
import static it.cavallium.dbengine.tests.DbTestUtils.tempDb;
import static it.cavallium.dbengine.tests.DbTestUtils.tempDictionary;
import it.cavallium.dbengine.database.UpdateMode;
import java.io.IOException;
import java.util.Arrays;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
public abstract class TestDictionary {
protected abstract TemporaryDbGenerator getTempDbGenerator();
private static Stream<Arguments> provideArgumentsCreate() {
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
}
@BeforeEach
public void beforeEach() {
ensureNoLeaks(false, false);
}
@AfterEach
public void afterEach() {
if (!isCIMode()) {
ensureNoLeaks(true, false);
}
}
@ParameterizedTest
@MethodSource("provideArgumentsCreate")
public void testCreate(UpdateMode updateMode) throws IOException {
tempDb(getTempDbGenerator(), db -> {
tempDictionary(db, updateMode).clear();
return null;
});
}
}

View File

@ -1,42 +1,25 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.DbTestUtils.*; import static it.cavallium.dbengine.tests.DbTestUtils.*;
import static it.cavallium.dbengine.SyncUtils.*;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMaps; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMaps;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import reactor.test.StepVerifier.Step;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
public abstract class TestDictionaryMap { public abstract class TestDictionaryMap {
private static final Logger log = LogManager.getLogger(TestDictionaryMap.class); private static final Logger log = LogManager.getLogger(TestDictionaryMap.class);
private TestAllocator allocator;
private boolean checkLeaks = true; private boolean checkLeaks = true;
private static boolean isTestBadKeysEnabled() { private static boolean isTestBadKeysEnabled() {
@ -45,6 +28,11 @@ public abstract class TestDictionaryMap {
protected abstract TemporaryDbGenerator getTempDbGenerator(); protected abstract TemporaryDbGenerator getTempDbGenerator();
record Tuple2<X, Y>(X getT1, Y getT2) {}
record Tuple3<X, Y, Z>(X getT1, Y getT2, Y getT3) {}
record Tuple4<X, Y, Z, W>(X getT1, Y getT2, Y getT3, W getT4) {}
record Tuple5<X, Y, Z, W, X1>(X getT1, Y getT2, Y getT3, W getT4, X1 getT5) {}
private static Stream<Arguments> provideArgumentsPut() { private static Stream<Arguments> provideArgumentsPut() {
var goodKeys = List.of("12345"); var goodKeys = List.of("12345");
List<String> badKeys; List<String> badKeys;
@ -54,7 +42,7 @@ public abstract class TestDictionaryMap {
badKeys = List.of(); badKeys = List.of();
} }
List<Tuple2<String, Boolean>> keys = Stream List<Tuple2<String, Boolean>> keys = Stream
.concat(goodKeys.stream().map(s -> Tuples.of(s, false)), badKeys.stream().map(s -> Tuples.of(s, true))) .concat(goodKeys.stream().map(s -> new Tuple2<>(s, false)), badKeys.stream().map(s -> new Tuple2<>(s, true)))
.toList(); .toList();
var values = isCIMode() ? List.of("val") : List.of("", "\0", BIG_STRING); var values = isCIMode() ? List.of("val") : List.of("", "\0", BIG_STRING);
@ -67,18 +55,18 @@ public abstract class TestDictionaryMap {
} else { } else {
strm = values.stream(); strm = values.stream();
} }
return strm.map(val -> Tuples.of(keyTuple.getT1(), val, keyTuple.getT2())); return strm.map(val -> new Tuple3<>(keyTuple.getT1(), val, keyTuple.getT2()));
}) })
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode, .flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> new Tuple4<>(updateMode,
entryTuple.getT1(), entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3() entryTuple.getT3()
))) )))
.flatMap(entryTuple -> Stream.of(Tuples.of(MapType.MAP, entryTuple.getT1(), .flatMap(entryTuple -> Stream.of(new Tuple5<>(MapType.MAP, entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3(), entryTuple.getT3(),
entryTuple.getT4() entryTuple.getT4()
), Tuples.of(MapType.HASH_MAP, entryTuple.getT1(), ), new Tuple5<>(MapType.HASH_MAP, entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3(), entryTuple.getT3(),
false false
@ -89,36 +77,33 @@ public abstract class TestDictionaryMap {
@BeforeEach @BeforeEach
public void beforeEach() { public void beforeEach() {
this.allocator = newAllocator(); ensureNoLeaks(false, false);
ensureNoLeaks(allocator.allocator(), false, false);
} }
@AfterEach @AfterEach
public void afterEach() { public void afterEach() {
if (!isCIMode() && checkLeaks) { if (!isCIMode() && checkLeaks) {
ensureNoLeaks(allocator.allocator(), true, false); ensureNoLeaks(true, false);
} }
destroyAllocator(allocator);
} }
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArgumentsPut") @MethodSource("provideArgumentsPut")
public void testPut(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) { public void testPut(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail)
throws IOException {
var gen = getTempDbGenerator(); var gen = getTempDbGenerator();
var db = run(gen.openTempDb(allocator)); var db = gen.openTempDb();
var dict = run(tempDictionary(db.db(), updateMode)); var dict = tempDictionary(db.db(), updateMode);
var map = tempDatabaseMapDictionaryMap(dict, mapType, 5); var map = tempDatabaseMapDictionaryMap(dict, mapType, 5);
runVoid(shouldFail, map.putValue(key, value)); runVoid(shouldFail, () -> map.putValue(key, value));
var resultingMapSize = run(map.leavesCount(null, false)); var resultingMapSize = map.leavesCount(null, false);
Assertions.assertEquals(shouldFail ? 0 : 1, resultingMapSize); Assertions.assertEquals(shouldFail ? 0 : 1, resultingMapSize);
var resultingMap = run(map.get(null)); var resultingMap = map.get(null);
Assertions.assertEquals(shouldFail ? null : Object2ObjectSortedMaps.singleton(key, value), resultingMap); Assertions.assertEquals(shouldFail ? null : Object2ObjectSortedMaps.singleton(key, value), resultingMap);
map.close();
//if (shouldFail) this.checkLeaks = false; //if (shouldFail) this.checkLeaks = false;
gen.closeTempDb(db); gen.closeTempDb(db);
@ -126,47 +111,43 @@ public abstract class TestDictionaryMap {
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArgumentsPut") @MethodSource("provideArgumentsPut")
public void testAtSetAtGet(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) { public void testAtSetAtGet(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail)
var stpVer = StepVerifier throws IOException {
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode) var result = tempDb(getTempDbGenerator(), db -> {
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5)) var map = tempDatabaseMapDictionaryMap(tempDictionary(db, updateMode), mapType, 5);
.flatMap(map -> Mono return run(shouldFail, () -> {
.usingWhen(map.at(null, key), v -> v.set(value), LLUtils::finalizeResource) map.at(null, key).set(value);
.then(Mono.usingWhen(map.at(null, key), v -> v.get(null), LLUtils::finalizeResource)) return map.at(null, key).get(null);
.doFinally(s -> map.close()) });
) });
));
if (shouldFail) { if (shouldFail) {
this.checkLeaks = false; this.checkLeaks = false;
stpVer.verifyError();
} else { } else {
stpVer.expectNext(value).verifyComplete(); Assertions.assertEquals(value, result);
} }
} }
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArgumentsPut") @MethodSource("provideArgumentsPut")
public void testPutAndGetPrevious(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) { public void testPutAndGetPrevious(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail)
var stpVer = StepVerifier throws IOException {
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode) var result = tempDb(getTempDbGenerator(), db -> {
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5)) var map = tempDatabaseMapDictionaryMap(tempDictionary(db, updateMode), mapType, 5);
.flatMapMany(map -> Flux return run(shouldFail,
.concat( () -> Arrays.asList(map.putValueAndGetPrevious(key, "error?"),
map.putValueAndGetPrevious(key, "error?"), map.putValueAndGetPrevious(key, value),
map.putValueAndGetPrevious(key, value), map.putValueAndGetPrevious(key, value)
map.putValueAndGetPrevious(key, value) )
) );
.doFinally(s -> map.close()) });
)
));
if (shouldFail) { if (shouldFail) {
this.checkLeaks = false; this.checkLeaks = false;
stpVer.verifyError();
} else { } else {
stpVer.expectNext("error?").expectNext(value).verifyComplete(); Assertions.assertArrayEquals(new String[] {null, "error?", value}, result.toArray(String[]::new));
} }
} }
/*
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArgumentsPut") @MethodSource("provideArgumentsPut")
public void testPutValueRemoveAndGetPrevious(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) { public void testPutValueRemoveAndGetPrevious(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
@ -370,7 +351,7 @@ public abstract class TestDictionaryMap {
badKeys = List.of(); badKeys = List.of();
} }
List<Tuple2<List<String>, Boolean>> keys = Stream List<Tuple2<List<String>, Boolean>> keys = Stream
.concat(goodKeys.stream().map(s -> Tuples.of(s, false)), badKeys.stream().map(s -> Tuples.of(s, true))) .concat(goodKeys.stream().map(s -> new Tuple2<>(s, false)), badKeys.stream().map(s -> new Tuple2<>(s, true)))
.toList(); .toList();
var values = isCIMode() ? List.of("val") : List.of("", "\0", BIG_STRING); var values = isCIMode() ? List.of("val") : List.of("", "\0", BIG_STRING);
@ -381,14 +362,14 @@ public abstract class TestDictionaryMap {
.collectMap(Tuple2::getT1, Tuple2::getT2, Object2ObjectLinkedOpenHashMap::new) .collectMap(Tuple2::getT1, Tuple2::getT2, Object2ObjectLinkedOpenHashMap::new)
.block() .block()
)) ))
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode, .flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> new Tuple2<>(updateMode,
entryTuple.getT1(), entryTuple.getT1(),
entryTuple.getT2() entryTuple.getT2()
))) )))
.flatMap(entryTuple -> Stream.of(Tuples.of(MapType.MAP, entryTuple.getT1(), .flatMap(entryTuple -> Stream.of(new Tuple2<>(MapType.MAP, entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3() entryTuple.getT3()
), Tuples.of(MapType.HASH_MAP, entryTuple.getT1(), ), new Tuple2<>(MapType.HASH_MAP, entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
false false
))) )))
@ -761,4 +742,5 @@ public abstract class TestDictionaryMap {
Assertions.assertEquals(true, result.get(2)); Assertions.assertEquals(true, result.get(2));
} }
*/
} }

View File

@ -1,29 +1,22 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.DbTestUtils.BIG_STRING; import static it.cavallium.dbengine.tests.DbTestUtils.BIG_STRING;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.isCIMode; import static it.cavallium.dbengine.tests.DbTestUtils.isCIMode;
import static it.cavallium.dbengine.DbTestUtils.newAllocator; import static it.cavallium.dbengine.tests.DbTestUtils.run;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator; import static it.cavallium.dbengine.tests.DbTestUtils.runVoid;
import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMap; import static it.cavallium.dbengine.tests.DbTestUtils.tempDatabaseMapDictionaryDeepMap;
import static it.cavallium.dbengine.DbTestUtils.tempDb; import static it.cavallium.dbengine.tests.DbTestUtils.tempDictionary;
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
import static it.cavallium.dbengine.SyncUtils.*;
import static org.assertj.core.api.Assertions.*;
import io.netty5.buffer.internal.ResourceSupport; import com.google.common.collect.Streams;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.utils.SimpleResource;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@ -36,20 +29,11 @@ import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import reactor.test.StepVerifier.Step;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuple3;
import reactor.util.function.Tuple4;
import reactor.util.function.Tuples;
@TestMethodOrder(MethodOrderer.MethodName.class) @TestMethodOrder(MethodOrderer.MethodName.class)
public abstract class TestDictionaryMapDeep { public abstract class TestDictionaryMapDeep {
private final Logger log = LogManager.getLogger(this.getClass()); private final Logger log = LogManager.getLogger(this.getClass());
private TestAllocator allocator;
private boolean checkLeaks = true; private boolean checkLeaks = true;
private static boolean isTestBadKeysEnabled() { private static boolean isTestBadKeysEnabled() {
@ -58,6 +42,11 @@ public abstract class TestDictionaryMapDeep {
protected abstract TemporaryDbGenerator getTempDbGenerator(); protected abstract TemporaryDbGenerator getTempDbGenerator();
record Tuple2<X, Y>(X getT1, Y getT2) {}
record Tuple3<X, Y, Z>(X getT1, Y getT2, Y getT3) {}
record Tuple4<X, Y, Z, W>(X getT1, Y getT2, Y getT3, W getT4) {}
record Tuple5<X, Y, Z, W, X1>(X getT1, Y getT2, Y getT3, W getT4, X1 getT5) {}
private static Stream<Arguments> provideArgumentsSet() { private static Stream<Arguments> provideArgumentsSet() {
var goodKeys = Set.of("12345"); var goodKeys = Set.of("12345");
Set<String> badKeys; Set<String> badKeys;
@ -67,8 +56,8 @@ public abstract class TestDictionaryMapDeep {
badKeys = Set.of(); badKeys = Set.of();
} }
Set<Tuple2<String, Boolean>> keys = Stream.concat( Set<Tuple2<String, Boolean>> keys = Stream.concat(
goodKeys.stream().map(s -> Tuples.of(s, false)), goodKeys.stream().map(s -> new Tuple2<>(s, false)),
badKeys.stream().map(s -> Tuples.of(s, true)) badKeys.stream().map(s -> new Tuple2<>(s, true))
).collect(Collectors.toSet()); ).collect(Collectors.toSet());
var values = Set.of( var values = Set.of(
new Object2ObjectLinkedOpenHashMap<>(Map.of("123456", "a", "234567", "")), new Object2ObjectLinkedOpenHashMap<>(Map.of("123456", "a", "234567", "")),
@ -84,9 +73,9 @@ public abstract class TestDictionaryMapDeep {
} else { } else {
strm = values.stream(); strm = values.stream();
} }
return strm.map(val -> Tuples.of(keyTuple.getT1(), val, keyTuple.getT2())); return strm.map(val -> new Tuple3<>(keyTuple.getT1(), val, keyTuple.getT2()));
}) })
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode, .flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> new Tuple4<>(updateMode,
entryTuple.getT1(), entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3() entryTuple.getT3()
@ -112,43 +101,38 @@ public abstract class TestDictionaryMapDeep {
var values = isCIMode() ? List.of("val") : List.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING); var values = isCIMode() ? List.of("val") : List.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING);
Flux<Tuple4<String, String, String, Boolean>> failOnKeys1 = Flux Stream<Tuple4<String, String, String, Boolean>> failOnKeys1 = badKeys1.stream()
.fromIterable(badKeys1) .map(badKey1 -> new Tuple4<>(
.map(badKey1 -> Tuples.of(
badKey1, badKey1,
goodKeys2.stream().findFirst().orElseThrow(), goodKeys2.stream().findFirst().orElseThrow(),
values.stream().findFirst().orElseThrow(), values.stream().findFirst().orElseThrow(),
true true
)); ));
Flux<Tuple4<String, String, String, Boolean>> failOnKeys2 = Flux Stream<Tuple4<String, String, String, Boolean>> failOnKeys2 = badKeys2.stream()
.fromIterable(badKeys2) .map(badKey2 -> new Tuple4<>(
.map(badKey2 -> Tuples.of(
goodKeys1.stream().findFirst().orElseThrow(), goodKeys1.stream().findFirst().orElseThrow(),
badKey2, badKey2,
values.stream().findFirst().orElseThrow(), values.stream().findFirst().orElseThrow(),
true true
)); ));
Flux<Tuple4<String, String, String, Boolean>> goodKeys1And2 = Flux Stream<Tuple4<String, String, String, Boolean>> goodKeys1And2 = values.stream()
.fromIterable(values) .map(value -> new Tuple4<>(
.map(value -> Tuples.of(
goodKeys1.stream().findFirst().orElseThrow(), goodKeys1.stream().findFirst().orElseThrow(),
goodKeys2.stream().findFirst().orElseThrow(), goodKeys2.stream().findFirst().orElseThrow(),
value, value,
false false
)); ));
Flux<Tuple4<String, String, String, Boolean>> keys1And2 = Flux Stream<Tuple4<String, String, String, Boolean>> keys1And2 = Streams.concat(
.concat(
goodKeys1And2, goodKeys1And2,
failOnKeys1, failOnKeys1,
failOnKeys2 failOnKeys2
); );
return keys1And2 return keys1And2
.concatMap(entryTuple -> Flux .flatMap(entryTuple -> Stream.of(UpdateMode.values())
.fromArray(UpdateMode.values()) .map(updateMode -> new Tuple5<>(updateMode,
.map(updateMode -> Tuples.of(updateMode,
entryTuple.getT1(), entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3(), entryTuple.getT3(),
@ -161,22 +145,19 @@ public abstract class TestDictionaryMapDeep {
fullTuple.getT4(), fullTuple.getT4(),
fullTuple.getT5() fullTuple.getT5()
)) ))
.toStream()
.sequential(); .sequential();
} }
@BeforeEach @BeforeEach
public void beforeEach() { public void beforeEach() {
this.allocator = newAllocator(); ensureNoLeaks(false, false);
ensureNoLeaks(allocator.allocator(), false, false);
} }
@AfterEach @AfterEach
public void afterEach() { public void afterEach() {
if (!isCIMode() && checkLeaks) { if (!isCIMode() && checkLeaks) {
ensureNoLeaks(allocator.allocator(), true, false); ensureNoLeaks(true, false);
} }
destroyAllocator(allocator);
} }
@ParameterizedTest @ParameterizedTest
@ -184,25 +165,21 @@ public abstract class TestDictionaryMapDeep {
public void testPutValue(UpdateMode updateMode, public void testPutValue(UpdateMode updateMode,
String key, String key,
Object2ObjectSortedMap<String, String> value, Object2ObjectSortedMap<String, String> value,
boolean shouldFail) { boolean shouldFail) throws IOException {
var gen = getTempDbGenerator(); var gen = getTempDbGenerator();
var db = run(gen.openTempDb(allocator)); var db = gen.openTempDb();
var dict = run(tempDictionary(db.db(), updateMode)); var dict = tempDictionary(db.db(), updateMode);
var map = tempDatabaseMapDictionaryDeepMap(dict, 5, 6); var map = tempDatabaseMapDictionaryDeepMap(dict, 5, 6);
log.debug("Put \"{}\" = \"{}\"", key, value); log.debug("Put \"{}\" = \"{}\"", key, value);
runVoid(shouldFail, map.putValue(key, value)); runVoid(shouldFail, () -> map.putValue(key, value));
var resultingMapSize = run(map.leavesCount(null, false)); var resultingMapSize = map.leavesCount(null, false);
Assertions.assertEquals(shouldFail ? 0 : value.size(), resultingMapSize); Assertions.assertEquals(shouldFail ? 0 : value.size(), resultingMapSize);
var resultingMap = run(map.get(null)); var resultingMap = map.get(null);
Assertions.assertEquals(shouldFail ? null : Map.of(key, value), resultingMap); Assertions.assertEquals(shouldFail ? null : Map.of(key, value), resultingMap);
map.close();
//if (shouldFail) this.checkLeaks = false;
gen.closeTempDb(db); gen.closeTempDb(db);
} }
@ -211,27 +188,23 @@ public abstract class TestDictionaryMapDeep {
public void testGetValue(UpdateMode updateMode, public void testGetValue(UpdateMode updateMode,
String key, String key,
Object2ObjectSortedMap<String, String> value, Object2ObjectSortedMap<String, String> value,
boolean shouldFail) { boolean shouldFail) throws IOException {
var gen = getTempDbGenerator(); var gen = getTempDbGenerator();
var db = run(gen.openTempDb(allocator)); var db = gen.openTempDb();
var dict = run(tempDictionary(db.db(), updateMode)); var dict = tempDictionary(db.db(), updateMode);
var map = tempDatabaseMapDictionaryDeepMap(dict, 5, 6); var map = tempDatabaseMapDictionaryDeepMap(dict, 5, 6);
log.debug("Put \"{}\" = \"{}\"", key, value); log.debug("Put \"{}\" = \"{}\"", key, value);
runVoid(shouldFail, map.putValue(key, value)); runVoid(shouldFail, () -> map.putValue(key, value));
log.debug("Get \"{}\"", key); log.debug("Get \"{}\"", key);
var returnedValue = run(shouldFail, map.getValue(null, key)); var returnedValue = run(shouldFail, () -> map.getValue(null, key));
Assertions.assertEquals(shouldFail ? null : value, returnedValue); Assertions.assertEquals(shouldFail ? null : value, returnedValue);
map.close();
//if (shouldFail) this.checkLeaks = false;
gen.closeTempDb(db); gen.closeTempDb(db);
} }
/*
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArgumentsSet") @MethodSource("provideArgumentsSet")
public void testSetValueGetAllValues(UpdateMode updateMode, String key, Object2ObjectSortedMap<String, String> value, public void testSetValueGetAllValues(UpdateMode updateMode, String key, Object2ObjectSortedMap<String, String> value,
@ -284,7 +257,7 @@ public abstract class TestDictionaryMapDeep {
.getAllStages(null, false) .getAllStages(null, false)
.flatMap(v -> v.getValue() .flatMap(v -> v.getValue()
.getAllValues(null, false) .getAllValues(null, false)
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue())) .map(result -> new Tuple2<>(v.getKey(), result.getKey(), result.getValue()))
.doFinally(s -> v.getValue().close()) .doFinally(s -> v.getValue().close())
) )
), ),
@ -295,9 +268,9 @@ public abstract class TestDictionaryMapDeep {
this.checkLeaks = false; this.checkLeaks = false;
stpVer.verifyError(); stpVer.verifyError();
} else { } else {
value.forEach((k, v) -> remainingEntries.add(Tuples.of(key, k, v))); value.forEach((k, v) -> remainingEntries.add(new Tuple2<>(key, k, v)));
remainingEntries.add(Tuples.of("capra", "normal", "123")); remainingEntries.add(new Tuple2<>("capra", "normal", "123"));
remainingEntries.add(Tuples.of("capra", "ormaln", "456")); remainingEntries.add(new Tuple2<>("capra", "ormaln", "456"));
for (Tuple3<String, String, String> ignored : remainingEntries) { for (Tuple3<String, String, String> ignored : remainingEntries) {
stpVer = stpVer.expectNextMatches(remainingEntries::remove); stpVer = stpVer.expectNextMatches(remainingEntries::remove);
} }
@ -771,7 +744,7 @@ public abstract class TestDictionaryMapDeep {
badKeys = List.of(); badKeys = List.of();
} }
List<Tuple2<List<String>, Boolean>> keys = Stream List<Tuple2<List<String>, Boolean>> keys = Stream
.concat(goodKeys.stream().map(s -> Tuples.of(s, false)), badKeys.stream().map(s -> Tuples.of(s, true))) .concat(goodKeys.stream().map(s -> new Tuple2<>(s, false)), badKeys.stream().map(s -> new Tuple2<>(s, true)))
.toList(); .toList();
var values = isCIMode() ? List.of(new Object2ObjectLinkedOpenHashMap<>(Map.of("123456", "val"))) : List.of( var values = isCIMode() ? List.of(new Object2ObjectLinkedOpenHashMap<>(Map.of("123456", "val"))) : List.of(
new Object2ObjectLinkedOpenHashMap<>(Map.of("123456", "a", "234567", "")), new Object2ObjectLinkedOpenHashMap<>(Map.of("123456", "a", "234567", "")),
@ -785,7 +758,7 @@ public abstract class TestDictionaryMapDeep {
.collectMap(Tuple2::getT1, Tuple2::getT2, Object2ObjectLinkedOpenHashMap::new) .collectMap(Tuple2::getT1, Tuple2::getT2, Object2ObjectLinkedOpenHashMap::new)
.block() .block()
)) ))
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode, .flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> new Tuple2<>(updateMode,
entryTuple.getT1(), entryTuple.getT1(),
entryTuple.getT2() entryTuple.getT2()
))) )))
@ -1142,4 +1115,6 @@ public abstract class TestDictionaryMapDeep {
stpVer.expectNext(true, entries.isEmpty(), true).verifyComplete(); stpVer.expectNext(true, entries.isEmpty(), true).verifyComplete();
} }
} }
*/
} }

View File

@ -1,32 +1,19 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.DbTestUtils.BIG_STRING; import static it.cavallium.dbengine.tests.DbTestUtils.BIG_STRING;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator; import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; import static it.cavallium.dbengine.tests.DbTestUtils.isCIMode;
import static it.cavallium.dbengine.DbTestUtils.isCIMode; import static it.cavallium.dbengine.tests.DbTestUtils.tempDictionary;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMapHashMap;
import static it.cavallium.dbengine.DbTestUtils.tempDb;
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
import it.cavallium.dbengine.DbTestUtils.TestAllocator; import com.google.common.collect.Streams;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import java.util.List; import java.util.List;
import java.util.Map.Entry;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
import reactor.util.function.Tuple4;
import reactor.util.function.Tuples;
public abstract class TestDictionaryMapDeepHashMap { public abstract class TestDictionaryMapDeepHashMap {
private TestAllocator allocator;
private boolean checkLeaks = true; private boolean checkLeaks = true;
private static boolean isTestBadKeysEnabled() { private static boolean isTestBadKeysEnabled() {
@ -35,6 +22,11 @@ public abstract class TestDictionaryMapDeepHashMap {
protected abstract TemporaryDbGenerator getTempDbGenerator(); protected abstract TemporaryDbGenerator getTempDbGenerator();
record Tuple2<X, Y>(X getT1, Y getT2) {}
record Tuple3<X, Y, Z>(X getT1, Y getT2, Y getT3) {}
record Tuple4<X, Y, Z, W>(X getT1, Y getT2, Y getT3, W getT4) {}
record Tuple5<X, Y, Z, W, X1>(X getT1, Y getT2, Y getT3, W getT4, X1 getT5) {}
private static Stream<Arguments> provideArgumentsPut() { private static Stream<Arguments> provideArgumentsPut() {
var goodKeys1 = isCIMode() ? List.of("12345") : List.of("12345", "zebra"); var goodKeys1 = isCIMode() ? List.of("12345") : List.of("12345", "zebra");
List<String> badKeys1; List<String> badKeys1;
@ -47,34 +39,31 @@ public abstract class TestDictionaryMapDeepHashMap {
var values = isCIMode() ? List.of("val") : List.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING); var values = isCIMode() ? List.of("val") : List.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", BIG_STRING);
Flux<Tuple4<String, String, String, Boolean>> failOnKeys1 = Flux Stream<Tuple4<String, String, String, Boolean>> failOnKeys1 = badKeys1.stream()
.fromIterable(badKeys1) .map(badKey1 -> new Tuple4<>(
.map(badKey1 -> Tuples.of(
badKey1, badKey1,
goodKeys2.stream().findAny().orElseThrow(), goodKeys2.stream().findAny().orElseThrow(),
values.stream().findAny().orElseThrow(), values.stream().findAny().orElseThrow(),
true true
)); ));
Flux<Tuple4<String, String, String, Boolean>> goodKeys1And2 = Flux Stream<Tuple4<String, String, String, Boolean>> goodKeys1And2 = values.stream()
.fromIterable(values) .map(value -> new Tuple4<>(
.map(value -> Tuples.of(
goodKeys1.stream().findAny().orElseThrow(), goodKeys1.stream().findAny().orElseThrow(),
goodKeys2.stream().findAny().orElseThrow(), goodKeys2.stream().findAny().orElseThrow(),
value, value,
false false
)); ));
Flux<Tuple4<String, String, String, Boolean>> keys1And2 = Flux Stream<Tuple4<String, String, String, Boolean>> keys1And2 = Streams
.concat( .concat(
goodKeys1And2, goodKeys1And2,
failOnKeys1 failOnKeys1
); );
return keys1And2 return keys1And2
.flatMap(entryTuple -> Flux .flatMap(entryTuple -> Stream.of(UpdateMode.values())
.fromArray(UpdateMode.values()) .map(updateMode -> new Tuple5<>(updateMode,
.map(updateMode -> Tuples.of(updateMode,
entryTuple.getT1(), entryTuple.getT1(),
entryTuple.getT2(), entryTuple.getT2(),
entryTuple.getT3(), entryTuple.getT3(),
@ -86,24 +75,22 @@ public abstract class TestDictionaryMapDeepHashMap {
fullTuple.getT3(), fullTuple.getT3(),
fullTuple.getT4(), fullTuple.getT4(),
fullTuple.getT1() != UpdateMode.ALLOW || fullTuple.getT5() fullTuple.getT1() != UpdateMode.ALLOW || fullTuple.getT5()
)) ));
.toStream();
} }
@BeforeEach @BeforeEach
public void beforeEach() { public void beforeEach() {
this.allocator = newAllocator(); ensureNoLeaks(false, false);
ensureNoLeaks(allocator.allocator(), false, false);
} }
@AfterEach @AfterEach
public void afterEach() { public void afterEach() {
if (!isCIMode() && checkLeaks) { if (!isCIMode() && checkLeaks) {
ensureNoLeaks(allocator.allocator(), true, false); ensureNoLeaks(true, false);
} }
destroyAllocator(allocator);
} }
/*
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArgumentsPut") @MethodSource("provideArgumentsPut")
public void testAtPutValueGetAllValues(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) { public void testAtPutValueGetAllValues(UpdateMode updateMode, String key1, String key2, String value, boolean shouldFail) {
@ -129,4 +116,6 @@ public abstract class TestDictionaryMapDeepHashMap {
} }
} }
*/
} }

View File

@ -1,8 +1,7 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static java.util.Map.entry; import static java.util.Map.entry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.database.disk.KeyMayExistGetter; import it.cavallium.dbengine.database.disk.KeyMayExistGetter;
import it.unimi.dsi.fastutil.bytes.ByteList; import it.unimi.dsi.fastutil.bytes.ByteList;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -41,7 +40,7 @@ public class TestGetter {
return new String(bytes.toByteArray(), StandardCharsets.UTF_8); return new String(bytes.toByteArray(), StandardCharsets.UTF_8);
} }
public KeyMayExistGetter getter = new KeyMayExistGetter(BufferAllocator.offHeapUnpooled(), true) { public KeyMayExistGetter getter = new KeyMayExistGetter() {
@Override @Override
protected KeyMayExist keyMayExist(ReadOptions readOptions, ByteBuffer key, ByteBuffer value) { protected KeyMayExist keyMayExist(ReadOptions readOptions, ByteBuffer key, ByteBuffer value) {
return null; return null;

View File

@ -0,0 +1,247 @@
package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.tests.DbTestUtils.runVoid;
import static org.junit.jupiter.api.Assertions.assertEquals;
import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Objects;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
public abstract class TestLLDictionary {
private final Logger log = LogManager.getLogger(this.getClass());
private static final LLRange RANGE_ALL = LLRange.all();
private TempDb tempDb;
private LLKeyValueDatabase db;
protected abstract TemporaryDbGenerator getTempDbGenerator();
@BeforeEach
public void beforeEach() throws IOException {
ensureNoLeaks(false, false);
tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(), "TempDB");
db = tempDb.db();
}
@AfterEach
public void afterEach() throws IOException {
getTempDbGenerator().closeTempDb(tempDb);
ensureNoLeaks(true, false);
}
public static Stream<Arguments> provideArguments() {
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
}
public static Stream<Arguments> providePutArguments() {
var updateModes = Arrays.stream(UpdateMode.values());
return updateModes.flatMap(updateMode -> {
var resultTypes = Arrays.stream(LLDictionaryResultType.values());
return resultTypes.map(resultType -> Arguments.of(updateMode, resultType));
});
}
public static Stream<Arguments> provideUpdateArguments() {
var updateModes = Arrays.stream(UpdateMode.values());
return updateModes.flatMap(updateMode -> {
var resultTypes = Arrays.stream(UpdateReturnMode.values());
return resultTypes.map(resultType -> Arguments.of(updateMode, resultType));
});
}
private LLDictionary getDict(UpdateMode updateMode) {
var dict = DbTestUtils.tempDictionary(db, updateMode);
var key1 = fromString("test-key-1");
var key2 = fromString("test-key-2");
var key3 = fromString("test-key-3");
var key4 = fromString("test-key-4");
var value = fromString("test-value");
dict.put(key1, value, LLDictionaryResultType.VOID);
dict.put(key2, value, LLDictionaryResultType.VOID);
dict.put(key3, value, LLDictionaryResultType.VOID);
dict.put(key4, value, LLDictionaryResultType.VOID);
return dict;
}
private Buf fromString(String s) {
var sb = s.getBytes(StandardCharsets.UTF_8);
Buf b = Buf.create(sb.length + 3 + 13);
b.addElements(0, sb);
assert b.size() == sb.length;
return b;
}
private String toString(Buf bb) {
return bb != null ? bb.toString(StandardCharsets.UTF_8) : null;
}
@Test
public void testNoOp() {
}
@Test
public void testNoOpAllocation() {
for (int i = 0; i < 10; i++) {
var a = Buf.create(i * 512);
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetDict(UpdateMode updateMode) {
var dict = getDict(updateMode);
Assertions.assertNotNull(dict);
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetColumnName(UpdateMode updateMode) {
var dict = getDict(updateMode);
Assertions.assertEquals("hash_map_testmap", dict.getColumnName());
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGet(UpdateMode updateMode) {
var dict = getDict(updateMode);
var keyEx = fromString("test-key-1");
var keyNonEx = fromString("test-nonexistent");
Assertions.assertEquals("test-value", toString(dict.get(null, keyEx)));
Assertions.assertEquals("test-value", toString(dict.get(null, keyEx)));
Assertions.assertEquals("test-value", toString(dict.get(null, keyEx)));
Assertions.assertEquals((String) null, toString(dict.get(null, keyNonEx)));
Assertions.assertEquals((String) null, toString(dict.get(null, keyNonEx)));
Assertions.assertEquals((String) null, toString(dict.get(null, keyNonEx)));
}
@ParameterizedTest
@MethodSource("providePutArguments")
public void testPutExisting(UpdateMode updateMode, LLDictionaryResultType resultType) {
var dict = getDict(updateMode);
var keyEx = fromString("test-key-1");
var value = fromString("test-value");
var beforeSize = dict.sizeRange(null, RANGE_ALL, false);
dict.put(keyEx, value, resultType);
var afterSize = dict.sizeRange(null, RANGE_ALL, false);
Assertions.assertEquals(0, afterSize - beforeSize);
}
@ParameterizedTest
@MethodSource("providePutArguments")
public void testPutNew(UpdateMode updateMode, LLDictionaryResultType resultType) {
var dict = getDict(updateMode);
var keyNonEx = fromString("test-nonexistent");
var value = fromString("test-value");
var beforeSize = dict.sizeRange(null, RANGE_ALL, false);
dict.put(keyNonEx, value, resultType);
var afterSize = dict.sizeRange(null, LLRange.all(), false);
Assertions.assertEquals(1, afterSize - beforeSize);
Assertions.assertTrue(dict.getRangeKeys(null, RANGE_ALL, false, false).map(this::toString).toList().contains("test-nonexistent"));
Assertions.assertTrue(dict.getRangeKeys(null, RANGE_ALL, true, false).map(this::toString).toList().contains("test-nonexistent"));
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetUpdateMode(UpdateMode updateMode) {
var dict = getDict(updateMode);
assertEquals(updateMode, dict.getUpdateMode());
}
@ParameterizedTest
@MethodSource("provideUpdateArguments")
public void testUpdateExisting(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
var dict = getDict(updateMode);
var keyEx = fromString("test-key-1");
var beforeSize = dict.sizeRange(null, RANGE_ALL, false);
long afterSize;
runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(keyEx, old -> fromString("test-value"), updateReturnMode));
afterSize = dict.sizeRange(null, RANGE_ALL, false);
assertEquals(0, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(keyEx, old -> fromString("test-value"), updateReturnMode));
afterSize = dict.sizeRange(null, RANGE_ALL, false);
assertEquals(0, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(keyEx, old -> fromString("test-value"), updateReturnMode));
afterSize = dict.sizeRange(null, RANGE_ALL, false);
assertEquals(0, afterSize - beforeSize);
}
@ParameterizedTest
@MethodSource("provideUpdateArguments")
public void testUpdateNew(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
int expected = updateMode == UpdateMode.DISALLOW ? 0 : 1;
var dict = getDict(updateMode);
var keyNonEx = fromString("test-nonexistent");
var beforeSize = dict.sizeRange(null, RANGE_ALL, false);
long afterSize;
runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode));
afterSize = dict.sizeRange(null, RANGE_ALL, false);
assertEquals(expected, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode));
afterSize = dict.sizeRange(null, RANGE_ALL, false);
assertEquals(expected, afterSize - beforeSize);
runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode));
afterSize = dict.sizeRange(null, RANGE_ALL, false);
assertEquals(expected, afterSize - beforeSize);
if (updateMode != UpdateMode.DISALLOW) {
Assertions.assertTrue(dict
.getRangeKeys(null, RANGE_ALL, false, false)
.map(this::toString)
.toList()
.contains("test-nonexistent"));
Assertions.assertTrue(dict
.getRangeKeys(null, RANGE_ALL, true, false)
.map(this::toString)
.toList()
.contains("test-nonexistent"));
}
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testUpdateAndGetDelta(UpdateMode updateMode) {
log.warn("Test not implemented");
//todo: implement
}
@ParameterizedTest
@MethodSource("provideArguments")
public void testClear(UpdateMode updateMode) {
log.warn("Test not implemented");
//todo: implement
}
@ParameterizedTest
@MethodSource("providePutArguments")
public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) {
log.warn("Test not implemented");
//todo: implement
}
}

View File

@ -1,21 +1,18 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator; import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks; import static it.cavallium.dbengine.tests.DbTestUtils.runVoid;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.SyncUtils.*;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import io.netty5.util.Send; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import java.util.Objects; import java.util.Objects;
@ -27,29 +24,25 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Mono;
public abstract class TestLLDictionaryLeaks { public abstract class TestLLDictionaryLeaks {
private TestAllocator allocator;
private TempDb tempDb; private TempDb tempDb;
private LLKeyValueDatabase db; private LLKeyValueDatabase db;
protected abstract TemporaryDbGenerator getTempDbGenerator(); protected abstract TemporaryDbGenerator getTempDbGenerator();
@BeforeEach @BeforeEach
public void beforeEach() { public void beforeEach() throws IOException {
this.allocator = newAllocator(); ensureNoLeaks(false, false);
ensureNoLeaks(allocator.allocator(), false, false); tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(), "TempDB");
tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(allocator).block(), "TempDB");
db = tempDb.db(); db = tempDb.db();
} }
@AfterEach @AfterEach
public void afterEach() { public void afterEach() throws IOException {
getTempDbGenerator().closeTempDb(tempDb).block(); getTempDbGenerator().closeTempDb(tempDb);
ensureNoLeaks(allocator.allocator(), true, false); ensureNoLeaks(true, false);
destroyAllocator(allocator);
} }
public static Stream<Arguments> provideArguments() { public static Stream<Arguments> provideArguments() {
@ -73,30 +66,25 @@ public abstract class TestLLDictionaryLeaks {
} }
private LLDictionary getDict(UpdateMode updateMode) { private LLDictionary getDict(UpdateMode updateMode) {
var dict = DbTestUtils.tempDictionary(db, updateMode).blockOptional().orElseThrow(); var dict = DbTestUtils.tempDictionary(db, updateMode);
var key1 = Mono.fromCallable(() -> fromString("test-key-1")); var key1 = fromString("test-key-1");
var key2 = Mono.fromCallable(() -> fromString("test-key-2")); var key2 = fromString("test-key-2");
var key3 = Mono.fromCallable(() -> fromString("test-key-3")); var key3 = fromString("test-key-3");
var key4 = Mono.fromCallable(() -> fromString("test-key-4")); var key4 = fromString("test-key-4");
var value = Mono.fromCallable(() -> fromString("test-value")); var value = fromString("test-value");
dict.put(key1, value, LLDictionaryResultType.VOID).block(); dict.put(key1, value, LLDictionaryResultType.VOID);
dict.put(key2, value, LLDictionaryResultType.VOID).block(); dict.put(key2, value, LLDictionaryResultType.VOID);
dict.put(key3, value, LLDictionaryResultType.VOID).block(); dict.put(key3, value, LLDictionaryResultType.VOID);
dict.put(key4, value, LLDictionaryResultType.VOID).block(); dict.put(key4, value, LLDictionaryResultType.VOID);
return dict; return dict;
} }
private Buffer fromString(String s) { private Buf fromString(String s) {
var sb = s.getBytes(StandardCharsets.UTF_8); var sb = s.getBytes(StandardCharsets.UTF_8);
var b = db.getAllocator().allocate(sb.length); var b = Buf.create(sb.length);
try { b.addElements(0, sb);
b.writeBytes(sb); assert b.size() == sb.length;
assert b.readableBytes() == sb.length; return b;
return b;
} catch (Throwable ex) {
b.close();
throw ex;
}
} }
@Test @Test
@ -106,8 +94,7 @@ public abstract class TestLLDictionaryLeaks {
@Test @Test
public void testNoOpAllocation() { public void testNoOpAllocation() {
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {
var a = allocator.allocator().allocate(i * 512); var a = Buf.create(i * 512);
a.send().receive().close();
} }
} }
@ -124,30 +111,23 @@ public abstract class TestLLDictionaryLeaks {
dict.getColumnName(); dict.getColumnName();
} }
@ParameterizedTest
@MethodSource("provideArguments")
public void testGetAllocator(UpdateMode updateMode) {
var dict = getDict(updateMode);
dict.getAllocator();
}
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArguments") @MethodSource("provideArguments")
public void testGet(UpdateMode updateMode) { public void testGet(UpdateMode updateMode) {
var dict = getDict(updateMode); var dict = getDict(updateMode);
var key = Mono.fromCallable(() -> fromString("test")); var key = fromString("test");
runVoid(dict.get(null, key).then()); dict.get(null, key);
runVoid(dict.get(null, key).then()); dict.get(null, key);
runVoid(dict.get(null, key).then()); dict.get(null, key);
} }
@ParameterizedTest @ParameterizedTest
@MethodSource("providePutArguments") @MethodSource("providePutArguments")
public void testPut(UpdateMode updateMode, LLDictionaryResultType resultType) { public void testPut(UpdateMode updateMode, LLDictionaryResultType resultType) {
var dict = getDict(updateMode); var dict = getDict(updateMode);
var key = Mono.fromCallable(() -> fromString("test-key")); var key = fromString("test-key");
var value = Mono.fromCallable(() -> fromString("test-value")); var value = fromString("test-value");
runVoid(dict.put(key, value, resultType).then().doOnDiscard(Buffer.class, Buffer::close)); dict.put(key, value, resultType);
} }
@ParameterizedTest @ParameterizedTest
@ -161,19 +141,13 @@ public abstract class TestLLDictionaryLeaks {
@MethodSource("provideUpdateArguments") @MethodSource("provideUpdateArguments")
public void testUpdate(UpdateMode updateMode, UpdateReturnMode updateReturnMode) { public void testUpdate(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
var dict = getDict(updateMode); var dict = getDict(updateMode);
var key = Mono.fromCallable(() -> fromString("test-key")); var key = fromString("test-key");
runVoid(updateMode == UpdateMode.DISALLOW, runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(key, this::pass, updateReturnMode));
dict.update(key, this::pass, updateReturnMode).then() runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(key, this::pass, updateReturnMode));
); runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.update(key, this::pass, updateReturnMode));
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(key, this::pass, updateReturnMode).then()
);
runVoid(updateMode == UpdateMode.DISALLOW,
dict.update(key, this::pass, updateReturnMode).then()
);
} }
private Buffer pass(@Nullable Buffer old) { private Buf pass(@Nullable Buf old) {
if (old == null) return null; if (old == null) return null;
return old.copy(); return old.copy();
} }
@ -182,30 +156,24 @@ public abstract class TestLLDictionaryLeaks {
@MethodSource("provideArguments") @MethodSource("provideArguments")
public void testUpdateAndGetDelta(UpdateMode updateMode) { public void testUpdateAndGetDelta(UpdateMode updateMode) {
var dict = getDict(updateMode); var dict = getDict(updateMode);
var key = Mono.fromCallable(() -> fromString("test-key")); var key = fromString("test-key");
runVoid(updateMode == UpdateMode.DISALLOW, runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.updateAndGetDelta(key, this::pass));
dict.updateAndGetDelta(key, this::pass).then() runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.updateAndGetDelta(key, this::pass));
); runVoid(updateMode == UpdateMode.DISALLOW, () -> dict.updateAndGetDelta(key, this::pass));
runVoid(updateMode == UpdateMode.DISALLOW,
dict.updateAndGetDelta(key, this::pass).then()
);
runVoid(updateMode == UpdateMode.DISALLOW,
dict.updateAndGetDelta(key, this::pass).then()
);
} }
@ParameterizedTest @ParameterizedTest
@MethodSource("provideArguments") @MethodSource("provideArguments")
public void testClear(UpdateMode updateMode) { public void testClear(UpdateMode updateMode) {
var dict = getDict(updateMode); var dict = getDict(updateMode);
runVoid(dict.clear()); dict.clear();
} }
@ParameterizedTest @ParameterizedTest
@MethodSource("providePutArguments") @MethodSource("providePutArguments")
public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) { public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) {
var dict = getDict(updateMode); var dict = getDict(updateMode);
var key = Mono.fromCallable(() -> fromString("test-key")); var key = fromString("test-key");
runVoid(dict.remove(key, resultType).then().doOnDiscard(Buffer.class, Buffer::close)); dict.remove(key, resultType);
} }
} }

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalDictionary extends TestDictionary { public class TestLocalDictionary extends TestDictionary {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalDictionaryMap extends TestDictionaryMap { public class TestLocalDictionaryMap extends TestDictionaryMap {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalDictionaryMapDeep extends TestDictionaryMapDeep { public class TestLocalDictionaryMapDeep extends TestDictionaryMapDeep {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalDictionaryMapDeepHashMap extends TestDictionaryMapDeepHashMap { public class TestLocalDictionaryMapDeepHashMap extends TestDictionaryMapDeepHashMap {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalLLDictionary extends TestLLDictionary { public class TestLocalLLDictionary extends TestLLDictionary {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalLLDictionaryLeaks extends TestLLDictionaryLeaks { public class TestLocalLLDictionaryLeaks extends TestLLDictionaryLeaks {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestLocalSingletons extends TestSingletons { public class TestLocalSingletons extends TestSingletons {

View File

@ -1,29 +1,26 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES; import static it.cavallium.dbengine.tests.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator; import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.SyncUtils.*;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import it.cavallium.dbengine.DbTestUtils.TempDb; import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.client.LuceneIndex; import it.cavallium.dbengine.client.LuceneIndex;
import it.cavallium.dbengine.client.Sort; import it.cavallium.dbengine.client.Sort;
import it.cavallium.dbengine.client.query.current.data.MatchAllDocsQuery; import it.cavallium.dbengine.client.query.current.data.MatchAllDocsQuery;
import it.cavallium.dbengine.database.LLLuceneIndex; import it.cavallium.dbengine.database.LLLuceneIndex;
import it.cavallium.dbengine.database.LLScoreMode; import it.cavallium.dbengine.database.LLScoreMode;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.disk.LLTempHugePqEnv;
import it.cavallium.dbengine.lucene.searcher.AdaptiveLocalSearcher; import it.cavallium.dbengine.lucene.searcher.AdaptiveLocalSearcher;
import it.cavallium.dbengine.lucene.searcher.AdaptiveMultiSearcher; import it.cavallium.dbengine.lucene.searcher.AdaptiveMultiSearcher;
import it.cavallium.dbengine.lucene.searcher.CountMultiSearcher; import it.cavallium.dbengine.lucene.searcher.CountMultiSearcher;
import it.cavallium.dbengine.lucene.searcher.LocalSearcher; import it.cavallium.dbengine.lucene.searcher.LocalSearcher;
import it.cavallium.dbengine.lucene.searcher.MultiSearcher; import it.cavallium.dbengine.lucene.searcher.MultiSearcher;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.IntStream;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@ -37,15 +34,10 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Flux;
import reactor.util.function.Tuples;
public class TestLuceneIndex { public class TestLuceneIndex {
private final Logger log = LogManager.getLogger(this.getClass()); private final Logger log = LogManager.getLogger(this.getClass());
private static LLTempHugePqEnv ENV;
private TestAllocator allocator;
private TempDb tempDb; private TempDb tempDb;
private LLLuceneIndex luceneSingle; private LLLuceneIndex luceneSingle;
private LLLuceneIndex luceneMulti; private LLLuceneIndex luceneMulti;
@ -56,14 +48,12 @@ public class TestLuceneIndex {
@BeforeAll @BeforeAll
public static void beforeAll() throws IOException { public static void beforeAll() throws IOException {
ENV = new LLTempHugePqEnv();
} }
@BeforeEach @BeforeEach
public void beforeEach() { public void beforeEach() throws IOException {
this.allocator = newAllocator(); ensureNoLeaks(false, false);
ensureNoLeaks(allocator.allocator(), false, false); tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(), "TempDB");
tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(allocator).block(), "TempDB");
luceneSingle = tempDb.luceneSingle(); luceneSingle = tempDb.luceneSingle();
luceneMulti = tempDb.luceneMulti(); luceneMulti = tempDb.luceneMulti();
} }
@ -72,13 +62,13 @@ public class TestLuceneIndex {
return Stream.of(false, true).map(Arguments::of); return Stream.of(false, true).map(Arguments::of);
} }
private static final Flux<Boolean> multi = Flux.just(false, true); private static final List<Boolean> multi = List.of(false, true);
private static final Flux<LLScoreMode> scoreModes = Flux.just(LLScoreMode.NO_SCORES, private static final List<LLScoreMode> scoreModes = List.of(LLScoreMode.NO_SCORES,
LLScoreMode.TOP_SCORES, LLScoreMode.TOP_SCORES,
LLScoreMode.COMPLETE_NO_SCORES, LLScoreMode.COMPLETE_NO_SCORES,
LLScoreMode.COMPLETE LLScoreMode.COMPLETE
); );
private static final Flux<Sort> multiSort = Flux.just(Sort.score(), private static final List<Sort> multiSort = List.of(Sort.score(),
Sort.random(), Sort.random(),
Sort.no(), Sort.no(),
Sort.doc(), Sort.doc(),
@ -86,62 +76,78 @@ public class TestLuceneIndex {
Sort.numeric("longsort", true) Sort.numeric("longsort", true)
); );
record Tuple2<X, Y>(X getT1, Y getT2) {
public Object[] toArray() {
return new Object[] {getT1, getT2};
}
}
record Tuple3<X, Y, Z>(X getT1, Y getT2, Y getT3) {
public Object[] toArray() {
return new Object[] {getT1, getT2, getT3};
}
}
record Tuple4<X, Y, Z, W>(X getT1, Y getT2, Y getT3, W getT4) {
public Object[] toArray() {
return new Object[] {getT1, getT2, getT3, getT4};
}
}
record Tuple5<X, Y, Z, W, X1>(X getT1, Y getT2, Y getT3, W getT4, X1 getT5) {
public Object[] toArray() {
return new Object[] {getT1, getT2, getT3, getT4, getT5};
}
}
public static Stream<Arguments> provideQueryArgumentsScoreMode() { public static Stream<Arguments> provideQueryArgumentsScoreMode() {
return multi return multi.stream()
.concatMap(shard -> scoreModes.map(scoreMode -> Tuples.of(shard, scoreMode))) .flatMap(shard -> scoreModes.stream().map(scoreMode -> new Tuple2<>(shard, scoreMode)))
.map(tuple -> Arguments.of(tuple.toArray())) .map(tuple -> Arguments.of(tuple.toArray()));
.toStream();
} }
public static Stream<Arguments> provideQueryArgumentsSort() { public static Stream<Arguments> provideQueryArgumentsSort() {
return multi return multi.stream()
.concatMap(shard -> multiSort.map(multiSort -> Tuples.of(shard, multiSort))) .flatMap(shard -> multiSort.stream().map(multiSort -> new Tuple2<>(shard, multiSort)))
.map(tuple -> Arguments.of(tuple.toArray())) .map(tuple -> Arguments.of(tuple.toArray()));
.toStream();
} }
public static Stream<Arguments> provideQueryArgumentsScoreModeAndSort() { public static Stream<Arguments> provideQueryArgumentsScoreModeAndSort() {
return multi return multi.stream()
.concatMap(shard -> scoreModes.map(scoreMode -> Tuples.of(shard, scoreMode))) .flatMap(shard -> scoreModes.stream().map(scoreMode -> new Tuple2<>(shard, scoreMode)))
.concatMap(tuple -> multiSort.map(multiSort -> Tuples.of(tuple.getT1(), tuple.getT2(), multiSort))) .flatMap(tuple -> multiSort.stream().map(multiSort -> new Tuple3<>(tuple.getT1(), tuple.getT2(), multiSort)))
.map(tuple -> Arguments.of(tuple.toArray())) .map(tuple -> Arguments.of(tuple.toArray()));
.toStream();
} }
@AfterEach @AfterEach
public void afterEach() { public void afterEach() throws IOException {
getTempDbGenerator().closeTempDb(tempDb).block(); getTempDbGenerator().closeTempDb(tempDb);
ensureNoLeaks(allocator.allocator(), true, false); ensureNoLeaks(true, false);
destroyAllocator(allocator);
} }
@AfterAll @AfterAll
public static void afterAll() throws IOException { public static void afterAll() throws IOException {
ENV.close();
} }
private LuceneIndex<String, String> getLuceneIndex(boolean shards, @Nullable LocalSearcher customSearcher) { private LuceneIndex<String, String> getLuceneIndex(boolean shards, @Nullable LocalSearcher customSearcher) {
LuceneIndex<String, String> index = run(DbTestUtils.tempLuceneIndex(shards ? luceneSingle : luceneMulti)); LuceneIndex<String, String> index = DbTestUtils.tempLuceneIndex(shards ? luceneSingle : luceneMulti);
index.updateDocument("test-key-1", "0123456789").block(); index.updateDocument("test-key-1", "0123456789");
index.updateDocument("test-key-2", "test 0123456789 test word").block(); index.updateDocument("test-key-2", "test 0123456789 test word");
index.updateDocument("test-key-3", "0123456789 test example string").block(); index.updateDocument("test-key-3", "0123456789 test example string");
index.updateDocument("test-key-4", "hello world the quick brown fox jumps over the lazy dog").block(); index.updateDocument("test-key-4", "hello world the quick brown fox jumps over the lazy dog");
index.updateDocument("test-key-5", "hello the quick brown fox jumps over the lazy dog").block(); index.updateDocument("test-key-5", "hello the quick brown fox jumps over the lazy dog");
index.updateDocument("test-key-6", "hello the quick brown fox jumps over the world dog").block(); index.updateDocument("test-key-6", "hello the quick brown fox jumps over the world dog");
index.updateDocument("test-key-7", "the quick brown fox jumps over the world dog").block(); index.updateDocument("test-key-7", "the quick brown fox jumps over the world dog");
index.updateDocument("test-key-8", "the quick brown fox jumps over the lazy dog").block(); index.updateDocument("test-key-8", "the quick brown fox jumps over the lazy dog");
index.updateDocument("test-key-9", "Example1").block(); index.updateDocument("test-key-9", "Example1");
index.updateDocument("test-key-10", "Example2").block(); index.updateDocument("test-key-10", "Example2");
index.updateDocument("test-key-11", "Example3").block(); index.updateDocument("test-key-11", "Example3");
index.updateDocument("test-key-12", "-234").block(); index.updateDocument("test-key-12", "-234");
index.updateDocument("test-key-13", "2111").block(); index.updateDocument("test-key-13", "2111");
index.updateDocument("test-key-14", "2999").block(); index.updateDocument("test-key-14", "2999");
index.updateDocument("test-key-15", "3902").block(); index.updateDocument("test-key-15", "3902");
Flux IntStream.rangeClosed(1, 1000).forEach(i -> index.updateDocument("test-key-" + (15 + i), "" + i));
.range(1, 1000)
.concatMap(i -> index.updateDocument("test-key-" + (15 + i), "" + i))
.transform(LLUtils::handleDiscard)
.blockLast();
tempDb.swappableLuceneSearcher().setSingle(new CountMultiSearcher()); tempDb.swappableLuceneSearcher().setSingle(new CountMultiSearcher());
tempDb.swappableLuceneSearcher().setMulti(new CountMultiSearcher()); tempDb.swappableLuceneSearcher().setMulti(new CountMultiSearcher());
assertCount(index, 1000 + 15); assertCount(index, 1000 + 15);
@ -155,8 +161,8 @@ public class TestLuceneIndex {
} }
} }
} else { } else {
tempDb.swappableLuceneSearcher().setSingle(new AdaptiveLocalSearcher(ENV, true, MAX_IN_MEMORY_RESULT_ENTRIES)); tempDb.swappableLuceneSearcher().setSingle(new AdaptiveLocalSearcher(MAX_IN_MEMORY_RESULT_ENTRIES));
tempDb.swappableLuceneSearcher().setMulti(new AdaptiveMultiSearcher(ENV, true, MAX_IN_MEMORY_RESULT_ENTRIES)); tempDb.swappableLuceneSearcher().setMulti(new AdaptiveMultiSearcher(MAX_IN_MEMORY_RESULT_ENTRIES));
} }
return index; return index;
} }
@ -166,8 +172,8 @@ public class TestLuceneIndex {
} }
private long getCount(LuceneIndex<String, String> luceneIndex) { private long getCount(LuceneIndex<String, String> luceneIndex) {
luceneIndex.refresh(true).block(); luceneIndex.refresh(true);
var totalHitsCount = run(luceneIndex.count(null, new MatchAllDocsQuery())); var totalHitsCount = luceneIndex.count(null, new MatchAllDocsQuery());
Assertions.assertTrue(totalHitsCount.exact(), "Can't get count because the total hits count is not exact"); Assertions.assertTrue(totalHitsCount.exact(), "Can't get count because the total hits count is not exact");
return totalHitsCount.value(); return totalHitsCount.value();
} }
@ -179,8 +185,7 @@ public class TestLuceneIndex {
@Test @Test
public void testNoOpAllocation() { public void testNoOpAllocation() {
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {
var a = allocator.allocator().allocate(i * 512); var a = Buf.create(i * 512);
a.send().receive().close();
} }
} }
@ -195,7 +200,7 @@ public class TestLuceneIndex {
@MethodSource("provideArguments") @MethodSource("provideArguments")
public void testDeleteAll(boolean shards) { public void testDeleteAll(boolean shards) {
var luceneIndex = getLuceneIndex(shards, null); var luceneIndex = getLuceneIndex(shards, null);
runVoid(luceneIndex.deleteAll()); luceneIndex.deleteAll();
assertCount(luceneIndex, 0); assertCount(luceneIndex, 0);
} }
@ -204,7 +209,7 @@ public class TestLuceneIndex {
public void testDelete(boolean shards) { public void testDelete(boolean shards) {
var luceneIndex = getLuceneIndex(shards, null); var luceneIndex = getLuceneIndex(shards, null);
var prevCount = getCount(luceneIndex); var prevCount = getCount(luceneIndex);
runVoid(luceneIndex.deleteDocument("test-key-1")); luceneIndex.deleteDocument("test-key-1");
assertCount(luceneIndex, prevCount - 1); assertCount(luceneIndex, prevCount - 1);
} }
@ -213,7 +218,7 @@ public class TestLuceneIndex {
public void testUpdateSameDoc(boolean shards) { public void testUpdateSameDoc(boolean shards) {
var luceneIndex = getLuceneIndex(shards, null); var luceneIndex = getLuceneIndex(shards, null);
var prevCount = getCount(luceneIndex); var prevCount = getCount(luceneIndex);
runVoid(luceneIndex.updateDocument("test-key-1", "new-value")); luceneIndex.updateDocument("test-key-1", "new-value");
assertCount(luceneIndex, prevCount ); assertCount(luceneIndex, prevCount );
} }
@ -222,7 +227,7 @@ public class TestLuceneIndex {
public void testUpdateNewDoc(boolean shards) { public void testUpdateNewDoc(boolean shards) {
var luceneIndex = getLuceneIndex(shards, null); var luceneIndex = getLuceneIndex(shards, null);
var prevCount = getCount(luceneIndex); var prevCount = getCount(luceneIndex);
runVoid(luceneIndex.updateDocument("test-key-new", "new-value")); luceneIndex.updateDocument("test-key-new", "new-value");
assertCount(luceneIndex, prevCount + 1); assertCount(luceneIndex, prevCount + 1);
} }

View File

@ -1,21 +1,17 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES; import static it.cavallium.dbengine.tests.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator; import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
import static it.cavallium.dbengine.SyncUtils.*;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import io.netty.buffer.PooledByteBufAllocator; import io.netty.buffer.PooledByteBufAllocator;
import it.cavallium.dbengine.DbTestUtils.TempDb; import it.cavallium.dbengine.tests.DbTestUtils.TempDb;
import it.cavallium.dbengine.DbTestUtils.TestAllocator; import it.cavallium.dbengine.tests.TestLuceneIndex.Tuple2;
import it.cavallium.dbengine.client.HitKey; import it.cavallium.dbengine.client.HitKey;
import it.cavallium.dbengine.client.Hits; import it.cavallium.dbengine.client.Hits;
import it.cavallium.dbengine.client.LuceneIndex; import it.cavallium.dbengine.client.LuceneIndex;
import it.cavallium.dbengine.client.Sort; import it.cavallium.dbengine.client.Sort;
import it.cavallium.dbengine.client.LazyHitKey;
import it.cavallium.dbengine.client.query.ClientQueryParams; import it.cavallium.dbengine.client.query.ClientQueryParams;
import it.cavallium.dbengine.client.query.ClientQueryParamsBuilder; import it.cavallium.dbengine.client.query.ClientQueryParamsBuilder;
import it.cavallium.dbengine.client.query.current.data.BooleanQuery; import it.cavallium.dbengine.client.query.current.data.BooleanQuery;
@ -29,7 +25,6 @@ import it.cavallium.dbengine.client.query.current.data.Term;
import it.cavallium.dbengine.client.query.current.data.TermQuery; import it.cavallium.dbengine.client.query.current.data.TermQuery;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.LLLuceneIndex; import it.cavallium.dbengine.database.LLLuceneIndex;
import it.cavallium.dbengine.database.disk.LLTempHugePqEnv;
import it.cavallium.dbengine.lucene.searcher.AdaptiveLocalSearcher; import it.cavallium.dbengine.lucene.searcher.AdaptiveLocalSearcher;
import it.cavallium.dbengine.lucene.searcher.AdaptiveMultiSearcher; import it.cavallium.dbengine.lucene.searcher.AdaptiveMultiSearcher;
import it.cavallium.dbengine.lucene.searcher.CountMultiSearcher; import it.cavallium.dbengine.lucene.searcher.CountMultiSearcher;
@ -38,18 +33,17 @@ import it.cavallium.dbengine.lucene.searcher.MultiSearcher;
import it.cavallium.dbengine.lucene.searcher.StandardSearcher; import it.cavallium.dbengine.lucene.searcher.StandardSearcher;
import it.cavallium.dbengine.lucene.searcher.ScoredPagedMultiSearcher; import it.cavallium.dbengine.lucene.searcher.ScoredPagedMultiSearcher;
import it.cavallium.dbengine.lucene.searcher.PagedLocalSearcher; import it.cavallium.dbengine.lucene.searcher.PagedLocalSearcher;
import it.cavallium.dbengine.lucene.searcher.SortedScoredFullMultiSearcher;
import it.cavallium.dbengine.lucene.searcher.SortedByScoreFullMultiSearcher;
import it.cavallium.dbengine.lucene.searcher.UnsortedStreamingMultiSearcher; import it.cavallium.dbengine.lucene.searcher.UnsortedStreamingMultiSearcher;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.commons.lang3.function.FailableConsumer; import org.apache.commons.lang3.function.FailableConsumer;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@ -63,18 +57,12 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink.OverflowStrategy;
import reactor.core.scheduler.Schedulers;
import reactor.util.function.Tuples;
public class TestLuceneSearches { public class TestLuceneSearches {
private static final Logger log = LogManager.getLogger(TestLuceneSearches.class); private static final Logger log = LogManager.getLogger(TestLuceneSearches.class);
private static LLTempHugePqEnv ENV;
private static final MemoryTemporaryDbGenerator TEMP_DB_GENERATOR = new MemoryTemporaryDbGenerator(); private static final MemoryTemporaryDbGenerator TEMP_DB_GENERATOR = new MemoryTemporaryDbGenerator();
private static TestAllocator allocator;
private static TempDb tempDb; private static TempDb tempDb;
private static LLLuceneIndex luceneSingle; private static LLLuceneIndex luceneSingle;
private static LLLuceneIndex luceneMulti; private static LLLuceneIndex luceneMulti;
@ -102,31 +90,25 @@ public class TestLuceneSearches {
modifiableElements.put("test-key-13", "2111"); modifiableElements.put("test-key-13", "2111");
modifiableElements.put("test-key-14", "2999"); modifiableElements.put("test-key-14", "2999");
modifiableElements.put("test-key-15", "3902"); modifiableElements.put("test-key-15", "3902");
runVoid(Flux.range(1, 1000).doOnNext(i -> modifiableElements.put("test-key-" + (15 + i), "" + i)).then()); IntStream.rangeClosed(1, 1000).forEach(i -> modifiableElements.put("test-key-" + (15 + i), "" + i));
ELEMENTS = Collections.unmodifiableMap(modifiableElements); ELEMENTS = Collections.unmodifiableMap(modifiableElements);
} }
@BeforeAll @BeforeAll
public static void beforeAll() throws IOException { public static void beforeAll() throws IOException {
allocator = newAllocator(); ensureNoLeaks(false, false);
ensureNoLeaks(allocator.allocator(), false, false); tempDb = Objects.requireNonNull(TEMP_DB_GENERATOR.openTempDb(), "TempDB");
tempDb = Objects.requireNonNull(TEMP_DB_GENERATOR.openTempDb(allocator).block(), "TempDB");
luceneSingle = tempDb.luceneSingle(); luceneSingle = tempDb.luceneSingle();
luceneMulti = tempDb.luceneMulti(); luceneMulti = tempDb.luceneMulti();
ENV = new LLTempHugePqEnv();
setUpIndex(true); setUpIndex(true);
setUpIndex(false); setUpIndex(false);
} }
private static void setUpIndex(boolean shards) { private static void setUpIndex(boolean shards) {
LuceneIndex<String, String> index = run(DbTestUtils.tempLuceneIndex(shards ? luceneSingle : luceneMulti)); LuceneIndex<String, String> index = DbTestUtils.tempLuceneIndex(shards ? luceneSingle : luceneMulti);
Flux ELEMENTS.forEach(index::updateDocument);
.fromIterable(ELEMENTS.entrySet())
.concatMap(entry -> index.updateDocument(entry.getKey(), entry.getValue()))
.subscribeOn(Schedulers.boundedElastic())
.blockLast();
tempDb.swappableLuceneSearcher().setSingle(new CountMultiSearcher()); tempDb.swappableLuceneSearcher().setSingle(new CountMultiSearcher());
tempDb.swappableLuceneSearcher().setMulti(new CountMultiSearcher()); tempDb.swappableLuceneSearcher().setMulti(new CountMultiSearcher());
assertCount(index, 1000 + 15); assertCount(index, 1000 + 15);
@ -141,8 +123,8 @@ public class TestLuceneSearches {
return Stream.of(false, true).map(Arguments::of); return Stream.of(false, true).map(Arguments::of);
} }
private static final Flux<Boolean> multi = Flux.just(false, true); private static final List<Boolean> multi = List.of(false, true);
private static final Flux<Sort> multiSort = Flux.just( private static final List<Sort> multiSort = List.of(
Sort.score(), Sort.score(),
//todo: fix random sort field //todo: fix random sort field
//Sort.randomSortField(), //Sort.randomSortField(),
@ -154,50 +136,43 @@ public class TestLuceneSearches {
Sort.numeric("intsort", true) Sort.numeric("intsort", true)
); );
private static Flux<LocalSearcher> getSearchers(ExpectedQueryType info) { private static List<LocalSearcher> getSearchers(ExpectedQueryType info) {
return Flux.push(sink -> { var sink = new ArrayList<LocalSearcher>();
if (info.shard()) { if (info.shard()) {
if (info.onlyCount()) { if (info.onlyCount()) {
sink.next(new CountMultiSearcher()); sink.add(new CountMultiSearcher());
} else {
sink.next(new ScoredPagedMultiSearcher());
if (info.sorted() && !info.sortedByScore()) {
sink.next(new SortedScoredFullMultiSearcher(ENV));
} else {
sink.next(new SortedByScoreFullMultiSearcher(ENV));
}
if (!info.sorted()) {
sink.next(new UnsortedUnscoredSimpleMultiSearcher(new PagedLocalSearcher()));
sink.next(new UnsortedStreamingMultiSearcher());
}
}
sink.next(new AdaptiveMultiSearcher(ENV, true, MAX_IN_MEMORY_RESULT_ENTRIES));
} else { } else {
if (info.onlyCount()) { sink.add(new ScoredPagedMultiSearcher());
sink.next(new CountMultiSearcher()); if (!info.sorted()) {
} else { sink.add(new UnsortedUnscoredSimpleMultiSearcher(new PagedLocalSearcher()));
sink.next(new PagedLocalSearcher()); sink.add(new UnsortedStreamingMultiSearcher());
} }
sink.next(new AdaptiveLocalSearcher(ENV, true, MAX_IN_MEMORY_RESULT_ENTRIES));
} }
sink.complete(); sink.add(new AdaptiveMultiSearcher(MAX_IN_MEMORY_RESULT_ENTRIES));
}, OverflowStrategy.BUFFER); } else {
if (info.onlyCount()) {
sink.add(new CountMultiSearcher());
} else {
sink.add(new PagedLocalSearcher());
}
sink.add(new AdaptiveLocalSearcher(MAX_IN_MEMORY_RESULT_ENTRIES));
}
return sink;
} }
public static Stream<Arguments> provideQueryArgumentsScoreMode() { public static Stream<Arguments> provideQueryArgumentsScoreMode() {
return multi.map(tuple -> Arguments.of(multi)).toStream(); return multi.stream().map(tuple -> Arguments.of(multi));
} }
public static Stream<Arguments> provideQueryArgumentsScoreModeAndSort() { public static Stream<Arguments> provideQueryArgumentsScoreModeAndSort() {
return multi return multi.stream()
.concatMap(multi -> multiSort.map(multiSort -> Tuples.of(multi, multiSort))) .flatMap(multi -> multiSort.stream().map(multiSort -> new Tuple2<>(multi, multiSort)))
.map(tuple -> Arguments.of(tuple.toArray())) .map(tuple -> Arguments.of(tuple.toArray()));
.toStream();
} }
private static void runSearchers(ExpectedQueryType expectedQueryType, FailableConsumer<LocalSearcher, Throwable> consumer) private static void runSearchers(ExpectedQueryType expectedQueryType, FailableConsumer<LocalSearcher, Throwable> consumer)
throws Throwable { throws Throwable {
var searchers = run(getSearchers(expectedQueryType).collectList()); var searchers = getSearchers(expectedQueryType);
for (LocalSearcher searcher : searchers) { for (LocalSearcher searcher : searchers) {
log.info("Using searcher \"{}\"", searcher.getName()); log.info("Using searcher \"{}\"", searcher.getName());
consumer.accept(searcher); consumer.accept(searcher);
@ -214,10 +189,8 @@ public class TestLuceneSearches {
@AfterAll @AfterAll
public static void afterAll() throws IOException { public static void afterAll() throws IOException {
TEMP_DB_GENERATOR.closeTempDb(tempDb).block(); TEMP_DB_GENERATOR.closeTempDb(tempDb);
ENV.close(); ensureNoLeaks(true, false);
ensureNoLeaks(allocator.allocator(), true, false);
destroyAllocator(allocator);
} }
private LuceneIndex<String, String> getLuceneIndex(boolean shards, @Nullable LocalSearcher customSearcher) { private LuceneIndex<String, String> getLuceneIndex(boolean shards, @Nullable LocalSearcher customSearcher) {
@ -231,8 +204,8 @@ public class TestLuceneSearches {
} }
} }
} else { } else {
tempDb.swappableLuceneSearcher().setSingle(new AdaptiveLocalSearcher(ENV, true, MAX_IN_MEMORY_RESULT_ENTRIES)); tempDb.swappableLuceneSearcher().setSingle(new AdaptiveLocalSearcher(MAX_IN_MEMORY_RESULT_ENTRIES));
tempDb.swappableLuceneSearcher().setMulti(new AdaptiveMultiSearcher(ENV, true, MAX_IN_MEMORY_RESULT_ENTRIES)); tempDb.swappableLuceneSearcher().setMulti(new AdaptiveMultiSearcher(MAX_IN_MEMORY_RESULT_ENTRIES));
} }
return shards ? multiIndex : localIndex; return shards ? multiIndex : localIndex;
} }
@ -242,8 +215,8 @@ public class TestLuceneSearches {
} }
private static long getCount(LuceneIndex<String, String> luceneIndex) { private static long getCount(LuceneIndex<String, String> luceneIndex) {
luceneIndex.refresh(true).block(); luceneIndex.refresh(true);
var totalHitsCount = run(luceneIndex.count(null, new MatchAllDocsQuery())); var totalHitsCount = luceneIndex.count(null, new MatchAllDocsQuery());
Assertions.assertTrue(totalHitsCount.exact(), "Can't get count because the total hits count is not exact"); Assertions.assertTrue(totalHitsCount.exact(), "Can't get count because the total hits count is not exact");
return totalHitsCount.value(); return totalHitsCount.value();
} }
@ -266,7 +239,7 @@ public class TestLuceneSearches {
runSearchers(expectedQueryType, searcher -> { runSearchers(expectedQueryType, searcher -> {
var luceneIndex = getLuceneIndex(expectedQueryType.shard(), searcher); var luceneIndex = getLuceneIndex(expectedQueryType.shard(), searcher);
var query = queryParamsBuilder.build(); var query = queryParamsBuilder.build();
try (var results = run(luceneIndex.search(query))) { try (var results = luceneIndex.search(query)) {
var hits = results.totalHitsCount(); var hits = results.totalHitsCount();
var keys = getResults(results); var keys = getResults(results);
if (hits.exact()) { if (hits.exact()) {
@ -278,7 +251,7 @@ public class TestLuceneSearches {
var standardSearcher = new StandardSearcher(); var standardSearcher = new StandardSearcher();
luceneIndex = getLuceneIndex(expectedQueryType.shard(), standardSearcher); luceneIndex = getLuceneIndex(expectedQueryType.shard(), standardSearcher);
var officialQuery = queryParamsBuilder.limit(ELEMENTS.size() * 2L).build(); var officialQuery = queryParamsBuilder.limit(ELEMENTS.size() * 2L).build();
try (var officialResults = run(luceneIndex.search(officialQuery))) { try (var officialResults = luceneIndex.search(officialQuery)) {
var officialHits = officialResults.totalHitsCount(); var officialHits = officialResults.totalHitsCount();
var officialKeys = getResults(officialResults); var officialKeys = getResults(officialResults);
if (officialHits.exact()) { if (officialHits.exact()) {
@ -303,7 +276,7 @@ public class TestLuceneSearches {
@MethodSource("provideQueryArgumentsScoreModeAndSort") @MethodSource("provideQueryArgumentsScoreModeAndSort")
public void testSearchNoDocs(boolean shards, Sort multiSort) throws Throwable { public void testSearchNoDocs(boolean shards, Sort multiSort) throws Throwable {
var queryBuilder = ClientQueryParams var queryBuilder = ClientQueryParams
.<LazyHitKey<String>>builder() .<HitKey<String>>builder()
.query(new MatchNoDocsQuery()) .query(new MatchNoDocsQuery())
.snapshot(null) .snapshot(null)
.computePreciseHitsCount(true) .computePreciseHitsCount(true)
@ -317,7 +290,7 @@ public class TestLuceneSearches {
@MethodSource("provideQueryArgumentsScoreModeAndSort") @MethodSource("provideQueryArgumentsScoreModeAndSort")
public void testSearchAllDocs(boolean shards, Sort multiSort) throws Throwable { public void testSearchAllDocs(boolean shards, Sort multiSort) throws Throwable {
var queryBuilder = ClientQueryParams var queryBuilder = ClientQueryParams
.<LazyHitKey<String>>builder() .<HitKey<String>>builder()
.query(new MatchAllDocsQuery()) .query(new MatchAllDocsQuery())
.snapshot(null) .snapshot(null)
.computePreciseHitsCount(true) .computePreciseHitsCount(true)
@ -370,10 +343,10 @@ public class TestLuceneSearches {
} }
private List<Scored> getResults(Hits<HitKey<String>> results) { private List<Scored> getResults(Hits<HitKey<String>> results) {
return run(results return results
.results() .results()
.map(key -> new Scored(key.key(), key.score())) .map(key -> new Scored(key.key(), key.score()))
.collectList()); .toList();
} }
} }

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestMemoryDictionary extends TestDictionary { public class TestMemoryDictionary extends TestDictionary {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestMemoryDictionaryMap extends TestDictionaryMap { public class TestMemoryDictionaryMap extends TestDictionaryMap {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestMemoryLLDictionary extends TestLLDictionary { public class TestMemoryLLDictionary extends TestLLDictionary {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestMemoryLLDictionaryLeaks extends TestLLDictionaryLeaks { public class TestMemoryLLDictionaryLeaks extends TestLLDictionaryLeaks {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
public class TestMemorySingletons extends TestSingletons { public class TestMemorySingletons extends TestSingletons {

View File

@ -1,4 +1,4 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import it.cavallium.dbengine.database.disk.RocksLog4jLogger; import it.cavallium.dbengine.database.disk.RocksLog4jLogger;
import java.io.IOException; import java.io.IOException;
@ -23,7 +23,6 @@ import org.rocksdb.PersistentCache;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException; import org.rocksdb.RocksDBException;
import org.rocksdb.TableFormatConfig;
import org.rocksdb.WriteOptions; import org.rocksdb.WriteOptions;
import org.rocksdb.util.SizeUnit; import org.rocksdb.util.SizeUnit;

View File

@ -1,8 +1,8 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.tests;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
import java.util.Arrays; import java.util.Arrays;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
@ -11,16 +11,12 @@ import org.junit.jupiter.api.Test;
public class TestRanges { public class TestRanges {
private static BufferAllocator alloc;
@BeforeAll @BeforeAll
public static void beforeAll() { public static void beforeAll() {
alloc = BufferAllocator.offHeapPooled();
} }
@AfterAll @AfterAll
public static void afterAll() { public static void afterAll() {
alloc = BufferAllocator.offHeapPooled();
} }
@Test @Test
@ -43,17 +39,15 @@ public class TestRanges {
public void testNextRangeKey(byte[] prefixKey) { public void testNextRangeKey(byte[] prefixKey) {
byte[] firstRangeKey; byte[] firstRangeKey;
Buffer firstRangeKeyBuf = alloc.allocate(prefixKey.length).writeBytes(prefixKey); Buf firstRangeKeyBuf = Buf.create(prefixKey.length);
try (firstRangeKeyBuf) { firstRangeKeyBuf.addElements(0, prefixKey);
DatabaseMapDictionaryDeep.firstRangeKey(firstRangeKeyBuf, prefixKey.length, 7, 3); firstRangeKeyBuf = DatabaseMapDictionaryDeep.firstRangeKey(firstRangeKeyBuf, prefixKey.length, Buf.createZeroes(7 + 3));
firstRangeKey = LLUtils.toArray(firstRangeKeyBuf); firstRangeKey = firstRangeKeyBuf.asArray();
}
byte[] nextRangeKey; byte[] nextRangeKey;
Buffer nextRangeKeyBuf = alloc.allocate(prefixKey.length).writeBytes(prefixKey); Buf nextRangeKeyBuf = Buf.create(prefixKey.length);
try (nextRangeKeyBuf) { nextRangeKeyBuf.addElements(0, prefixKey);
DatabaseMapDictionaryDeep.nextRangeKey(nextRangeKeyBuf, prefixKey.length, 7, 3); nextRangeKeyBuf = DatabaseMapDictionaryDeep.nextRangeKey(nextRangeKeyBuf, prefixKey.length, Buf.createZeroes(7 + 3));
nextRangeKey = LLUtils.toArray(nextRangeKeyBuf); nextRangeKey = nextRangeKeyBuf.asArray();
}
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) { if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
org.assertj.core.api.Assertions org.assertj.core.api.Assertions

View File

@ -0,0 +1,136 @@
package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.tests.DbTestUtils.ensureNoLeaks;
import static it.cavallium.dbengine.tests.DbTestUtils.tempDb;
import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.collections.DatabaseInt;
import it.cavallium.dbengine.database.collections.DatabaseLong;
import it.cavallium.dbengine.database.collections.DatabaseSingleton;
import it.cavallium.dbengine.database.serialization.Serializer;
import java.io.IOException;
import java.util.stream.Stream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
public abstract class TestSingletons {
protected abstract TemporaryDbGenerator getTempDbGenerator();
private static Stream<Arguments> provideNumberWithRepeats() {
return Stream.of(
Arguments.of(Integer.MIN_VALUE, 2),
Arguments.of(-11, 2),
Arguments.of(0, 3),
Arguments.of(102, 5)
);
}
private static Stream<Arguments> provideLongNumberWithRepeats() {
return Stream.of(
Arguments.of(Long.MIN_VALUE, 2),
Arguments.of(-11L, 2),
Arguments.of(0L, 3),
Arguments.of(102L, 5)
);
}
@BeforeEach
public void beforeEach() {
ensureNoLeaks(false, false);
}
@AfterEach
public void afterEach() {
ensureNoLeaks(true, false);
}
@Test
public void testCreateInteger() throws IOException {
tempDb(getTempDbGenerator(), db -> tempInt(db, "test", 0).get(null));
}
@Test
public void testCreateIntegerNoop() throws IOException {
tempDb(getTempDbGenerator(), db -> tempInt(db, "test", 0));
}
@Test
public void testCreateLong() throws IOException {
tempDb(getTempDbGenerator(), db -> tempLong(db, "test", 0).get(null));
}
@Test
public void testCreateSingleton() throws IOException {
tempDb(getTempDbGenerator(), db -> tempSingleton(db, "testsingleton").get(null));
}
@ParameterizedTest
@ValueSource(ints = {Integer.MIN_VALUE, -192, -2, -1, 0, 1, 2, 1292, Integer.MAX_VALUE})
public void testDefaultValueInteger(int i) throws IOException {
Assertions.assertEquals((Integer) i, tempDb(getTempDbGenerator(), db -> tempInt(db, "test", i).get(null)));
}
@ParameterizedTest
@ValueSource(longs = {Long.MIN_VALUE, -192, -2, -1, 0, 1, 2, 1292, Long.MAX_VALUE})
public void testDefaultValueLong(long i) throws IOException {
Assertions.assertEquals((Long) i, tempDb(getTempDbGenerator(), db -> tempLong(db, "test", i).get(null)));
}
@ParameterizedTest
@MethodSource("provideNumberWithRepeats")
public void testSetInteger(Integer i, Integer repeats) throws IOException {
Assertions.assertEquals(i, tempDb(getTempDbGenerator(), db -> {
var dbInt = tempInt(db, "test", 0);
for (int integer = 0; integer < repeats; integer++) {
dbInt.set((int) System.currentTimeMillis());
}
dbInt.set(i);
return dbInt.get(null);
}));
}
@ParameterizedTest
@MethodSource("provideLongNumberWithRepeats")
public void testSetLong(Long i, Integer repeats) throws IOException {
Assertions.assertEquals(i, tempDb(getTempDbGenerator(), db -> {
var dbLong = tempLong(db, "test", 0);
for (int integer = 0; integer < repeats; integer++) {
dbLong.set(System.currentTimeMillis());
}
dbLong.set(i);
return dbLong.get(null);
}));
}
@ParameterizedTest
@MethodSource("provideLongNumberWithRepeats")
public void testSetSingleton(Long i, Integer repeats) throws IOException {
Assertions.assertEquals(Long.toString(i), tempDb(getTempDbGenerator(), db -> {
var dbSingleton = tempSingleton(db, "test");
for (int integer = 0; integer < repeats; integer++) {
dbSingleton.set(Long.toString(System.currentTimeMillis()));
}
dbSingleton.set(Long.toString(i));
return dbSingleton.get(null);
}));
}
public static DatabaseInt tempInt(LLKeyValueDatabase database, String name, int defaultValue) {
return database.getInteger("ints", name, defaultValue);
}
public static DatabaseLong tempLong(LLKeyValueDatabase database, String name, long defaultValue) {
return database.getLong("longs", name, defaultValue);
}
public static DatabaseSingleton<String> tempSingleton(LLKeyValueDatabase database, String name) {
return new DatabaseSingleton<>(database.getSingleton("longs", name), Serializer.UTF8_SERIALIZER);
}
}

View File

@ -1,14 +1,10 @@
package it.cavallium.dbengine; package it.cavallium.dbengine.tests;
import static it.cavallium.dbengine.client.UninterruptibleScheduler.uninterruptibleScheduler;
import static it.cavallium.dbengine.database.LLUtils.singleOrClose;
import static it.cavallium.dbengine.lucene.searcher.GlobalQueryRewrite.NO_REWRITE; import static it.cavallium.dbengine.lucene.searcher.GlobalQueryRewrite.NO_REWRITE;
import io.netty5.util.Send; import com.google.common.collect.Streams;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.LLKeyScore; import it.cavallium.dbengine.database.LLKeyScore;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
import it.cavallium.dbengine.database.disk.LLIndexSearchers; import it.cavallium.dbengine.database.disk.LLIndexSearchers;
import it.cavallium.dbengine.lucene.LuceneCloseable; import it.cavallium.dbengine.lucene.LuceneCloseable;
import it.cavallium.dbengine.lucene.LuceneUtils; import it.cavallium.dbengine.lucene.LuceneUtils;
@ -17,17 +13,13 @@ import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
import it.cavallium.dbengine.lucene.searcher.LocalSearcher; import it.cavallium.dbengine.lucene.searcher.LocalSearcher;
import it.cavallium.dbengine.lucene.searcher.LuceneSearchResult; import it.cavallium.dbengine.lucene.searcher.LuceneSearchResult;
import it.cavallium.dbengine.lucene.searcher.MultiSearcher; import it.cavallium.dbengine.lucene.searcher.MultiSearcher;
import it.cavallium.dbengine.lucene.searcher.ShardIndexSearcher;
import it.cavallium.dbengine.utils.SimpleResource; import it.cavallium.dbengine.utils.SimpleResource;
import java.io.IOException;
import java.io.UncheckedIOException; import java.io.UncheckedIOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
public class UnsortedUnscoredSimpleMultiSearcher implements MultiSearcher { public class UnsortedUnscoredSimpleMultiSearcher implements MultiSearcher {
@ -40,12 +32,12 @@ public class UnsortedUnscoredSimpleMultiSearcher implements MultiSearcher {
} }
@Override @Override
public Mono<LuceneSearchResult> collectMulti(Mono<LLIndexSearchers> indexSearchersMono, public LuceneSearchResult collectMulti(LLIndexSearchers indexSearchers,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName,
GlobalQueryRewrite transformer) { GlobalQueryRewrite transformer) {
if (transformer != NO_REWRITE) { if (transformer != NO_REWRITE) {
return LuceneUtils.rewriteMulti(this, indexSearchersMono, queryParams, keyFieldName, transformer); return LuceneUtils.rewriteMulti(this, indexSearchers, queryParams, keyFieldName, transformer);
} }
if (queryParams.isSorted() && queryParams.limitLong() > 0) { if (queryParams.isSorted() && queryParams.limitLong() > 0) {
throw new UnsupportedOperationException( throw new UnsupportedOperationException(
@ -56,33 +48,28 @@ public class UnsortedUnscoredSimpleMultiSearcher implements MultiSearcher {
"Scored queries are not supported" + " by SimpleUnsortedUnscoredLuceneMultiSearcher"); "Scored queries are not supported" + " by SimpleUnsortedUnscoredLuceneMultiSearcher");
} }
return singleOrClose(indexSearchersMono, indexSearchers -> { var localQueryParams = getLocalQueryParams(queryParams);
var localQueryParams = getLocalQueryParams(queryParams); var results = indexSearchers.llShards().stream()
return Flux .map(searcher -> localSearcher.collect(searcher, localQueryParams, keyFieldName, transformer))
.fromIterable(indexSearchers.llShards()) .toList();
.flatMap(searcher -> localSearcher.collect(Mono.just(searcher), localQueryParams, keyFieldName, transformer)) List<LuceneSearchResult> resultsToDrop = new ArrayList<>(results.size());
.collectList() List<Stream<LLKeyScore>> resultsFluxes = new ArrayList<>(results.size());
.map(results -> { boolean exactTotalHitsCount = true;
List<LuceneSearchResult> resultsToDrop = new ArrayList<>(results.size()); long totalHitsCountValue = 0;
List<Flux<LLKeyScore>> resultsFluxes = new ArrayList<>(results.size()); for (LuceneSearchResult result : results) {
boolean exactTotalHitsCount = true; resultsToDrop.add(result);
long totalHitsCountValue = 0; resultsFluxes.add(result.results());
for (LuceneSearchResult result : results) { exactTotalHitsCount &= result.totalHitsCount().exact();
resultsToDrop.add(result); totalHitsCountValue += result.totalHitsCount().value();
resultsFluxes.add(result.results()); }
exactTotalHitsCount &= result.totalHitsCount().exact();
totalHitsCountValue += result.totalHitsCount().value();
}
var totalHitsCount = new TotalHitsCount(totalHitsCountValue, exactTotalHitsCount); var totalHitsCount = new TotalHitsCount(totalHitsCountValue, exactTotalHitsCount);
Flux<LLKeyScore> mergedFluxes = Flux //noinspection unchecked
.merge(resultsFluxes) Stream<LLKeyScore> mergedFluxes = (Stream<LLKeyScore>) (Stream) Streams.concat(resultsFluxes.toArray(Stream<?>[]::new))
.skip(queryParams.offsetLong()) .skip(queryParams.offsetLong())
.take(queryParams.limitLong(), true); .limit(queryParams.limitLong());
return new MyLuceneSearchResult(totalHitsCount, mergedFluxes, resultsToDrop, indexSearchers); return new MyLuceneSearchResult(totalHitsCount, mergedFluxes, resultsToDrop, indexSearchers);
});
});
} }
private LocalQueryParams getLocalQueryParams(LocalQueryParams queryParams) { private LocalQueryParams getLocalQueryParams(LocalQueryParams queryParams) {
@ -107,7 +94,7 @@ public class UnsortedUnscoredSimpleMultiSearcher implements MultiSearcher {
private final LLIndexSearchers indexSearchers; private final LLIndexSearchers indexSearchers;
public MyLuceneSearchResult(TotalHitsCount totalHitsCount, public MyLuceneSearchResult(TotalHitsCount totalHitsCount,
Flux<LLKeyScore> mergedFluxes, Stream<LLKeyScore> mergedFluxes,
List<LuceneSearchResult> resultsToDrop, List<LuceneSearchResult> resultsToDrop,
LLIndexSearchers indexSearchers) { LLIndexSearchers indexSearchers) {
super(totalHitsCount, mergedFluxes); super(totalHitsCount, mergedFluxes);

View File

@ -0,0 +1,19 @@
module dbengine.tests {
requires org.junit.jupiter.api;
requires dbengine;
requires data.generator.runtime;
requires org.assertj.core;
requires org.apache.lucene.core;
requires it.unimi.dsi.fastutil;
requires org.apache.lucene.queryparser;
requires io.netty.common;
requires org.jetbrains.annotations;
requires micrometer.core;
requires org.junit.jupiter.params;
requires com.google.common;
requires org.apache.logging.log4j;
requires io.netty.buffer;
requires org.apache.commons.lang3;
requires rocksdbjni;
opens it.cavallium.dbengine.tests;
}