Fix getMulti

This commit is contained in:
Andrea Cavalli 2021-07-23 15:20:33 +02:00
parent aa1aa7a6fb
commit abde1d1aab
11 changed files with 71 additions and 35 deletions

View File

@ -7,6 +7,7 @@ import it.cavallium.dbengine.database.LLSnapshottable;
import it.cavallium.dbengine.database.collections.ValueGetter; import it.cavallium.dbengine.database.collections.ValueGetter;
import it.cavallium.dbengine.database.collections.ValueTransformer; import it.cavallium.dbengine.database.collections.ValueTransformer;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
@ -92,10 +93,12 @@ public interface LuceneIndex<T, U> extends LLSnapshottable {
private static <T, U> ValueTransformer<T, U> getValueGetterTransformer(ValueGetter<T, U> valueGetter) { private static <T, U> ValueTransformer<T, U> getValueGetterTransformer(ValueGetter<T, U> valueGetter) {
return new ValueTransformer<T, U>() { return new ValueTransformer<T, U>() {
@Override @Override
public <X> Flux<Tuple3<X, T, U>> transform(Flux<Tuple2<X, T>> keys) { public <X> Flux<Tuple3<X, T, Optional<U>>> transform(Flux<Tuple2<X, T>> keys) {
return keys.flatMapSequential(key -> valueGetter return keys.flatMapSequential(key -> valueGetter
.get(key.getT2()) .get(key.getT2())
.map(result -> Tuples.of(key.getT1(), key.getT2(), result))); .map(result -> Tuples.of(key.getT1(), key.getT2(), Optional.of(result)))
.switchIfEmpty(Mono.fromSupplier(() -> Tuples.of(key.getT1(), key.getT2(), Optional.empty())))
);
} }
}; };
} }

View File

@ -106,8 +106,9 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
.flatMapSequential(signal -> signal.key().map(indicizer::getKey).map(key -> Tuples.of(signal.score(), key))); .flatMapSequential(signal -> signal.key().map(indicizer::getKey).map(key -> Tuples.of(signal.score(), key)));
var resultItemsFlux = valueTransformer var resultItemsFlux = valueTransformer
.transform(scoresWithKeysFlux) .transform(scoresWithKeysFlux)
.filter(tuple3 -> tuple3.getT3().isPresent())
.map(tuple3 -> new SearchResultItem<>(Mono.just(tuple3.getT2()), .map(tuple3 -> new SearchResultItem<>(Mono.just(tuple3.getT2()),
Mono.just(tuple3.getT3()), Mono.just(tuple3.getT3().orElseThrow()),
tuple3.getT1() tuple3.getT1()
)); ));
return Mono.fromCallable(() -> new SearchResult<>(resultItemsFlux, return Mono.fromCallable(() -> new SearchResult<>(resultItemsFlux,

View File

@ -5,6 +5,7 @@ import io.netty.buffer.ByteBufAllocator;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
@ -60,11 +61,11 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
Mono<ByteBuf> remove(ByteBuf key, LLDictionaryResultType resultType); Mono<ByteBuf> remove(ByteBuf key, LLDictionaryResultType resultType);
<K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys, Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly); boolean existsAlmostCertainly);
default <K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, Flux<Tuple2<K, ByteBuf>> keys) { default <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot, Flux<Tuple2<K, ByteBuf>> keys) {
return getMulti(snapshot, keys, false); return getMulti(snapshot, keys, false);
} }

View File

@ -18,6 +18,7 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
@ -315,7 +316,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
@Override @Override
public Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) { public Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return Flux return Flux
.defer(() -> dictionary .defer(() -> dictionary
.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> { .getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> {
@ -330,7 +331,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
.flatMapSequential(entry -> { .flatMapSequential(entry -> {
entry.getT2().release(); entry.getT2().release();
return Mono return Mono
.fromCallable(() -> Map.entry(entry.getT1(), deserialize(entry.getT3()))); .fromCallable(() -> Map.entry(entry.getT1(), entry.getT3().map(this::deserialize)));
}); });
} }

View File

@ -120,18 +120,19 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
/** /**
* GetMulti must return the elements in sequence! * GetMulti must return the elements in sequence!
*/ */
default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) { default Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return keys return keys
.flatMapSequential(key -> this .flatMapSequential(key -> this
.getValue(snapshot, key, existsAlmostCertainly) .getValue(snapshot, key, existsAlmostCertainly)
.map(value -> Map.entry(key, value)) .map(value -> Map.entry(key, Optional.of(value)))
.switchIfEmpty(Mono.fromSupplier(() -> Map.entry(key, Optional.empty())))
); );
} }
/** /**
* GetMulti must return the elements in sequence! * GetMulti must return the elements in sequence!
*/ */
default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) { default Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) {
return getMulti(snapshot, keys, false); return getMulti(snapshot, keys, false);
} }
@ -279,7 +280,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
default ValueTransformer<T, U> getAsyncDbValueTransformer(@Nullable CompositeSnapshot snapshot) { default ValueTransformer<T, U> getAsyncDbValueTransformer(@Nullable CompositeSnapshot snapshot) {
return new ValueTransformer<>() { return new ValueTransformer<>() {
@Override @Override
public <X> Flux<Tuple3<X, T, U>> transform(Flux<Tuple2<X, T>> keys) { public <X> Flux<Tuple3<X, T, Optional<U>>> transform(Flux<Tuple2<X, T>> keys) {
return Flux.defer(() -> { return Flux.defer(() -> {
ConcurrentLinkedQueue<X> extraValues = new ConcurrentLinkedQueue<>(); ConcurrentLinkedQueue<X> extraValues = new ConcurrentLinkedQueue<>();
return getMulti(snapshot, keys.map(key -> { return getMulti(snapshot, keys.map(key -> {

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2; import reactor.util.function.Tuple2;
@ -11,5 +12,5 @@ public interface ValueTransformer<KEY, VALUE> {
/** /**
* Can return Flux error IOException * Can return Flux error IOException
*/ */
<X> Flux<Tuple3<X, KEY, VALUE>> transform(Flux<Tuple2<X, KEY>> keys); <X> Flux<Tuple3<X, KEY, Optional<VALUE>>> transform(Flux<Tuple2<X, KEY>> keys);
} }

View File

@ -958,7 +958,7 @@ public class LLLocalDictionary implements LLDictionary {
} }
@Override @Override
public <K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, public <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys, Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return keys return keys
@ -997,16 +997,20 @@ public class LLLocalDictionary implements LLDictionary {
try { try {
var columnFamilyHandles = new RepeatedElementList<>(cfh, keysWindow.size()); var columnFamilyHandles = new RepeatedElementList<>(cfh, keysWindow.size());
var results = db.multiGetAsList(resolveSnapshot(snapshot), columnFamilyHandles, LLUtils.toArray(keyBufsWindow)); var results = db.multiGetAsList(resolveSnapshot(snapshot), columnFamilyHandles, LLUtils.toArray(keyBufsWindow));
var mappedResults = new ArrayList<Tuple3<K, ByteBuf, ByteBuf>>(results.size()); var mappedResults = new ArrayList<Tuple3<K, ByteBuf, Optional<ByteBuf>>>(results.size());
for (int i = 0; i < results.size(); i++) { for (int i = 0; i < results.size(); i++) {
var val = results.get(i); byte[] val = results.get(i);
Optional<ByteBuf> valueOpt;
if (val != null) { if (val != null) {
results.set(i, null); results.set(i, null);
mappedResults.add(Tuples.of(keysWindow.get(i).getT1(), valueOpt = Optional.of(wrappedBuffer(val));
keyBufsWindow.get(i).retain(), } else {
wrappedBuffer(val) valueOpt = Optional.empty();
));
} }
mappedResults.add(Tuples.of(keysWindow.get(i).getT1(),
keyBufsWindow.get(i).retain(),
valueOpt
));
} }
return mappedResults; return mappedResults;
} finally { } finally {
@ -1033,9 +1037,9 @@ public class LLLocalDictionary implements LLDictionary {
}) })
.doOnDiscard(Tuple3.class, discardedEntry -> { .doOnDiscard(Tuple3.class, discardedEntry -> {
//noinspection unchecked //noinspection unchecked
var entry = (Tuple3<K, ByteBuf, ByteBuf>) discardedEntry; var entry = (Tuple3<K, ByteBuf, Optional<ByteBuf>>) discardedEntry;
entry.getT2().release(); entry.getT2().release();
entry.getT3().release(); entry.getT3().ifPresent(ReferenceCounted::release);
}); });
} }

View File

@ -91,7 +91,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene", "lucene",
Integer.MAX_VALUE, Integer.MAX_VALUE,
false true
); );
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks // Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneSearcherScheduler = Schedulers.newBoundedElastic( private final Scheduler luceneSearcherScheduler = Schedulers.newBoundedElastic(
@ -99,16 +99,10 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-searcher", "lucene-searcher",
60, 60,
false true
); );
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks // Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneWriterScheduler = Schedulers.newBoundedElastic( private final Scheduler luceneWriterScheduler;
4,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-writer",
60,
false
);
private final String luceneIndexName; private final String luceneIndexName;
private final SnapshotDeletionPolicy snapshotter; private final SnapshotDeletionPolicy snapshotter;
@ -215,12 +209,20 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
indexWriterConfig.setIndexDeletionPolicy(snapshotter); indexWriterConfig.setIndexDeletionPolicy(snapshotter);
indexWriterConfig.setCommitOnClose(true); indexWriterConfig.setCommitOnClose(true);
int writerSchedulerMaxThreadCount;
MergeScheduler mergeScheduler; MergeScheduler mergeScheduler;
if (lowMemory) { if (lowMemory) {
mergeScheduler = new SerialMergeScheduler(); mergeScheduler = new SerialMergeScheduler();
writerSchedulerMaxThreadCount = 1;
} else { } else {
var concurrentMergeScheduler = new ConcurrentMergeScheduler(); var concurrentMergeScheduler = new ConcurrentMergeScheduler();
concurrentMergeScheduler.enableAutoIOThrottle(); concurrentMergeScheduler.setDefaultMaxMergesAndThreads(false);
if (luceneOptions.inMemory()) {
concurrentMergeScheduler.disableAutoIOThrottle();
} else {
concurrentMergeScheduler.enableAutoIOThrottle();
}
writerSchedulerMaxThreadCount = concurrentMergeScheduler.getMaxThreadCount();
mergeScheduler = concurrentMergeScheduler; mergeScheduler = concurrentMergeScheduler;
} }
indexWriterConfig.setMergeScheduler(mergeScheduler); indexWriterConfig.setMergeScheduler(mergeScheduler);
@ -234,6 +236,14 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
new SearcherFactory() new SearcherFactory()
); );
this.luceneWriterScheduler = Schedulers.newBoundedElastic(
writerSchedulerMaxThreadCount,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-writer",
60,
true
);
// Create scheduled tasks lifecycle manager // Create scheduled tasks lifecycle manager
this.scheduledTasksLifecycle = new ScheduledTaskLifecycle(); this.scheduledTasksLifecycle = new ScheduledTaskLifecycle();

View File

@ -17,6 +17,7 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
@ -210,17 +211,17 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public <K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, public <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys, Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return keys return keys
.flatMapSequential(key -> { .flatMapSequential(key -> {
try { try {
ByteList v = snapshots.get(resolveSnapshot(snapshot)).get(k(key.getT2())); ByteList v = snapshots.get(resolveSnapshot(snapshot)).get(k(key.getT2()));
if (v == null) { if (v != null) {
return Flux.empty(); return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), Optional.of(kk(v))));
} else { } else {
return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), kk(v))); return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), Optional.empty()));
} }
} finally { } finally {
key.getT2().release(); key.getT2().release();

View File

@ -8,6 +8,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -341,6 +342,8 @@ public class TestDictionaryMap {
) )
.doAfterTerminate(map::release) .doAfterTerminate(map::release)
) )
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
)); ));
if (shouldFail) { if (shouldFail) {
stpVer.verifyError(); stpVer.verifyError();
@ -365,6 +368,8 @@ public class TestDictionaryMap {
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet()))) .thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
.doAfterTerminate(map::release) .doAfterTerminate(map::release)
) )
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
)); ));
if (shouldFail) { if (shouldFail) {
stpVer.verifyError(); stpVer.verifyError();
@ -417,6 +422,8 @@ public class TestDictionaryMap {
) )
.doAfterTerminate(map::release) .doAfterTerminate(map::release)
) )
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
)); ));
if (shouldFail) { if (shouldFail) {
stpVer.verifyError(); stpVer.verifyError();

View File

@ -685,6 +685,8 @@ public class TestDictionaryMapDeep {
) )
.doAfterTerminate(map::release) .doAfterTerminate(map::release)
) )
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
)); ));
if (shouldFail) { if (shouldFail) {
stpVer.verifyError(); stpVer.verifyError();
@ -709,6 +711,8 @@ public class TestDictionaryMapDeep {
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet()))) .thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
.doAfterTerminate(map::release) .doAfterTerminate(map::release)
) )
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
)); ));
if (shouldFail) { if (shouldFail) {
stpVer.verifyError(); stpVer.verifyError();
@ -761,6 +765,8 @@ public class TestDictionaryMapDeep {
) )
.doAfterTerminate(map::release) .doAfterTerminate(map::release)
) )
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
)); ));
if (shouldFail) { if (shouldFail) {
stpVer.verifyError(); stpVer.verifyError();