Fix getMulti

This commit is contained in:
Andrea Cavalli 2021-07-23 15:20:33 +02:00
parent aa1aa7a6fb
commit abde1d1aab
11 changed files with 71 additions and 35 deletions

View File

@ -7,6 +7,7 @@ import it.cavallium.dbengine.database.LLSnapshottable;
import it.cavallium.dbengine.database.collections.ValueGetter;
import it.cavallium.dbengine.database.collections.ValueTransformer;
import java.util.Map.Entry;
import java.util.Optional;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
@ -92,10 +93,12 @@ public interface LuceneIndex<T, U> extends LLSnapshottable {
private static <T, U> ValueTransformer<T, U> getValueGetterTransformer(ValueGetter<T, U> valueGetter) {
return new ValueTransformer<T, U>() {
@Override
public <X> Flux<Tuple3<X, T, U>> transform(Flux<Tuple2<X, T>> keys) {
public <X> Flux<Tuple3<X, T, Optional<U>>> transform(Flux<Tuple2<X, T>> keys) {
return keys.flatMapSequential(key -> valueGetter
.get(key.getT2())
.map(result -> Tuples.of(key.getT1(), key.getT2(), result)));
.map(result -> Tuples.of(key.getT1(), key.getT2(), Optional.of(result)))
.switchIfEmpty(Mono.fromSupplier(() -> Tuples.of(key.getT1(), key.getT2(), Optional.empty())))
);
}
};
}

View File

@ -106,8 +106,9 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
.flatMapSequential(signal -> signal.key().map(indicizer::getKey).map(key -> Tuples.of(signal.score(), key)));
var resultItemsFlux = valueTransformer
.transform(scoresWithKeysFlux)
.filter(tuple3 -> tuple3.getT3().isPresent())
.map(tuple3 -> new SearchResultItem<>(Mono.just(tuple3.getT2()),
Mono.just(tuple3.getT3()),
Mono.just(tuple3.getT3().orElseThrow()),
tuple3.getT1()
));
return Mono.fromCallable(() -> new SearchResult<>(resultItemsFlux,

View File

@ -5,6 +5,7 @@ import io.netty.buffer.ByteBufAllocator;
import it.cavallium.dbengine.client.BadBlock;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.jetbrains.annotations.Nullable;
@ -60,11 +61,11 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
Mono<ByteBuf> remove(ByteBuf key, LLDictionaryResultType resultType);
<K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot,
<K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly);
default <K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, Flux<Tuple2<K, ByteBuf>> keys) {
default <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot, Flux<Tuple2<K, ByteBuf>> keys) {
return getMulti(snapshot, keys, false);
}

View File

@ -18,6 +18,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.jetbrains.annotations.Nullable;
@ -315,7 +316,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
}
@Override
public Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
public Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return Flux
.defer(() -> dictionary
.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> {
@ -330,7 +331,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
.flatMapSequential(entry -> {
entry.getT2().release();
return Mono
.fromCallable(() -> Map.entry(entry.getT1(), deserialize(entry.getT3())));
.fromCallable(() -> Map.entry(entry.getT1(), entry.getT3().map(this::deserialize)));
});
}

View File

@ -120,18 +120,19 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
/**
* GetMulti must return the elements in sequence!
*/
default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
default Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return keys
.flatMapSequential(key -> this
.getValue(snapshot, key, existsAlmostCertainly)
.map(value -> Map.entry(key, value))
.map(value -> Map.entry(key, Optional.of(value)))
.switchIfEmpty(Mono.fromSupplier(() -> Map.entry(key, Optional.empty())))
);
}
/**
* GetMulti must return the elements in sequence!
*/
default Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) {
default Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) {
return getMulti(snapshot, keys, false);
}
@ -279,7 +280,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
default ValueTransformer<T, U> getAsyncDbValueTransformer(@Nullable CompositeSnapshot snapshot) {
return new ValueTransformer<>() {
@Override
public <X> Flux<Tuple3<X, T, U>> transform(Flux<Tuple2<X, T>> keys) {
public <X> Flux<Tuple3<X, T, Optional<U>>> transform(Flux<Tuple2<X, T>> keys) {
return Flux.defer(() -> {
ConcurrentLinkedQueue<X> extraValues = new ConcurrentLinkedQueue<>();
return getMulti(snapshot, keys.map(key -> {

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections;
import java.util.Map.Entry;
import java.util.Optional;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
@ -11,5 +12,5 @@ public interface ValueTransformer<KEY, VALUE> {
/**
* Can return Flux error IOException
*/
<X> Flux<Tuple3<X, KEY, VALUE>> transform(Flux<Tuple2<X, KEY>> keys);
<X> Flux<Tuple3<X, KEY, Optional<VALUE>>> transform(Flux<Tuple2<X, KEY>> keys);
}

View File

@ -958,7 +958,7 @@ public class LLLocalDictionary implements LLDictionary {
}
@Override
public <K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot,
public <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly) {
return keys
@ -997,16 +997,20 @@ public class LLLocalDictionary implements LLDictionary {
try {
var columnFamilyHandles = new RepeatedElementList<>(cfh, keysWindow.size());
var results = db.multiGetAsList(resolveSnapshot(snapshot), columnFamilyHandles, LLUtils.toArray(keyBufsWindow));
var mappedResults = new ArrayList<Tuple3<K, ByteBuf, ByteBuf>>(results.size());
var mappedResults = new ArrayList<Tuple3<K, ByteBuf, Optional<ByteBuf>>>(results.size());
for (int i = 0; i < results.size(); i++) {
var val = results.get(i);
byte[] val = results.get(i);
Optional<ByteBuf> valueOpt;
if (val != null) {
results.set(i, null);
mappedResults.add(Tuples.of(keysWindow.get(i).getT1(),
keyBufsWindow.get(i).retain(),
wrappedBuffer(val)
));
valueOpt = Optional.of(wrappedBuffer(val));
} else {
valueOpt = Optional.empty();
}
mappedResults.add(Tuples.of(keysWindow.get(i).getT1(),
keyBufsWindow.get(i).retain(),
valueOpt
));
}
return mappedResults;
} finally {
@ -1033,9 +1037,9 @@ public class LLLocalDictionary implements LLDictionary {
})
.doOnDiscard(Tuple3.class, discardedEntry -> {
//noinspection unchecked
var entry = (Tuple3<K, ByteBuf, ByteBuf>) discardedEntry;
var entry = (Tuple3<K, ByteBuf, Optional<ByteBuf>>) discardedEntry;
entry.getT2().release();
entry.getT3().release();
entry.getT3().ifPresent(ReferenceCounted::release);
});
}

View File

@ -91,7 +91,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene",
Integer.MAX_VALUE,
false
true
);
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneSearcherScheduler = Schedulers.newBoundedElastic(
@ -99,16 +99,10 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-searcher",
60,
false
true
);
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneWriterScheduler = Schedulers.newBoundedElastic(
4,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-writer",
60,
false
);
private final Scheduler luceneWriterScheduler;
private final String luceneIndexName;
private final SnapshotDeletionPolicy snapshotter;
@ -215,12 +209,20 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
indexWriterConfig.setIndexDeletionPolicy(snapshotter);
indexWriterConfig.setCommitOnClose(true);
int writerSchedulerMaxThreadCount;
MergeScheduler mergeScheduler;
if (lowMemory) {
mergeScheduler = new SerialMergeScheduler();
writerSchedulerMaxThreadCount = 1;
} else {
var concurrentMergeScheduler = new ConcurrentMergeScheduler();
concurrentMergeScheduler.enableAutoIOThrottle();
concurrentMergeScheduler.setDefaultMaxMergesAndThreads(false);
if (luceneOptions.inMemory()) {
concurrentMergeScheduler.disableAutoIOThrottle();
} else {
concurrentMergeScheduler.enableAutoIOThrottle();
}
writerSchedulerMaxThreadCount = concurrentMergeScheduler.getMaxThreadCount();
mergeScheduler = concurrentMergeScheduler;
}
indexWriterConfig.setMergeScheduler(mergeScheduler);
@ -234,6 +236,14 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
new SearcherFactory()
);
this.luceneWriterScheduler = Schedulers.newBoundedElastic(
writerSchedulerMaxThreadCount,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-writer",
60,
true
);
// Create scheduled tasks lifecycle manager
this.scheduledTasksLifecycle = new ScheduledTaskLifecycle();

View File

@ -17,6 +17,7 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicReference;
@ -210,17 +211,17 @@ public class LLMemoryDictionary implements LLDictionary {
}
@Override
public <K> Flux<Tuple3<K, ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot,
public <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys,
boolean existsAlmostCertainly) {
return keys
.flatMapSequential(key -> {
try {
ByteList v = snapshots.get(resolveSnapshot(snapshot)).get(k(key.getT2()));
if (v == null) {
return Flux.empty();
if (v != null) {
return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), Optional.of(kk(v))));
} else {
return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), kk(v)));
return Flux.just(Tuples.of(key.getT1(), key.getT2().retain(), Optional.empty()));
}
} finally {
key.getT2().release();

View File

@ -8,6 +8,7 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
@ -341,6 +342,8 @@ public class TestDictionaryMap {
)
.doAfterTerminate(map::release)
)
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
));
if (shouldFail) {
stpVer.verifyError();
@ -365,6 +368,8 @@ public class TestDictionaryMap {
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
.doAfterTerminate(map::release)
)
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
));
if (shouldFail) {
stpVer.verifyError();
@ -417,6 +422,8 @@ public class TestDictionaryMap {
)
.doAfterTerminate(map::release)
)
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
));
if (shouldFail) {
stpVer.verifyError();

View File

@ -685,6 +685,8 @@ public class TestDictionaryMapDeep {
)
.doAfterTerminate(map::release)
)
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
));
if (shouldFail) {
stpVer.verifyError();
@ -709,6 +711,8 @@ public class TestDictionaryMapDeep {
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
.doAfterTerminate(map::release)
)
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
));
if (shouldFail) {
stpVer.verifyError();
@ -761,6 +765,8 @@ public class TestDictionaryMapDeep {
)
.doAfterTerminate(map::release)
)
.filter(k -> k.getValue().isPresent())
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
));
if (shouldFail) {
stpVer.verifyError();