Optimize performance by removing all superfluous subscribeOn calls

This commit is contained in:
Andrea Cavalli 2021-09-04 16:42:47 +02:00
parent 2fe8063193
commit 1882e8b300
16 changed files with 133 additions and 217 deletions

View File

@ -68,6 +68,7 @@ import org.warp.commonutils.log.LoggerFactory;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler; import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
import reactor.util.function.Tuple2; import reactor.util.function.Tuple2;
import reactor.util.function.Tuple3; import reactor.util.function.Tuple3;
import reactor.util.function.Tuple4; import reactor.util.function.Tuple4;
@ -142,7 +143,6 @@ public class LLLocalDictionary implements LLDictionary {
private final ColumnFamilyHandle cfh; private final ColumnFamilyHandle cfh;
private final String databaseName; private final String databaseName;
private final String columnName; private final String columnName;
private final Scheduler dbScheduler;
private final Function<LLSnapshot, Snapshot> snapshotResolver; private final Function<LLSnapshot, Snapshot> snapshotResolver;
private final Striped<StampedLock> itemsLock = Striped.readWriteStampedLock(STRIPES); private final Striped<StampedLock> itemsLock = Striped.readWriteStampedLock(STRIPES);
private final UpdateMode updateMode; private final UpdateMode updateMode;
@ -157,7 +157,6 @@ public class LLLocalDictionary implements LLDictionary {
@NotNull ColumnFamilyHandle columnFamilyHandle, @NotNull ColumnFamilyHandle columnFamilyHandle,
String databaseName, String databaseName,
String columnName, String columnName,
Scheduler dbScheduler,
Function<LLSnapshot, Snapshot> snapshotResolver, Function<LLSnapshot, Snapshot> snapshotResolver,
UpdateMode updateMode, UpdateMode updateMode,
DatabaseOptions databaseOptions) { DatabaseOptions databaseOptions) {
@ -167,7 +166,6 @@ public class LLLocalDictionary implements LLDictionary {
this.cfh = columnFamilyHandle; this.cfh = columnFamilyHandle;
this.databaseName = databaseName; this.databaseName = databaseName;
this.columnName = columnName; this.columnName = columnName;
this.dbScheduler = dbScheduler;
this.snapshotResolver = snapshotResolver; this.snapshotResolver = snapshotResolver;
this.updateMode = updateMode; this.updateMode = updateMode;
this.getRangeMultiDebugName = databaseName + "(" + columnName + ")" + "::getRangeMulti"; this.getRangeMultiDebugName = databaseName + "(" + columnName + ")" + "::getRangeMulti";
@ -243,7 +241,7 @@ public class LLLocalDictionary implements LLDictionary {
} }
private <T> @NotNull Mono<T> runOnDb(Callable<@Nullable T> callable) { private <T> @NotNull Mono<T> runOnDb(Callable<@Nullable T> callable) {
return Mono.fromCallable(callable).subscribeOn(dbScheduler); return Mono.fromCallable(callable);
} }
@Override @Override
@ -327,6 +325,7 @@ public class LLLocalDictionary implements LLDictionary {
// If it's smaller or equals it means that RocksDB is overwriting // If it's smaller or equals it means that RocksDB is overwriting
// the beginning of the result buffer. // the beginning of the result buffer.
assert resultNioBuf.limit() > assertionReadData; assert resultNioBuf.limit() > assertionReadData;
//noinspection ConstantConditions
if (ASSERTIONS_ENABLED) { if (ASSERTIONS_ENABLED) {
assertionReadData = resultNioBuf.limit(); assertionReadData = resultNioBuf.limit();
} }
@ -515,6 +514,8 @@ public class LLLocalDictionary implements LLDictionary {
if (direct1 != null) PlatformDependent.freeDirectBuffer(direct1); if (direct1 != null) PlatformDependent.freeDirectBuffer(direct1);
if (direct2 != null) PlatformDependent.freeDirectBuffer(direct2); if (direct2 != null) PlatformDependent.freeDirectBuffer(direct2);
if (direct3 != null) PlatformDependent.freeDirectBuffer(direct3); if (direct3 != null) PlatformDependent.freeDirectBuffer(direct3);
if (slice1 != null) slice1.close();
if (slice2 != null) slice2.close();
} }
}).onErrorMap(cause -> new IOException("Failed to read range", cause)), }).onErrorMap(cause -> new IOException("Failed to read range", cause)),
rangeSend -> Mono.fromRunnable(rangeSend::close)); rangeSend -> Mono.fromRunnable(rangeSend::close));
@ -1386,7 +1387,7 @@ public class LLLocalDictionary implements LLDictionary {
rangeSend -> Flux.using( rangeSend -> Flux.using(
() -> new LLLocalEntryReactiveRocksIterator(db, alloc, cfh, rangeSend, () -> new LLLocalEntryReactiveRocksIterator(db, alloc, cfh, rangeSend,
databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), getRangeMultiDebugName), databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), getRangeMultiDebugName),
llLocalEntryReactiveRocksIterator -> llLocalEntryReactiveRocksIterator.flux().subscribeOn(dbScheduler), LLLocalReactiveRocksIterator::flux,
LLLocalReactiveRocksIterator::release LLLocalReactiveRocksIterator::release
).transform(LLUtils::handleDiscard), ).transform(LLUtils::handleDiscard),
rangeSend -> Mono.fromRunnable(rangeSend::close) rangeSend -> Mono.fromRunnable(rangeSend::close)
@ -1398,7 +1399,7 @@ public class LLLocalDictionary implements LLDictionary {
rangeSend -> Flux.using( rangeSend -> Flux.using(
() -> new LLLocalGroupedEntryReactiveRocksIterator(db, alloc, cfh, prefixLength, rangeSend, () -> new LLLocalGroupedEntryReactiveRocksIterator(db, alloc, cfh, prefixLength, rangeSend,
databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), "getRangeMultiGrouped"), databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), "getRangeMultiGrouped"),
reactiveRocksIterator -> reactiveRocksIterator.flux().subscribeOn(dbScheduler), LLLocalGroupedReactiveRocksIterator::flux,
LLLocalGroupedReactiveRocksIterator::release LLLocalGroupedReactiveRocksIterator::release
).transform(LLUtils::handleDiscard), ).transform(LLUtils::handleDiscard),
rangeSend -> Mono.fromRunnable(rangeSend::close) rangeSend -> Mono.fromRunnable(rangeSend::close)
@ -1429,7 +1430,7 @@ public class LLLocalDictionary implements LLDictionary {
rangeSend -> Flux.using( rangeSend -> Flux.using(
() -> new LLLocalGroupedKeyReactiveRocksIterator(db, alloc, cfh, prefixLength, rangeSend, () -> new LLLocalGroupedKeyReactiveRocksIterator(db, alloc, cfh, prefixLength, rangeSend,
databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), "getRangeKeysGrouped"), databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), "getRangeKeysGrouped"),
reactiveRocksIterator -> reactiveRocksIterator.flux().subscribeOn(dbScheduler), LLLocalGroupedReactiveRocksIterator::flux,
LLLocalGroupedReactiveRocksIterator::release LLLocalGroupedReactiveRocksIterator::release
).transform(LLUtils::handleDiscard), ).transform(LLUtils::handleDiscard),
rangeSend -> Mono.fromRunnable(rangeSend::close) rangeSend -> Mono.fromRunnable(rangeSend::close)
@ -1440,7 +1441,7 @@ public class LLLocalDictionary implements LLDictionary {
public Flux<BadBlock> badBlocks(Mono<Send<LLRange>> rangeMono) { public Flux<BadBlock> badBlocks(Mono<Send<LLRange>> rangeMono) {
return Flux.usingWhen(rangeMono, return Flux.usingWhen(rangeMono,
rangeSend -> Flux rangeSend -> Flux
.<BadBlock>create(sink -> { .create(sink -> {
var range = rangeSend.receive(); var range = rangeSend.receive();
sink.onDispose(range::close); sink.onDispose(range::close);
try (var ro = new ReadOptions(getReadOptions(null))) { try (var ro = new ReadOptions(getReadOptions(null))) {
@ -1477,8 +1478,7 @@ public class LLLocalDictionary implements LLDictionary {
} catch (Throwable ex) { } catch (Throwable ex) {
sink.error(ex); sink.error(ex);
} }
}) }),
.subscribeOn(dbScheduler),
rangeSend -> Mono.fromRunnable(rangeSend::close) rangeSend -> Mono.fromRunnable(rangeSend::close)
); );
} }
@ -1498,10 +1498,9 @@ public class LLLocalDictionary implements LLDictionary {
true, true,
"getRangeKeysGrouped" "getRangeKeysGrouped"
), ),
it -> it.flux(), LLLocalKeyPrefixReactiveRocksIterator::flux,
it -> it.release() LLLocalKeyPrefixReactiveRocksIterator::release
) ),
.subscribeOn(dbScheduler),
rangeSend -> Mono.fromRunnable(rangeSend::close) rangeSend -> Mono.fromRunnable(rangeSend::close)
); );
} }
@ -1528,7 +1527,7 @@ public class LLLocalDictionary implements LLDictionary {
rangeSend -> Flux.using( rangeSend -> Flux.using(
() -> new LLLocalKeyReactiveRocksIterator(db, alloc, cfh, rangeSend, () -> new LLLocalKeyReactiveRocksIterator(db, alloc, cfh, rangeSend,
databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), getRangeKeysMultiDebugName), databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), getRangeKeysMultiDebugName),
llLocalKeyReactiveRocksIterator -> llLocalKeyReactiveRocksIterator.flux().subscribeOn(dbScheduler), LLLocalReactiveRocksIterator::flux,
LLLocalReactiveRocksIterator::release LLLocalReactiveRocksIterator::release
).transform(LLUtils::handleDiscard), ).transform(LLUtils::handleDiscard),
rangeSend -> Mono.fromRunnable(rangeSend::close) rangeSend -> Mono.fromRunnable(rangeSend::close)
@ -1963,8 +1962,7 @@ public class LLLocalDictionary implements LLDictionary {
return null; return null;
} }
}) })
.onErrorMap(cause -> new IOException("Failed to clear", cause)) .onErrorMap(cause -> new IOException("Failed to clear", cause));
.subscribeOn(dbScheduler);
} }

View File

@ -22,6 +22,7 @@ import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -65,7 +66,6 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
RocksDB.DEFAULT_COLUMN_FAMILY); RocksDB.DEFAULT_COLUMN_FAMILY);
private final BufferAllocator allocator; private final BufferAllocator allocator;
private final Scheduler dbScheduler;
// Configurations // Configurations
@ -112,6 +112,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
} }
// Get databases directory path // Get databases directory path
Objects.requireNonNull(path);
Path databasesDirPath = path.toAbsolutePath().getParent(); Path databasesDirPath = path.toAbsolutePath().getParent();
String dbPathString = databasesDirPath.toString() + File.separatorChar + path.getFileName(); String dbPathString = databasesDirPath.toString() + File.separatorChar + path.getFileName();
Path dbPath = Paths.get(dbPathString); Path dbPath = Paths.get(dbPathString);
@ -127,12 +128,6 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
// 8 or more // 8 or more
threadCap = Math.max(8, Runtime.getRuntime().availableProcessors()); threadCap = Math.max(8, Runtime.getRuntime().availableProcessors());
} }
this.dbScheduler = Schedulers.newBoundedElastic(threadCap,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"db-" + name,
60,
true
);
this.enableColumnsBug = "true".equals(databaseOptions.extraFlags().getOrDefault("enableColumnBug", "false")); this.enableColumnsBug = "true".equals(databaseOptions.extraFlags().getOrDefault("enableColumnBug", "false"));
createIfNotExists(descriptors, rocksdbOptions, databaseOptions, dbPath, dbPathString); createIfNotExists(descriptors, rocksdbOptions, databaseOptions, dbPath, dbPathString);
@ -300,6 +295,8 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
options.setAllowFAllocate(true); options.setAllowFAllocate(true);
options.setRateLimiter(new RateLimiter(10L * 1024L * 1024L)); // 10MiB/s max compaction write speed options.setRateLimiter(new RateLimiter(10L * 1024L * 1024L)); // 10MiB/s max compaction write speed
Objects.requireNonNull(databasesDirPath);
Objects.requireNonNull(path.getFileName());
List<DbPath> paths = List.of(new DbPath(databasesDirPath.resolve(path.getFileName() + "_hot"), List<DbPath> paths = List.of(new DbPath(databasesDirPath.resolve(path.getFileName() + "_hot"),
10L * 1024L * 1024L * 1024L), // 10GiB 10L * 1024L * 1024L * 1024L), // 10GiB
new DbPath(databasesDirPath.resolve(path.getFileName() + "_cold"), new DbPath(databasesDirPath.resolve(path.getFileName() + "_cold"),
@ -455,11 +452,9 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
(snapshot) -> snapshotsHandles.get(snapshot.getSequenceNumber()), (snapshot) -> snapshotsHandles.get(snapshot.getSequenceNumber()),
LLLocalKeyValueDatabase.this.name, LLLocalKeyValueDatabase.this.name,
name, name,
dbScheduler,
defaultValue defaultValue
)) ))
.onErrorMap(cause -> new IOException("Failed to read " + Arrays.toString(name), cause)) .onErrorMap(cause -> new IOException("Failed to read " + Arrays.toString(name), cause));
.subscribeOn(dbScheduler);
} }
@Override @Override
@ -471,12 +466,10 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
getCfh(columnName), getCfh(columnName),
name, name,
Column.toString(columnName), Column.toString(columnName),
dbScheduler,
(snapshot) -> snapshotsHandles.get(snapshot.getSequenceNumber()), (snapshot) -> snapshotsHandles.get(snapshot.getSequenceNumber()),
updateMode, updateMode,
databaseOptions databaseOptions
)) ));
.subscribeOn(dbScheduler);
} }
private ColumnFamilyHandle getCfh(byte[] columnName) throws RocksDBException { private ColumnFamilyHandle getCfh(byte[] columnName) throws RocksDBException {
@ -492,8 +485,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
@Override @Override
public Mono<Long> getProperty(String propertyName) { public Mono<Long> getProperty(String propertyName) {
return Mono.fromCallable(() -> db.getAggregatedLongProperty(propertyName)) return Mono.fromCallable(() -> db.getAggregatedLongProperty(propertyName))
.onErrorMap(cause -> new IOException("Failed to read " + propertyName, cause)) .onErrorMap(cause -> new IOException("Failed to read " + propertyName, cause));
.subscribeOn(dbScheduler);
} }
@Override @Override
@ -504,8 +496,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
return null; return null;
}) })
.onErrorMap(cause -> new IOException("Failed to verify checksum of database \"" .onErrorMap(cause -> new IOException("Failed to verify checksum of database \""
+ getDatabaseName() + "\"", cause)) + getDatabaseName() + "\"", cause));
.subscribeOn(dbScheduler);
} }
@Override @Override
@ -521,22 +512,20 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
long currentSnapshotSequenceNumber = nextSnapshotNumbers.getAndIncrement(); long currentSnapshotSequenceNumber = nextSnapshotNumbers.getAndIncrement();
this.snapshotsHandles.put(currentSnapshotSequenceNumber, snapshot); this.snapshotsHandles.put(currentSnapshotSequenceNumber, snapshot);
return new LLSnapshot(currentSnapshotSequenceNumber); return new LLSnapshot(currentSnapshotSequenceNumber);
}) });
.subscribeOn(dbScheduler);
} }
@Override @Override
public Mono<Void> releaseSnapshot(LLSnapshot snapshot) { public Mono<Void> releaseSnapshot(LLSnapshot snapshot) {
return Mono return Mono
.<Void>fromCallable(() -> { .fromCallable(() -> {
Snapshot dbSnapshot = this.snapshotsHandles.remove(snapshot.getSequenceNumber()); Snapshot dbSnapshot = this.snapshotsHandles.remove(snapshot.getSequenceNumber());
if (dbSnapshot == null) { if (dbSnapshot == null) {
throw new IOException("Snapshot " + snapshot.getSequenceNumber() + " not found!"); throw new IOException("Snapshot " + snapshot.getSequenceNumber() + " not found!");
} }
db.releaseSnapshot(dbSnapshot); db.releaseSnapshot(dbSnapshot);
return null; return null;
}) });
.subscribeOn(dbScheduler);
} }
@Override @Override
@ -551,8 +540,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
} }
return null; return null;
}) })
.onErrorMap(cause -> new IOException("Failed to close", cause)) .onErrorMap(cause -> new IOException("Failed to close", cause));
.subscribeOn(dbScheduler);
} }
/** /**

View File

@ -93,16 +93,6 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
Integer.MAX_VALUE, Integer.MAX_VALUE,
true true
); );
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private final Scheduler luceneSearcherScheduler = Schedulers.newBoundedElastic(
4,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-searcher",
60,
true
);
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
private static final Scheduler luceneWriterScheduler = Schedulers.boundedElastic();
private final String luceneIndexName; private final String luceneIndexName;
private final SnapshotDeletionPolicy snapshotter; private final SnapshotDeletionPolicy snapshotter;
@ -209,11 +199,9 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
indexWriterConfig.setIndexDeletionPolicy(snapshotter); indexWriterConfig.setIndexDeletionPolicy(snapshotter);
indexWriterConfig.setCommitOnClose(true); indexWriterConfig.setCommitOnClose(true);
int writerSchedulerMaxThreadCount;
MergeScheduler mergeScheduler; MergeScheduler mergeScheduler;
if (lowMemory) { if (lowMemory) {
mergeScheduler = new SerialMergeScheduler(); mergeScheduler = new SerialMergeScheduler();
writerSchedulerMaxThreadCount = 1;
} else { } else {
var concurrentMergeScheduler = new ConcurrentMergeScheduler(); var concurrentMergeScheduler = new ConcurrentMergeScheduler();
concurrentMergeScheduler.setDefaultMaxMergesAndThreads(false); concurrentMergeScheduler.setDefaultMaxMergesAndThreads(false);
@ -222,7 +210,6 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
} else { } else {
concurrentMergeScheduler.enableAutoIOThrottle(); concurrentMergeScheduler.enableAutoIOThrottle();
} }
writerSchedulerMaxThreadCount = concurrentMergeScheduler.getMaxThreadCount();
mergeScheduler = concurrentMergeScheduler; mergeScheduler = concurrentMergeScheduler;
} }
indexWriterConfig.setMergeScheduler(mergeScheduler); indexWriterConfig.setMergeScheduler(mergeScheduler);
@ -236,14 +223,6 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
new SearcherFactory() new SearcherFactory()
); );
/*this.luceneWriterScheduler = Schedulers.newBoundedElastic(
writerSchedulerMaxThreadCount,
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
"lucene-writer",
60,
true
);*/
// Create scheduled tasks lifecycle manager // Create scheduled tasks lifecycle manager
this.scheduledTasksLifecycle = new ScheduledTaskLifecycle(); this.scheduledTasksLifecycle = new ScheduledTaskLifecycle();
@ -370,23 +349,21 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
@Override @Override
public Mono<Void> addDocument(LLTerm key, LLDocument doc) { public Mono<Void> addDocument(LLTerm key, LLDocument doc) {
return Mono.<Void>fromCallable(() -> { return Mono.fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.addDocument(LLUtils.toDocument(doc)); indexWriter.addDocument(LLUtils.toDocument(doc));
return null; return null;
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}).subscribeOn(luceneWriterScheduler); });
} }
@Override @Override
public Mono<Void> addDocuments(Flux<Entry<LLTerm, LLDocument>> documents) { public Mono<Void> addDocuments(Flux<Entry<LLTerm, LLDocument>> documents) {
return documents return documents
.collectList() .collectList()
.publishOn(luceneWriterScheduler)
.flatMap(documentsList -> Mono .flatMap(documentsList -> Mono
.fromCallable(() -> { .fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
@ -403,30 +380,28 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
@Override @Override
public Mono<Void> deleteDocument(LLTerm id) { public Mono<Void> deleteDocument(LLTerm id) {
return Mono.<Void>fromCallable(() -> { return Mono.fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.deleteDocuments(LLUtils.toTerm(id)); indexWriter.deleteDocuments(LLUtils.toTerm(id));
return null; return null;
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}).subscribeOn(luceneWriterScheduler); });
} }
@Override @Override
public Mono<Void> updateDocument(LLTerm id, LLDocument document) { public Mono<Void> updateDocument(LLTerm id, LLDocument document) {
return Mono.<Void>fromCallable(() -> { return Mono.fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document)); indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document));
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
return null; return null;
}).subscribeOn(luceneWriterScheduler); });
} }
@Override @Override
@ -436,21 +411,19 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
private Mono<Void> updateDocuments(Map<LLTerm, LLDocument> documentsMap) { private Mono<Void> updateDocuments(Map<LLTerm, LLDocument> documentsMap) {
return Mono return Mono
.<Void>fromCallable(() -> { .fromCallable(() -> {
scheduledTasksLifecycle.startScheduledTask(); scheduledTasksLifecycle.startScheduledTask();
try { try {
for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) { for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) {
LLTerm key = entry.getKey(); LLTerm key = entry.getKey();
LLDocument value = entry.getValue(); LLDocument value = entry.getValue();
//noinspection BlockingMethodInNonBlockingContext
indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value)); indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value));
} }
return null; return null;
} finally { } finally {
scheduledTasksLifecycle.endScheduledTask(); scheduledTasksLifecycle.endScheduledTask();
} }
}) });
.subscribeOn(luceneWriterScheduler);
} }
@Override @Override
@ -506,7 +479,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.flatMap(indexSearcher -> { .flatMap(indexSearcher -> {
Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher); Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher);
return localSearcher return localSearcher
.collect(indexSearcher, releaseMono, modifiedLocalQuery, keyFieldName, luceneSearcherScheduler) .collect(indexSearcher, releaseMono, modifiedLocalQuery, keyFieldName)
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release())) .map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()))
.onErrorResume(ex -> releaseMono.then(Mono.error(ex))); .onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
}) })
@ -522,7 +495,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.flatMap(indexSearcher -> { .flatMap(indexSearcher -> {
Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher); Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher);
return shardSearcher return shardSearcher
.searchOn(indexSearcher, releaseMono, modifiedLocalQuery, luceneSearcherScheduler) .searchOn(indexSearcher, releaseMono, modifiedLocalQuery)
.onErrorResume(ex -> releaseMono.then(Mono.error(ex))); .onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
}) })
); );
@ -606,7 +579,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.flatMap(indexSearcher -> { .flatMap(indexSearcher -> {
Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher); Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher);
return localSearcher return localSearcher
.collect(indexSearcher, releaseMono, localQueryParams, keyFieldName, luceneSearcherScheduler) .collect(indexSearcher, releaseMono, localQueryParams, keyFieldName)
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release())) .map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()))
.onErrorResume(ex -> releaseMono.then(Mono.error(ex))); .onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
}); });
@ -619,7 +592,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
return this.acquireSearcherWrapper(snapshot) return this.acquireSearcherWrapper(snapshot)
.flatMap(indexSearcher -> { .flatMap(indexSearcher -> {
Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher); Mono<Void> releaseMono = releaseSearcherWrapper(snapshot, indexSearcher);
return shardSearcher.searchOn(indexSearcher, releaseMono, localQueryParams, luceneSearcherScheduler) return shardSearcher.searchOn(indexSearcher, releaseMono, localQueryParams)
.onErrorResume(ex -> releaseMono.then(Mono.error(ex))); .onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
}); });
} }

View File

@ -218,7 +218,7 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
.flatMap(luceneIndexWithSnapshot -> luceneIndexWithSnapshot.luceneIndex() .flatMap(luceneIndexWithSnapshot -> luceneIndexWithSnapshot.luceneIndex()
.distributedMoreLikeThis(luceneIndexWithSnapshot.snapshot.orElse(null), queryParams, mltDocumentFields, shardSearcher)) .distributedMoreLikeThis(luceneIndexWithSnapshot.snapshot.orElse(null), queryParams, mltDocumentFields, shardSearcher))
// Collect all the shards results into a single global result // Collect all the shards results into a single global result
.then(shardSearcher.collect(localQueryParams, keyFieldName, Schedulers.boundedElastic())) .then(shardSearcher.collect(localQueryParams, keyFieldName))
) )
// Fix the result type // Fix the result type
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release())); .map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()));
@ -246,7 +246,7 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
.flatMap(luceneIndexWithSnapshot -> luceneIndexWithSnapshot.luceneIndex() .flatMap(luceneIndexWithSnapshot -> luceneIndexWithSnapshot.luceneIndex()
.distributedSearch(luceneIndexWithSnapshot.snapshot.orElse(null), queryParams, shardSearcher)) .distributedSearch(luceneIndexWithSnapshot.snapshot.orElse(null), queryParams, shardSearcher))
// Collect all the shards results into a single global result // Collect all the shards results into a single global result
.then(shardSearcher.collect(localQueryParams, keyFieldName, Schedulers.boundedElastic())) .then(shardSearcher.collect(localQueryParams, keyFieldName))
) )
// Fix the result type // Fix the result type
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release())); .map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()));

View File

@ -22,20 +22,17 @@ public class LLLocalSingleton implements LLSingleton {
private final Function<LLSnapshot, Snapshot> snapshotResolver; private final Function<LLSnapshot, Snapshot> snapshotResolver;
private final byte[] name; private final byte[] name;
private final String databaseName; private final String databaseName;
private final Scheduler dbScheduler;
public LLLocalSingleton(RocksDB db, ColumnFamilyHandle singletonListColumn, public LLLocalSingleton(RocksDB db, ColumnFamilyHandle singletonListColumn,
Function<LLSnapshot, Snapshot> snapshotResolver, Function<LLSnapshot, Snapshot> snapshotResolver,
String databaseName, String databaseName,
byte[] name, byte[] name,
Scheduler dbScheduler,
byte[] defaultValue) throws RocksDBException { byte[] defaultValue) throws RocksDBException {
this.db = db; this.db = db;
this.cfh = singletonListColumn; this.cfh = singletonListColumn;
this.databaseName = databaseName; this.databaseName = databaseName;
this.snapshotResolver = snapshotResolver; this.snapshotResolver = snapshotResolver;
this.name = name; this.name = name;
this.dbScheduler = dbScheduler;
if (db.get(cfh, this.name) == null) { if (db.get(cfh, this.name) == null) {
db.put(cfh, this.name, defaultValue); db.put(cfh, this.name, defaultValue);
} }
@ -53,8 +50,7 @@ public class LLLocalSingleton implements LLSingleton {
public Mono<byte[]> get(@Nullable LLSnapshot snapshot) { public Mono<byte[]> get(@Nullable LLSnapshot snapshot) {
return Mono return Mono
.fromCallable(() -> db.get(cfh, resolveSnapshot(snapshot), name)) .fromCallable(() -> db.get(cfh, resolveSnapshot(snapshot), name))
.onErrorMap(cause -> new IOException("Failed to read " + Arrays.toString(name), cause)) .onErrorMap(cause -> new IOException("Failed to read " + Arrays.toString(name), cause));
.subscribeOn(dbScheduler);
} }
@Override @Override
@ -64,8 +60,7 @@ public class LLLocalSingleton implements LLSingleton {
db.put(cfh, name, value); db.put(cfh, name, value);
return null; return null;
}) })
.onErrorMap(cause -> new IOException("Failed to write " + Arrays.toString(name), cause)) .onErrorMap(cause -> new IOException("Failed to write " + Arrays.toString(name), cause));
.subscribeOn(dbScheduler);
} }
@Override @Override

View File

@ -69,6 +69,7 @@ import org.warp.commonutils.log.LoggerFactory;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler; import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
public class LuceneUtils { public class LuceneUtils {
@ -353,7 +354,6 @@ public class LuceneUtils {
public static Flux<LLKeyScore> convertHits(ScoreDoc[] hits, public static Flux<LLKeyScore> convertHits(ScoreDoc[] hits,
IndexSearchers indexSearchers, IndexSearchers indexSearchers,
String keyFieldName, String keyFieldName,
Scheduler scheduler,
boolean preserveOrder) { boolean preserveOrder) {
return Flux return Flux
@ -361,12 +361,11 @@ public class LuceneUtils {
.transform(hitsFlux -> { .transform(hitsFlux -> {
if (preserveOrder) { if (preserveOrder) {
return hitsFlux return hitsFlux
.publishOn(scheduler)
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName)); .mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
} else { } else {
return hitsFlux return hitsFlux
.parallel() .parallel()
.runOn(scheduler) .runOn(Schedulers.parallel())
.map(hit -> { .map(hit -> {
var result = mapHitBlocking(hit, indexSearchers, keyFieldName); var result = mapHitBlocking(hit, indexSearchers, keyFieldName);
// The "else" value is an errored key score, to filter out next // The "else" value is an errored key score, to filter out next

View File

@ -16,20 +16,18 @@ public class AdaptiveLuceneLocalSearcher implements LuceneLocalSearcher {
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName) {
Scheduler scheduler) {
if (queryParams.limit() == 0) { if (queryParams.limit() == 0) {
return countSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName, scheduler); return countSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName);
} else if (!queryParams.isScored() && queryParams.offset() == 0 && queryParams.limit() >= 2147483630 } else if (!queryParams.isScored() && queryParams.offset() == 0 && queryParams.limit() >= 2147483630
&& !queryParams.isSorted()) { && !queryParams.isSorted()) {
return unscoredPagedLuceneLocalSearcher.collect(indexSearcher, return unscoredPagedLuceneLocalSearcher.collect(indexSearcher,
releaseIndexSearcher, releaseIndexSearcher,
queryParams, queryParams,
keyFieldName, keyFieldName
scheduler
); );
} else { } else {
return localSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName, scheduler); return localSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName);
} }
} }
} }

View File

@ -14,15 +14,12 @@ public class CountLuceneLocalSearcher implements LuceneLocalSearcher {
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName) {
Scheduler scheduler) {
//noinspection BlockingMethodInNonBlockingContext
return Mono return Mono
.fromCallable(() -> new LuceneSearchResult( .fromCallable(() -> new LuceneSearchResult(
TotalHitsCount.of(indexSearcher.count(queryParams.query()), true), TotalHitsCount.of(indexSearcher.count(queryParams.query()), true),
Flux.empty(), Flux.empty(),
releaseIndexSearcher) releaseIndexSearcher)
) );
.subscribeOn(scheduler);
} }
} }

View File

@ -24,20 +24,17 @@ public class CountLuceneMultiSearcher implements LuceneMultiSearcher {
@Override @Override
public Mono<Void> searchOn(IndexSearcher indexSearcher, public Mono<Void> searchOn(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams) {
Scheduler scheduler) {
return Mono return Mono
.<Void>fromCallable(() -> { .<Void>fromCallable(() -> {
//noinspection BlockingMethodInNonBlockingContext
totalHits.addAndGet(indexSearcher.count(queryParams.query())); totalHits.addAndGet(indexSearcher.count(queryParams.query()));
release.add(releaseIndexSearcher); release.add(releaseIndexSearcher);
return null; return null;
}) });
.subscribeOn(scheduler);
} }
@Override @Override
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) { public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName) {
return Mono.fromCallable(() -> new LuceneSearchResult(TotalHitsCount.of(totalHits.get(), true), Flux.empty(), Mono.when(release))); return Mono.fromCallable(() -> new LuceneSearchResult(TotalHitsCount.of(totalHits.get(), true), Flux.empty(), Mono.when(release)));
} }
}; };

View File

@ -16,13 +16,12 @@ public class LocalLuceneWrapper implements LuceneLocalSearcher {
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName) {
Scheduler scheduler) {
var shardSearcher = luceneMultiSearcher.createShardSearcher(queryParams); var shardSearcher = luceneMultiSearcher.createShardSearcher(queryParams);
return shardSearcher return shardSearcher
.flatMap(luceneShardSearcher -> luceneShardSearcher .flatMap(luceneShardSearcher -> luceneShardSearcher
.searchOn(indexSearcher, releaseIndexSearcher, queryParams, scheduler) .searchOn(indexSearcher, releaseIndexSearcher, queryParams)
.then(luceneShardSearcher.collect(queryParams, keyFieldName, scheduler)) .then(luceneShardSearcher.collect(queryParams, keyFieldName))
); );
} }
} }

View File

@ -11,11 +11,9 @@ public interface LuceneLocalSearcher {
* @param indexSearcher Lucene index searcher * @param indexSearcher Lucene index searcher
* @param queryParams the query parameters * @param queryParams the query parameters
* @param keyFieldName the name of the key field * @param keyFieldName the name of the key field
* @param scheduler a blocking scheduler
*/ */
Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName);
Scheduler scheduler);
} }

View File

@ -10,17 +10,14 @@ public interface LuceneShardSearcher {
/** /**
* @param indexSearcher the index searcher, which contains all the lucene data * @param indexSearcher the index searcher, which contains all the lucene data
* @param queryParams the query parameters * @param queryParams the query parameters
* @param scheduler a blocking scheduler
*/ */
Mono<Void> searchOn(IndexSearcher indexSearcher, Mono<Void> searchOn(IndexSearcher indexSearcher,
Mono<Void> indexSearcherRelease, Mono<Void> indexSearcherRelease,
LocalQueryParams queryParams, LocalQueryParams queryParams);
Scheduler scheduler);
/** /**
* @param queryParams the query parameters * @param queryParams the query parameters
* @param keyFieldName the name of the key field * @param keyFieldName the name of the key field
* @param collectorScheduler a blocking scheduler
*/ */
Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler collectorScheduler); Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName);
} }

View File

@ -48,25 +48,22 @@ class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
@Override @Override
public Mono<Void> searchOn(IndexSearcher indexSearcher, public Mono<Void> searchOn(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams) {
Scheduler scheduler) { return Mono.fromCallable(() -> {
return Mono.<Void>fromCallable(() -> {
TopFieldCollector collector; TopFieldCollector collector;
synchronized (lock) { synchronized (lock) {
//noinspection BlockingMethodInNonBlockingContext
collector = firstPageSharedManager.newCollector(); collector = firstPageSharedManager.newCollector();
indexSearchersArray.add(indexSearcher); indexSearchersArray.add(indexSearcher);
indexSearcherReleasersArray.add(releaseIndexSearcher); indexSearcherReleasersArray.add(releaseIndexSearcher);
collectors.add(collector); collectors.add(collector);
} }
//noinspection BlockingMethodInNonBlockingContext
indexSearcher.search(luceneQuery, collector); indexSearcher.search(luceneQuery, collector);
return null; return null;
}).subscribeOn(scheduler); });
} }
@Override @Override
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler collectorScheduler) { public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName) {
if (!queryParams.isScored()) { if (!queryParams.isScored()) {
return Mono.error( return Mono.error(
new UnsupportedOperationException("Can't execute an unscored query with a scored lucene shard searcher") new UnsupportedOperationException("Can't execute an unscored query with a scored lucene shard searcher")
@ -86,7 +83,7 @@ class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
indexSearchers = IndexSearchers.of(indexSearchersArray); indexSearchers = IndexSearchers.of(indexSearchersArray);
} }
Flux<LLKeyScore> firstPageHits = LuceneUtils Flux<LLKeyScore> firstPageHits = LuceneUtils
.convertHits(result.scoreDocs, indexSearchers, keyFieldName, collectorScheduler, true); .convertHits(result.scoreDocs, indexSearchers, keyFieldName, true);
Flux<LLKeyScore> nextHits = Flux.defer(() -> { Flux<LLKeyScore> nextHits = Flux.defer(() -> {
if (paginationInfo.forceSinglePage() if (paginationInfo.forceSinglePage()
@ -115,21 +112,24 @@ class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
TopDocs pageTopDocs = Flux TopDocs pageTopDocs = Flux
.fromIterable(indexSearchersArray) .fromIterable(indexSearchersArray)
.index() .index()
.flatMapSequential(tuple -> Mono .<TopFieldCollector>handle((tuple, sink) -> {
.fromCallable(() -> { try {
long shardIndex = tuple.getT1(); IndexSearcher indexSearcher = tuple.getT2();
IndexSearcher indexSearcher = tuple.getT2(); TopFieldCollector collector = sharedManager.newCollector();
TopFieldCollector collector = sharedManager.newCollector(); indexSearcher.search(luceneQuery, collector);
indexSearcher.search(luceneQuery, collector); sink.next(collector);
return collector; } catch (Exception ex) {
}) sink.error(ex);
.subscribeOn(Schedulers.immediate()) }
) })
.collect(Collectors.toCollection(ObjectArrayList::new)) .collect(Collectors.toCollection(ObjectArrayList::new))
.flatMap(collectors -> Mono .<TopDocs>handle((collectors, sink) -> {
.fromCallable(() -> sharedManager.reduce(collectors)) try {
.subscribeOn(Schedulers.immediate()) sink.next(sharedManager.reduce(collectors));
) } catch (Exception ex) {
sink.error(ex);
}
})
.single() .single()
.takeUntilOther(cancelEvent.asMono()) .takeUntilOther(cancelEvent.asMono())
.block(); .block();
@ -154,9 +154,8 @@ class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
emitter.onCancel(cancelEvent::tryEmitEmpty); emitter.onCancel(cancelEvent::tryEmitEmpty);
}) })
.subscribeOn(collectorScheduler)
.flatMapSequential(topFieldDoc -> LuceneUtils .flatMapSequential(topFieldDoc -> LuceneUtils
.convertHits(topFieldDoc.scoreDocs, indexSearchers, keyFieldName, collectorScheduler, true) .convertHits(topFieldDoc.scoreDocs, indexSearchers, keyFieldName, true)
); );
}); });
@ -166,8 +165,7 @@ class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)), //.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
release release
); );
}) });
.subscribeOn(Schedulers.boundedElastic());
} }
} }

View File

@ -22,8 +22,7 @@ public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams,
String keyFieldName, String keyFieldName) {
Scheduler scheduler) {
return Mono return Mono
.fromCallable(() -> { .fromCallable(() -> {
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null"); Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
@ -42,7 +41,6 @@ public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
LuceneUtils.totalHitsThreshold(), LuceneUtils.totalHitsThreshold(),
!paginationInfo.forceSinglePage(), !paginationInfo.forceSinglePage(),
queryParams.isScored()); queryParams.isScored());
//noinspection BlockingMethodInNonBlockingContext
indexSearcher.search(queryParams.query(), firstPageCollector); indexSearcher.search(queryParams.query(), firstPageCollector);
firstPageTopDocs = firstPageCollector.topDocs(LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset()), firstPageTopDocs = firstPageCollector.topDocs(LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset()),
LuceneUtils.safeLongToInt(paginationInfo.firstPageLimit()) LuceneUtils.safeLongToInt(paginationInfo.firstPageLimit())
@ -53,7 +51,6 @@ public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
firstPageTopDocs.scoreDocs, firstPageTopDocs.scoreDocs,
IndexSearchers.unsharded(indexSearcher), IndexSearchers.unsharded(indexSearcher),
keyFieldName, keyFieldName,
scheduler,
true true
) )
.take(queryParams.limit(), true); .take(queryParams.limit(), true);
@ -63,43 +60,40 @@ public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) { if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
nextHits = null; nextHits = null;
} else { } else {
nextHits = Flux.defer(() -> { nextHits = Flux.defer(() -> Flux
return Flux .<TopDocs, CurrentPageInfo>generate(
.<TopDocs, CurrentPageInfo>generate( () -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
() -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1), (s, sink) -> {
(s, sink) -> { if (s.last() != null && s.remainingLimit() > 0) {
if (s.last() != null && s.remainingLimit() > 0) { TopDocs pageTopDocs;
TopDocs pageTopDocs; try {
try { TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(), s.currentPageLimit(),
s.currentPageLimit(), s.last(),
s.last(), LuceneUtils.totalHitsThreshold(),
LuceneUtils.totalHitsThreshold(), true,
true, queryParams.isScored()
queryParams.isScored() );
); indexSearcher.search(queryParams.query(), collector);
//noinspection BlockingMethodInNonBlockingContext pageTopDocs = collector.topDocs();
indexSearcher.search(queryParams.query(), collector); } catch (IOException e) {
pageTopDocs = collector.topDocs(); sink.error(e);
} catch (IOException e) { return EMPTY_STATUS;
sink.error(e); }
return EMPTY_STATUS; var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
} sink.next(pageTopDocs);
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs); return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1);
sink.next(pageTopDocs); } else {
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1); sink.complete();
} else { return EMPTY_STATUS;
sink.complete(); }
return EMPTY_STATUS; },
} s -> {}
}, )
s -> {} .flatMapSequential(topFieldDoc -> LuceneUtils
) .convertHits(topFieldDoc.scoreDocs, IndexSearchers.unsharded(indexSearcher), keyFieldName, true)
.subscribeOn(scheduler) )
.flatMapSequential(topFieldDoc -> LuceneUtils );
.convertHits(topFieldDoc.scoreDocs, IndexSearchers.unsharded(indexSearcher), keyFieldName, scheduler, true)
);
});
} }
Flux<LLKeyScore> combinedFlux; Flux<LLKeyScore> combinedFlux;
@ -115,7 +109,6 @@ public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)), //.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
releaseIndexSearcher releaseIndexSearcher
); );
}) });
.subscribeOn(scheduler);
} }
} }

View File

@ -40,31 +40,28 @@ class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
@Override @Override
public Mono<Void> searchOn(IndexSearcher indexSearcher, public Mono<Void> searchOn(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams) {
Scheduler scheduler) { return Mono.fromCallable(() -> {
return Mono.<Void>fromCallable(() -> {
TopDocsCollector<ScoreDoc> collector; TopDocsCollector<ScoreDoc> collector;
synchronized (lock) { synchronized (lock) {
//noinspection BlockingMethodInNonBlockingContext
collector = firstPageUnsortedCollectorManager.newCollector(); collector = firstPageUnsortedCollectorManager.newCollector();
indexSearchersArray.add(indexSearcher); indexSearchersArray.add(indexSearcher);
indexSearcherReleasersArray.add(releaseIndexSearcher); indexSearcherReleasersArray.add(releaseIndexSearcher);
collectors.add(collector); collectors.add(collector);
} }
//noinspection BlockingMethodInNonBlockingContext
indexSearcher.search(luceneQuery, collector); indexSearcher.search(luceneQuery, collector);
return null; return null;
}).subscribeOn(scheduler); });
} }
@Override @Override
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) { public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName) {
return Mono return Mono
.fromCallable(() -> { .fromCallable(() -> {
TopDocs result; TopDocs result;
Mono<Void> release; Mono<Void> release;
synchronized (lock) { synchronized (lock) {
//noinspection BlockingMethodInNonBlockingContext
result = firstPageUnsortedCollectorManager.reduce(collectors); result = firstPageUnsortedCollectorManager.reduce(collectors);
release = Mono.when(indexSearcherReleasersArray); release = Mono.when(indexSearcherReleasersArray);
} }
@ -73,7 +70,7 @@ class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
indexSearchers = IndexSearchers.of(indexSearchersArray); indexSearchers = IndexSearchers.of(indexSearchersArray);
} }
Flux<LLKeyScore> firstPageHits = LuceneUtils Flux<LLKeyScore> firstPageHits = LuceneUtils
.convertHits(result.scoreDocs, indexSearchers, keyFieldName, scheduler, false); .convertHits(result.scoreDocs, indexSearchers, keyFieldName, false);
Flux<LLKeyScore> nextHits = Flux.defer(() -> { Flux<LLKeyScore> nextHits = Flux.defer(() -> {
if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) { if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
@ -91,23 +88,19 @@ class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
() -> TopDocsSearcher.getTopDocsCollector(queryParams.sort(), s.currentPageLimit(), () -> TopDocsSearcher.getTopDocsCollector(queryParams.sort(), s.currentPageLimit(),
s.last(), LuceneUtils.totalHitsThreshold(), true, queryParams.isScored()), s.last(), LuceneUtils.totalHitsThreshold(), true, queryParams.isScored()),
0, s.currentPageLimit(), queryParams.sort()); 0, s.currentPageLimit(), queryParams.sort());
//noinspection BlockingMethodInNonBlockingContext
TopDocs pageTopDocs = Flux TopDocs pageTopDocs = Flux
.fromIterable(indexSearchersArray) .fromIterable(indexSearchersArray)
.flatMapSequential(indexSearcher -> Mono .flatMapSequential(indexSearcher -> Mono
.fromCallable(() -> { .fromCallable(() -> {
//noinspection BlockingMethodInNonBlockingContext
var collector = currentPageUnsortedCollectorManager.newCollector(); var collector = currentPageUnsortedCollectorManager.newCollector();
//noinspection BlockingMethodInNonBlockingContext
indexSearcher.search(luceneQuery, collector); indexSearcher.search(luceneQuery, collector);
return collector; return collector;
}) })
.subscribeOn(scheduler)
) )
.collect(Collectors.toCollection(ObjectArrayList::new)) .collect(Collectors.toCollection(ObjectArrayList::new))
.flatMap(collectors -> Mono .flatMap(collectors -> Mono
.fromCallable(() -> currentPageUnsortedCollectorManager.reduce(collectors)) .fromCallable(() -> currentPageUnsortedCollectorManager.reduce(collectors))
.subscribeOn(scheduler)
) )
.blockOptional().orElseThrow(); .blockOptional().orElseThrow();
@ -122,9 +115,8 @@ class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
}, },
s -> {} s -> {}
) )
.subscribeOn(scheduler)
.flatMapSequential(topFieldDoc -> LuceneUtils .flatMapSequential(topFieldDoc -> LuceneUtils
.convertHits(topFieldDoc.scoreDocs, indexSearchers, keyFieldName, scheduler, false) .convertHits(topFieldDoc.scoreDocs, indexSearchers, keyFieldName, false)
); );
}); });
@ -133,8 +125,7 @@ class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)), //.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
release release
); );
}) });
.subscribeOn(scheduler);
} }
} }

View File

@ -91,11 +91,9 @@ public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMult
@Override @Override
public Mono<Void> searchOn(IndexSearcher indexSearcher, public Mono<Void> searchOn(IndexSearcher indexSearcher,
Mono<Void> releaseIndexSearcher, Mono<Void> releaseIndexSearcher,
LocalQueryParams queryParams, LocalQueryParams queryParams) {
Scheduler scheduler) {
return Mono return Mono
.<Void>fromCallable(() -> { .fromCallable(() -> {
//noinspection BlockingMethodInNonBlockingContext
var collector = cm.newCollector(); var collector = cm.newCollector();
int collectorShardIndex; int collectorShardIndex;
synchronized (lock) { synchronized (lock) {
@ -117,14 +115,12 @@ public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMult
} }
}); });
return null; return null;
}) });
.subscribeOn(scheduler);
} }
@Override @Override
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams,
String keyFieldName, String keyFieldName) {
Scheduler scheduler) {
return Mono return Mono
.fromCallable(() -> { .fromCallable(() -> {
synchronized (scoreDocsSink) { synchronized (scoreDocsSink) {
@ -156,7 +152,6 @@ public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMult
.flatMap(scoreDocs -> LuceneUtils.convertHits(scoreDocs.toArray(ScoreDoc[]::new), .flatMap(scoreDocs -> LuceneUtils.convertHits(scoreDocs.toArray(ScoreDoc[]::new),
indexSearchers, indexSearchers,
keyFieldName, keyFieldName,
scheduler,
false false
)); ));