First safe search commit
This commit is contained in:
parent
634880a7b2
commit
8bc0284f27
@ -1,5 +1,6 @@
|
||||
package it.cavallium.dbengine.database;
|
||||
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.data.generator.nativedata.Nullablefloat;
|
||||
import it.cavallium.dbengine.client.query.current.data.NoSort;
|
||||
import it.cavallium.dbengine.client.query.current.data.Query;
|
||||
@ -40,7 +41,7 @@ public interface LLLuceneIndex extends LLSnapshottable {
|
||||
* The additional query will be used with the moreLikeThis query: "mltQuery AND additionalQuery"
|
||||
* @return the collection has one or more flux
|
||||
*/
|
||||
Mono<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||
Mono<Send<LLSearchResultShard>> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||
QueryParams queryParams,
|
||||
String keyFieldName,
|
||||
Flux<Tuple2<String, Set<String>>> mltDocumentFields);
|
||||
@ -50,13 +51,18 @@ public interface LLLuceneIndex extends LLSnapshottable {
|
||||
* returned can be at most <code>limit * 15</code>
|
||||
* @return the collection has one or more flux
|
||||
*/
|
||||
Mono<LLSearchResultShard> search(@Nullable LLSnapshot snapshot, QueryParams queryParams, String keyFieldName);
|
||||
Mono<Send<LLSearchResultShard>> search(@Nullable LLSnapshot snapshot, QueryParams queryParams, String keyFieldName);
|
||||
|
||||
default Mono<TotalHitsCount> count(@Nullable LLSnapshot snapshot, Query query) {
|
||||
QueryParams params = QueryParams.of(query, 0, 0, Nullablefloat.empty(), NoSort.of(), ScoreMode.of(false, false));
|
||||
return Mono.from(this.search(snapshot, params, null)
|
||||
.flatMap(llSearchResultShard -> llSearchResultShard.release().thenReturn(llSearchResultShard.totalHitsCount()))
|
||||
.defaultIfEmpty(TotalHitsCount.of(0, true)));
|
||||
.map(llSearchResultShardToReceive -> {
|
||||
try (var llSearchResultShard = llSearchResultShardToReceive.receive()) {
|
||||
return llSearchResultShard.totalHitsCount();
|
||||
}
|
||||
})
|
||||
.defaultIfEmpty(TotalHitsCount.of(0, true))
|
||||
).doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
|
||||
boolean isLowMemoryMode();
|
||||
|
@ -1,46 +1,41 @@
|
||||
package it.cavallium.dbengine.database;
|
||||
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.lucene.searcher.LuceneSearchResult;
|
||||
import java.util.Objects;
|
||||
import org.warp.commonutils.log.Logger;
|
||||
import org.warp.commonutils.log.LoggerFactory;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public final class LLSearchResultShard {
|
||||
public final class LLSearchResultShard extends ResourceSupport<LLSearchResultShard, LLSearchResultShard> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LLSearchResultShard.class);
|
||||
|
||||
private volatile boolean releaseCalled;
|
||||
private Flux<LLKeyScore> results;
|
||||
private TotalHitsCount totalHitsCount;
|
||||
|
||||
private final Flux<LLKeyScore> results;
|
||||
private final TotalHitsCount totalHitsCount;
|
||||
private final Mono<Void> release;
|
||||
|
||||
public LLSearchResultShard(Flux<LLKeyScore> results, TotalHitsCount totalHitsCount, Mono<Void> release) {
|
||||
public LLSearchResultShard(Flux<LLKeyScore> results, TotalHitsCount totalHitsCount, Drop<LLSearchResultShard> drop) {
|
||||
super(new LLSearchResultShard.CloseOnDrop(drop));
|
||||
this.results = results;
|
||||
this.totalHitsCount = totalHitsCount;
|
||||
this.release = Mono.fromRunnable(() -> {
|
||||
if (releaseCalled) {
|
||||
logger.warn(this.getClass().getName() + "::release has been called twice!");
|
||||
}
|
||||
releaseCalled = true;
|
||||
}).then(release);
|
||||
}
|
||||
|
||||
public Flux<LLKeyScore> results() {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("LLSearchResultShard must be owned to be used"));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public TotalHitsCount totalHitsCount() {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("LLSearchResultShard must be owned to be used"));
|
||||
}
|
||||
return totalHitsCount;
|
||||
}
|
||||
|
||||
public Mono<Void> release() {
|
||||
return release;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this)
|
||||
@ -48,28 +43,48 @@ public final class LLSearchResultShard {
|
||||
if (obj == null || obj.getClass() != this.getClass())
|
||||
return false;
|
||||
var that = (LLSearchResultShard) obj;
|
||||
return Objects.equals(this.results, that.results) && Objects.equals(this.totalHitsCount, that.totalHitsCount)
|
||||
&& Objects.equals(this.release, that.release);
|
||||
return Objects.equals(this.results, that.results) && Objects.equals(this.totalHitsCount, that.totalHitsCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(results, totalHitsCount, release);
|
||||
return Objects.hash(results, totalHitsCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "LLSearchResultShard[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ", " + "release="
|
||||
+ release + ']';
|
||||
return "LLSearchResultShard[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ']';
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
if (!releaseCalled) {
|
||||
logger.warn(this.getClass().getName() + "::release has not been called before class finalization!");
|
||||
}
|
||||
super.finalize();
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
return new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<LLSearchResultShard> prepareSend() {
|
||||
var results = this.results;
|
||||
var totalHitsCount = this.totalHitsCount;
|
||||
makeInaccessible();
|
||||
return drop -> new LLSearchResultShard(results, totalHitsCount, drop);
|
||||
}
|
||||
|
||||
private void makeInaccessible() {
|
||||
this.results = null;
|
||||
this.totalHitsCount = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop implements Drop<LLSearchResultShard> {
|
||||
|
||||
private final Drop<LLSearchResultShard> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<LLSearchResultShard> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(LLSearchResultShard obj) {
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -48,6 +48,7 @@ import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuple3;
|
||||
|
||||
@ -337,6 +338,12 @@ public class LLUtils {
|
||||
}
|
||||
}
|
||||
|
||||
public static void ensureBlocking() {
|
||||
if (Schedulers.isInNonBlockingThread()) {
|
||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
||||
}
|
||||
}
|
||||
|
||||
public static record DirectBuffer(@NotNull Send<Buffer> buffer, @NotNull ByteBuffer byteBuffer) {}
|
||||
|
||||
@NotNull
|
||||
|
@ -1,106 +0,0 @@
|
||||
package it.cavallium.dbengine.database.disk;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.SearcherManager;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CachedIndexSearcher {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(CachedIndexSearcher.class);
|
||||
|
||||
private final IndexSearcher indexSearcher;
|
||||
private final SearcherManager associatedSearcherManager;
|
||||
private final Runnable afterFinalization;
|
||||
private boolean inCache = true;
|
||||
private int usages = 0;
|
||||
|
||||
public CachedIndexSearcher(IndexSearcher indexSearcher,
|
||||
@Nullable SearcherManager associatedSearcherManager,
|
||||
@Nullable Runnable afterFinalization) {
|
||||
this.indexSearcher = indexSearcher;
|
||||
this.associatedSearcherManager = associatedSearcherManager;
|
||||
this.afterFinalization = afterFinalization;
|
||||
}
|
||||
|
||||
public void incUsage() {
|
||||
synchronized (this) {
|
||||
usages++;
|
||||
}
|
||||
}
|
||||
|
||||
public void decUsage() throws IOException {
|
||||
synchronized (this) {
|
||||
if (usages > 0) {
|
||||
usages--;
|
||||
if (mustClose()) {
|
||||
try {
|
||||
close();
|
||||
} finally {
|
||||
if (afterFinalization != null) afterFinalization.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void removeFromCache() throws IOException {
|
||||
synchronized (this) {
|
||||
if (inCache) {
|
||||
inCache = false;
|
||||
if (mustClose()) {
|
||||
try {
|
||||
close();
|
||||
} finally {
|
||||
if (afterFinalization != null) afterFinalization.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void close() throws IOException {
|
||||
if (associatedSearcherManager != null) {
|
||||
associatedSearcherManager.release(indexSearcher);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean mustClose() {
|
||||
return !this.inCache && this.usages == 0;
|
||||
}
|
||||
|
||||
public IndexReader getIndexReader() {
|
||||
return indexSearcher.getIndexReader();
|
||||
}
|
||||
|
||||
public IndexSearcher getIndexSearcher() {
|
||||
return indexSearcher;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
boolean failedToRelease = false;
|
||||
if (usages > 0) {
|
||||
failedToRelease = true;
|
||||
logger.error("A cached index searcher has been garbage collected, but "
|
||||
+ usages + " usages have not been released");
|
||||
}
|
||||
if (inCache) {
|
||||
failedToRelease = true;
|
||||
logger.error("A cached index searcher has been garbage collected, but it's marked"
|
||||
+ " as still actively cached");
|
||||
}
|
||||
if (failedToRelease) {
|
||||
try {
|
||||
this.close();
|
||||
} catch (Throwable ex) {
|
||||
logger.warn("Error when closing cached index searcher", ex);
|
||||
}
|
||||
}
|
||||
super.finalize();
|
||||
}
|
||||
}
|
@ -3,16 +3,14 @@ package it.cavallium.dbengine.database.disk;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import java.io.IOException;
|
||||
import java.time.Duration;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.Phaser;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.locks.LockSupport;
|
||||
import java.util.function.Function;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
@ -30,7 +28,7 @@ import reactor.core.publisher.Sinks;
|
||||
import reactor.core.publisher.Sinks.Empty;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
public class CachedIndexSearcherManager {
|
||||
public class CachedIndexSearcherManager implements IndexSearcherManager {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(CachedIndexSearcherManager.class);
|
||||
|
||||
@ -41,8 +39,8 @@ public class CachedIndexSearcherManager {
|
||||
private final Phaser activeSearchers = new Phaser(1);
|
||||
private final Phaser activeRefreshes = new Phaser(1);
|
||||
|
||||
private final LoadingCache<LLSnapshot, Mono<CachedIndexSearcher>> cachedSnapshotSearchers;
|
||||
private final Mono<CachedIndexSearcher> cachedMainSearcher;
|
||||
private final LoadingCache<LLSnapshot, Mono<Send<LLIndexSearcher>>> cachedSnapshotSearchers;
|
||||
private final Mono<Send<LLIndexSearcher>> cachedMainSearcher;
|
||||
|
||||
private final Empty<Void> closeRequested = Sinks.empty();
|
||||
private final Empty<Void> refresherClosed = Sinks.empty();
|
||||
@ -84,7 +82,7 @@ public class CachedIndexSearcherManager {
|
||||
.maximumSize(3)
|
||||
.build(new CacheLoader<>() {
|
||||
@Override
|
||||
public Mono<CachedIndexSearcher> load(@NotNull LLSnapshot snapshot) {
|
||||
public Mono<Send<LLIndexSearcher>> load(@NotNull LLSnapshot snapshot) {
|
||||
return CachedIndexSearcherManager.this.generateCachedSearcher(snapshot);
|
||||
}
|
||||
});
|
||||
@ -129,7 +127,11 @@ public class CachedIndexSearcherManager {
|
||||
})).cache();
|
||||
}
|
||||
|
||||
private Mono<CachedIndexSearcher> generateCachedSearcher(@Nullable LLSnapshot snapshot) {
|
||||
private Mono<Send<LLIndexSearcher>> generateCachedSearcher(@Nullable LLSnapshot snapshot) {
|
||||
var onClose = this.closeRequested.asMono();
|
||||
var onQueryRefresh = Mono.delay(queryRefreshDebounceTime).then();
|
||||
var onInvalidateCache = Mono.firstWithSignal(onClose, onQueryRefresh);
|
||||
|
||||
return Mono.fromCallable(() -> {
|
||||
activeSearchers.register();
|
||||
IndexSearcher indexSearcher;
|
||||
@ -142,34 +144,20 @@ public class CachedIndexSearcherManager {
|
||||
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher();
|
||||
associatedSearcherManager = null;
|
||||
}
|
||||
AtomicBoolean alreadyDeregistered = new AtomicBoolean(false);
|
||||
return new CachedIndexSearcher(indexSearcher, associatedSearcherManager,
|
||||
() -> {
|
||||
// This shouldn't happen more than once,
|
||||
// but I put this AtomicBoolean to be sure that this will NEVER happen more than once.
|
||||
if (alreadyDeregistered.compareAndSet(false, true)) {
|
||||
activeSearchers.arriveAndDeregister();
|
||||
} else {
|
||||
logger.error("Disposed CachedIndexSearcher twice! This is an implementation bug!");
|
||||
}
|
||||
}
|
||||
);
|
||||
return new LLIndexSearcher(indexSearcher, associatedSearcherManager, this::dropCachedIndexSearcher);
|
||||
})
|
||||
.cacheInvalidateWhen(indexSearcher -> Mono
|
||||
.firstWithSignal(
|
||||
this.closeRequested.asMono(),
|
||||
Mono.delay(queryRefreshDebounceTime).then()
|
||||
),
|
||||
indexSearcher -> {
|
||||
try {
|
||||
// Mark as removed from cache
|
||||
indexSearcher.removeFromCache();
|
||||
} catch (Exception ex) {
|
||||
logger.error("Failed to release an old cached IndexSearcher", ex);
|
||||
}
|
||||
});
|
||||
.cacheInvalidateWhen(indexSearcher -> onInvalidateCache, ResourceSupport::close)
|
||||
.map(searcher -> searcher.copy(this::dropCachedIndexSearcher).send())
|
||||
.takeUntilOther(onClose)
|
||||
.doOnDiscard(ResourceSupport.class, ResourceSupport::close);
|
||||
}
|
||||
|
||||
private void dropCachedIndexSearcher(LLIndexSearcher cachedIndexSearcher) {
|
||||
// This shouldn't happen more than once per searcher.
|
||||
activeSearchers.arriveAndDeregister();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void maybeRefreshBlocking() throws IOException {
|
||||
try {
|
||||
activeRefreshes.register();
|
||||
@ -181,6 +169,7 @@ public class CachedIndexSearcherManager {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void maybeRefresh() throws IOException {
|
||||
try {
|
||||
activeRefreshes.register();
|
||||
@ -192,30 +181,26 @@ public class CachedIndexSearcherManager {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> Flux<T> searchMany(@Nullable LLSnapshot snapshot, Function<IndexSearcher, Flux<T>> searcherFunction) {
|
||||
return Flux.usingWhen(
|
||||
this.captureIndexSearcher(snapshot),
|
||||
this.retrieveSearcher(snapshot).map(Send::receive),
|
||||
indexSearcher -> searcherFunction.apply(indexSearcher.getIndexSearcher()),
|
||||
this::releaseUsedIndexSearcher
|
||||
cachedIndexSearcher -> Mono.fromRunnable(cachedIndexSearcher::close)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> Mono<T> search(@Nullable LLSnapshot snapshot, Function<IndexSearcher, Mono<T>> searcherFunction) {
|
||||
return Mono.usingWhen(
|
||||
this.captureIndexSearcher(snapshot),
|
||||
this.retrieveSearcher(snapshot).map(Send::receive),
|
||||
indexSearcher -> searcherFunction.apply(indexSearcher.getIndexSearcher()),
|
||||
this::releaseUsedIndexSearcher
|
||||
cachedIndexSearcher -> Mono.fromRunnable(cachedIndexSearcher::close)
|
||||
);
|
||||
}
|
||||
|
||||
public Mono<CachedIndexSearcher> captureIndexSearcher(@Nullable LLSnapshot snapshot) {
|
||||
return this
|
||||
.retrieveCachedIndexSearcher(snapshot)
|
||||
// Increment reference count
|
||||
.doOnNext(CachedIndexSearcher::incUsage);
|
||||
}
|
||||
|
||||
private Mono<CachedIndexSearcher> retrieveCachedIndexSearcher(LLSnapshot snapshot) {
|
||||
@Override
|
||||
public Mono<Send<LLIndexSearcher>> retrieveSearcher(@Nullable LLSnapshot snapshot) {
|
||||
if (snapshot == null) {
|
||||
return this.cachedMainSearcher;
|
||||
} else {
|
||||
@ -223,17 +208,7 @@ public class CachedIndexSearcherManager {
|
||||
}
|
||||
}
|
||||
|
||||
public Mono<Void> releaseUsedIndexSearcher(CachedIndexSearcher indexSearcher) {
|
||||
return Mono.fromRunnable(() -> {
|
||||
try {
|
||||
// Decrement reference count
|
||||
indexSearcher.decUsage();
|
||||
} catch (Exception ex) {
|
||||
logger.error("Failed to release an used IndexSearcher", ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> close() {
|
||||
return closeMono;
|
||||
}
|
||||
|
@ -0,0 +1,25 @@
|
||||
package it.cavallium.dbengine.database.disk;
|
||||
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import java.io.IOException;
|
||||
import java.util.function.Function;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public interface IndexSearcherManager {
|
||||
|
||||
void maybeRefreshBlocking() throws IOException;
|
||||
|
||||
void maybeRefresh() throws IOException;
|
||||
|
||||
<T> Flux<T> searchMany(@Nullable LLSnapshot snapshot, Function<IndexSearcher, Flux<T>> searcherFunction);
|
||||
|
||||
<T> Mono<T> search(@Nullable LLSnapshot snapshot, Function<IndexSearcher, Mono<T>> searcherFunction);
|
||||
|
||||
Mono<Send<LLIndexSearcher>> retrieveSearcher(@Nullable LLSnapshot snapshot);
|
||||
|
||||
Mono<Void> close();
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
package it.cavallium.dbengine.database.disk;
|
||||
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.SearcherManager;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class LLIndexSearcher extends ResourceSupport<LLIndexSearcher, LLIndexSearcher> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LLIndexSearcher.class);
|
||||
|
||||
private IndexSearcher indexSearcher;
|
||||
private SearcherManager associatedSearcherManager;
|
||||
|
||||
public LLIndexSearcher(IndexSearcher indexSearcher,
|
||||
@Nullable SearcherManager associatedSearcherManager,
|
||||
Drop<LLIndexSearcher> drop) {
|
||||
super(new LLIndexSearcher.CloseOnDrop(drop));
|
||||
this.indexSearcher = indexSearcher;
|
||||
this.associatedSearcherManager = associatedSearcherManager;
|
||||
}
|
||||
|
||||
public IndexReader getIndexReader() {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("CachedIndexSearcher must be owned to be used"));
|
||||
}
|
||||
return indexSearcher.getIndexReader();
|
||||
}
|
||||
|
||||
public IndexSearcher getIndexSearcher() {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("CachedIndexSearcher must be owned to be used"));
|
||||
}
|
||||
return indexSearcher;
|
||||
}
|
||||
|
||||
public LLIndexSearcher copy(Drop<LLIndexSearcher> drop) {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("CachedIndexSearcher must be owned to be used"));
|
||||
}
|
||||
var copyIndexSearcher = this.indexSearcher;
|
||||
if (associatedSearcherManager != null) {
|
||||
copyIndexSearcher.getIndexReader().incRef();
|
||||
}
|
||||
return new LLIndexSearcher(copyIndexSearcher, associatedSearcherManager, new CloseOnDrop(drop));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
return new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<LLIndexSearcher> prepareSend() {
|
||||
var indexSearcher = this.indexSearcher;
|
||||
var associatedSearcherManager = this.associatedSearcherManager;
|
||||
makeInaccessible();
|
||||
return drop -> new LLIndexSearcher(indexSearcher, associatedSearcherManager, drop);
|
||||
}
|
||||
|
||||
private void makeInaccessible() {
|
||||
this.indexSearcher = null;
|
||||
this.associatedSearcherManager = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop implements Drop<LLIndexSearcher> {
|
||||
|
||||
private final Drop<LLIndexSearcher> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<LLIndexSearcher> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(LLIndexSearcher obj) {
|
||||
try {
|
||||
if (obj.associatedSearcherManager != null) {
|
||||
if (obj.indexSearcher.getIndexReader().getRefCount() > 0) {
|
||||
obj.associatedSearcherManager.release(obj.indexSearcher);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to drop CachedIndexSearcher", e);
|
||||
}
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
@ -3,6 +3,7 @@ package it.cavallium.dbengine.database.disk;
|
||||
import static it.cavallium.dbengine.database.LLUtils.MARKER_LUCENE;
|
||||
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
|
||||
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.DirectIOOptions;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
@ -21,10 +22,9 @@ import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
import it.cavallium.dbengine.lucene.searcher.AdaptiveLuceneLocalSearcher;
|
||||
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
||||
import it.cavallium.dbengine.lucene.searcher.LuceneLocalSearcher;
|
||||
import it.cavallium.dbengine.lucene.searcher.LuceneShardSearcher;
|
||||
import it.cavallium.dbengine.lucene.searcher.LuceneMultiSearcher;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@ -73,21 +73,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
* There is only a single thread globally to not overwhelm the disk with
|
||||
* concurrent commits or concurrent refreshes.
|
||||
*/
|
||||
private static final Scheduler luceneHeavyTasksScheduler = Schedulers.newBoundedElastic(1,
|
||||
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
|
||||
"lucene",
|
||||
Integer.MAX_VALUE,
|
||||
true
|
||||
);
|
||||
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
|
||||
protected final Scheduler luceneSearcherScheduler = LuceneUtils.newLuceneSearcherScheduler(false);
|
||||
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
|
||||
private static final Scheduler luceneWriterScheduler = Schedulers.boundedElastic();
|
||||
private static final Scheduler luceneHeavyTasksScheduler = Schedulers.single(Schedulers.boundedElastic());
|
||||
|
||||
private final String luceneIndexName;
|
||||
private final IndexWriter indexWriter;
|
||||
private final SnapshotsManager snapshotsManager;
|
||||
private final CachedIndexSearcherManager searcherManager;
|
||||
private final IndexSearcherManager searcherManager;
|
||||
private final Similarity similarity;
|
||||
private final Directory directory;
|
||||
private final boolean lowMemory;
|
||||
@ -166,7 +157,8 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
|
||||
if (luceneOptions.nrtCachingOptions().isPresent()) {
|
||||
NRTCachingOptions nrtCachingOptions = luceneOptions.nrtCachingOptions().get();
|
||||
directory = new NRTCachingDirectory(directory, nrtCachingOptions.maxMergeSizeMB(), nrtCachingOptions.maxCachedMB());
|
||||
directory = new NRTCachingDirectory(directory, nrtCachingOptions.maxMergeSizeMB(),
|
||||
nrtCachingOptions.maxCachedMB());
|
||||
}
|
||||
|
||||
this.directory = directory;
|
||||
@ -177,7 +169,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
this.lowMemory = lowMemory;
|
||||
this.similarity = LuceneUtils.toPerFieldSimilarityWrapper(indicizerSimilarities);
|
||||
|
||||
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LuceneUtils.toPerFieldAnalyzerWrapper(indicizerAnalyzers));
|
||||
var indexWriterConfig = new IndexWriterConfig(LuceneUtils.toPerFieldAnalyzerWrapper(indicizerAnalyzers));
|
||||
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
|
||||
indexWriterConfig.setIndexDeletionPolicy(snapshotter);
|
||||
indexWriterConfig.setCommitOnClose(true);
|
||||
@ -241,13 +233,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
return Mono.<Void>fromCallable(() -> {
|
||||
activeTasks.register();
|
||||
try {
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexWriter.addDocument(LLUtils.toDocument(doc));
|
||||
return null;
|
||||
} finally {
|
||||
activeTasks.arriveAndDeregister();
|
||||
}
|
||||
}).subscribeOn(luceneWriterScheduler);
|
||||
}).subscribeOn(Schedulers.boundedElastic());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -258,13 +249,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
.<Void>fromCallable(() -> {
|
||||
activeTasks.register();
|
||||
try {
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexWriter.addDocuments(LLUtils.toDocumentsFromEntries(documentsList));
|
||||
return null;
|
||||
} finally {
|
||||
activeTasks.arriveAndDeregister();
|
||||
}
|
||||
}).subscribeOn(luceneWriterScheduler)
|
||||
}).subscribeOn(Schedulers.boundedElastic())
|
||||
);
|
||||
}
|
||||
|
||||
@ -274,13 +264,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
return Mono.<Void>fromCallable(() -> {
|
||||
activeTasks.register();
|
||||
try {
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexWriter.deleteDocuments(LLUtils.toTerm(id));
|
||||
return null;
|
||||
} finally {
|
||||
activeTasks.arriveAndDeregister();
|
||||
}
|
||||
}).subscribeOn(luceneWriterScheduler);
|
||||
}).subscribeOn(Schedulers.boundedElastic());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -288,13 +277,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
return Mono.<Void>fromCallable(() -> {
|
||||
activeTasks.register();
|
||||
try {
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document));
|
||||
} finally {
|
||||
activeTasks.arriveAndDeregister();
|
||||
}
|
||||
return null;
|
||||
}).subscribeOn(luceneWriterScheduler);
|
||||
}).subscribeOn(Schedulers.boundedElastic());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -310,7 +298,6 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) {
|
||||
LLTerm key = entry.getKey();
|
||||
LLDocument value = entry.getValue();
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value));
|
||||
}
|
||||
return null;
|
||||
@ -318,7 +305,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
activeTasks.arriveAndDeregister();
|
||||
}
|
||||
})
|
||||
.subscribeOn(luceneWriterScheduler);
|
||||
.subscribeOn(Schedulers.boundedElastic());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -340,35 +327,32 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||
public Mono<Send<LLSearchResultShard>> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||
QueryParams queryParams,
|
||||
String keyFieldName,
|
||||
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux) {
|
||||
return getMoreLikeThisQuery(snapshot, LuceneUtils.toLocalQueryParams(queryParams), mltDocumentFieldsFlux)
|
||||
.flatMap(modifiedLocalQuery -> searcherManager.captureIndexSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> {
|
||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
||||
return localSearcher
|
||||
.collect(indexSearcher.getIndexSearcher(), releaseMono, modifiedLocalQuery, keyFieldName, luceneSearcherScheduler)
|
||||
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()))
|
||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
||||
})
|
||||
);
|
||||
.flatMap(modifiedLocalQuery -> searcherManager
|
||||
.retrieveSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> localSearcher.collect(indexSearcher, modifiedLocalQuery, keyFieldName))
|
||||
)
|
||||
.map(resultToReceive -> {
|
||||
var result = resultToReceive.receive();
|
||||
return new LLSearchResultShard(result.results(), result.totalHitsCount(), d -> result.close()).send();
|
||||
})
|
||||
.doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
|
||||
public Mono<Void> distributedMoreLikeThis(@Nullable LLSnapshot snapshot,
|
||||
QueryParams queryParams,
|
||||
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux,
|
||||
LuceneShardSearcher shardSearcher) {
|
||||
LuceneMultiSearcher shardSearcher) {
|
||||
return getMoreLikeThisQuery(snapshot, LuceneUtils.toLocalQueryParams(queryParams), mltDocumentFieldsFlux)
|
||||
.flatMap(modifiedLocalQuery -> searcherManager.captureIndexSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> {
|
||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
||||
return shardSearcher
|
||||
.searchOn(indexSearcher.getIndexSearcher(), releaseMono, modifiedLocalQuery, luceneSearcherScheduler)
|
||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
||||
})
|
||||
);
|
||||
.flatMap(modifiedLocalQuery -> searcherManager
|
||||
.retrieveSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> shardSearcher.searchOn(indexSearcher, modifiedLocalQuery))
|
||||
)
|
||||
.doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
|
||||
public Mono<LocalQueryParams> getMoreLikeThisQuery(@Nullable LLSnapshot snapshot,
|
||||
@ -437,27 +421,27 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<LLSearchResultShard> search(@Nullable LLSnapshot snapshot, QueryParams queryParams, String keyFieldName) {
|
||||
public Mono<Send<LLSearchResultShard>> search(@Nullable LLSnapshot snapshot, QueryParams queryParams,
|
||||
String keyFieldName) {
|
||||
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
||||
return searcherManager.captureIndexSearcher(snapshot).flatMap(indexSearcher -> {
|
||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
||||
return localSearcher
|
||||
.collect(indexSearcher.getIndexSearcher(), releaseMono, localQueryParams, keyFieldName, luceneSearcherScheduler)
|
||||
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()))
|
||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
||||
});
|
||||
return searcherManager
|
||||
.retrieveSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> localSearcher.collect(indexSearcher, localQueryParams, keyFieldName))
|
||||
.map(resultToReceive -> {
|
||||
var result = resultToReceive.receive();
|
||||
return new LLSearchResultShard(result.results(), result.totalHitsCount(), d -> result.close()).send();
|
||||
})
|
||||
.doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
|
||||
public Mono<Void> distributedSearch(@Nullable LLSnapshot snapshot,
|
||||
QueryParams queryParams,
|
||||
LuceneShardSearcher shardSearcher) {
|
||||
LuceneMultiSearcher shardSearcher) {
|
||||
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
||||
return searcherManager.captureIndexSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> {
|
||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
||||
return shardSearcher.searchOn(indexSearcher.getIndexSearcher(), releaseMono, localQueryParams, luceneSearcherScheduler)
|
||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
||||
});
|
||||
return searcherManager
|
||||
.retrieveSearcher(snapshot)
|
||||
.flatMap(indexSearcher -> shardSearcher.searchOn(indexSearcher, localQueryParams))
|
||||
.doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,29 +1,18 @@
|
||||
package it.cavallium.dbengine.database.disk;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader.InvalidCacheLoadException;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneIndex;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||
import it.cavallium.dbengine.database.LLDocument;
|
||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||
import it.cavallium.dbengine.database.LLSearchResult;
|
||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.LLTerm;
|
||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
||||
import it.cavallium.dbengine.lucene.searcher.AdaptiveLuceneMultiSearcher;
|
||||
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
||||
import it.cavallium.dbengine.lucene.searcher.LuceneMultiSearcher;
|
||||
import it.cavallium.dbengine.lucene.searcher.LuceneShardSearcher;
|
||||
import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
|
||||
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
|
||||
import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
@ -33,28 +22,15 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.warp.commonutils.batch.ParallelUtils;
|
||||
import org.warp.commonutils.functional.IOBiConsumer;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.GroupedFlux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
package it.cavallium.dbengine.lucene;
|
||||
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
@ -70,6 +72,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.util.concurrent.Queues;
|
||||
|
||||
public class LuceneUtils {
|
||||
|
||||
@ -363,23 +366,28 @@ public class LuceneUtils {
|
||||
);
|
||||
}
|
||||
|
||||
public static Flux<LLKeyScore> convertHits(Flux<ScoreDoc> hits,
|
||||
public static Flux<LLKeyScore> convertHits(Flux<ScoreDoc> hitsFlux,
|
||||
IndexSearchers indexSearchers,
|
||||
String keyFieldName,
|
||||
Scheduler scheduler,
|
||||
boolean preserveOrder) {
|
||||
if (preserveOrder) {
|
||||
return hitsFlux
|
||||
.publishOn(Schedulers.boundedElastic())
|
||||
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
|
||||
} else {
|
||||
// Compute parallelism
|
||||
var availableProcessors = Runtime.getRuntime().availableProcessors();
|
||||
var min = Queues.XS_BUFFER_SIZE;
|
||||
var maxParallelGroups = Math.max(availableProcessors, min);
|
||||
|
||||
return hits.transform(hitsFlux -> {
|
||||
if (preserveOrder) {
|
||||
return hitsFlux
|
||||
.publishOn(scheduler)
|
||||
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
|
||||
} else {
|
||||
return hitsFlux
|
||||
.publishOn(scheduler)
|
||||
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
|
||||
}
|
||||
});
|
||||
return hitsFlux
|
||||
.groupBy(hit -> hit.shardIndex % maxParallelGroups) // Max n groups
|
||||
.flatMap(shardHits -> shardHits
|
||||
.publishOn(Schedulers.boundedElastic())
|
||||
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName)),
|
||||
maxParallelGroups // Max n concurrency. Concurrency must be >= total groups count
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
|
@ -1,5 +1,8 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
@ -9,32 +12,23 @@ public class AdaptiveLuceneLocalSearcher implements LuceneLocalSearcher {
|
||||
|
||||
private static final LuceneLocalSearcher localSearcher = new SimpleLuceneLocalSearcher();
|
||||
|
||||
private static final LuceneLocalSearcher unscoredPagedLuceneLocalSearcher = new LocalLuceneWrapper(new UnscoredUnsortedContinuousLuceneMultiSearcher());
|
||||
private static final LuceneLocalSearcher unscoredPagedLuceneLocalSearcher = new LocalLuceneWrapper(new UnscoredUnsortedContinuousLuceneMultiSearcher(), d -> {});
|
||||
|
||||
private static final LuceneLocalSearcher countSearcher = new CountLuceneLocalSearcher();
|
||||
|
||||
@Override
|
||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
||||
Mono<Void> releaseIndexSearcher,
|
||||
public Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcher,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName,
|
||||
Scheduler scheduler) {
|
||||
if (Schedulers.isInNonBlockingThread()) {
|
||||
return releaseIndexSearcher
|
||||
.then(Mono.error(() -> new UnsupportedOperationException("Called collect in a nonblocking thread")));
|
||||
}
|
||||
String keyFieldName) {
|
||||
Mono<Send<LuceneSearchResult>> collectionMono;
|
||||
if (queryParams.limit() == 0) {
|
||||
return countSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName, scheduler);
|
||||
collectionMono = countSearcher.collect(indexSearcher, queryParams, keyFieldName);
|
||||
} else if (!queryParams.isScored() && queryParams.offset() == 0 && queryParams.limit() >= 2147483630
|
||||
&& !queryParams.isSorted()) {
|
||||
return unscoredPagedLuceneLocalSearcher.collect(indexSearcher,
|
||||
releaseIndexSearcher,
|
||||
queryParams,
|
||||
keyFieldName,
|
||||
scheduler
|
||||
);
|
||||
collectionMono = unscoredPagedLuceneLocalSearcher.collect(indexSearcher, queryParams, keyFieldName);
|
||||
} else {
|
||||
return localSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName, scheduler);
|
||||
collectionMono = localSearcher.collect(indexSearcher, queryParams, keyFieldName);
|
||||
}
|
||||
return Mono.fromRunnable(LLUtils::ensureBlocking).then(collectionMono);
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public class AdaptiveLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||
@ -13,15 +15,17 @@ public class AdaptiveLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||
private static final LuceneMultiSearcher countLuceneMultiSearcher = new CountLuceneMultiSearcher();
|
||||
|
||||
@Override
|
||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
public Mono<Send<LuceneMultiSearcher>> createShardSearcher(LocalQueryParams queryParams) {
|
||||
Mono<Send<LuceneMultiSearcher>> shardSearcherCreationMono;
|
||||
if (queryParams.limit() <= 0) {
|
||||
return countLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
shardSearcherCreationMono = countLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
} else if (queryParams.isScored()) {
|
||||
return scoredLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
shardSearcherCreationMono = scoredLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
} else if (queryParams.offset() == 0 && queryParams.limit() >= 2147483630 && !queryParams.isSorted()) {
|
||||
return unscoredIterableLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
shardSearcherCreationMono = unscoredIterableLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
} else {
|
||||
return unscoredPagedLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
shardSearcherCreationMono = unscoredPagedLuceneMultiSearcher.createShardSearcher(queryParams);
|
||||
}
|
||||
return Mono.fromRunnable(LLUtils::ensureBlocking).then(shardSearcherCreationMono);
|
||||
}
|
||||
}
|
||||
|
@ -1,28 +1,33 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import it.cavallium.dbengine.client.query.QueryParser;
|
||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
public class CountLuceneLocalSearcher implements LuceneLocalSearcher {
|
||||
|
||||
@Override
|
||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, Mono<Void> releaseIndexSearcher,
|
||||
LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) {
|
||||
return Mono.fromCallable(() -> {
|
||||
if (Schedulers.isInNonBlockingThread()) {
|
||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
||||
}
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
return new LuceneSearchResult(TotalHitsCount.of(indexSearcher.count(queryParams.query()), true),
|
||||
Flux.empty(),
|
||||
releaseIndexSearcher
|
||||
);
|
||||
}).subscribeOn(scheduler);
|
||||
public Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName) {
|
||||
return Mono
|
||||
.usingWhen(
|
||||
indexSearcherMono,
|
||||
indexSearcher -> Mono.fromCallable(() -> {
|
||||
try (var is = indexSearcher.receive()) {
|
||||
LLUtils.ensureBlocking();
|
||||
return is.getIndexSearcher().count(queryParams.query());
|
||||
}
|
||||
}).subscribeOn(Schedulers.boundedElastic()),
|
||||
is -> Mono.empty()
|
||||
)
|
||||
.map(count -> new LuceneSearchResult(TotalHitsCount.of(count, true), Flux.empty(), drop -> {}).send())
|
||||
.doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
}
|
||||
|
@ -1,55 +1,88 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import it.cavallium.dbengine.client.query.QueryParser;
|
||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
public class CountLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||
|
||||
@Override
|
||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
AtomicLong totalHits = new AtomicLong(0);
|
||||
ConcurrentLinkedQueue<Mono<Void>> release = new ConcurrentLinkedQueue<>();
|
||||
return new LuceneShardSearcher() {
|
||||
@Override
|
||||
public Mono<Void> searchOn(IndexSearcher indexSearcher,
|
||||
Mono<Void> releaseIndexSearcher,
|
||||
LocalQueryParams queryParams,
|
||||
Scheduler scheduler) {
|
||||
return Mono
|
||||
.<Void>fromCallable(() -> {
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
totalHits.addAndGet(indexSearcher.count(queryParams.query()));
|
||||
release.add(releaseIndexSearcher);
|
||||
return null;
|
||||
})
|
||||
.subscribeOn(scheduler);
|
||||
}
|
||||
public Mono<Send<LuceneMultiSearcher>> createShardSearcher(LocalQueryParams queryParams) {
|
||||
return Mono.fromCallable(() -> new CountLuceneShardSearcher(new AtomicLong(0), d -> {}).send());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) {
|
||||
return Mono.fromCallable(() -> {
|
||||
if (Schedulers.isInNonBlockingThread()) {
|
||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
||||
}
|
||||
return new LuceneSearchResult(TotalHitsCount.of(totalHits.get(), true),
|
||||
Flux.empty(),
|
||||
Mono.when(release)
|
||||
);
|
||||
}).subscribeOn(scheduler);
|
||||
private static class CountLuceneShardSearcher extends
|
||||
ResourceSupport<LuceneMultiSearcher, CountLuceneShardSearcher> implements LuceneMultiSearcher {
|
||||
|
||||
private AtomicLong totalHitsCount;
|
||||
|
||||
public CountLuceneShardSearcher(AtomicLong totalHitsCount, Drop<CountLuceneShardSearcher> drop) {
|
||||
super(new CloseOnDrop(drop));
|
||||
this.totalHitsCount = totalHitsCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> searchOn(Send<LLIndexSearcher> indexSearcher, LocalQueryParams queryParams) {
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
try (var is = indexSearcher.receive()) {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("CountLuceneMultiSearcher must be owned to be used"));
|
||||
}
|
||||
LLUtils.ensureBlocking();
|
||||
totalHitsCount.addAndGet(is.getIndexSearcher().count(queryParams.query()));
|
||||
return null;
|
||||
}
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Send<LuceneSearchResult>> collect(LocalQueryParams queryParams, String keyFieldName) {
|
||||
return Mono.fromCallable(() -> {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("CountLuceneMultiSearcher must be owned to be used"));
|
||||
}
|
||||
LLUtils.ensureBlocking();
|
||||
return new LuceneSearchResult(TotalHitsCount.of(totalHitsCount.get(), true), Flux.empty(), d -> {})
|
||||
.send();
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
return new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<CountLuceneShardSearcher> prepareSend() {
|
||||
var totalHitsCount = this.totalHitsCount;
|
||||
makeInaccessible();
|
||||
return drop -> new CountLuceneShardSearcher(totalHitsCount, drop);
|
||||
}
|
||||
|
||||
private void makeInaccessible() {
|
||||
this.totalHitsCount = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop implements Drop<CountLuceneShardSearcher> {
|
||||
|
||||
private final Drop<CountLuceneShardSearcher> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<CountLuceneShardSearcher> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(CountLuceneShardSearcher obj) {
|
||||
delegate.drop(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,27 +1,140 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import io.net5.buffer.UnpooledDirectByteBuf;
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.database.LLRange;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
||||
public interface IndexSearchers {
|
||||
public interface IndexSearchers extends Resource<IndexSearchers> {
|
||||
|
||||
static IndexSearchers of(List<IndexSearcher> indexSearchers) {
|
||||
return shardIndex -> {
|
||||
if (shardIndex < 0) {
|
||||
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid");
|
||||
}
|
||||
return indexSearchers.get(shardIndex);
|
||||
};
|
||||
static IndexSearchers of(List<LLIndexSearcher> indexSearchers) {
|
||||
return new ShardedIndexSearchers(indexSearchers, d -> {});
|
||||
}
|
||||
|
||||
static IndexSearchers unsharded(IndexSearcher indexSearcher) {
|
||||
return shardIndex -> {
|
||||
static IndexSearchers unsharded(LLIndexSearcher indexSearcher) {
|
||||
return new UnshardedIndexSearchers(indexSearcher, d -> {});
|
||||
}
|
||||
|
||||
LLIndexSearcher shard(int shardIndex);
|
||||
|
||||
class UnshardedIndexSearchers extends ResourceSupport<IndexSearchers, UnshardedIndexSearchers>
|
||||
implements IndexSearchers {
|
||||
|
||||
private LLIndexSearcher indexSearcher;
|
||||
|
||||
public UnshardedIndexSearchers(LLIndexSearcher indexSearcher, Drop<UnshardedIndexSearchers> drop) {
|
||||
super(new CloseOnDrop(drop));
|
||||
this.indexSearcher = indexSearcher;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LLIndexSearcher shard(int shardIndex) {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("UnshardedIndexSearchers must be owned to be used"));
|
||||
}
|
||||
if (shardIndex != -1) {
|
||||
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid, this is a unsharded index");
|
||||
}
|
||||
return indexSearcher;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
return new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<UnshardedIndexSearchers> prepareSend() {
|
||||
LLIndexSearcher indexSearcher = this.indexSearcher;
|
||||
this.makeInaccessible();
|
||||
return drop -> new UnshardedIndexSearchers(indexSearcher, drop);
|
||||
}
|
||||
|
||||
private void makeInaccessible() {
|
||||
this.indexSearcher = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop implements Drop<UnshardedIndexSearchers> {
|
||||
|
||||
private final Drop<UnshardedIndexSearchers> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<UnshardedIndexSearchers> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(UnshardedIndexSearchers obj) {
|
||||
try {
|
||||
if (obj.indexSearcher != null) obj.indexSearcher.close();
|
||||
delegate.drop(obj);
|
||||
} finally {
|
||||
obj.makeInaccessible();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IndexSearcher shard(int shardIndex);
|
||||
class ShardedIndexSearchers extends ResourceSupport<IndexSearchers, ShardedIndexSearchers>
|
||||
implements IndexSearchers {
|
||||
|
||||
private List<LLIndexSearcher> indexSearchers;
|
||||
|
||||
public ShardedIndexSearchers(List<LLIndexSearcher> indexSearchers, Drop<ShardedIndexSearchers> drop) {
|
||||
super(new CloseOnDrop(drop));
|
||||
this.indexSearchers = indexSearchers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LLIndexSearcher shard(int shardIndex) {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("ShardedIndexSearchers must be owned to be used"));
|
||||
}
|
||||
if (shardIndex < 0) {
|
||||
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid");
|
||||
}
|
||||
return indexSearchers.get(shardIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
return new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<ShardedIndexSearchers> prepareSend() {
|
||||
List<LLIndexSearcher> indexSearchers = this.indexSearchers;
|
||||
this.makeInaccessible();
|
||||
return drop -> new ShardedIndexSearchers(indexSearchers, drop);
|
||||
}
|
||||
|
||||
private void makeInaccessible() {
|
||||
this.indexSearchers = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop implements Drop<ShardedIndexSearchers> {
|
||||
|
||||
private final Drop<ShardedIndexSearchers> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<ShardedIndexSearchers> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(ShardedIndexSearchers obj) {
|
||||
try {
|
||||
delegate.drop(obj);
|
||||
} finally {
|
||||
obj.makeInaccessible();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,28 +0,0 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
|
||||
public class LocalLuceneWrapper implements LuceneLocalSearcher {
|
||||
|
||||
private final LuceneMultiSearcher luceneMultiSearcher;
|
||||
|
||||
public LocalLuceneWrapper(LuceneMultiSearcher luceneMultiSearcher) {
|
||||
this.luceneMultiSearcher = luceneMultiSearcher;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
||||
Mono<Void> releaseIndexSearcher,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName,
|
||||
Scheduler scheduler) {
|
||||
var shardSearcher = luceneMultiSearcher.createShardSearcher(queryParams);
|
||||
return shardSearcher
|
||||
.flatMap(luceneShardSearcher -> luceneShardSearcher
|
||||
.searchOn(indexSearcher, releaseIndexSearcher, queryParams, scheduler)
|
||||
.then(luceneShardSearcher.collect(queryParams, keyFieldName, scheduler))
|
||||
);
|
||||
}
|
||||
}
|
@ -1,21 +1,18 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
|
||||
public interface LuceneLocalSearcher {
|
||||
|
||||
/**
|
||||
* @param indexSearcher Lucene index searcher
|
||||
* @param indexSearcherMono Lucene index searcher
|
||||
* @param queryParams the query parameters
|
||||
* @param keyFieldName the name of the key field
|
||||
* @param scheduler a blocking scheduler
|
||||
*/
|
||||
Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
||||
Mono<Void> releaseIndexSearcher,
|
||||
Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName,
|
||||
Scheduler scheduler);
|
||||
String keyFieldName);
|
||||
}
|
||||
|
@ -1,28 +1,32 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||
import it.cavallium.dbengine.database.LLKeyScore;
|
||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.ScoreMode;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.warp.commonutils.log.Logger;
|
||||
import org.warp.commonutils.log.LoggerFactory;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
|
||||
public interface LuceneMultiSearcher {
|
||||
public interface LuceneMultiSearcher extends LuceneLocalSearcher {
|
||||
|
||||
/**
|
||||
* Do a lucene query, receiving the single results using a consumer
|
||||
* @param queryParams the query parameters
|
||||
* @param keyFieldName the name of the key field
|
||||
*/
|
||||
Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams);
|
||||
Mono<Send<LuceneSearchResult>> collect(Flux<Send<LLIndexSearcher>> indexSearchersFlux,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName);
|
||||
|
||||
/**
|
||||
* @param indexSearcherMono Lucene index searcher
|
||||
* @param queryParams the query parameters
|
||||
* @param keyFieldName the name of the key field
|
||||
*/
|
||||
@Override
|
||||
default Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName) {
|
||||
return this.collect(indexSearcherMono.flux(), queryParams, keyFieldName);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,11 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import io.net5.buffer.api.Drop;
|
||||
import io.net5.buffer.api.Owned;
|
||||
import io.net5.buffer.api.internal.ResourceSupport;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.database.LLKeyScore;
|
||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||
import it.cavallium.dbengine.database.disk.LLLocalKeyValueDatabase;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
@ -10,39 +14,33 @@ import org.warp.commonutils.log.LoggerFactory;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public final class LuceneSearchResult {
|
||||
public final class LuceneSearchResult extends ResourceSupport<LuceneSearchResult, LuceneSearchResult> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LuceneSearchResult.class);
|
||||
|
||||
private volatile boolean releaseCalled;
|
||||
private TotalHitsCount totalHitsCount;
|
||||
private Flux<LLKeyScore> results;
|
||||
|
||||
private final TotalHitsCount totalHitsCount;
|
||||
private final Flux<LLKeyScore> results;
|
||||
private final Mono<Void> release;
|
||||
|
||||
public LuceneSearchResult(TotalHitsCount totalHitsCount, Flux<LLKeyScore> results, Mono<Void> release) {
|
||||
public LuceneSearchResult(TotalHitsCount totalHitsCount, Flux<LLKeyScore> results, Drop<LuceneSearchResult> drop) {
|
||||
super(new LuceneSearchResult.CloseOnDrop(drop));
|
||||
this.totalHitsCount = totalHitsCount;
|
||||
this.results = results;
|
||||
this.release = Mono.fromRunnable(() -> {
|
||||
if (releaseCalled) {
|
||||
logger.warn(this.getClass().getName() + "::release has been called twice!");
|
||||
}
|
||||
releaseCalled = true;
|
||||
}).then(release);
|
||||
}
|
||||
|
||||
public TotalHitsCount totalHitsCount() {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("LuceneSearchResult must be owned to be used"));
|
||||
}
|
||||
return totalHitsCount;
|
||||
}
|
||||
|
||||
public Flux<LLKeyScore> results() {
|
||||
if (!isOwned()) {
|
||||
throw attachTrace(new IllegalStateException("LuceneSearchResult must be owned to be used"));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public Mono<Void> release() {
|
||||
return release;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this)
|
||||
@ -63,13 +61,36 @@ public final class LuceneSearchResult {
|
||||
return "LuceneSearchResult[" + "totalHitsCount=" + totalHitsCount + ", " + "results=" + results + ']';
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
if (!releaseCalled) {
|
||||
logger.warn(this.getClass().getName() + "::release has not been called before class finalization!");
|
||||
protected RuntimeException createResourceClosedException() {
|
||||
return new IllegalStateException("Closed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Owned<LuceneSearchResult> prepareSend() {
|
||||
var totalHitsCount = this.totalHitsCount;
|
||||
var results = this.results;
|
||||
makeInaccessible();
|
||||
return drop -> new LuceneSearchResult(totalHitsCount, results, drop);
|
||||
}
|
||||
|
||||
private void makeInaccessible() {
|
||||
this.totalHitsCount = null;
|
||||
this.results = null;
|
||||
}
|
||||
|
||||
private static class CloseOnDrop implements Drop<LuceneSearchResult> {
|
||||
|
||||
private final Drop<LuceneSearchResult> delegate;
|
||||
|
||||
public CloseOnDrop(Drop<LuceneSearchResult> drop) {
|
||||
this.delegate = drop;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void drop(LuceneSearchResult obj) {
|
||||
delegate.drop(obj);
|
||||
}
|
||||
super.finalize();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,26 +0,0 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
|
||||
public interface LuceneShardSearcher {
|
||||
|
||||
/**
|
||||
* @param indexSearcher the index searcher, which contains all the lucene data
|
||||
* @param queryParams the query parameters
|
||||
* @param scheduler a blocking scheduler
|
||||
*/
|
||||
Mono<Void> searchOn(IndexSearcher indexSearcher,
|
||||
Mono<Void> indexSearcherRelease,
|
||||
LocalQueryParams queryParams,
|
||||
Scheduler scheduler);
|
||||
|
||||
/**
|
||||
* @param queryParams the query parameters
|
||||
* @param keyFieldName the name of the key field
|
||||
* @param collectorScheduler a blocking scheduler
|
||||
*/
|
||||
Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler collectorScheduler);
|
||||
}
|
@ -8,13 +8,12 @@ import org.apache.lucene.search.CollectorManager;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public class ScoredLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||
|
||||
@Override
|
||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
public Mono<LuceneMultiSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
Sort luceneSort = queryParams.sort();
|
||||
|
@ -1,7 +1,6 @@
|
||||
package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS;
|
||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.TIE_BREAKER;
|
||||
|
||||
import it.cavallium.dbengine.database.LLKeyScore;
|
||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
@ -10,27 +9,19 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.lucene.search.CollectorManager;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.Sinks;
|
||||
import reactor.core.publisher.Sinks.Empty;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
|
||||
class ScoredSimpleLuceneShardSearcher implements LuceneMultiSearcher {
|
||||
|
||||
private final Object lock = new Object();
|
||||
private final List<IndexSearcher> indexSearchersArray = new ArrayList<>();
|
||||
|
@ -4,116 +4,141 @@ import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS
|
||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.FIRST_PAGE_LIMIT;
|
||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
|
||||
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.database.LLKeyScore;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
import org.apache.lucene.search.TotalHits;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.SignalType;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuple3;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
|
||||
|
||||
@Override
|
||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
||||
Mono<Void> releaseIndexSearcher,
|
||||
public Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||
LocalQueryParams queryParams,
|
||||
String keyFieldName,
|
||||
Scheduler scheduler) {
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
if (Schedulers.isInNonBlockingThread()) {
|
||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
||||
}
|
||||
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
|
||||
PaginationInfo paginationInfo;
|
||||
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.limit(), true);
|
||||
} else {
|
||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), FIRST_PAGE_LIMIT, false);
|
||||
}
|
||||
TopDocs firstPageTopDocs;
|
||||
{
|
||||
TopDocsCollector<ScoreDoc> firstPageCollector = TopDocsSearcher.getTopDocsCollector(
|
||||
queryParams.sort(),
|
||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset() + paginationInfo.firstPageLimit()),
|
||||
null,
|
||||
LuceneUtils.totalHitsThreshold(),
|
||||
!paginationInfo.forceSinglePage(),
|
||||
queryParams.isScored());
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexSearcher.search(queryParams.query(), firstPageCollector);
|
||||
firstPageTopDocs = firstPageCollector.topDocs(LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset()),
|
||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageLimit())
|
||||
);
|
||||
}
|
||||
Flux<LLKeyScore> firstPageMono = LuceneUtils
|
||||
.convertHits(Flux.fromArray(firstPageTopDocs.scoreDocs), IndexSearchers.unsharded(indexSearcher),
|
||||
keyFieldName, scheduler, true)
|
||||
.take(queryParams.limit(), true);
|
||||
String keyFieldName) {
|
||||
|
||||
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
|
||||
PaginationInfo paginationInfo;
|
||||
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.limit(), true);
|
||||
} else {
|
||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), FIRST_PAGE_LIMIT, false);
|
||||
}
|
||||
|
||||
return indexSearcherMono
|
||||
.flatMap(indexSearcherToReceive -> {
|
||||
var indexSearcher = indexSearcherToReceive.receive();
|
||||
var indexSearchers = IndexSearchers.unsharded(indexSearcher);
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
LLUtils.ensureBlocking();
|
||||
TopDocsCollector<ScoreDoc> firstPageCollector = TopDocsSearcher.getTopDocsCollector(
|
||||
queryParams.sort(),
|
||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset() + paginationInfo.firstPageLimit()),
|
||||
null,
|
||||
LuceneUtils.totalHitsThreshold(),
|
||||
!paginationInfo.forceSinglePage(),
|
||||
queryParams.isScored());
|
||||
|
||||
indexSearcher.getIndexSearcher().search(queryParams.query(), firstPageCollector);
|
||||
return firstPageCollector.topDocs(LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset()),
|
||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageLimit())
|
||||
);
|
||||
})
|
||||
.map(firstPageTopDocs -> {
|
||||
Flux<LLKeyScore> firstPageHitsFlux = LuceneUtils.convertHits(Flux.fromArray(firstPageTopDocs.scoreDocs),
|
||||
indexSearchers, keyFieldName, true)
|
||||
.take(queryParams.limit(), true);
|
||||
|
||||
return Tuples.of(Optional.ofNullable(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs)),
|
||||
LuceneUtils.convertTotalHitsCount(firstPageTopDocs.totalHits), firstPageHitsFlux);
|
||||
})
|
||||
.map(firstResult -> {
|
||||
var firstPageLastScoreDoc = firstResult.getT1();
|
||||
var totalHitsCount = firstResult.getT2();
|
||||
var firstPageFlux = firstResult.getT3();
|
||||
|
||||
|
||||
Flux<LLKeyScore> nextHits;
|
||||
if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
|
||||
nextHits = null;
|
||||
} else {
|
||||
nextHits = Flux.defer(() -> Flux
|
||||
.<TopDocs, CurrentPageInfo>generate(
|
||||
() -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
|
||||
(s, sink) -> {
|
||||
if (Schedulers.isInNonBlockingThread()) {
|
||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
||||
}
|
||||
if (s.last() != null && s.remainingLimit() > 0) {
|
||||
TopDocs pageTopDocs;
|
||||
try {
|
||||
TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
|
||||
s.currentPageLimit(), s.last(), LuceneUtils.totalHitsThreshold(), true,
|
||||
queryParams.isScored());
|
||||
//noinspection BlockingMethodInNonBlockingContext
|
||||
indexSearcher.search(queryParams.query(), collector);
|
||||
pageTopDocs = collector.topDocs();
|
||||
} catch (IOException e) {
|
||||
sink.error(e);
|
||||
return EMPTY_STATUS;
|
||||
}
|
||||
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
|
||||
sink.next(pageTopDocs);
|
||||
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1);
|
||||
} else {
|
||||
sink.complete();
|
||||
return EMPTY_STATUS;
|
||||
}
|
||||
},
|
||||
s -> {}
|
||||
)
|
||||
.subscribeOn(scheduler)
|
||||
.flatMapIterable(topDocs -> Arrays.asList(topDocs.scoreDocs))
|
||||
.transform(topFieldDocFlux -> LuceneUtils.convertHits(topFieldDocFlux,
|
||||
IndexSearchers.unsharded(indexSearcher), keyFieldName, scheduler, true))
|
||||
);
|
||||
}
|
||||
Flux<LLKeyScore> nextHits;
|
||||
if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
|
||||
nextHits = null;
|
||||
} else {
|
||||
nextHits = Flux.defer(() -> Flux
|
||||
.<TopDocs, CurrentPageInfo>generate(
|
||||
() -> new CurrentPageInfo(firstPageLastScoreDoc.orElse(null), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
|
||||
(s, sink) -> {
|
||||
LLUtils.ensureBlocking();
|
||||
if (s.last() != null && s.remainingLimit() > 0) {
|
||||
TopDocs pageTopDocs;
|
||||
try {
|
||||
TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
|
||||
s.currentPageLimit(), s.last(), LuceneUtils.totalHitsThreshold(), true,
|
||||
queryParams.isScored());
|
||||
indexSearcher.getIndexSearcher().search(queryParams.query(), collector);
|
||||
pageTopDocs = collector.topDocs();
|
||||
} catch (IOException e) {
|
||||
sink.error(e);
|
||||
return EMPTY_STATUS;
|
||||
}
|
||||
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
|
||||
sink.next(pageTopDocs);
|
||||
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1);
|
||||
} else {
|
||||
sink.complete();
|
||||
return EMPTY_STATUS;
|
||||
}
|
||||
},
|
||||
s -> {}
|
||||
)
|
||||
.subscribeOn(Schedulers.boundedElastic())
|
||||
.flatMapIterable(topDocs -> Arrays.asList(topDocs.scoreDocs))
|
||||
.transform(topFieldDocFlux -> LuceneUtils.convertHits(topFieldDocFlux, indexSearchers,
|
||||
keyFieldName, true))
|
||||
);
|
||||
}
|
||||
|
||||
Flux<LLKeyScore> combinedFlux;
|
||||
Flux<LLKeyScore> combinedFlux;
|
||||
|
||||
if (nextHits != null) {
|
||||
combinedFlux = firstPageMono
|
||||
.concatWith(nextHits);
|
||||
} else {
|
||||
combinedFlux = firstPageMono;
|
||||
}
|
||||
|
||||
return new LuceneSearchResult(LuceneUtils.convertTotalHitsCount(firstPageTopDocs.totalHits), combinedFlux,
|
||||
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
|
||||
releaseIndexSearcher
|
||||
);
|
||||
})
|
||||
.subscribeOn(scheduler);
|
||||
if (nextHits != null) {
|
||||
combinedFlux = firstPageFlux
|
||||
.concatWith(nextHits);
|
||||
} else {
|
||||
combinedFlux = firstPageFlux;
|
||||
}
|
||||
return new LuceneSearchResult(totalHitsCount, combinedFlux, d -> {
|
||||
indexSearcher.close();
|
||||
indexSearchers.close();
|
||||
}).send();
|
||||
})
|
||||
.doFinally(s -> {
|
||||
// Close searchers if the search result has not been returned
|
||||
if (s != SignalType.ON_COMPLETE) {
|
||||
indexSearcher.close();
|
||||
indexSearchers.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
)
|
||||
.doOnDiscard(Send.class, Send::close);
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ import reactor.core.publisher.Mono;
|
||||
public class UnscoredPagedLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||
|
||||
@Override
|
||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
public Mono<LuceneMultiSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
if (queryParams.isScored()) {
|
||||
|
@ -10,7 +10,6 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.lucene.search.CollectorManager;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
@ -22,7 +21,7 @@ import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
|
||||
class UnscoredPagedLuceneShardSearcher implements LuceneMultiSearcher {
|
||||
|
||||
private final Object lock = new Object();
|
||||
private final List<IndexSearcher> indexSearchersArray = new ArrayList<>();
|
||||
|
@ -2,10 +2,8 @@ package it.cavallium.dbengine.lucene.searcher;
|
||||
|
||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@ -18,14 +16,12 @@ import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.ScoreMode;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.Sinks;
|
||||
import reactor.core.publisher.Sinks.EmitResult;
|
||||
import reactor.core.publisher.Sinks.Many;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.util.concurrent.Queues;
|
||||
|
||||
public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||
|
||||
@ -34,7 +30,7 @@ public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMult
|
||||
.availableProcessors(), Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "UnscoredUnsortedExecutor");
|
||||
|
||||
@Override
|
||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
public Mono<LuceneMultiSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
||||
return Mono
|
||||
.fromCallable(() -> {
|
||||
AtomicBoolean alreadySubscribed = new AtomicBoolean(false);
|
||||
@ -92,7 +88,7 @@ public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMult
|
||||
}
|
||||
};
|
||||
|
||||
return new LuceneShardSearcher() {
|
||||
return new LuceneMultiSearcher() {
|
||||
private final Object lock = new Object();
|
||||
private final List<IndexSearcher> indexSearchersArray = new ArrayList<>();
|
||||
private final List<Mono<Void>> indexSearcherReleasersArray = new ArrayList<>();
|
||||
|
Loading…
Reference in New Issue
Block a user