Update luene, update rocksdb
This commit is contained in:
parent
e03afafcee
commit
04623b754c
25
pom.xml
25
pom.xml
@ -15,6 +15,7 @@
|
||||
<revision>0-SNAPSHOT</revision>
|
||||
<dbengine.ci>false</dbengine.ci>
|
||||
<micrometer.version>1.7.4</micrometer.version>
|
||||
<lucene.version>9.1.0</lucene.version>
|
||||
</properties>
|
||||
<repositories>
|
||||
<repository>
|
||||
@ -133,57 +134,57 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-join</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analysis-common</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analysis-icu</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-codecs</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-backward-codecs</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queryparser</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-misc</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-facet</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<version>9.0.0</version>
|
||||
<version>${lucene.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@ -322,7 +323,7 @@
|
||||
<dependency>
|
||||
<groupId>org.rocksdb</groupId>
|
||||
<artifactId>rocksdbjni</artifactId>
|
||||
<version>7.1.1</version>
|
||||
<version>7.1.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
|
@ -41,6 +41,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -29,7 +29,7 @@ import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.lucene.LuceneRocksDBManager;
|
||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||
import it.cavallium.dbengine.lucene.collector.Buckets;
|
||||
import it.cavallium.dbengine.lucene.directory.Lucene90CodecWithNoFieldCompression;
|
||||
import it.cavallium.dbengine.lucene.directory.Lucene91CodecWithNoFieldCompression;
|
||||
import it.cavallium.dbengine.lucene.mlt.MoreLikeThisTransformer;
|
||||
import it.cavallium.dbengine.lucene.searcher.AdaptiveLocalSearcher;
|
||||
import it.cavallium.dbengine.lucene.searcher.BucketParams;
|
||||
@ -194,7 +194,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
||||
}
|
||||
if (isFilesystemCompressed) {
|
||||
indexWriterConfig.setUseCompoundFile(false);
|
||||
indexWriterConfig.setCodec(new Lucene90CodecWithNoFieldCompression());
|
||||
indexWriterConfig.setCodec(new Lucene91CodecWithNoFieldCompression());
|
||||
}
|
||||
logger.trace("WriterSchedulerMaxThreadCount: {}", writerSchedulerMaxThreadCount);
|
||||
indexWriterConfig.setMergeScheduler(mergeScheduler);
|
||||
|
@ -109,7 +109,7 @@ public interface FullDocs<T extends LLDoc> extends ResourceIterable<T> {
|
||||
|
||||
for (int compIDX = 0; compIDX < sortFields.length; ++compIDX) {
|
||||
SortField sortField = sortFields[compIDX];
|
||||
comparators[compIDX] = sortField.getComparator(1, compIDX);
|
||||
comparators[compIDX] = sortField.getComparator(1, compIDX == 0);
|
||||
reverseMul[compIDX] = sortField.getReverse() ? -1 : 1;
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,7 @@ import it.cavallium.dbengine.lucene.hugepq.search.comparators.HugePqDocComparato
|
||||
public class HugePqComparator {
|
||||
|
||||
public static FieldComparator<?> getComparator(LLTempHugePqEnv env, SortField sortField,
|
||||
int numHits, int sortPos) {
|
||||
int numHits, boolean enableSkipping) {
|
||||
var sortFieldClass = sortField.getClass();
|
||||
if (sortFieldClass == org.apache.lucene.search.SortedNumericSortField.class) {
|
||||
var nf = (org.apache.lucene.search.SortedNumericSortField) sortField;
|
||||
@ -32,7 +32,7 @@ public class HugePqComparator {
|
||||
var reverse = nf.getReverse();
|
||||
var selector = nf.getSelector();
|
||||
final FieldComparator<?> fieldComparator = switch (type) {
|
||||
case INT -> new IntComparator(env, numHits, nf.getField(), (Integer) missingValue, reverse, sortPos) {
|
||||
case INT -> new IntComparator(env, numHits, nf.getField(), (Integer) missingValue, reverse, enableSkipping) {
|
||||
@Override
|
||||
public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
|
||||
return new IntLeafComparator(context) {
|
||||
@ -44,7 +44,7 @@ public class HugePqComparator {
|
||||
};
|
||||
}
|
||||
};
|
||||
case FLOAT -> new FloatComparator(env, numHits, nf.getField(), (Float) missingValue, reverse, sortPos) {
|
||||
case FLOAT -> new FloatComparator(env, numHits, nf.getField(), (Float) missingValue, reverse, enableSkipping) {
|
||||
@Override
|
||||
public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
|
||||
return new FloatLeafComparator(context) {
|
||||
@ -56,7 +56,7 @@ public class HugePqComparator {
|
||||
};
|
||||
}
|
||||
};
|
||||
case LONG -> new LongComparator(env, numHits, nf.getField(), (Long) missingValue, reverse, sortPos) {
|
||||
case LONG -> new LongComparator(env, numHits, nf.getField(), (Long) missingValue, reverse, enableSkipping) {
|
||||
@Override
|
||||
public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
|
||||
return new LongLeafComparator(context) {
|
||||
@ -68,7 +68,7 @@ public class HugePqComparator {
|
||||
};
|
||||
}
|
||||
};
|
||||
case DOUBLE -> new DoubleComparator(env, numHits, nf.getField(), (Double) missingValue, reverse, sortPos) {
|
||||
case DOUBLE -> new DoubleComparator(env, numHits, nf.getField(), (Double) missingValue, reverse, enableSkipping) {
|
||||
@Override
|
||||
public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
|
||||
return new DoubleLeafComparator(context) {
|
||||
@ -93,18 +93,18 @@ public class HugePqComparator {
|
||||
var comparatorSource = sortField.getComparatorSource();
|
||||
return switch (sortField.getType()) {
|
||||
case SCORE -> new RelevanceComparator(env, numHits);
|
||||
case DOC -> new HugePqDocComparator(env, numHits, reverse, sortPos);
|
||||
case DOC -> new HugePqDocComparator(env, numHits, reverse, enableSkipping);
|
||||
case INT -> new IntComparator(env, numHits, field, (Integer) missingValue,
|
||||
reverse, sortPos);
|
||||
reverse, enableSkipping);
|
||||
case FLOAT -> new FloatComparator(env, numHits, field, (Float) missingValue,
|
||||
reverse, sortPos);
|
||||
reverse, enableSkipping);
|
||||
case LONG -> new LongComparator(env, numHits, field, (Long) missingValue,
|
||||
reverse, sortPos);
|
||||
reverse, enableSkipping);
|
||||
case DOUBLE -> new DoubleComparator(env, numHits, field, (Double) missingValue,
|
||||
reverse, sortPos);
|
||||
reverse, enableSkipping);
|
||||
case CUSTOM -> {
|
||||
assert comparatorSource != null;
|
||||
yield comparatorSource.newComparator(field, numHits, sortPos, reverse);
|
||||
yield comparatorSource.newComparator(field, numHits, enableSkipping, reverse);
|
||||
}
|
||||
case STRING -> new TermOrdValComparator(env, numHits, field, missingValue == STRING_LAST);
|
||||
case STRING_VAL -> throw new NotImplementedException("String val sort field not implemented");
|
||||
|
@ -40,7 +40,7 @@ public class LLSlotDocCodec implements HugePqCodec<LLSlotDoc>, FieldValueHitQueu
|
||||
for (int i = 0; i < numComparators; ++i) {
|
||||
SortField field = fields[i];
|
||||
reverseMul[i] = field.getReverse() ? -1 : 1;
|
||||
comparators[i] = HugePqComparator.getComparator(env, field, numHits, i);
|
||||
comparators[i] = HugePqComparator.getComparator(env, field, numHits, i == 0);
|
||||
}
|
||||
comparator = new AbstractComparator(new ComparatorOptions().setMaxReusedBufferSize(0)) {
|
||||
@Override
|
||||
|
@ -14,7 +14,7 @@ public class RandomFieldComparatorSource extends FieldComparatorSource {
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldComparator<?> newComparator(String fieldName, int numHits, int sortPos, boolean reversed) {
|
||||
public FieldComparator<?> newComparator(String fieldName, int numHits, boolean enableSkipping, boolean reversed) {
|
||||
return new RandomFieldComparator(rand.iterator(), numHits);
|
||||
}
|
||||
}
|
||||
|
@ -39,8 +39,8 @@ public class DoubleComparator extends NumericComparator<Double> implements SafeC
|
||||
protected double bottom;
|
||||
|
||||
public DoubleComparator(LLTempHugePqEnv env,
|
||||
int numHits, String field, Double missingValue, boolean reverse, int sortPos) {
|
||||
super(field, missingValue != null ? missingValue : 0.0, reverse, sortPos, Double.BYTES);
|
||||
int numHits, String field, Double missingValue, boolean reverse, boolean enableSkipping) {
|
||||
super(field, missingValue != null ? missingValue : 0.0, reverse, enableSkipping, Double.BYTES);
|
||||
values = new HugePqArray<>(env, new DoubleCodec(), numHits, 0d);
|
||||
}
|
||||
|
||||
|
@ -39,8 +39,8 @@ public class FloatComparator extends NumericComparator<Float> implements SafeClo
|
||||
protected float bottom;
|
||||
|
||||
public FloatComparator(LLTempHugePqEnv env,
|
||||
int numHits, String field, Float missingValue, boolean reverse, int sortPos) {
|
||||
super(field, missingValue != null ? missingValue : 0.0f, reverse, sortPos, Float.BYTES);
|
||||
int numHits, String field, Float missingValue, boolean reverse, boolean enableSkipping) {
|
||||
super(field, missingValue != null ? missingValue : 0.0f, reverse, enableSkipping, Float.BYTES);
|
||||
values = new HugePqArray<>(env, new FloatCodec(), numHits, 0f);
|
||||
}
|
||||
|
||||
|
@ -39,8 +39,8 @@ public class IntComparator extends NumericComparator<Integer> implements SafeClo
|
||||
protected int bottom;
|
||||
|
||||
public IntComparator(LLTempHugePqEnv env,
|
||||
int numHits, String field, Integer missingValue, boolean reverse, int sortPos) {
|
||||
super(field, missingValue != null ? missingValue : 0, reverse, sortPos, Integer.BYTES);
|
||||
int numHits, String field, Integer missingValue, boolean reverse, boolean enableSkipping) {
|
||||
super(field, missingValue != null ? missingValue : 0, reverse, enableSkipping, Integer.BYTES);
|
||||
values = new HugePqArray<>(env, new IntCodec(), numHits, 0);
|
||||
}
|
||||
|
||||
|
@ -42,8 +42,8 @@ public class LongComparator extends NumericComparator<Long> implements SafeClose
|
||||
protected long bottom;
|
||||
|
||||
public LongComparator(LLTempHugePqEnv env,
|
||||
int numHits, String field, Long missingValue, boolean reverse, int sortPos) {
|
||||
super(field, missingValue != null ? missingValue : 0L, reverse, sortPos, Long.BYTES);
|
||||
int numHits, String field, Long missingValue, boolean reverse, boolean enableSkipping) {
|
||||
super(field, missingValue != null ? missingValue : 0L, reverse, enableSkipping, Long.BYTES);
|
||||
values = new HugePqArray<>(env, new LongCodec(), numHits, 0L);
|
||||
}
|
||||
|
||||
|
@ -1,15 +1,14 @@
|
||||
package it.cavallium.dbengine.lucene.directory;
|
||||
|
||||
import org.apache.lucene.backward_codecs.lucene90.Lucene90Codec;
|
||||
import org.apache.lucene.codecs.FilterCodec;
|
||||
import org.apache.lucene.codecs.StoredFieldsFormat;
|
||||
import org.apache.lucene.codecs.lucene90.Lucene90Codec;
|
||||
import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat;
|
||||
|
||||
public final class Lucene90CodecWithNoFieldCompression extends FilterCodec {
|
||||
public final class Lucene91CodecWithNoFieldCompression extends FilterCodec {
|
||||
|
||||
private final StoredFieldsFormat storedFieldsFormat;
|
||||
|
||||
public Lucene90CodecWithNoFieldCompression() {
|
||||
public Lucene91CodecWithNoFieldCompression() {
|
||||
super("Lucene410CodecWithNoFieldCompression", new Lucene90Codec());
|
||||
storedFieldsFormat = new Lucene90NoCompressionStoredFieldsFormat();
|
||||
}
|
@ -43,11 +43,11 @@ public class HugePqDocComparator extends org.apache.lucene.search.comparators.Do
|
||||
private boolean hitsThresholdReached;
|
||||
|
||||
/** Creates a new comparator based on document ids for {@code numHits} */
|
||||
public HugePqDocComparator(LLTempHugePqEnv env, int numHits, boolean reverse, int sortPost) {
|
||||
super(0, reverse, sortPost);
|
||||
public HugePqDocComparator(LLTempHugePqEnv env, int numHits, boolean reverse, boolean enableSkipping) {
|
||||
super(0, reverse, enableSkipping);
|
||||
this.docIDs = new HugePqArray<>(env, new IntCodec(), numHits, 0);
|
||||
// skipping functionality is enabled if we are sorting by _doc in asc order as a primary sort
|
||||
this.enableSkipping = (!reverse && sortPost == 0);
|
||||
this.enableSkipping = (!reverse && enableSkipping);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1 +1 @@
|
||||
it.cavallium.dbengine.lucene.directory.Lucene90CodecWithNoFieldCompression
|
||||
it.cavallium.dbengine.lucene.directory.Lucene91CodecWithNoFieldCompression
|
||||
|
Loading…
Reference in New Issue
Block a user