diff --git a/pom.xml b/pom.xml index 1abce4f..6642cf9 100644 --- a/pom.xml +++ b/pom.xml @@ -26,6 +26,23 @@ https://mvn.mchv.eu/repository/mchv-snapshot + + + mchv-release-distribution + MCHV Release Apache Maven Packages Distribution + https://mvn.mchv.eu/repository/mchv + + + mchv-snapshot-distribution + MCHV Snapshot Apache Maven Packages Distribution + https://mvn.mchv.eu/repository/mchv-snapshot + + + + scm:git:https://git.ignuranza.net/andreacavalli/CavalliumDBEngine.git + scm:git:https://git.ignuranza.net/andreacavalli/CavalliumDBEngine.git + HEAD + org.slf4j diff --git a/src/example/java/it.cavallium.dbengine.client/SpeedExample.java b/src/example/java/it.cavallium.dbengine.client/SpeedExample.java index c64e771..371210a 100644 --- a/src/example/java/it.cavallium.dbengine.client/SpeedExample.java +++ b/src/example/java/it.cavallium.dbengine.client/SpeedExample.java @@ -8,13 +8,12 @@ import it.cavallium.dbengine.database.collections.DatabaseMapDictionary; import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep; import it.cavallium.dbengine.database.collections.DatabaseStageEntry; import it.cavallium.dbengine.database.collections.DatabaseStageMap; -import it.cavallium.dbengine.database.collections.QueryableBuilder; -import it.cavallium.dbengine.database.serialization.Serializer; -import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.collections.SubStageGetterMap; import it.cavallium.dbengine.database.collections.SubStageGetterMapDeep; import it.cavallium.dbengine.database.collections.SubStageGetterSingleBytes; import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection; +import it.cavallium.dbengine.database.serialization.Serializer; +import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -55,36 +54,6 @@ public class SpeedExample { .blockOptional(); } - private static Mono testCreateQueryable() { - var ssg = new SubStageGetterSingleBytes(); - var ser = SerializerFixedBinaryLength.noop(4); - var itemKey = new byte[]{0, 1, 2, 3}; - var newValue = new byte[]{4, 5, 6, 7}; - return test("Create Queryable", - tempDb() - .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) - .map(tuple -> tuple.mapT2(dict -> { - var builder = new QueryableBuilder(2); - return builder.wrap(DatabaseMapDictionaryDeep.simple(dict, builder.serializer(), builder.tail(ssg, ser))); - })), - tuple -> Flux.range(0, batchSize).flatMap(n -> Mono - .defer(() -> Mono - .fromRunnable(() -> { - if (printPreviousValue) - System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); - }) - .then(tuple.getT2().at(null, itemKey)) - .flatMap(handle -> handle.setAndGetPrevious(newValue)) - .doOnSuccess(oldValue -> { - if (printPreviousValue) - System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue))); - }) - )) - .then(), - numRepeats, - tuple -> tuple.getT1().close()); - } - private static Mono test2LevelPut() { var k1ser = SerializerFixedBinaryLength.noop(4); var k2ser = SerializerFixedBinaryLength.noop(4); diff --git a/src/main/java/it/cavallium/dbengine/client/CompositeDatabasePartLocation.java b/src/main/java/it/cavallium/dbengine/client/CompositeDatabasePartLocation.java index 6b41dcb..c1f6b42 100644 --- a/src/main/java/it/cavallium/dbengine/client/CompositeDatabasePartLocation.java +++ b/src/main/java/it/cavallium/dbengine/client/CompositeDatabasePartLocation.java @@ -3,6 +3,7 @@ package it.cavallium.dbengine.client; import java.util.Objects; import java.util.StringJoiner; +@SuppressWarnings("unused") public class CompositeDatabasePartLocation { private final CompositeDatabasePartType partType; private final String partName; diff --git a/src/main/java/it/cavallium/dbengine/database/LLDatabaseConnection.java b/src/main/java/it/cavallium/dbengine/database/LLDatabaseConnection.java index 569c568..1f383f4 100644 --- a/src/main/java/it/cavallium/dbengine/database/LLDatabaseConnection.java +++ b/src/main/java/it/cavallium/dbengine/database/LLDatabaseConnection.java @@ -5,6 +5,7 @@ import java.time.Duration; import java.util.List; import reactor.core.publisher.Mono; +@SuppressWarnings("UnusedReturnValue") public interface LLDatabaseConnection { Mono connect(); diff --git a/src/main/java/it/cavallium/dbengine/database/LLDictionary.java b/src/main/java/it/cavallium/dbengine/database/LLDictionary.java index e2fc98f..cd6d2d3 100644 --- a/src/main/java/it/cavallium/dbengine/database/LLDictionary.java +++ b/src/main/java/it/cavallium/dbengine/database/LLDictionary.java @@ -8,6 +8,7 @@ import org.warp.commonutils.concurrency.atomicity.NotAtomic; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +@SuppressWarnings("unused") @NotAtomic public interface LLDictionary extends LLKeyValueDatabaseStructure { diff --git a/src/main/java/it/cavallium/dbengine/database/LLDictionaryResultType.java b/src/main/java/it/cavallium/dbengine/database/LLDictionaryResultType.java index f09809f..d6b5d04 100644 --- a/src/main/java/it/cavallium/dbengine/database/LLDictionaryResultType.java +++ b/src/main/java/it/cavallium/dbengine/database/LLDictionaryResultType.java @@ -1,5 +1,5 @@ package it.cavallium.dbengine.database; public enum LLDictionaryResultType { - VOID, VALUE_CHANGED, PREVIOUS_VALUE; + VOID, VALUE_CHANGED, PREVIOUS_VALUE } diff --git a/src/main/java/it/cavallium/dbengine/database/LLItem.java b/src/main/java/it/cavallium/dbengine/database/LLItem.java index 55f1c8c..10ba409 100644 --- a/src/main/java/it/cavallium/dbengine/database/LLItem.java +++ b/src/main/java/it/cavallium/dbengine/database/LLItem.java @@ -8,17 +8,16 @@ import java.util.Arrays; import java.util.Objects; import java.util.StringJoiner; import org.apache.lucene.document.Field; -import org.jetbrains.annotations.Nullable; public class LLItem { private final LLType type; private final String name; private final byte[] data; - @Nullable + // nullable private final byte[] data2; - public LLItem(LLType type, String name, byte[] data, @Nullable byte[] data2) { + public LLItem(LLType type, String name, byte[] data, byte[] data2) { this.type = type; this.name = name; this.data = data; diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java index 1467132..9403749 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionary.java @@ -74,10 +74,6 @@ public class DatabaseMapDictionary extends DatabaseMapDictionaryDeep stripPrefix(Entry entry) { - return Map.entry(stripPrefix(entry.getKey()), entry.getValue()); - } - @Override public Mono> clearAndGetPrevious() { return dictionary diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java index 47e8deb..b1071e8 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseMapDictionaryDeep.java @@ -14,7 +14,6 @@ import reactor.core.publisher.Mono; import reactor.util.function.Tuples; // todo: implement optimized methods -@SuppressWarnings("Convert2MethodRef") public class DatabaseMapDictionaryDeep> implements DatabaseStageMap { public static final byte[] EMPTY_BYTES = new byte[0]; @@ -143,15 +142,6 @@ public class DatabaseMapDictionaryDeep> implem return Arrays.copyOfRange(key, this.keyPrefix.length, key.length); } - /** - * Remove ext from suffix - */ - protected byte[] trimSuffix(byte[] keySuffix) { - if (keySuffix.length == keySuffixLength) - return keySuffix; - return Arrays.copyOf(keySuffix, keySuffixLength); - } - /** * Remove ext from full key */ @@ -170,15 +160,6 @@ public class DatabaseMapDictionaryDeep> implem return result; } - /** - * Remove suffix from keySuffix, returning probably an empty byte array - */ - protected byte[] stripSuffix(byte[] keySuffix) { - if (keySuffix.length == this.keySuffixLength) - return EMPTY_BYTES; - return Arrays.copyOfRange(keySuffix, this.keySuffixLength, keySuffix.length); - } - protected LLSnapshot resolveSnapshot(@Nullable CompositeSnapshot snapshot) { if (snapshot == null) { return null; diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStage.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStage.java index 2b3b6bb..f3e7191 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStage.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStage.java @@ -5,7 +5,7 @@ import java.util.Objects; import org.jetbrains.annotations.Nullable; import reactor.core.publisher.Mono; -public interface DatabaseStage extends DatabaseEntryable { +public interface DatabaseStage extends DatabaseStageWithEntry { Mono get(@Nullable CompositeSnapshot snapshot); diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStageQueryable.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStageQueryable.java deleted file mode 100644 index 08faa47..0000000 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStageQueryable.java +++ /dev/null @@ -1,3 +0,0 @@ -package it.cavallium.dbengine.database.collections; - -public interface DatabaseStageQueryable {} diff --git a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseEntryable.java b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStageWithEntry.java similarity index 66% rename from src/main/java/it/cavallium/dbengine/database/collections/DatabaseEntryable.java rename to src/main/java/it/cavallium/dbengine/database/collections/DatabaseStageWithEntry.java index 49379de..9ffdb89 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/DatabaseEntryable.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/DatabaseStageWithEntry.java @@ -1,6 +1,6 @@ package it.cavallium.dbengine.database.collections; -public interface DatabaseEntryable { +public interface DatabaseStageWithEntry { DatabaseStageEntry entry(); } diff --git a/src/main/java/it/cavallium/dbengine/database/collections/JoinerBlocking.java b/src/main/java/it/cavallium/dbengine/database/collections/JoinerBlocking.java index da72042..331bb6a 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/JoinerBlocking.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/JoinerBlocking.java @@ -2,6 +2,7 @@ package it.cavallium.dbengine.database.collections; import java.io.IOException; +@SuppressWarnings("SpellCheckingInspection") public interface JoinerBlocking { interface ValueGetterBlocking { diff --git a/src/main/java/it/cavallium/dbengine/database/collections/QueryableBuilder.java b/src/main/java/it/cavallium/dbengine/database/collections/QueryableBuilder.java deleted file mode 100644 index b4f6fe1..0000000 --- a/src/main/java/it/cavallium/dbengine/database/collections/QueryableBuilder.java +++ /dev/null @@ -1,24 +0,0 @@ -package it.cavallium.dbengine.database.collections; - -import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; - -public class QueryableBuilder { - - public QueryableBuilder(int stagesNumber) { - - } - - public SerializerFixedBinaryLength serializer() { - return null; - } - - public > SubStageGetterSingleBytes tail(U ssg, - SerializerFixedBinaryLength ser) { - return null; - - } - - public , M extends DatabaseStageMap> M wrap(M map) { - return null; - } -} diff --git a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java index 5e2f2af..a7acef3 100644 --- a/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java +++ b/src/main/java/it/cavallium/dbengine/database/collections/SubStageGetterSingle.java @@ -21,12 +21,10 @@ public class SubStageGetterSingle implements SubStageGetter keyFlux) { - //System.out.println(Thread.currentThread() + "subStageGetterSingle1"); return keyFlux .singleOrEmpty() .flatMap(key -> Mono .>fromCallable(() -> { - //System.out.println(Thread.currentThread() + "subStageGetterSingle2"); if (!Arrays.equals(keyPrefix, key)) { throw new IndexOutOfBoundsException("Found more than one element!"); } @@ -34,8 +32,7 @@ public class SubStageGetterSingle implements SubStageGetter { - //System.out.println(Thread.currentThread() + "subStageGetterSingle3"); - return new DatabaseSingle(dictionary, + return new DatabaseSingle<>(dictionary, keyPrefix, serializer ); @@ -47,13 +44,4 @@ public class SubStageGetterSingle implements SubStageGetter getDatabase(String name, List columns, boolean lowMemory) { return Mono - .fromCallable(() -> new LLLocalKeyValueDatabase(name, + .fromCallable(() -> new LLLocalKeyValueDatabase(name, basePath.resolve("database_" + name), columns, new LinkedList<>(), diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalDictionary.java b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalDictionary.java index ba8bb71..58b5fcd 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalDictionary.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalDictionary.java @@ -224,7 +224,7 @@ public class LLLocalDictionary implements LLDictionary { .window(MULTI_GET_WINDOW) .flatMap(keysWindowFlux -> keysWindowFlux.collectList() .flatMapMany(keysWindow -> Mono - .>>fromCallable(() -> { + .fromCallable(() -> { var handlesArray = new ColumnFamilyHandle[keysWindow.size()]; Arrays.fill(handlesArray, cfh); var handles = ObjectArrayList.wrap(handlesArray, handlesArray.length); @@ -240,7 +240,7 @@ public class LLLocalDictionary implements LLDictionary { return mappedResults; }) .subscribeOn(dbScheduler) - .>flatMapMany(Flux::fromIterable) + .flatMapMany(Flux::fromIterable) ) ) .onErrorMap(IOException::new); @@ -292,31 +292,6 @@ public class LLLocalDictionary implements LLDictionary { .map(oldValue -> Map.entry(newEntry.getKey(), oldValue))); } - @NotNull - private Flux> putEntryToWriteBatch(List> newEntries, boolean getOldValues, - CappedWriteBatch writeBatch) { - return Flux - .from(Flux - .defer(() -> { - if (getOldValues) { - return getMulti(null, Flux.fromIterable(newEntries).map(Entry::getKey)); - } else { - return Flux.empty(); - } - }) - .concatWith(Mono - .>fromCallable(() -> { - synchronized (writeBatch) { - for (Entry newEntry : newEntries) { - writeBatch.put(cfh, newEntry.getKey(), newEntry.getValue()); - } - } - return null; - }).subscribeOn(dbScheduler) - ) - ); - } - @Override public Flux> getRange(@Nullable LLSnapshot snapshot, LLRange range) { return Flux.defer(() -> { @@ -408,7 +383,7 @@ public class LLLocalDictionary implements LLDictionary { if (!currentGroupValues.isEmpty()) { sink.next(currentGroupValues); } - } finally {; + } finally { sink.complete(); } }) @@ -448,16 +423,14 @@ public class LLLocalDictionary implements LLDictionary { if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) { break; } - if (Arrays.equals(firstGroupKey, 0, prefixLength, key, 0, prefixLength)) { - currentGroupValues.add(key); - } else { + if (!Arrays.equals(firstGroupKey, 0, prefixLength, key, 0, prefixLength)) { if (!currentGroupValues.isEmpty()) { sink.next(currentGroupValues); } firstGroupKey = key; currentGroupValues = new ArrayList<>(); - currentGroupValues.add(key); } + currentGroupValues.add(key); rocksIterator.next(); } if (!currentGroupValues.isEmpty()) { diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalLuceneIndex.java b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalLuceneIndex.java index 7714cba..5612a11 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalLuceneIndex.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalLuceneIndex.java @@ -61,9 +61,9 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { /** * Global lucene index scheduler. * There is only a single thread globally to not overwhelm the disk with - * parallel commits or parallel refreshes. + * concurrent commits or concurrent refreshes. */ - private static final Scheduler luceneScheduler = Schedulers.newBoundedElastic(1, + private static final Scheduler luceneBlockingScheduler = Schedulers.newBoundedElastic(1, Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "Lucene", 120, @@ -124,7 +124,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { } private void registerScheduledFixedTask(Runnable task, Duration duration) { - scheduledTasksLifecycle.registerScheduledTask(luceneScheduler.schedulePeriodically(() -> { + scheduledTasksLifecycle.registerScheduledTask(luceneBlockingScheduler.schedulePeriodically(() -> { scheduledTasksLifecycle.startScheduledTask(); try { task.run(); @@ -143,14 +143,14 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { public Mono takeSnapshot() { return Mono .fromCallable(lastSnapshotSeqNo::incrementAndGet) - .subscribeOn(luceneScheduler) + .subscribeOn(luceneBlockingScheduler) .flatMap(snapshotSeqNo -> takeLuceneSnapshot() .flatMap(snapshot -> Mono .fromCallable(() -> { this.snapshots.put(snapshotSeqNo, new LuceneIndexSnapshot(snapshot)); return new LLSnapshot(snapshotSeqNo); }) - .subscribeOn(luceneScheduler) + .subscribeOn(luceneBlockingScheduler) ) ); } @@ -160,18 +160,23 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { * avoiding the exception. */ private Mono takeLuceneSnapshot() { - return Mono.fromCallable(() -> { - try { - return snapshotter.snapshot(); - } catch (IllegalStateException ex) { - if ("No index commit to snapshot".equals(ex.getMessage())) { - indexWriter.commit(); - return snapshotter.snapshot(); - } else { - throw ex; - } - } - }).subscribeOn(luceneScheduler); + return Mono + .fromCallable(() -> { + try { + //noinspection BlockingMethodInNonBlockingContext + return snapshotter.snapshot(); + } catch (IllegalStateException ex) { + if ("No index commit to snapshot".equals(ex.getMessage())) { + //noinspection BlockingMethodInNonBlockingContext + indexWriter.commit(); + //noinspection BlockingMethodInNonBlockingContext + return snapshotter.snapshot(); + } else { + throw ex; + } + } + }) + .subscribeOn(luceneBlockingScheduler); } @Override @@ -182,22 +187,26 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { throw new IOException("Snapshot " + snapshot.getSequenceNumber() + " not found!"); } + //noinspection BlockingMethodInNonBlockingContext indexSnapshot.close(); var luceneIndexSnapshot = indexSnapshot.getSnapshot(); + //noinspection BlockingMethodInNonBlockingContext snapshotter.release(luceneIndexSnapshot); // Delete unused files after releasing the snapshot + //noinspection BlockingMethodInNonBlockingContext indexWriter.deleteUnusedFiles(); return null; - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } @Override public Mono addDocument(LLTerm key, LLDocument doc) { return Mono.fromCallable(() -> { + //noinspection BlockingMethodInNonBlockingContext indexWriter.addDocument(LLUtils.toDocument(doc)); return null; - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } @Override @@ -207,10 +216,11 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { .collectList() .flatMap(docs -> Mono .fromCallable(() -> { + //noinspection BlockingMethodInNonBlockingContext indexWriter.addDocuments(LLUtils.toDocuments(docs)); return null; }) - .subscribeOn(luceneScheduler)) + .subscribeOn(luceneBlockingScheduler)) ) .then(); } @@ -219,17 +229,19 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { @Override public Mono deleteDocument(LLTerm id) { return Mono.fromCallable(() -> { + //noinspection BlockingMethodInNonBlockingContext indexWriter.deleteDocuments(LLUtils.toTerm(id)); return null; - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } @Override public Mono updateDocument(LLTerm id, LLDocument document) { return Mono.fromCallable(() -> { + //noinspection BlockingMethodInNonBlockingContext indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document)); return null; - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } @Override @@ -243,45 +255,53 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { .collectList() .flatMap(luceneDocuments -> Mono .fromCallable(() -> { + //noinspection BlockingMethodInNonBlockingContext indexWriter.updateDocuments(LLUtils.toTerm(documents.key()), luceneDocuments); return null; }) - .subscribeOn(luceneScheduler) + .subscribeOn(luceneBlockingScheduler) ); } @Override public Mono deleteAll() { return Mono.fromCallable(() -> { + //noinspection BlockingMethodInNonBlockingContext indexWriter.deleteAll(); + //noinspection BlockingMethodInNonBlockingContext indexWriter.commit(); + //noinspection BlockingMethodInNonBlockingContext indexWriter.forceMergeDeletes(true); + //noinspection BlockingMethodInNonBlockingContext indexWriter.flush(); + //noinspection BlockingMethodInNonBlockingContext indexWriter.commit(); return null; - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } private Mono acquireSearcherWrapper(LLSnapshot snapshot) { return Mono.fromCallable(() -> { if (snapshot == null) { + //noinspection BlockingMethodInNonBlockingContext return searcherManager.acquire(); } else { return resolveSnapshot(snapshot).getIndexSearcher(); } - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } private Mono releaseSearcherWrapper(LLSnapshot snapshot, IndexSearcher indexSearcher) { return Mono.fromRunnable(() -> { if (snapshot == null) { try { + //noinspection BlockingMethodInNonBlockingContext searcherManager.release(indexSearcher); } catch (IOException e) { e.printStackTrace(); } } - }).subscribeOn(luceneScheduler); + }).subscribeOn(luceneBlockingScheduler); } @SuppressWarnings({"Convert2MethodRef", "unchecked", "rawtypes"}) @@ -308,9 +328,10 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { mlt.setBoost(true); // Get the reference doc and apply it to MoreLikeThis, to generate the query + //noinspection BlockingMethodInNonBlockingContext return mlt.like((Map) mltDocumentFields); }) - .subscribeOn(luceneScheduler) + .subscribeOn(luceneBlockingScheduler) .flatMap(query -> Mono .fromCallable(() -> { One totalHitsCountSink = Sinks.one(); @@ -319,7 +340,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { .unicast() .onBackpressureBuffer(new ArrayBlockingQueue<>(1000)); - luceneScheduler.schedule(() -> { + luceneBlockingScheduler.schedule(() -> { try { streamSearcher.search(indexSearcher, query, @@ -347,7 +368,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { }); return new LLSearchResult(totalHitsCountSink.asMono(), Flux.just(topKeysSink.asFlux())); - }).subscribeOn(luceneScheduler) + }).subscribeOn(luceneBlockingScheduler) ).then() .materialize() .flatMap(value -> releaseSearcherWrapper(snapshot, indexSearcher).thenReturn(value)) @@ -369,7 +390,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { org.apache.lucene.search.ScoreMode luceneScoreMode = LLUtils.toScoreMode(scoreMode); return Tuples.of(luceneQuery, Optional.ofNullable(luceneSort), luceneScoreMode); }) - .subscribeOn(luceneScheduler) + .subscribeOn(luceneBlockingScheduler) .flatMap(tuple -> Mono .fromCallable(() -> { Query luceneQuery = tuple.getT1(); @@ -382,7 +403,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { .unicast() .onBackpressureBuffer(new ArrayBlockingQueue<>(PagedStreamSearcher.MAX_ITEMS_PER_PAGE)); - luceneScheduler.schedule(() -> { + luceneBlockingScheduler.schedule(() -> { try { streamSearcher.search(indexSearcher, luceneQuery, @@ -410,7 +431,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { }); return new LLSearchResult(totalHitsCountSink.asMono(), Flux.just(topKeysSink.asFlux())); - }).subscribeOn(luceneScheduler) + }).subscribeOn(luceneBlockingScheduler) ) .materialize() .flatMap(value -> releaseSearcherWrapper(snapshot, indexSearcher).thenReturn(value)) @@ -423,11 +444,13 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { return Mono .fromCallable(() -> { scheduledTasksLifecycle.cancelAndWait(); + //noinspection BlockingMethodInNonBlockingContext indexWriter.close(); + //noinspection BlockingMethodInNonBlockingContext directory.close(); return null; }) - .subscribeOn(luceneScheduler); + .subscribeOn(luceneBlockingScheduler); } @Override @@ -436,14 +459,16 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { .fromCallable(() -> { scheduledTasksLifecycle.startScheduledTask(); try { + //noinspection BlockingMethodInNonBlockingContext indexWriter.commit(); + //noinspection BlockingMethodInNonBlockingContext indexWriter.flush(); } finally { scheduledTasksLifecycle.endScheduledTask(); } return null; }) - .subscribeOn(luceneScheduler); + .subscribeOn(luceneBlockingScheduler); } @Override @@ -452,13 +477,14 @@ public class LLLocalLuceneIndex implements LLLuceneIndex { .fromCallable(() -> { scheduledTasksLifecycle.startScheduledTask(); try { + //noinspection BlockingMethodInNonBlockingContext searcherManager.maybeRefreshBlocking(); } finally { scheduledTasksLifecycle.endScheduledTask(); } return null; }) - .subscribeOn(luceneScheduler); + .subscribeOn(luceneBlockingScheduler); } private void scheduledCommit() { diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalMultiLuceneIndex.java b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalMultiLuceneIndex.java index 83a6da5..59cef6e 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LLLocalMultiLuceneIndex.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LLLocalMultiLuceneIndex.java @@ -9,15 +9,11 @@ import it.cavallium.dbengine.database.LLSort; import it.cavallium.dbengine.database.LLTerm; import it.cavallium.dbengine.database.analyzer.TextFieldsAnalyzer; import it.cavallium.dbengine.lucene.serializer.Query; -import it.unimi.dsi.fastutil.ints.Int2ObjectMap; -import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import it.unimi.dsi.fastutil.longs.Long2ObjectMap; import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import java.io.IOException; import java.nio.file.Path; import java.time.Duration; -import java.util.ArrayList; -import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; @@ -25,7 +21,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.jetbrains.annotations.Nullable; import org.warp.commonutils.batch.ParallelUtils; import org.warp.commonutils.functional.IOBiConsumer; -import org.warp.commonutils.functional.TriFunction; import reactor.core.publisher.Flux; import reactor.core.publisher.GroupedFlux; import reactor.core.publisher.Mono; @@ -95,34 +90,6 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex { return documents.flatMap(docs -> getLuceneIndex(docs.key()).addDocuments(documents)).then(); } - private Mono runPerInstance(Iterable keys, - Iterable documents, - TriFunction, Iterable, Mono> consumer) { - var keysIt = keys.iterator(); - var docsIt = documents.iterator(); - - Int2ObjectMap> perInstanceKeys = new Int2ObjectOpenHashMap<>(); - Int2ObjectMap> perInstanceDocs = new Int2ObjectOpenHashMap<>(); - - while (keysIt.hasNext()) { - LLTerm key = keysIt.next(); - LLDocument doc = docsIt.next(); - var instanceId = getLuceneIndexId(key); - - perInstanceKeys.computeIfAbsent(instanceId, iid -> new ArrayList<>()).add(key); - perInstanceDocs.computeIfAbsent(instanceId, iid -> new ArrayList<>()).add(doc); - } - - return Flux - .fromIterable(perInstanceKeys.int2ObjectEntrySet()) - .flatMap(currentInstanceEntry -> { - int instanceId = currentInstanceEntry.getIntKey(); - List currentInstanceKeys = currentInstanceEntry.getValue(); - return consumer.apply(this.luceneIndices[instanceId], currentInstanceKeys, perInstanceDocs.get(instanceId)); - }) - .then(); - } - @Override public Mono deleteDocument(LLTerm id) { return getLuceneIndex(id).deleteDocument(id); diff --git a/src/main/java/it/cavallium/dbengine/database/disk/LuceneIndexSnapshot.java b/src/main/java/it/cavallium/dbengine/database/disk/LuceneIndexSnapshot.java index 93ae43f..4ed387c 100644 --- a/src/main/java/it/cavallium/dbengine/database/disk/LuceneIndexSnapshot.java +++ b/src/main/java/it/cavallium/dbengine/database/disk/LuceneIndexSnapshot.java @@ -5,6 +5,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.search.IndexSearcher; +@SuppressWarnings("unused") public class LuceneIndexSnapshot { private final IndexCommit snapshot; diff --git a/src/main/java/it/cavallium/dbengine/database/indicizer/JoinedIndicizerWriter.java b/src/main/java/it/cavallium/dbengine/database/indicizer/JoinedIndicizerWriter.java index 1a061d6..e2338d5 100644 --- a/src/main/java/it/cavallium/dbengine/database/indicizer/JoinedIndicizerWriter.java +++ b/src/main/java/it/cavallium/dbengine/database/indicizer/JoinedIndicizerWriter.java @@ -11,6 +11,7 @@ import org.jetbrains.annotations.Nullable; import it.cavallium.dbengine.client.CompositeSnapshot; import reactor.core.publisher.Mono; +@SuppressWarnings("SpellCheckingInspection") public class JoinedIndicizerWriter implements LuceneIndicizerWriter { private final LuceneIndicizerWriter indicizerWriter; diff --git a/src/main/java/it/cavallium/dbengine/database/indicizer/LuceneIndicizerWriter.java b/src/main/java/it/cavallium/dbengine/database/indicizer/LuceneIndicizerWriter.java index d6f353f..c5fa86e 100644 --- a/src/main/java/it/cavallium/dbengine/database/indicizer/LuceneIndicizerWriter.java +++ b/src/main/java/it/cavallium/dbengine/database/indicizer/LuceneIndicizerWriter.java @@ -12,6 +12,7 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; +@SuppressWarnings("unused") public interface LuceneIndicizerWriter { Mono add(T key, U value); diff --git a/src/main/java/it/cavallium/dbengine/database/serialization/SerializerFixedBinaryLength.java b/src/main/java/it/cavallium/dbengine/database/serialization/SerializerFixedBinaryLength.java index a751619..c034d00 100644 --- a/src/main/java/it/cavallium/dbengine/database/serialization/SerializerFixedBinaryLength.java +++ b/src/main/java/it/cavallium/dbengine/database/serialization/SerializerFixedBinaryLength.java @@ -4,6 +4,7 @@ import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import org.jetbrains.annotations.NotNull; +@SuppressWarnings("unused") public interface SerializerFixedBinaryLength extends Serializer { int getSerializedBinaryLength(); diff --git a/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparator.java b/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparator.java index d6371fb..b66b607 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparator.java +++ b/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparator.java @@ -65,7 +65,7 @@ public class RandomFieldComparator extends FieldComparator implements Lea var randomizedScorer = new Scorable() { @Override - public float score() throws IOException { + public float score() { return randomize(scorer.docID()); } @@ -81,9 +81,10 @@ public class RandomFieldComparator extends FieldComparator implements Lea } } + @SuppressWarnings("RedundantCast") @Override public Float value(int slot) { - return Float.valueOf(scores[slot]); + return (float) scores[slot]; } // Override because we sort reverse of natural Float order: diff --git a/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparatorSource.java b/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparatorSource.java index 9eaf61e..988d372 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparatorSource.java +++ b/src/main/java/it/cavallium/dbengine/lucene/RandomFieldComparatorSource.java @@ -14,7 +14,7 @@ public class RandomFieldComparatorSource extends FieldComparatorSource { } @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + public FieldComparator newComparator(String fieldName, int numHits, int sortPos, boolean reversed) { return new RandomFieldComparator(rand.iterator(), numHits); } } diff --git a/src/main/java/it/cavallium/dbengine/lucene/searcher/PagedStreamSearcher.java b/src/main/java/it/cavallium/dbengine/lucene/searcher/PagedStreamSearcher.java index 80fdba4..4cf1f85 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/searcher/PagedStreamSearcher.java +++ b/src/main/java/it/cavallium/dbengine/lucene/searcher/PagedStreamSearcher.java @@ -49,7 +49,7 @@ public class PagedStreamSearcher implements LuceneStreamSearcher { totalHitsConsumer.accept(lastTopDocs.totalHits.value); if (lastTopDocs.scoreDocs.length > 0) { ScoreDoc lastScoreDoc = getLastItem(lastTopDocs.scoreDocs); - consumeHits(currentAllowedResults, lastTopDocs.scoreDocs, indexSearcher, scoreMode, keyFieldName, resultsConsumer); + consumeHits(currentAllowedResults, lastTopDocs.scoreDocs, indexSearcher, keyFieldName, resultsConsumer); // Run the searches for each page until the end boolean finished = currentAllowedResults.var <= 0; @@ -57,7 +57,7 @@ public class PagedStreamSearcher implements LuceneStreamSearcher { lastTopDocs = indexSearcher.searchAfter(lastScoreDoc, query, MAX_ITEMS_PER_PAGE, luceneSort, scoreMode != ScoreMode.COMPLETE_NO_SCORES); if (lastTopDocs.scoreDocs.length > 0) { lastScoreDoc = getLastItem(lastTopDocs.scoreDocs); - consumeHits(currentAllowedResults, lastTopDocs.scoreDocs, indexSearcher, scoreMode, keyFieldName, resultsConsumer); + consumeHits(currentAllowedResults, lastTopDocs.scoreDocs, indexSearcher, keyFieldName, resultsConsumer); } if (lastTopDocs.scoreDocs.length < MAX_ITEMS_PER_PAGE || currentAllowedResults.var <= 0) { finished = true; @@ -69,7 +69,6 @@ public class PagedStreamSearcher implements LuceneStreamSearcher { private void consumeHits(IntWrapper currentAllowedResults, ScoreDoc[] hits, IndexSearcher indexSearcher, - ScoreMode scoreMode, String keyFieldName, Consumer resultsConsumer) throws IOException { for (ScoreDoc hit : hits) { diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/BooleanQuery.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/BooleanQuery.java index 736a205..69748ec 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/BooleanQuery.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/BooleanQuery.java @@ -2,6 +2,7 @@ package it.cavallium.dbengine.lucene.serializer; import java.util.Collection; +@SuppressWarnings("unused") public class BooleanQuery implements Query { private final BooleanQueryPart[] parts; @@ -26,8 +27,8 @@ public class BooleanQuery implements Query { StringifyUtils.stringifyInt(data, minShouldMatch); StringBuilder listData = new StringBuilder(); listData.append(parts.length).append('|'); - for (int i = 0; i < parts.length; i++) { - parts[i].stringify(listData); + for (BooleanQueryPart part : parts) { + part.stringify(listData); } StringifyUtils.writeHeader(data, QueryConstructorType.BOOLEAN_QUERY_INFO_LIST, listData); StringifyUtils.writeHeader(output, QueryConstructorType.BOOLEAN_QUERY, data); diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/FuzzyQuery.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/FuzzyQuery.java index 9ef6ca3..77c33fc 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/FuzzyQuery.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/FuzzyQuery.java @@ -4,6 +4,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.util.automaton.LevenshteinAutomata; +@SuppressWarnings("unused") public class FuzzyQuery implements Query { private final Term term; diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/Occur.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/Occur.java index 24b628c..fcc7a82 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/Occur.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/Occur.java @@ -2,6 +2,7 @@ package it.cavallium.dbengine.lucene.serializer; import org.apache.lucene.search.BooleanClause; +@SuppressWarnings("unused") public class Occur implements SerializedQueryObject { private final BooleanClause.Occur occur; diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/PhraseQuery.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/PhraseQuery.java index d589bc7..044c266 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/PhraseQuery.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/PhraseQuery.java @@ -14,8 +14,8 @@ public class PhraseQuery implements Query { StringBuilder data = new StringBuilder(); StringBuilder listData = new StringBuilder(); listData.append(parts.length).append('|'); - for (int i = 0; i < parts.length; i++) { - StringifyUtils.stringifyTermPosition(listData, parts[i]); + for (TermPosition part : parts) { + StringifyUtils.stringifyTermPosition(listData, part); } StringifyUtils.writeHeader(data, QueryConstructorType.TERM_POSITION_LIST, listData); StringifyUtils.writeHeader(output, QueryConstructorType.PHRASE_QUERY, data); diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/Query.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/Query.java index 0d97367..3ef6fa3 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/Query.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/Query.java @@ -16,7 +16,7 @@ import org.apache.lucene.index.Term; public interface Query extends SerializedQueryObject { - static Query approximativeSearch(TextFieldsAnalyzer yotsubaAnalyzer, String field, String text) { + static Query approximateSearch(TextFieldsAnalyzer yotsubaAnalyzer, String field, String text) { try { var terms = getTerms(yotsubaAnalyzer, field, text); diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/QueryParser.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/QueryParser.java index 0657789..b91d0ff 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/QueryParser.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/QueryParser.java @@ -23,8 +23,7 @@ public class QueryParser { public static Query parse(String text) throws ParseException { try { - var builtQuery = (Query) parse(text, new AtomicInteger(0)); - return builtQuery; + return (Query) parse(text, new AtomicInteger(0)); } catch (Exception e) { throw new ParseException(e); } @@ -72,10 +71,13 @@ public class QueryParser { switch (type) { case TERM_QUERY: Term term = (Term) parse(completeText, position); + assert term != null; return new TermQuery(term); case BOOST_QUERY: Query query = (Query) parse(completeText, position); Float numb = (Float) parse(completeText, position); + assert query != null; + assert numb != null; return new BoostQuery(query, numb); case FUZZY_QUERY: Term fqTerm = (Term) parse(completeText, position); @@ -83,11 +85,16 @@ public class QueryParser { Integer numb2 = (Integer) parse(completeText, position); Integer numb3 = (Integer) parse(completeText, position); Boolean bool1 = (Boolean) parse(completeText, position); + assert fqTerm != null; + assert numb1 != null; + assert numb2 != null; + assert numb3 != null; + assert bool1 != null; return new FuzzyQuery(fqTerm, numb1, numb2, numb3, bool1); case PHRASE_QUERY: - //noinspection unchecked TermPosition[] pqTerms = (TermPosition[]) parse(completeText, position); var pqB = new PhraseQuery.Builder(); + assert pqTerms != null; for (TermPosition pqTerm : pqTerms) { if (pqTerm != null) { pqB.add(pqTerm.getTerm(), pqTerm.getPosition()); @@ -99,7 +106,6 @@ public class QueryParser { //noinspection ConstantConditions int minShouldMatch = (Integer) parse(completeText, position); bqB.setMinimumNumberShouldMatch(minShouldMatch); - //noinspection unchecked BooleanQueryInfo[] bqTerms = (BooleanQueryInfo[]) parse(completeText, position); assert bqTerms != null; for (BooleanQueryInfo bqTerm : bqTerms) { @@ -113,26 +119,39 @@ public class QueryParser { case INT_POINT_EXACT_QUERY: String string1 = (String) parse(completeText, position); Integer int1 = (Integer) parse(completeText, position); + assert string1 != null; + assert int1 != null; return IntPoint.newExactQuery(string1, int1); case LONG_POINT_EXACT_QUERY: String string5 = (String) parse(completeText, position); Long long3 = (Long) parse(completeText, position); + assert string5 != null; + assert long3 != null; return LongPoint.newExactQuery(string5, long3); case SORTED_SLOW_RANGE_QUERY: String string2 = (String) parse(completeText, position); Long long1 = (Long) parse(completeText, position); Long long2 = (Long) parse(completeText, position); + assert string2 != null; + assert long1 != null; + assert long2 != null; return SortedNumericDocValuesField.newSlowRangeQuery(string2, long1, long2); case LONG_POINT_RANGE_QUERY: - String stringx2 = (String) parse(completeText, position); - Long longx1 = (Long) parse(completeText, position); - Long longx2 = (Long) parse(completeText, position); - return LongPoint.newRangeQuery(stringx2, longx1, longx2); + String stringX2 = (String) parse(completeText, position); + Long longX1 = (Long) parse(completeText, position); + Long longX2 = (Long) parse(completeText, position); + assert stringX2 != null; + assert longX1 != null; + assert longX2 != null; + return LongPoint.newRangeQuery(stringX2, longX1, longX2); case INT_POINT_RANGE_QUERY: - String stringx3 = (String) parse(completeText, position); - Integer intx1 = (Integer) parse(completeText, position); - Integer intx2 = (Integer) parse(completeText, position); - return IntPoint.newRangeQuery(stringx3, intx1, intx2); + String stringX3 = (String) parse(completeText, position); + Integer intX1 = (Integer) parse(completeText, position); + Integer intX2 = (Integer) parse(completeText, position); + assert stringX3 != null; + assert intX1 != null; + assert intX2 != null; + return IntPoint.newRangeQuery(stringX3, intX1, intX2); case INT: position.addAndGet(toParse.length()); return Integer.parseInt(toParse); @@ -142,11 +161,13 @@ public class QueryParser { case TERM: String string3 = (String) parse(completeText, position); String string4 = (String) parse(completeText, position); + assert string4 != null; return new Term(string3, string4); case TERM_POSITION: Term term1 = (Term) parse(completeText, position); - Integer intx3 = (Integer) parse(completeText, position); - return new TermPosition(term1, intx3); + Integer intX3 = (Integer) parse(completeText, position); + assert intX3 != null; + return new TermPosition(term1, intX3); case FLOAT: position.addAndGet(toParse.length()); return Float.parseFloat(toParse); diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/SerializedQueryObject.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/SerializedQueryObject.java index 7258ee6..38919da 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/SerializedQueryObject.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/SerializedQueryObject.java @@ -3,7 +3,7 @@ package it.cavallium.dbengine.lucene.serializer; public interface SerializedQueryObject { /** - * @return length|type|---data--- + * returns length|type|---data--- */ void stringify(StringBuilder output); } diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/SortedNumericDocValuesFieldSlowRangeQuery.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/SortedNumericDocValuesFieldSlowRangeQuery.java index 96bf824..b450929 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/SortedNumericDocValuesFieldSlowRangeQuery.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/SortedNumericDocValuesFieldSlowRangeQuery.java @@ -1,5 +1,6 @@ package it.cavallium.dbengine.lucene.serializer; +@SuppressWarnings("unused") public class SortedNumericDocValuesFieldSlowRangeQuery implements Query { private final String name; diff --git a/src/main/java/it/cavallium/dbengine/lucene/serializer/StringifyUtils.java b/src/main/java/it/cavallium/dbengine/lucene/serializer/StringifyUtils.java index 4c4871b..3b57ce2 100644 --- a/src/main/java/it/cavallium/dbengine/lucene/serializer/StringifyUtils.java +++ b/src/main/java/it/cavallium/dbengine/lucene/serializer/StringifyUtils.java @@ -4,6 +4,7 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import org.apache.lucene.index.Term; +@SuppressWarnings("unused") public class StringifyUtils { public static void stringifyFloat(StringBuilder output, float value) {