Fix compilation errors
This commit is contained in:
parent
a9857f7553
commit
cd15f8d23d
@ -54,7 +54,7 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
|||||||
Stream<Buf> getRangeKeys(@Nullable LLSnapshot snapshot,
|
Stream<Buf> getRangeKeys(@Nullable LLSnapshot snapshot,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
boolean reverse,
|
boolean reverse,
|
||||||
boolean smallRange) throws RocksDBException, IOException;
|
boolean smallRange);
|
||||||
|
|
||||||
Stream<List<Buf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot,
|
Stream<List<Buf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
|
@ -14,8 +14,7 @@ import org.rocksdb.RocksDBException;
|
|||||||
public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseStructure, DatabaseProperties,
|
public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseStructure, DatabaseProperties,
|
||||||
IBackuppable, DatabaseOperations {
|
IBackuppable, DatabaseOperations {
|
||||||
|
|
||||||
LLSingleton getSingleton(byte[] singletonListColumnName, byte[] name, byte @Nullable [] defaultValue)
|
LLSingleton getSingleton(byte[] singletonListColumnName, byte[] name, byte @Nullable [] defaultValue);
|
||||||
throws IOException;
|
|
||||||
|
|
||||||
LLDictionary getDictionary(byte[] columnName, UpdateMode updateMode);
|
LLDictionary getDictionary(byte[] columnName, UpdateMode updateMode);
|
||||||
|
|
||||||
|
@ -145,13 +145,8 @@ public class LLMultiDatabaseConnection implements LLDatabaseConnection {
|
|||||||
var indices = connectionToShardMap.entrySet().stream().flatMap(entry -> {
|
var indices = connectionToShardMap.entrySet().stream().flatMap(entry -> {
|
||||||
var connectionIndexStructure = indexStructure.setActiveShards(new IntArrayList(entry.getValue()));
|
var connectionIndexStructure = indexStructure.setActiveShards(new IntArrayList(entry.getValue()));
|
||||||
|
|
||||||
LLLuceneIndex connIndex;
|
LLLuceneIndex connIndex = entry.getKey().getLuceneIndex(clusterName, connectionIndexStructure,
|
||||||
try {
|
|
||||||
connIndex = entry.getKey().getLuceneIndex(clusterName, connectionIndexStructure,
|
|
||||||
indicizerAnalyzers, indicizerSimilarities, luceneOptions, luceneHacks);
|
indicizerAnalyzers, indicizerSimilarities, luceneOptions, luceneHacks);
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
return entry.getValue().intStream().mapToObj(shard -> new ShardToIndex(shard, connIndex));
|
return entry.getValue().intStream().mapToObj(shard -> new ShardToIndex(shard, connIndex));
|
||||||
}).toList();
|
}).toList();
|
||||||
|
@ -63,7 +63,11 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
this.similarity = similarity;
|
this.similarity = similarity;
|
||||||
this.queryRefreshDebounceTime = queryRefreshDebounceTime;
|
this.queryRefreshDebounceTime = queryRefreshDebounceTime;
|
||||||
|
|
||||||
|
try {
|
||||||
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
|
|
||||||
refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
|
refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
|
||||||
try {
|
try {
|
||||||
@ -98,7 +102,11 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
IndexSearcher indexSearcher;
|
IndexSearcher indexSearcher;
|
||||||
boolean fromSnapshot;
|
boolean fromSnapshot;
|
||||||
if (snapshotsManager == null || snapshot == null) {
|
if (snapshotsManager == null || snapshot == null) {
|
||||||
|
try {
|
||||||
indexSearcher = searcherManager.acquire();
|
indexSearcher = searcherManager.acquire();
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new DBException(ex);
|
||||||
|
}
|
||||||
fromSnapshot = false;
|
fromSnapshot = false;
|
||||||
} else {
|
} else {
|
||||||
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher(SEARCH_EXECUTOR);
|
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher(SEARCH_EXECUTOR);
|
||||||
@ -131,6 +139,8 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
searcherManager.maybeRefreshBlocking();
|
searcherManager.maybeRefreshBlocking();
|
||||||
} catch (AlreadyClosedException ignored) {
|
} catch (AlreadyClosedException ignored) {
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
} finally {
|
} finally {
|
||||||
activeRefreshes.decrementAndGet();
|
activeRefreshes.decrementAndGet();
|
||||||
}
|
}
|
||||||
@ -143,6 +153,8 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
searcherManager.maybeRefresh();
|
searcherManager.maybeRefresh();
|
||||||
} catch (AlreadyClosedException ignored) {
|
} catch (AlreadyClosedException ignored) {
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
} finally {
|
} finally {
|
||||||
activeRefreshes.decrementAndGet();
|
activeRefreshes.decrementAndGet();
|
||||||
}
|
}
|
||||||
|
@ -478,13 +478,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Stream<OptionalBuf> getMulti(@Nullable LLSnapshot snapshot, Stream<Buf> keys) {
|
public Stream<OptionalBuf> getMulti(@Nullable LLSnapshot snapshot, Stream<Buf> keys) {
|
||||||
return keys.map(key -> {
|
return keys.map(key -> OptionalBuf.ofNullable(getSync(snapshot, key)));
|
||||||
try {
|
|
||||||
return OptionalBuf.ofNullable(getSync(snapshot, key));
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -854,13 +848,9 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
});
|
});
|
||||||
|
|
||||||
entries.forEach(entry -> {
|
entries.forEach(entry -> {
|
||||||
try {
|
|
||||||
if (entry.getKey() != null && entry.getValue() != null) {
|
if (entry.getKey() != null && entry.getValue() != null) {
|
||||||
this.putInternal(entry.getKey(), entry.getValue());
|
this.putInternal(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new CompletionException(new DBException("Failed to write range", ex));
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -861,8 +861,8 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static OptionsWithCache openRocksDb(@Nullable Path path, DatabaseOptions databaseOptions, RocksDBRefs refs)
|
private static OptionsWithCache openRocksDb(@Nullable Path path, DatabaseOptions databaseOptions, RocksDBRefs refs) {
|
||||||
throws IOException {
|
try {
|
||||||
// Get databases directory path
|
// Get databases directory path
|
||||||
Path databasesDirPath;
|
Path databasesDirPath;
|
||||||
if (path != null) {
|
if (path != null) {
|
||||||
@ -1054,6 +1054,9 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
|||||||
}
|
}
|
||||||
|
|
||||||
return new OptionsWithCache(options, blockCache, compressedCache);
|
return new OptionsWithCache(options, blockCache, compressedCache);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
record DbPathRecord(Path path, long targetSize) {}
|
record DbPathRecord(Path path, long targetSize) {}
|
||||||
@ -1316,13 +1319,7 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Stream<ColumnProperty<Map<String, String>>> getMapColumnProperties(RocksDBMapProperty property) {
|
public Stream<ColumnProperty<Map<String, String>>> getMapColumnProperties(RocksDBMapProperty property) {
|
||||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
return getAllColumnFamilyHandles().keySet().stream().map(c -> new ColumnProperty<>(c.name(), property.getName(), this.getMapProperty(c, property)));
|
||||||
try {
|
|
||||||
return new ColumnProperty<>(c.name(), property.getName(), this.getMapProperty(c, property));
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -1347,11 +1344,7 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
|||||||
@Override
|
@Override
|
||||||
public Stream<ColumnProperty<String>> getStringColumnProperties(RocksDBStringProperty property) {
|
public Stream<ColumnProperty<String>> getStringColumnProperties(RocksDBStringProperty property) {
|
||||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
||||||
try {
|
|
||||||
return new ColumnProperty<>(c.name(), property.getName(), this.getStringProperty(c, property));
|
return new ColumnProperty<>(c.name(), property.getName(), this.getStringProperty(c, property));
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1377,11 +1370,7 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
|||||||
@Override
|
@Override
|
||||||
public Stream<ColumnProperty<Long>> getLongColumnProperties(RocksDBLongProperty property) {
|
public Stream<ColumnProperty<Long>> getLongColumnProperties(RocksDBLongProperty property) {
|
||||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
||||||
try {
|
|
||||||
return new ColumnProperty<>(c.name(), property.getName(), this.getLongProperty(c, property));
|
return new ColumnProperty<>(c.name(), property.getName(), this.getLongProperty(c, property));
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,7 +60,11 @@ public class SimpleIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
this.similarity = similarity;
|
this.similarity = similarity;
|
||||||
this.queryRefreshDebounceTime = queryRefreshDebounceTime;
|
this.queryRefreshDebounceTime = queryRefreshDebounceTime;
|
||||||
|
|
||||||
|
try {
|
||||||
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
|
|
||||||
refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
|
refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
|
||||||
try {
|
try {
|
||||||
@ -83,6 +87,8 @@ public class SimpleIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
searcherManager.maybeRefreshBlocking();
|
searcherManager.maybeRefreshBlocking();
|
||||||
} catch (AlreadyClosedException ignored) {
|
} catch (AlreadyClosedException ignored) {
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
} finally {
|
} finally {
|
||||||
activeRefreshes.decrementAndGet();
|
activeRefreshes.decrementAndGet();
|
||||||
}
|
}
|
||||||
@ -95,6 +101,8 @@ public class SimpleIndexSearcherManager extends SimpleResource implements IndexS
|
|||||||
searcherManager.maybeRefresh();
|
searcherManager.maybeRefresh();
|
||||||
} catch (AlreadyClosedException ignored) {
|
} catch (AlreadyClosedException ignored) {
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
} finally {
|
} finally {
|
||||||
activeRefreshes.decrementAndGet();
|
activeRefreshes.decrementAndGet();
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class BufSerializer implements DataSerializer<Buf> {
|
public class BufSerializer implements DataSerializer<Buf> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull Buf bytes) {
|
public void serialize(DataOutput dataOutput, @NotNull Buf bytes) throws IOException {
|
||||||
dataOutput.writeInt(bytes.size());
|
dataOutput.writeInt(bytes.size());
|
||||||
for (Byte aByte : bytes) {
|
for (Byte aByte : bytes) {
|
||||||
dataOutput.writeByte(aByte);
|
dataOutput.writeByte(aByte);
|
||||||
@ -18,7 +18,7 @@ public class BufSerializer implements DataSerializer<Buf> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Buf deserialize(DataInput dataInput) {
|
public @NotNull Buf deserialize(DataInput dataInput) throws IOException {
|
||||||
var size = dataInput.readInt();
|
var size = dataInput.readInt();
|
||||||
var bal = Buf.create(size);
|
var bal = Buf.create(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class CompressionSerializer implements DataSerializer<Compression> {
|
public class CompressionSerializer implements DataSerializer<Compression> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull Compression compression) {
|
public void serialize(DataOutput dataOutput, @NotNull Compression compression) throws IOException {
|
||||||
dataOutput.writeInt(compression.ordinal());
|
dataOutput.writeInt(compression.ordinal());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Compression deserialize(DataInput dataInput) {
|
public @NotNull Compression deserialize(DataInput dataInput) throws IOException {
|
||||||
return Compression.values()[dataInput.readInt()];
|
return Compression.values()[dataInput.readInt()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,7 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class DurationSerializer implements DataSerializer<Duration> {
|
public class DurationSerializer implements DataSerializer<Duration> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull Duration duration) {
|
public void serialize(DataOutput dataOutput, @NotNull Duration duration) throws IOException {
|
||||||
var units = duration.getUnits();
|
var units = duration.getUnits();
|
||||||
var smallestUnit = (ChronoUnit) units.get(units.size() - 1);
|
var smallestUnit = (ChronoUnit) units.get(units.size() - 1);
|
||||||
dataOutput.writeInt(smallestUnit.ordinal());
|
dataOutput.writeInt(smallestUnit.ordinal());
|
||||||
@ -19,7 +19,7 @@ public class DurationSerializer implements DataSerializer<Duration> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Duration deserialize(DataInput dataInput) {
|
public @NotNull Duration deserialize(DataInput dataInput) throws IOException {
|
||||||
var smallestUnit = ChronoUnit.values()[dataInput.readInt()];
|
var smallestUnit = ChronoUnit.values()[dataInput.readInt()];
|
||||||
return Duration.of(dataInput.readLong(), smallestUnit);
|
return Duration.of(dataInput.readLong(), smallestUnit);
|
||||||
}
|
}
|
||||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class LLSnapshotSerializer implements DataSerializer<LLSnapshot> {
|
public class LLSnapshotSerializer implements DataSerializer<LLSnapshot> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull LLSnapshot llSnapshot) {
|
public void serialize(DataOutput dataOutput, @NotNull LLSnapshot llSnapshot) throws IOException {
|
||||||
dataOutput.writeLong(llSnapshot.getSequenceNumber());
|
dataOutput.writeLong(llSnapshot.getSequenceNumber());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull LLSnapshot deserialize(DataInput dataInput) {
|
public @NotNull LLSnapshot deserialize(DataInput dataInput) throws IOException {
|
||||||
return new LLSnapshot(dataInput.readLong());
|
return new LLSnapshot(dataInput.readLong());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class PathSerializer implements DataSerializer<Path> {
|
public class PathSerializer implements DataSerializer<Path> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull Path path) {
|
public void serialize(DataOutput dataOutput, @NotNull Path path) throws IOException {
|
||||||
dataOutput.writeUTF(path.toString());
|
dataOutput.writeUTF(path.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Path deserialize(DataInput dataInput) {
|
public @NotNull Path deserialize(DataInput dataInput) throws IOException {
|
||||||
return Path.of(dataInput.readUTF());
|
return Path.of(dataInput.readUTF());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ public class String2FieldAnalyzerMapSerializer implements DataSerializer<Map<Str
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Map<String, TextFieldsAnalyzer> deserialize(DataInput dataInput) {
|
public @NotNull Map<String, TextFieldsAnalyzer> deserialize(DataInput dataInput) throws IOException {
|
||||||
var size = dataInput.readInt();
|
var size = dataInput.readInt();
|
||||||
var result = new HashMap<String, TextFieldsAnalyzer>(size);
|
var result = new HashMap<String, TextFieldsAnalyzer>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
|
@ -26,7 +26,7 @@ public class String2FieldSimilarityMapSerializer implements DataSerializer<Map<S
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Map<String, TextFieldsSimilarity> deserialize(DataInput dataInput) {
|
public @NotNull Map<String, TextFieldsSimilarity> deserialize(DataInput dataInput) throws IOException {
|
||||||
var size = dataInput.readInt();
|
var size = dataInput.readInt();
|
||||||
var result = new HashMap<String, TextFieldsSimilarity>(size);
|
var result = new HashMap<String, TextFieldsSimilarity>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
|
@ -11,13 +11,13 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class StringEntrySerializer implements DataSerializer<Map.Entry> {
|
public class StringEntrySerializer implements DataSerializer<Map.Entry> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull Map.Entry entry) {
|
public void serialize(DataOutput dataOutput, @NotNull Map.Entry entry) throws IOException {
|
||||||
dataOutput.writeUTF((String) entry.getKey());
|
dataOutput.writeUTF((String) entry.getKey());
|
||||||
dataOutput.writeUTF((String) entry.getValue());
|
dataOutput.writeUTF((String) entry.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Map.Entry deserialize(DataInput dataInput) {
|
public @NotNull Map.Entry deserialize(DataInput dataInput) throws IOException {
|
||||||
return Map.entry(dataInput.readUTF(), dataInput.readUTF());
|
return Map.entry(dataInput.readUTF(), dataInput.readUTF());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,7 @@ public class StringMapSerializer implements DataSerializer<Map<String, String>>
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Map<String, String> deserialize(DataInput dataInput) {
|
public @NotNull Map<String, String> deserialize(DataInput dataInput) throws IOException {
|
||||||
var size = dataInput.readInt();
|
var size = dataInput.readInt();
|
||||||
var result = new HashMap<String, String>(size);
|
var result = new HashMap<String, String>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class TextFieldsAnalyzerSerializer implements DataSerializer<TextFieldsAnalyzer> {
|
public class TextFieldsAnalyzerSerializer implements DataSerializer<TextFieldsAnalyzer> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsAnalyzer textFieldsAnalyzer) {
|
public void serialize(DataOutput dataOutput, @NotNull TextFieldsAnalyzer textFieldsAnalyzer) throws IOException {
|
||||||
dataOutput.writeInt(textFieldsAnalyzer.ordinal());
|
dataOutput.writeInt(textFieldsAnalyzer.ordinal());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull TextFieldsAnalyzer deserialize(DataInput dataInput) {
|
public @NotNull TextFieldsAnalyzer deserialize(DataInput dataInput) throws IOException {
|
||||||
return TextFieldsAnalyzer.values()[dataInput.readInt()];
|
return TextFieldsAnalyzer.values()[dataInput.readInt()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class TextFieldsSimilaritySerializer implements DataSerializer<TextFieldsSimilarity> {
|
public class TextFieldsSimilaritySerializer implements DataSerializer<TextFieldsSimilarity> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsSimilarity textFieldsSimilarity) {
|
public void serialize(DataOutput dataOutput, @NotNull TextFieldsSimilarity textFieldsSimilarity) throws IOException {
|
||||||
dataOutput.writeInt(textFieldsSimilarity.ordinal());
|
dataOutput.writeInt(textFieldsSimilarity.ordinal());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull TextFieldsSimilarity deserialize(DataInput dataInput) {
|
public @NotNull TextFieldsSimilarity deserialize(DataInput dataInput) throws IOException {
|
||||||
return TextFieldsSimilarity.values()[dataInput.readInt()];
|
return TextFieldsSimilarity.values()[dataInput.readInt()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
|||||||
public class UpdateReturnModeSerializer implements DataSerializer<UpdateReturnMode> {
|
public class UpdateReturnModeSerializer implements DataSerializer<UpdateReturnMode> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(DataOutput dataOutput, @NotNull UpdateReturnMode updateReturnMode) {
|
public void serialize(DataOutput dataOutput, @NotNull UpdateReturnMode updateReturnMode) throws IOException {
|
||||||
dataOutput.writeInt(updateReturnMode.ordinal());
|
dataOutput.writeInt(updateReturnMode.ordinal());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull UpdateReturnMode deserialize(DataInput dataInput) {
|
public @NotNull UpdateReturnMode deserialize(DataInput dataInput) throws IOException {
|
||||||
return UpdateReturnMode.values()[dataInput.readInt()];
|
return UpdateReturnMode.values()[dataInput.readInt()];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,6 @@ public class CodecSerializer<A> implements Serializer<A> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull A deserialize(@NotNull BufDataInput is) throws SerializationException {
|
public @NotNull A deserialize(@NotNull BufDataInput is) throws SerializationException {
|
||||||
try {
|
|
||||||
int codecId;
|
int codecId;
|
||||||
if (microCodecs) {
|
if (microCodecs) {
|
||||||
codecId = is.readUnsignedByte();
|
codecId = is.readUnsignedByte();
|
||||||
@ -55,25 +54,16 @@ public class CodecSerializer<A> implements Serializer<A> {
|
|||||||
}
|
}
|
||||||
var serializer = deserializationCodecs.getCodec(codecId);
|
var serializer = deserializationCodecs.getCodec(codecId);
|
||||||
return serializer.deserialize(is);
|
return serializer.deserialize(is);
|
||||||
} catch (IOException ex) {
|
|
||||||
// This shouldn't happen
|
|
||||||
throw new IOError(ex);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void serialize(@NotNull A deserialized, BufDataOutput os) throws SerializationException {
|
public void serialize(@NotNull A deserialized, BufDataOutput os) throws SerializationException {
|
||||||
try {
|
|
||||||
if (microCodecs) {
|
if (microCodecs) {
|
||||||
os.writeByte(serializationCodecId);
|
os.writeByte(serializationCodecId);
|
||||||
} else {
|
} else {
|
||||||
os.writeInt(serializationCodecId);
|
os.writeInt(serializationCodecId);
|
||||||
}
|
}
|
||||||
serializationCodec.serialize(os, deserialized);
|
serializationCodec.serialize(os, deserialized);
|
||||||
} catch (IOException ex) {
|
|
||||||
// This shouldn't happen
|
|
||||||
throw new IOError(ex);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
|
@ -9,11 +9,11 @@ import org.apache.lucene.store.IOContext;
|
|||||||
|
|
||||||
public class AlwaysDirectIOFSDirectory extends DirectIODirectory {
|
public class AlwaysDirectIOFSDirectory extends DirectIODirectory {
|
||||||
|
|
||||||
public AlwaysDirectIOFSDirectory(Path path, int mergeBufferSize, long minBytesDirect) {
|
public AlwaysDirectIOFSDirectory(Path path, int mergeBufferSize, long minBytesDirect) throws IOException {
|
||||||
super(FSDirectory.open(path), mergeBufferSize, minBytesDirect);
|
super(FSDirectory.open(path), mergeBufferSize, minBytesDirect);
|
||||||
}
|
}
|
||||||
|
|
||||||
public AlwaysDirectIOFSDirectory(Path path) {
|
public AlwaysDirectIOFSDirectory(Path path) throws IOException {
|
||||||
super(FSDirectory.open(path));
|
super(FSDirectory.open(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ public class CheckIndexOutput extends IndexOutput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() throws IOException {
|
||||||
warnLuceneThread();
|
warnLuceneThread();
|
||||||
output.close();
|
output.close();
|
||||||
}
|
}
|
||||||
@ -31,19 +31,19 @@ public class CheckIndexOutput extends IndexOutput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getChecksum() {
|
public long getChecksum() throws IOException {
|
||||||
checkThread();
|
checkThread();
|
||||||
return output.getChecksum();
|
return output.getChecksum();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeByte(byte b) {
|
public void writeByte(byte b) throws IOException {
|
||||||
checkThread();
|
checkThread();
|
||||||
output.writeByte(b);
|
output.writeByte(b);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeBytes(byte[] b, int offset, int length) {
|
public void writeBytes(byte[] b, int offset, int length) throws IOException {
|
||||||
checkThread();
|
checkThread();
|
||||||
output.writeBytes(b, offset, length);
|
output.writeBytes(b, offset, length);
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ package it.cavallium.dbengine.lucene;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.LuceneUtils.warnLuceneThread;
|
import static it.cavallium.dbengine.lucene.LuceneUtils.warnLuceneThread;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.utils.DBException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
@ -21,69 +22,117 @@ public class CheckOutputDirectory extends Directory {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String[] listAll() {
|
public String[] listAll() {
|
||||||
|
try {
|
||||||
return directory.listAll();
|
return directory.listAll();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void deleteFile(String name) {
|
public void deleteFile(String name) {
|
||||||
|
try {
|
||||||
directory.deleteFile(name);
|
directory.deleteFile(name);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long fileLength(String name) {
|
public long fileLength(String name) {
|
||||||
|
try {
|
||||||
return directory.fileLength(name);
|
return directory.fileLength(name);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexOutput createOutput(String name, IOContext context) {
|
public IndexOutput createOutput(String name, IOContext context) {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
return new CheckIndexOutput(directory.createOutput(name, context));
|
return new CheckIndexOutput(directory.createOutput(name, context));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) {
|
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
return new CheckIndexOutput(directory.createTempOutput(prefix, suffix, context));
|
return new CheckIndexOutput(directory.createTempOutput(prefix, suffix, context));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void sync(Collection<String> names) {
|
public void sync(Collection<String> names) {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
directory.sync(names);
|
directory.sync(names);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void syncMetaData() {
|
public void syncMetaData() {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
directory.syncMetaData();
|
directory.syncMetaData();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void rename(String source, String dest) {
|
public void rename(String source, String dest) {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
directory.rename(source, dest);
|
directory.rename(source, dest);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexInput openInput(String name, IOContext context) {
|
public IndexInput openInput(String name, IOContext context) {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
return new CheckIndexInput(directory.openInput(name, context));
|
return new CheckIndexInput(directory.openInput(name, context));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Lock obtainLock(String name) {
|
public Lock obtainLock(String name) {
|
||||||
LuceneUtils.checkLuceneThread();
|
LuceneUtils.checkLuceneThread();
|
||||||
|
try {
|
||||||
return directory.obtainLock(name);
|
return directory.obtainLock(name);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
warnLuceneThread();
|
warnLuceneThread();
|
||||||
|
try {
|
||||||
directory.close();
|
directory.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<String> getPendingDeletions() {
|
public Set<String> getPendingDeletions() {
|
||||||
|
try {
|
||||||
return directory.getPendingDeletions();
|
return directory.getPendingDeletions();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -543,7 +543,12 @@ public class LuceneUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the reference docId and apply it to MoreLikeThis, to generate the query
|
// Get the reference docId and apply it to MoreLikeThis, to generate the query
|
||||||
Query mltQuery = mlt.like(mltDocumentFields);
|
Query mltQuery = null;
|
||||||
|
try {
|
||||||
|
mltQuery = mlt.like(mltDocumentFields);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new DBException(e);
|
||||||
|
}
|
||||||
Query luceneQuery;
|
Query luceneQuery;
|
||||||
if (!(luceneAdditionalQuery instanceof MatchAllDocsQuery)) {
|
if (!(luceneAdditionalQuery instanceof MatchAllDocsQuery)) {
|
||||||
luceneQuery = new Builder()
|
luceneQuery = new Builder()
|
||||||
|
@ -31,14 +31,14 @@ public class RandomFieldComparator extends FieldComparator<Float> implements Lea
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareBottom(int doc) {
|
public int compareBottom(int doc) throws IOException {
|
||||||
float score = scorer.score();
|
float score = scorer.score();
|
||||||
assert !Float.isNaN(score);
|
assert !Float.isNaN(score);
|
||||||
return Float.compare(score, bottom);
|
return Float.compare(score, bottom);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void copy(int slot, int doc) {
|
public void copy(int slot, int doc) throws IOException {
|
||||||
scores[slot] = scorer.score();
|
scores[slot] = scorer.score();
|
||||||
assert !Float.isNaN(scores[slot]);
|
assert !Float.isNaN(scores[slot]);
|
||||||
}
|
}
|
||||||
@ -93,7 +93,7 @@ public class RandomFieldComparator extends FieldComparator<Float> implements Lea
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareTop(int doc) {
|
public int compareTop(int doc) throws IOException {
|
||||||
float docValue = scorer.score();
|
float docValue = scorer.score();
|
||||||
assert !Float.isNaN(docValue);
|
assert !Float.isNaN(docValue);
|
||||||
return Float.compare(docValue, topValue);
|
return Float.compare(docValue, topValue);
|
||||||
|
@ -114,7 +114,7 @@ public class DecimalBucketMultiCollectorManager implements CollectorMultiManager
|
|||||||
return new double[buckets];
|
return new double[buckets];
|
||||||
}
|
}
|
||||||
|
|
||||||
public Buckets search(IndexSearcher indexSearcher) {
|
public Buckets search(IndexSearcher indexSearcher) throws IOException {
|
||||||
Query query;
|
Query query;
|
||||||
if (USE_SINGLE_FACET_COLLECTOR && normalizationQuery != null) {
|
if (USE_SINGLE_FACET_COLLECTOR && normalizationQuery != null) {
|
||||||
query = normalizationQuery;
|
query = normalizationQuery;
|
||||||
|
@ -17,7 +17,7 @@ public interface FacetsCollector extends Collector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LeafCollector getLeafCollector(LeafReaderContext context) {
|
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||||
return facetsCollector.getLeafCollector(context);
|
return facetsCollector.getLeafCollector(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ public class FastFacetsCollectorManager implements CollectorManager<FacetsCollec
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FacetsCollector reduce(Collection<FacetsCollector> collectors) {
|
public FacetsCollector reduce(Collection<FacetsCollector> collectors) throws IOException {
|
||||||
return FacetsCollector.wrap(facetsCollectorManager.reduce(collectors
|
return FacetsCollector.wrap(facetsCollectorManager.reduce(collectors
|
||||||
.stream()
|
.stream()
|
||||||
.map(facetsCollector -> facetsCollector.getLuceneFacetsCollector())
|
.map(facetsCollector -> facetsCollector.getLuceneFacetsCollector())
|
||||||
@ -61,23 +61,23 @@ public class FastFacetsCollectorManager implements CollectorManager<FacetsCollec
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LeafCollector getLeafCollector(LeafReaderContext context) {
|
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||||
var leafCollector = collector.getLeafCollector(context);
|
var leafCollector = collector.getLeafCollector(context);
|
||||||
return new LeafCollector() {
|
return new LeafCollector() {
|
||||||
@Override
|
@Override
|
||||||
public void setScorer(Scorable scorer) {
|
public void setScorer(Scorable scorer) throws IOException {
|
||||||
leafCollector.setScorer(scorer);
|
leafCollector.setScorer(scorer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) {
|
public void collect(int doc) throws IOException {
|
||||||
if (collectionRate == 1 || hash.hashCode(doc) % collectionRate == 0) {
|
if (collectionRate == 1 || hash.hashCode(doc) % collectionRate == 0) {
|
||||||
leafCollector.collect(doc);
|
leafCollector.collect(doc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocIdSetIterator competitiveIterator() {
|
public DocIdSetIterator competitiveIterator() throws IOException {
|
||||||
return leafCollector.competitiveIterator();
|
return leafCollector.competitiveIterator();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -34,17 +34,17 @@ public class FastRandomSamplingFacetsCollector extends SimpleCollector implement
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doSetNextReader(LeafReaderContext context) {
|
protected void doSetNextReader(LeafReaderContext context) throws IOException {
|
||||||
collector.getLeafCollector(context);
|
collector.getLeafCollector(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setScorer(Scorable scorer) {
|
public void setScorer(Scorable scorer) throws IOException {
|
||||||
collector.setScorer(scorer);
|
collector.setScorer(scorer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) {
|
public void collect(int doc) throws IOException {
|
||||||
if (collectionRate == 1 || hash.hashCode(doc) % collectionRate == 0) {
|
if (collectionRate == 1 || hash.hashCode(doc) % collectionRate == 0) {
|
||||||
collector.collect(doc);
|
collector.collect(doc);
|
||||||
}
|
}
|
||||||
|
@ -84,12 +84,12 @@ public class ScoringShardsCollectorMultiManager implements CollectorMultiManager
|
|||||||
public CollectorManager<TopFieldCollector, TopDocs> get(IndexSearcher indexSearcher, int shardIndex) {
|
public CollectorManager<TopFieldCollector, TopDocs> get(IndexSearcher indexSearcher, int shardIndex) {
|
||||||
return new CollectorManager<>() {
|
return new CollectorManager<>() {
|
||||||
@Override
|
@Override
|
||||||
public TopFieldCollector newCollector() {
|
public TopFieldCollector newCollector() throws IOException {
|
||||||
return sharedCollectorManager.newCollector();
|
return sharedCollectorManager.newCollector();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TopDocs reduce(Collection<TopFieldCollector> collectors) {
|
public TopDocs reduce(Collection<TopFieldCollector> collectors) throws IOException {
|
||||||
if (LLUtils.isInNonBlockingThread()) {
|
if (LLUtils.isInNonBlockingThread()) {
|
||||||
throw new UnsupportedOperationException("Called reduce in a nonblocking thread");
|
throw new UnsupportedOperationException("Called reduce in a nonblocking thread");
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,7 @@ public class TopDocsCollectorMultiManager implements CollectorMultiManager<TopDo
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TopDocs reduce(Collection<TopDocsCollector<?>> collectors) {
|
public TopDocs reduce(Collection<TopDocsCollector<?>> collectors) throws IOException {
|
||||||
TopDocs[] docsArray;
|
TopDocs[] docsArray;
|
||||||
boolean needsSort = luceneSort != null;
|
boolean needsSort = luceneSort != null;
|
||||||
boolean needsScores = luceneSort != null && luceneSort.needsScores();
|
boolean needsScores = luceneSort != null && luceneSort.needsScores();
|
||||||
|
@ -48,7 +48,7 @@ public class TotalHitCountCollectorManager implements CollectorManager<TimeLimit
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LeafCollector getLeafCollector(LeafReaderContext context) {
|
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||||
return timeLimitingCollector.getLeafCollector(context);
|
return timeLimitingCollector.getLeafCollector(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ public class Lucene90NoCompressionStoredFieldsFormat extends StoredFieldsFormat
|
|||||||
private static final Compressor DUMMY_COMPRESSOR = new Compressor() {
|
private static final Compressor DUMMY_COMPRESSOR = new Compressor() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void compress(ByteBuffersDataInput byteBuffersDataInput, DataOutput dataOutput) {
|
public void compress(ByteBuffersDataInput byteBuffersDataInput, DataOutput dataOutput) throws IOException {
|
||||||
dataOutput.copyBytes(byteBuffersDataInput, byteBuffersDataInput.size());
|
dataOutput.copyBytes(byteBuffersDataInput, byteBuffersDataInput.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ public class Lucene90NoCompressionStoredFieldsFormat extends StoredFieldsFormat
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) {
|
public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException {
|
||||||
return impl().fieldsWriter(directory, si, context);
|
return impl().fieldsWriter(directory, si, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ public class BigCompositeReader<R extends IndexReader> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getDocCount(String field) {
|
public long getDocCount(String field) throws IOException {
|
||||||
this.ensureOpen();
|
this.ensureOpen();
|
||||||
long total = 0;
|
long total = 0;
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ public class BigCompositeReader<R extends IndexReader> {
|
|||||||
return total;
|
return total;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long docFreq(Term term) {
|
public long docFreq(Term term) throws IOException {
|
||||||
this.ensureOpen();
|
this.ensureOpen();
|
||||||
long total = 0;
|
long total = 0;
|
||||||
|
|
||||||
@ -137,7 +137,7 @@ public class BigCompositeReader<R extends IndexReader> {
|
|||||||
return numDocs;
|
return numDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Fields getTermVectors(long docID) {
|
public Fields getTermVectors(long docID) throws IOException {
|
||||||
this.ensureOpen();
|
this.ensureOpen();
|
||||||
int i = this.readerIndex(docID);
|
int i = this.readerIndex(docID);
|
||||||
return this.subReaders[i].getTermVectors(Math.toIntExact(docID - this.starts[i]));
|
return this.subReaders[i].getTermVectors(Math.toIntExact(docID - this.starts[i]));
|
||||||
@ -177,19 +177,19 @@ public class BigCompositeReader<R extends IndexReader> {
|
|||||||
return hi;
|
return hi;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final void document(long docID, StoredFieldVisitor visitor) {
|
public final void document(long docID, StoredFieldVisitor visitor) throws IOException {
|
||||||
this.ensureOpen();
|
this.ensureOpen();
|
||||||
int i = this.readerIndex(docID);
|
int i = this.readerIndex(docID);
|
||||||
this.subReaders[i].document(Math.toIntExact(docID - this.starts[i]), visitor);
|
this.subReaders[i].document(Math.toIntExact(docID - this.starts[i]), visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
public final Document document(long docID) {
|
public final Document document(long docID) throws IOException {
|
||||||
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor();
|
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor();
|
||||||
this.document(docID, visitor);
|
this.document(docID, visitor);
|
||||||
return visitor.getDocument();
|
return visitor.getDocument();
|
||||||
}
|
}
|
||||||
|
|
||||||
public final Document document(long docID, Set<String> fieldsToLoad) {
|
public final Document document(long docID, Set<String> fieldsToLoad) throws IOException {
|
||||||
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(fieldsToLoad);
|
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(fieldsToLoad);
|
||||||
this.document(docID, visitor);
|
this.document(docID, visitor);
|
||||||
return visitor.getDocument();
|
return visitor.getDocument();
|
||||||
|
@ -549,7 +549,7 @@ public final class MultiMoreLikeThis {
|
|||||||
* @param docNum the documentID of the lucene doc to generate the 'More Like This" query for.
|
* @param docNum the documentID of the lucene doc to generate the 'More Like This" query for.
|
||||||
* @return a query that will return docs like the passed lucene document ID.
|
* @return a query that will return docs like the passed lucene document ID.
|
||||||
*/
|
*/
|
||||||
public Query like(long docNum) {
|
public Query like(long docNum) throws IOException {
|
||||||
if (fieldNames == null) {
|
if (fieldNames == null) {
|
||||||
// gather list of valid fields from lucene
|
// gather list of valid fields from lucene
|
||||||
Collection<String> fields;
|
Collection<String> fields;
|
||||||
@ -564,7 +564,7 @@ public final class MultiMoreLikeThis {
|
|||||||
* @param filteredDocument Document with field values extracted for selected fields.
|
* @param filteredDocument Document with field values extracted for selected fields.
|
||||||
* @return More Like This query for the passed document.
|
* @return More Like This query for the passed document.
|
||||||
*/
|
*/
|
||||||
public Query like(Map<String, ? extends Collection<?>> filteredDocument) {
|
public Query like(Map<String, ? extends Collection<?>> filteredDocument) throws IOException {
|
||||||
if (fieldNames == null) {
|
if (fieldNames == null) {
|
||||||
// gather list of valid fields from lucene
|
// gather list of valid fields from lucene
|
||||||
Collection<String> fields = BigCompositeReader.getIndexedFields(ir);
|
Collection<String> fields = BigCompositeReader.getIndexedFields(ir);
|
||||||
@ -579,7 +579,7 @@ public final class MultiMoreLikeThis {
|
|||||||
*
|
*
|
||||||
* @return a query that will return docs like the passed Readers.
|
* @return a query that will return docs like the passed Readers.
|
||||||
*/
|
*/
|
||||||
public Query like(String fieldName, Reader... readers) {
|
public Query like(String fieldName, Reader... readers) throws IOException {
|
||||||
Map<String, Map<String, Long>> perFieldTermFrequencies = new HashMap<>();
|
Map<String, Map<String, Long>> perFieldTermFrequencies = new HashMap<>();
|
||||||
for (Reader r : readers) {
|
for (Reader r : readers) {
|
||||||
addTermFrequencies(r, perFieldTermFrequencies, fieldName);
|
addTermFrequencies(r, perFieldTermFrequencies, fieldName);
|
||||||
@ -622,7 +622,7 @@ public final class MultiMoreLikeThis {
|
|||||||
* objects as the values.
|
* objects as the values.
|
||||||
*/
|
*/
|
||||||
private PriorityQueue<ScoreTerm> createQueue(
|
private PriorityQueue<ScoreTerm> createQueue(
|
||||||
Map<String, Map<String, Long>> perFieldTermFrequencies) {
|
Map<String, Map<String, Long>> perFieldTermFrequencies) throws IOException {
|
||||||
// have collected all words in doc and their freqs
|
// have collected all words in doc and their freqs
|
||||||
final long limit = Math.min(maxQueryTerms, this.getTermsCount(perFieldTermFrequencies));
|
final long limit = Math.min(maxQueryTerms, this.getTermsCount(perFieldTermFrequencies));
|
||||||
FreqQ queue = new FreqQ(Math.toIntExact(limit)); // will order words by score
|
FreqQ queue = new FreqQ(Math.toIntExact(limit)); // will order words by score
|
||||||
@ -709,7 +709,7 @@ public final class MultiMoreLikeThis {
|
|||||||
*
|
*
|
||||||
* @param docNum the id of the lucene document from which to find terms
|
* @param docNum the id of the lucene document from which to find terms
|
||||||
*/
|
*/
|
||||||
private PriorityQueue<ScoreTerm> retrieveTerms(long docNum) {
|
private PriorityQueue<ScoreTerm> retrieveTerms(long docNum) throws IOException {
|
||||||
Map<String, Map<String, Long>> field2termFreqMap = new HashMap<>();
|
Map<String, Map<String, Long>> field2termFreqMap = new HashMap<>();
|
||||||
retrieveTermsOfIndexReader(ir, docNum, field2termFreqMap);
|
retrieveTermsOfIndexReader(ir, docNum, field2termFreqMap);
|
||||||
|
|
||||||
@ -879,14 +879,14 @@ public final class MultiMoreLikeThis {
|
|||||||
* or best entry, first
|
* or best entry, first
|
||||||
* @see #retrieveInterestingTerms
|
* @see #retrieveInterestingTerms
|
||||||
*/
|
*/
|
||||||
private PriorityQueue<ScoreTerm> retrieveTerms(Reader r, String fieldName) {
|
private PriorityQueue<ScoreTerm> retrieveTerms(Reader r, String fieldName) throws IOException {
|
||||||
Map<String, Map<String, Long>> field2termFreqMap = new HashMap<>();
|
Map<String, Map<String, Long>> field2termFreqMap = new HashMap<>();
|
||||||
addTermFrequencies(r, field2termFreqMap, fieldName);
|
addTermFrequencies(r, field2termFreqMap, fieldName);
|
||||||
return createQueue(field2termFreqMap);
|
return createQueue(field2termFreqMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @see #retrieveInterestingTerms(java.io.Reader, String) */
|
/** @see #retrieveInterestingTerms(java.io.Reader, String) */
|
||||||
public String[] retrieveInterestingTerms(long docNum) {
|
public String[] retrieveInterestingTerms(long docNum) throws IOException {
|
||||||
ArrayList<String> al = new ArrayList<>(Math.toIntExact(maxQueryTerms));
|
ArrayList<String> al = new ArrayList<>(Math.toIntExact(maxQueryTerms));
|
||||||
PriorityQueue<ScoreTerm> pq = retrieveTerms(docNum);
|
PriorityQueue<ScoreTerm> pq = retrieveTerms(docNum);
|
||||||
ScoreTerm scoreTerm;
|
ScoreTerm scoreTerm;
|
||||||
@ -911,7 +911,7 @@ public final class MultiMoreLikeThis {
|
|||||||
* @see #retrieveTerms(java.io.Reader, String)
|
* @see #retrieveTerms(java.io.Reader, String)
|
||||||
* @see #setMaxQueryTerms
|
* @see #setMaxQueryTerms
|
||||||
*/
|
*/
|
||||||
public String[] retrieveInterestingTerms(Reader r, String fieldName) {
|
public String[] retrieveInterestingTerms(Reader r, String fieldName) throws IOException {
|
||||||
ArrayList<String> al = new ArrayList<>(Math.toIntExact(maxQueryTerms));
|
ArrayList<String> al = new ArrayList<>(Math.toIntExact(maxQueryTerms));
|
||||||
PriorityQueue<ScoreTerm> pq = retrieveTerms(r, fieldName);
|
PriorityQueue<ScoreTerm> pq = retrieveTerms(r, fieldName);
|
||||||
ScoreTerm scoreTerm;
|
ScoreTerm scoreTerm;
|
||||||
|
@ -32,11 +32,7 @@ public class AdaptiveLocalSearcher implements LocalSearcher {
|
|||||||
@Nullable String keyFieldName,
|
@Nullable String keyFieldName,
|
||||||
GlobalQueryRewrite transformer) {
|
GlobalQueryRewrite transformer) {
|
||||||
if (transformer != NO_REWRITE) {
|
if (transformer != NO_REWRITE) {
|
||||||
try {
|
|
||||||
return LuceneUtils.rewrite(this, indexSearcher, queryParams, keyFieldName, transformer);
|
return LuceneUtils.rewrite(this, indexSearcher, queryParams, keyFieldName, transformer);
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return transformedCollect(indexSearcher, queryParams, keyFieldName, transformer);
|
return transformedCollect(indexSearcher, queryParams, keyFieldName, transformer);
|
||||||
}
|
}
|
||||||
|
@ -32,11 +32,7 @@ public class AdaptiveMultiSearcher implements MultiSearcher {
|
|||||||
@Nullable String keyFieldName,
|
@Nullable String keyFieldName,
|
||||||
GlobalQueryRewrite transformer) {
|
GlobalQueryRewrite transformer) {
|
||||||
if (transformer != NO_REWRITE) {
|
if (transformer != NO_REWRITE) {
|
||||||
try {
|
|
||||||
return LuceneUtils.rewriteMulti(this, indexSearchers, queryParams, keyFieldName, transformer);
|
return LuceneUtils.rewriteMulti(this, indexSearchers, queryParams, keyFieldName, transformer);
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return transformedCollectMulti(indexSearchers, queryParams, keyFieldName, transformer);
|
return transformedCollectMulti(indexSearchers, queryParams, keyFieldName, transformer);
|
||||||
}
|
}
|
||||||
|
@ -4,6 +4,7 @@ import com.google.common.collect.Streams;
|
|||||||
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
import it.cavallium.dbengine.lucene.collector.Buckets;
|
import it.cavallium.dbengine.lucene.collector.Buckets;
|
||||||
import it.cavallium.dbengine.lucene.collector.DecimalBucketMultiCollectorManager;
|
import it.cavallium.dbengine.lucene.collector.DecimalBucketMultiCollectorManager;
|
||||||
|
import it.cavallium.dbengine.utils.DBException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
@ -43,11 +44,11 @@ public class DecimalBucketMultiSearcher {
|
|||||||
bucketParams.collectionRate(),
|
bucketParams.collectionRate(),
|
||||||
bucketParams.sampleSize()
|
bucketParams.sampleSize()
|
||||||
);
|
);
|
||||||
return cmm.reduce(Streams.stream(indexSearchers).parallel().map(shard -> {
|
return cmm.reduce(Streams.stream(indexSearchers).parallel().map(indexSearcher -> {
|
||||||
try {
|
try {
|
||||||
return cmm.search(shard);
|
return cmm.search(indexSearcher);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new DBException(e);
|
||||||
}
|
}
|
||||||
}).toList());
|
}).toList());
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
|||||||
remainingOffset--;
|
remainingOffset--;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Weight createWeight() {
|
private Weight createWeight() throws IOException {
|
||||||
ScoreMode scoreMode = computeScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
|
ScoreMode scoreMode = computeScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
|
||||||
return shard.createWeight(shard.rewrite(query), scoreMode, 1f);
|
return shard.createWeight(shard.rewrite(query), scoreMode, 1f);
|
||||||
}
|
}
|
||||||
@ -93,7 +93,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private ScoreDoc getWeightedNext() {
|
private ScoreDoc getWeightedNext() throws IOException {
|
||||||
while (tryAdvanceDocIdSetIterator()) {
|
while (tryAdvanceDocIdSetIterator()) {
|
||||||
LeafReader reader = leaf.reader();
|
LeafReader reader = leaf.reader();
|
||||||
Bits liveDocs = reader.getLiveDocs();
|
Bits liveDocs = reader.getLiveDocs();
|
||||||
@ -109,7 +109,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
|||||||
clearState();
|
clearState();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
private boolean tryAdvanceDocIdSetIterator() {
|
private boolean tryAdvanceDocIdSetIterator() throws IOException {
|
||||||
if (docIdSetIterator != null) {
|
if (docIdSetIterator != null) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -127,7 +127,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ScoreDoc transformDoc(int doc) {
|
private ScoreDoc transformDoc(int doc) throws IOException {
|
||||||
return new ScoreDoc(leaf.docBase + doc, scorer.score(), shardIndex);
|
return new ScoreDoc(leaf.docBase + doc, scorer.score(), shardIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rewrite(Query original) {
|
public Query rewrite(Query original) throws IOException {
|
||||||
final IndexSearcher localSearcher = new IndexSearcher(getIndexReader());
|
final IndexSearcher localSearcher = new IndexSearcher(getIndexReader());
|
||||||
original = localSearcher.rewrite(original);
|
original = localSearcher.rewrite(original);
|
||||||
final Set<Term> terms = new HashSet<>();
|
final Set<Term> terms = new HashSet<>();
|
||||||
@ -112,7 +112,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
|||||||
|
|
||||||
// Mock: in a real env, this would hit the wire and get
|
// Mock: in a real env, this would hit the wire and get
|
||||||
// term stats from remote node
|
// term stats from remote node
|
||||||
Map<Term, TermStatistics> getNodeTermStats(Set<Term> terms, int nodeID) {
|
Map<Term, TermStatistics> getNodeTermStats(Set<Term> terms, int nodeID) throws IOException {
|
||||||
var s = searchers[nodeID];
|
var s = searchers[nodeID];
|
||||||
final Map<Term, TermStatistics> stats = new HashMap<>();
|
final Map<Term, TermStatistics> stats = new HashMap<>();
|
||||||
if (s == null) {
|
if (s == null) {
|
||||||
@ -157,7 +157,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CollectionStatistics collectionStatistics(String field) {
|
public CollectionStatistics collectionStatistics(String field) throws IOException {
|
||||||
// TODO: we could compute this on init and cache,
|
// TODO: we could compute this on init and cache,
|
||||||
// since we are re-inited whenever any nodes have a
|
// since we are re-inited whenever any nodes have a
|
||||||
// new reader
|
// new reader
|
||||||
@ -204,7 +204,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private CollectionStatistics computeNodeCollectionStatistics(FieldAndShar fieldAndShard) {
|
private CollectionStatistics computeNodeCollectionStatistics(FieldAndShar fieldAndShard) throws IOException {
|
||||||
var searcher = searchers[fieldAndShard.nodeID];
|
var searcher = searchers[fieldAndShard.nodeID];
|
||||||
return searcher.collectionStatistics(fieldAndShard.field);
|
return searcher.collectionStatistics(fieldAndShard.field);
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
public class BooleanListJsonAdapter extends JsonAdapter<BooleanList> {
|
public class BooleanListJsonAdapter extends JsonAdapter<BooleanList> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull BooleanList fromJson(@NotNull JsonReader reader) {
|
public @NotNull BooleanList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||||
reader.beginArray();
|
reader.beginArray();
|
||||||
BooleanArrayList modifiableOutput = new BooleanArrayList();
|
BooleanArrayList modifiableOutput = new BooleanArrayList();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
@ -24,7 +24,7 @@ public class BooleanListJsonAdapter extends JsonAdapter<BooleanList> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter writer, @Nullable BooleanList value) {
|
public void toJson(@NotNull JsonWriter writer, @Nullable BooleanList value) throws IOException {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
writer.nullValue();
|
writer.nullValue();
|
||||||
return;
|
return;
|
||||||
|
@ -11,7 +11,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
public class ByteListJsonAdapter extends JsonAdapter<Buf> {
|
public class ByteListJsonAdapter extends JsonAdapter<Buf> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Buf fromJson(@NotNull JsonReader reader) {
|
public @NotNull Buf fromJson(@NotNull JsonReader reader) throws IOException {
|
||||||
reader.beginArray();
|
reader.beginArray();
|
||||||
var modifiableOutput = Buf.create();
|
var modifiableOutput = Buf.create();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
@ -22,7 +22,7 @@ public class ByteListJsonAdapter extends JsonAdapter<Buf> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter writer, @Nullable Buf value) {
|
public void toJson(@NotNull JsonWriter writer, @Nullable Buf value) throws IOException {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
writer.nullValue();
|
writer.nullValue();
|
||||||
return;
|
return;
|
||||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
public class CharListJsonAdapter extends JsonAdapter<CharList> {
|
public class CharListJsonAdapter extends JsonAdapter<CharList> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull CharList fromJson(@NotNull JsonReader reader) {
|
public @NotNull CharList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||||
reader.beginArray();
|
reader.beginArray();
|
||||||
CharArrayList modifiableOutput = new CharArrayList();
|
CharArrayList modifiableOutput = new CharArrayList();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
@ -24,7 +24,7 @@ public class CharListJsonAdapter extends JsonAdapter<CharList> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter writer, @Nullable CharList value) {
|
public void toJson(@NotNull JsonWriter writer, @Nullable CharList value) throws IOException {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
writer.nullValue();
|
writer.nullValue();
|
||||||
return;
|
return;
|
||||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
public class IntListJsonAdapter extends JsonAdapter<IntList> {
|
public class IntListJsonAdapter extends JsonAdapter<IntList> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull IntList fromJson(@NotNull JsonReader reader) {
|
public @NotNull IntList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||||
reader.beginArray();
|
reader.beginArray();
|
||||||
IntArrayList modifiableOutput = new IntArrayList();
|
IntArrayList modifiableOutput = new IntArrayList();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
@ -24,7 +24,7 @@ public class IntListJsonAdapter extends JsonAdapter<IntList> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter writer, @Nullable IntList value) {
|
public void toJson(@NotNull JsonWriter writer, @Nullable IntList value) throws IOException {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
writer.nullValue();
|
writer.nullValue();
|
||||||
return;
|
return;
|
||||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
public class LongListJsonAdapter extends JsonAdapter<LongList> {
|
public class LongListJsonAdapter extends JsonAdapter<LongList> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull LongList fromJson(@NotNull JsonReader reader) {
|
public @NotNull LongList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||||
reader.beginArray();
|
reader.beginArray();
|
||||||
LongArrayList modifiableOutput = new LongArrayList();
|
LongArrayList modifiableOutput = new LongArrayList();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
@ -24,7 +24,7 @@ public class LongListJsonAdapter extends JsonAdapter<LongList> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter writer, @Nullable LongList value) {
|
public void toJson(@NotNull JsonWriter writer, @Nullable LongList value) throws IOException {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
writer.nullValue();
|
writer.nullValue();
|
||||||
return;
|
return;
|
||||||
|
@ -132,7 +132,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
|||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public T fromJson(@NotNull JsonReader jsonReader) {
|
public T fromJson(@NotNull JsonReader jsonReader) throws IOException {
|
||||||
String type = null;
|
String type = null;
|
||||||
|
|
||||||
jsonReader.beginObject();
|
jsonReader.beginObject();
|
||||||
@ -165,7 +165,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) {
|
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) throws IOException {
|
||||||
if (t == null) {
|
if (t == null) {
|
||||||
jsonWriter.nullValue();
|
jsonWriter.nullValue();
|
||||||
} else {
|
} else {
|
||||||
@ -259,7 +259,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
|||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public T fromJson(@NotNull JsonReader jsonReader) {
|
public T fromJson(@NotNull JsonReader jsonReader) throws IOException {
|
||||||
try {
|
try {
|
||||||
Object instance;
|
Object instance;
|
||||||
Object[] fields;
|
Object[] fields;
|
||||||
@ -304,7 +304,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) {
|
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) throws IOException {
|
||||||
if (t == null) {
|
if (t == null) {
|
||||||
jsonWriter.nullValue();
|
jsonWriter.nullValue();
|
||||||
} else {
|
} else {
|
||||||
@ -339,7 +339,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
|||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public List<T> fromJson(@NotNull JsonReader jsonReader) {
|
public List<T> fromJson(@NotNull JsonReader jsonReader) throws IOException {
|
||||||
jsonReader.beginArray();
|
jsonReader.beginArray();
|
||||||
var result = new ArrayList<T>();
|
var result = new ArrayList<T>();
|
||||||
while (jsonReader.hasNext()) {
|
while (jsonReader.hasNext()) {
|
||||||
@ -350,7 +350,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable List<T> ts) {
|
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable List<T> ts) throws IOException {
|
||||||
if (ts == null) {
|
if (ts == null) {
|
||||||
jsonWriter.nullValue();
|
jsonWriter.nullValue();
|
||||||
} else {
|
} else {
|
||||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
public class ShortListJsonAdapter extends JsonAdapter<ShortList> {
|
public class ShortListJsonAdapter extends JsonAdapter<ShortList> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull ShortList fromJson(@NotNull JsonReader reader) {
|
public @NotNull ShortList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||||
reader.beginArray();
|
reader.beginArray();
|
||||||
ShortArrayList modifiableOutput = new ShortArrayList();
|
ShortArrayList modifiableOutput = new ShortArrayList();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
@ -24,7 +24,7 @@ public class ShortListJsonAdapter extends JsonAdapter<ShortList> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void toJson(@NotNull JsonWriter writer, @Nullable ShortList value) {
|
public void toJson(@NotNull JsonWriter writer, @Nullable ShortList value) throws IOException {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
writer.nullValue();
|
writer.nullValue();
|
||||||
return;
|
return;
|
||||||
|
@ -6,13 +6,13 @@ import java.io.IOException;
|
|||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
public class UTFUtils {
|
public class UTFUtils {
|
||||||
public static void writeUTF(DataOutput out, String utf) {
|
public static void writeUTF(DataOutput out, String utf) throws IOException {
|
||||||
byte[] bytes = utf.getBytes(StandardCharsets.UTF_8);
|
byte[] bytes = utf.getBytes(StandardCharsets.UTF_8);
|
||||||
out.writeInt(bytes.length);
|
out.writeInt(bytes.length);
|
||||||
out.write(bytes);
|
out.write(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String readUTF(DataInput in) {
|
public static String readUTF(DataInput in) throws IOException {
|
||||||
int len = in.readInt();
|
int len = in.readInt();
|
||||||
byte[] data = new byte[len];
|
byte[] data = new byte[len];
|
||||||
in.readFully(data, 0, len);
|
in.readFully(data, 0, len);
|
||||||
|
@ -26,148 +26,148 @@ public class DataInputOutputImpl implements DataInputOutput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFully(byte @NotNull [] bytes) {
|
public void readFully(byte @NotNull [] bytes) throws IOException {
|
||||||
in.readFully(bytes);
|
in.readFully(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFully(byte @NotNull [] bytes, int i, int i1) {
|
public void readFully(byte @NotNull [] bytes, int i, int i1) throws IOException {
|
||||||
in.readFully(bytes, i, i1);
|
in.readFully(bytes, i, i1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int skipBytes(int i) {
|
public int skipBytes(int i) throws IOException {
|
||||||
return in.skipBytes(i);
|
return in.skipBytes(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean readBoolean() {
|
public boolean readBoolean() throws IOException {
|
||||||
return in.readBoolean();
|
return in.readBoolean();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte readByte() {
|
public byte readByte() throws IOException {
|
||||||
return in.readByte();
|
return in.readByte();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readUnsignedByte() {
|
public int readUnsignedByte() throws IOException {
|
||||||
return in.readUnsignedByte();
|
return in.readUnsignedByte();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public short readShort() {
|
public short readShort() throws IOException {
|
||||||
return in.readShort();
|
return in.readShort();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readUnsignedShort() {
|
public int readUnsignedShort() throws IOException {
|
||||||
return in.readUnsignedShort();
|
return in.readUnsignedShort();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public char readChar() {
|
public char readChar() throws IOException {
|
||||||
return in.readChar();
|
return in.readChar();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readInt() {
|
public int readInt() throws IOException {
|
||||||
return in.readInt();
|
return in.readInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long readLong() {
|
public long readLong() throws IOException {
|
||||||
return in.readLong();
|
return in.readLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float readFloat() {
|
public float readFloat() throws IOException {
|
||||||
return in.readFloat();
|
return in.readFloat();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double readDouble() {
|
public double readDouble() throws IOException {
|
||||||
return in.readDouble();
|
return in.readDouble();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String readLine() {
|
public String readLine() throws IOException {
|
||||||
return in.readLine();
|
return in.readLine();
|
||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
@Override
|
@Override
|
||||||
public String readUTF() {
|
public String readUTF() throws IOException {
|
||||||
return in.readUTF();
|
return in.readUTF();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void write(int i) {
|
public void write(int i) throws IOException {
|
||||||
out.write(i);
|
out.write(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void write(byte @NotNull [] bytes) {
|
public void write(byte @NotNull [] bytes) throws IOException {
|
||||||
out.write(bytes);
|
out.write(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void write(byte @NotNull [] bytes, int i, int i1) {
|
public void write(byte @NotNull [] bytes, int i, int i1) throws IOException {
|
||||||
out.write(bytes, i, i1);
|
out.write(bytes, i, i1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeBoolean(boolean b) {
|
public void writeBoolean(boolean b) throws IOException {
|
||||||
out.writeBoolean(b);
|
out.writeBoolean(b);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeByte(int i) {
|
public void writeByte(int i) throws IOException {
|
||||||
out.writeByte(i);
|
out.writeByte(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeShort(int i) {
|
public void writeShort(int i) throws IOException {
|
||||||
out.writeShort(i);
|
out.writeShort(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeChar(int i) {
|
public void writeChar(int i) throws IOException {
|
||||||
out.writeChar(i);
|
out.writeChar(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeInt(int i) {
|
public void writeInt(int i) throws IOException {
|
||||||
out.writeInt(i);
|
out.writeInt(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeLong(long l) {
|
public void writeLong(long l) throws IOException {
|
||||||
out.writeLong(l);
|
out.writeLong(l);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeFloat(float v) {
|
public void writeFloat(float v) throws IOException {
|
||||||
out.writeFloat(v);
|
out.writeFloat(v);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeDouble(double v) {
|
public void writeDouble(double v) throws IOException {
|
||||||
out.writeDouble(v);
|
out.writeDouble(v);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeBytes(@NotNull String s) {
|
public void writeBytes(@NotNull String s) throws IOException {
|
||||||
out.writeBytes(s);
|
out.writeBytes(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeChars(@NotNull String s) {
|
public void writeChars(@NotNull String s) throws IOException {
|
||||||
out.writeChars(s);
|
out.writeChars(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeUTF(@NotNull String s) {
|
public void writeUTF(@NotNull String s) throws IOException {
|
||||||
out.writeUTF(s);
|
out.writeUTF(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,79 +25,79 @@ public class DataInputOutputStream extends DataOutputStream implements DataInput
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFully(byte @NotNull [] bytes) {
|
public void readFully(byte @NotNull [] bytes) throws IOException {
|
||||||
in.readFully(bytes);
|
in.readFully(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFully(byte @NotNull [] bytes, int i, int i1) {
|
public void readFully(byte @NotNull [] bytes, int i, int i1) throws IOException {
|
||||||
in.readFully(bytes, i, i1);
|
in.readFully(bytes, i, i1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int skipBytes(int i) {
|
public int skipBytes(int i) throws IOException {
|
||||||
return in.skipBytes(i);
|
return in.skipBytes(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean readBoolean() {
|
public boolean readBoolean() throws IOException {
|
||||||
return in.readBoolean();
|
return in.readBoolean();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte readByte() {
|
public byte readByte() throws IOException {
|
||||||
return in.readByte();
|
return in.readByte();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readUnsignedByte() {
|
public int readUnsignedByte() throws IOException {
|
||||||
return in.readUnsignedByte();
|
return in.readUnsignedByte();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public short readShort() {
|
public short readShort() throws IOException {
|
||||||
return in.readShort();
|
return in.readShort();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readUnsignedShort() {
|
public int readUnsignedShort() throws IOException {
|
||||||
return in.readUnsignedShort();
|
return in.readUnsignedShort();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public char readChar() {
|
public char readChar() throws IOException {
|
||||||
return in.readChar();
|
return in.readChar();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readInt() {
|
public int readInt() throws IOException {
|
||||||
return in.readInt();
|
return in.readInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long readLong() {
|
public long readLong() throws IOException {
|
||||||
return in.readLong();
|
return in.readLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float readFloat() {
|
public float readFloat() throws IOException {
|
||||||
return in.readFloat();
|
return in.readFloat();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double readDouble() {
|
public double readDouble() throws IOException {
|
||||||
return in.readDouble();
|
return in.readDouble();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Deprecated
|
@Deprecated
|
||||||
@Override
|
@Override
|
||||||
public String readLine() {
|
public String readLine() throws IOException {
|
||||||
return in.readLine();
|
return in.readLine();
|
||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
@Override
|
@Override
|
||||||
public String readUTF() {
|
public String readUTF() throws IOException {
|
||||||
return in.readUTF();
|
return in.readUTF();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user