Fix compilation errors
This commit is contained in:
parent
a9857f7553
commit
cd15f8d23d
src/main/java
it/cavallium/dbengine
database
LLDictionary.javaLLKeyValueDatabase.javaLLMultiDatabaseConnection.java
disk
CachedIndexSearcherManager.javaLLLocalDictionary.javaLLLocalKeyValueDatabase.javaSimpleIndexSearcherManager.java
remote
BufSerializer.javaCompressionSerializer.javaDurationSerializer.javaLLSnapshotSerializer.javaPathSerializer.javaString2FieldAnalyzerMapSerializer.javaString2FieldSimilarityMapSerializer.javaStringEntrySerializer.javaStringMapSerializer.javaTextFieldsAnalyzerSerializer.javaTextFieldsSimilaritySerializer.javaUpdateReturnModeSerializer.java
serialization
lucene
AlwaysDirectIOFSDirectory.javaCheckIndexOutput.javaCheckOutputDirectory.javaLuceneUtils.javaRandomFieldComparator.java
collector
DecimalBucketMultiCollectorManager.javaFacetsCollector.javaFastFacetsCollectorManager.javaFastRandomSamplingFacetsCollector.javaScoringShardsCollectorMultiManager.javaTopDocsCollectorMultiManager.javaTotalHitCountCollectorManager.java
directory
mlt
searcher
utils
org/warp/commonutils/stream
@ -54,7 +54,7 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
||||
Stream<Buf> getRangeKeys(@Nullable LLSnapshot snapshot,
|
||||
LLRange range,
|
||||
boolean reverse,
|
||||
boolean smallRange) throws RocksDBException, IOException;
|
||||
boolean smallRange);
|
||||
|
||||
Stream<List<Buf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot,
|
||||
LLRange range,
|
||||
|
@ -14,8 +14,7 @@ import org.rocksdb.RocksDBException;
|
||||
public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseStructure, DatabaseProperties,
|
||||
IBackuppable, DatabaseOperations {
|
||||
|
||||
LLSingleton getSingleton(byte[] singletonListColumnName, byte[] name, byte @Nullable [] defaultValue)
|
||||
throws IOException;
|
||||
LLSingleton getSingleton(byte[] singletonListColumnName, byte[] name, byte @Nullable [] defaultValue);
|
||||
|
||||
LLDictionary getDictionary(byte[] columnName, UpdateMode updateMode);
|
||||
|
||||
|
@ -145,13 +145,8 @@ public class LLMultiDatabaseConnection implements LLDatabaseConnection {
|
||||
var indices = connectionToShardMap.entrySet().stream().flatMap(entry -> {
|
||||
var connectionIndexStructure = indexStructure.setActiveShards(new IntArrayList(entry.getValue()));
|
||||
|
||||
LLLuceneIndex connIndex;
|
||||
try {
|
||||
connIndex = entry.getKey().getLuceneIndex(clusterName, connectionIndexStructure,
|
||||
indicizerAnalyzers, indicizerSimilarities, luceneOptions, luceneHacks);
|
||||
} catch (IOException e) {
|
||||
throw new CompletionException(e);
|
||||
}
|
||||
LLLuceneIndex connIndex = entry.getKey().getLuceneIndex(clusterName, connectionIndexStructure,
|
||||
indicizerAnalyzers, indicizerSimilarities, luceneOptions, luceneHacks);
|
||||
|
||||
return entry.getValue().intStream().mapToObj(shard -> new ShardToIndex(shard, connIndex));
|
||||
}).toList();
|
||||
|
@ -63,7 +63,11 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
||||
this.similarity = similarity;
|
||||
this.queryRefreshDebounceTime = queryRefreshDebounceTime;
|
||||
|
||||
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
||||
try {
|
||||
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
|
||||
refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
|
||||
try {
|
||||
@ -98,7 +102,11 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
||||
IndexSearcher indexSearcher;
|
||||
boolean fromSnapshot;
|
||||
if (snapshotsManager == null || snapshot == null) {
|
||||
indexSearcher = searcherManager.acquire();
|
||||
try {
|
||||
indexSearcher = searcherManager.acquire();
|
||||
} catch (IOException ex) {
|
||||
throw new DBException(ex);
|
||||
}
|
||||
fromSnapshot = false;
|
||||
} else {
|
||||
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher(SEARCH_EXECUTOR);
|
||||
@ -131,6 +139,8 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
||||
searcherManager.maybeRefreshBlocking();
|
||||
} catch (AlreadyClosedException ignored) {
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
} finally {
|
||||
activeRefreshes.decrementAndGet();
|
||||
}
|
||||
@ -143,6 +153,8 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
|
||||
searcherManager.maybeRefresh();
|
||||
} catch (AlreadyClosedException ignored) {
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
} finally {
|
||||
activeRefreshes.decrementAndGet();
|
||||
}
|
||||
|
@ -478,13 +478,7 @@ public class LLLocalDictionary implements LLDictionary {
|
||||
|
||||
@Override
|
||||
public Stream<OptionalBuf> getMulti(@Nullable LLSnapshot snapshot, Stream<Buf> keys) {
|
||||
return keys.map(key -> {
|
||||
try {
|
||||
return OptionalBuf.ofNullable(getSync(snapshot, key));
|
||||
} catch (IOException e) {
|
||||
throw new CompletionException(e);
|
||||
}
|
||||
});
|
||||
return keys.map(key -> OptionalBuf.ofNullable(getSync(snapshot, key)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -854,12 +848,8 @@ public class LLLocalDictionary implements LLDictionary {
|
||||
});
|
||||
|
||||
entries.forEach(entry -> {
|
||||
try {
|
||||
if (entry.getKey() != null && entry.getValue() != null) {
|
||||
this.putInternal(entry.getKey(), entry.getValue());
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new CompletionException(new DBException("Failed to write range", ex));
|
||||
if (entry.getKey() != null && entry.getValue() != null) {
|
||||
this.putInternal(entry.getKey(), entry.getValue());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -861,199 +861,202 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
||||
}
|
||||
}
|
||||
|
||||
private static OptionsWithCache openRocksDb(@Nullable Path path, DatabaseOptions databaseOptions, RocksDBRefs refs)
|
||||
throws IOException {
|
||||
// Get databases directory path
|
||||
Path databasesDirPath;
|
||||
if (path != null) {
|
||||
databasesDirPath = path.toAbsolutePath().getParent();
|
||||
// Create base directories
|
||||
if (Files.notExists(databasesDirPath)) {
|
||||
Files.createDirectories(databasesDirPath);
|
||||
private static OptionsWithCache openRocksDb(@Nullable Path path, DatabaseOptions databaseOptions, RocksDBRefs refs) {
|
||||
try {
|
||||
// Get databases directory path
|
||||
Path databasesDirPath;
|
||||
if (path != null) {
|
||||
databasesDirPath = path.toAbsolutePath().getParent();
|
||||
// Create base directories
|
||||
if (Files.notExists(databasesDirPath)) {
|
||||
Files.createDirectories(databasesDirPath);
|
||||
}
|
||||
} else {
|
||||
databasesDirPath = null;
|
||||
}
|
||||
} else {
|
||||
databasesDirPath = null;
|
||||
}
|
||||
//noinspection ConstantConditions
|
||||
if (databaseOptions.persistentCaches() != null) {
|
||||
for (var persistentCache : databaseOptions.persistentCaches()) {
|
||||
var persistentCachePath = Paths.get(persistentCache.path());
|
||||
if (Files.notExists(persistentCachePath)) {
|
||||
Files.createDirectories(persistentCachePath);
|
||||
if (!Files.isDirectory(persistentCachePath)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Persistent cache \"" + persistentCache.id() + "\" path \"" + persistentCachePath
|
||||
+ "\" is not a directory!");
|
||||
//noinspection ConstantConditions
|
||||
if (databaseOptions.persistentCaches() != null) {
|
||||
for (var persistentCache : databaseOptions.persistentCaches()) {
|
||||
var persistentCachePath = Paths.get(persistentCache.path());
|
||||
if (Files.notExists(persistentCachePath)) {
|
||||
Files.createDirectories(persistentCachePath);
|
||||
if (!Files.isDirectory(persistentCachePath)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Persistent cache \"" + persistentCache.id() + "\" path \"" + persistentCachePath
|
||||
+ "\" is not a directory!");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// the Options class contains a set of configurable DB options
|
||||
// that determines the behaviour of the database.
|
||||
var options = new DBOptions();
|
||||
refs.track(options);
|
||||
options.setEnablePipelinedWrite(true);
|
||||
var maxSubCompactions = Integer.parseInt(System.getProperty("it.cavallium.dbengine.compactions.max.sub", "-1"));
|
||||
if (maxSubCompactions >= 0) {
|
||||
options.setMaxSubcompactions(maxSubCompactions);
|
||||
}
|
||||
var customWriteRate = Long.parseLong(System.getProperty("it.cavallium.dbengine.write.delayedrate", "-1"));
|
||||
if (customWriteRate >= 0) {
|
||||
options.setDelayedWriteRate(customWriteRate);
|
||||
}
|
||||
if (databaseOptions.logPath().isPresent()) {
|
||||
options.setDbLogDir(databaseOptions.logPath().get());
|
||||
}
|
||||
if (databaseOptions.walPath().isPresent()) {
|
||||
options.setWalDir(databaseOptions.walPath().get());
|
||||
}
|
||||
options.setCreateIfMissing(true);
|
||||
options.setSkipStatsUpdateOnDbOpen(true);
|
||||
options.setCreateMissingColumnFamilies(true);
|
||||
options.setInfoLogLevel(InfoLogLevel.WARN_LEVEL);
|
||||
options.setAvoidFlushDuringShutdown(false); // Flush all WALs during shutdown
|
||||
options.setAvoidFlushDuringRecovery(true); // Flush all WALs during startup
|
||||
options.setWalRecoveryMode(databaseOptions.absoluteConsistency()
|
||||
? WALRecoveryMode.AbsoluteConsistency
|
||||
: WALRecoveryMode.PointInTimeRecovery); // Crash if the WALs are corrupted.Default: TolerateCorruptedTailRecords
|
||||
options.setDeleteObsoleteFilesPeriodMicros(20 * 1000000); // 20 seconds
|
||||
options.setKeepLogFileNum(10);
|
||||
|
||||
requireNonNull(databasesDirPath);
|
||||
requireNonNull(path.getFileName());
|
||||
List<DbPath> paths = convertPaths(databasesDirPath, path.getFileName(), databaseOptions.volumes())
|
||||
.stream()
|
||||
.map(p -> new DbPath(p.path, p.targetSize))
|
||||
.toList();
|
||||
options.setDbPaths(paths);
|
||||
options.setMaxOpenFiles(databaseOptions.maxOpenFiles().orElse(-1));
|
||||
if (databaseOptions.spinning()) {
|
||||
// https://nightlies.apache.org/flink/flink-docs-release-1.3/api/java/org/apache/flink/contrib/streaming/state/PredefinedOptions.html
|
||||
options.setUseFsync(false);
|
||||
}
|
||||
|
||||
long writeBufferManagerSize;
|
||||
if (databaseOptions.writeBufferManager().isPresent()) {
|
||||
writeBufferManagerSize = databaseOptions.writeBufferManager().get();
|
||||
} else {
|
||||
writeBufferManagerSize = 0;
|
||||
}
|
||||
|
||||
if (isDisableAutoCompactions()) {
|
||||
options.setMaxBackgroundCompactions(0);
|
||||
options.setMaxBackgroundJobs(0);
|
||||
} else {
|
||||
var backgroundJobs = Integer.parseInt(System.getProperty("it.cavallium.dbengine.jobs.background.num", "-1"));
|
||||
if (backgroundJobs >= 0) {
|
||||
options.setMaxBackgroundJobs(backgroundJobs);
|
||||
// the Options class contains a set of configurable DB options
|
||||
// that determines the behaviour of the database.
|
||||
var options = new DBOptions();
|
||||
refs.track(options);
|
||||
options.setEnablePipelinedWrite(true);
|
||||
var maxSubCompactions = Integer.parseInt(System.getProperty("it.cavallium.dbengine.compactions.max.sub", "-1"));
|
||||
if (maxSubCompactions >= 0) {
|
||||
options.setMaxSubcompactions(maxSubCompactions);
|
||||
}
|
||||
}
|
||||
var customWriteRate = Long.parseLong(System.getProperty("it.cavallium.dbengine.write.delayedrate", "-1"));
|
||||
if (customWriteRate >= 0) {
|
||||
options.setDelayedWriteRate(customWriteRate);
|
||||
}
|
||||
if (databaseOptions.logPath().isPresent()) {
|
||||
options.setDbLogDir(databaseOptions.logPath().get());
|
||||
}
|
||||
if (databaseOptions.walPath().isPresent()) {
|
||||
options.setWalDir(databaseOptions.walPath().get());
|
||||
}
|
||||
options.setCreateIfMissing(true);
|
||||
options.setSkipStatsUpdateOnDbOpen(true);
|
||||
options.setCreateMissingColumnFamilies(true);
|
||||
options.setInfoLogLevel(InfoLogLevel.WARN_LEVEL);
|
||||
options.setAvoidFlushDuringShutdown(false); // Flush all WALs during shutdown
|
||||
options.setAvoidFlushDuringRecovery(true); // Flush all WALs during startup
|
||||
options.setWalRecoveryMode(databaseOptions.absoluteConsistency()
|
||||
? WALRecoveryMode.AbsoluteConsistency
|
||||
: WALRecoveryMode.PointInTimeRecovery); // Crash if the WALs are corrupted.Default: TolerateCorruptedTailRecords
|
||||
options.setDeleteObsoleteFilesPeriodMicros(20 * 1000000); // 20 seconds
|
||||
options.setKeepLogFileNum(10);
|
||||
|
||||
Cache blockCache;
|
||||
//todo: compressed cache will be replaced with SecondaryCache in the future
|
||||
Cache compressedCache;
|
||||
final boolean useDirectIO = databaseOptions.useDirectIO();
|
||||
final boolean allowMmapReads = !useDirectIO && databaseOptions.allowMemoryMapping();
|
||||
final boolean allowMmapWrites = !useDirectIO && (databaseOptions.allowMemoryMapping()
|
||||
|| parseBoolean(System.getProperty("it.cavallium.dbengine.mmapwrites.enable", "false")));
|
||||
if (databaseOptions.lowMemory()) {
|
||||
// LOW MEMORY
|
||||
options
|
||||
.setBytesPerSync(0) // default
|
||||
.setWalBytesPerSync(0) // default
|
||||
.setIncreaseParallelism(1)
|
||||
.setDbWriteBufferSize(8 * SizeUnit.MB)
|
||||
.setWalTtlSeconds(60)
|
||||
.setMaxTotalWalSize(10 * SizeUnit.GB)
|
||||
;
|
||||
blockCache = CACHE_FACTORY.newCache(writeBufferManagerSize + databaseOptions.blockCache().orElse(8L * SizeUnit.MB));
|
||||
refs.track(blockCache);
|
||||
if (databaseOptions.compressedBlockCache().isPresent()) {
|
||||
compressedCache = CACHE_FACTORY.newCache(databaseOptions.compressedBlockCache().get());
|
||||
refs.track(compressedCache);
|
||||
requireNonNull(databasesDirPath);
|
||||
requireNonNull(path.getFileName());
|
||||
List<DbPath> paths = convertPaths(databasesDirPath, path.getFileName(), databaseOptions.volumes())
|
||||
.stream()
|
||||
.map(p -> new DbPath(p.path, p.targetSize))
|
||||
.toList();
|
||||
options.setDbPaths(paths);
|
||||
options.setMaxOpenFiles(databaseOptions.maxOpenFiles().orElse(-1));
|
||||
if (databaseOptions.spinning()) {
|
||||
// https://nightlies.apache.org/flink/flink-docs-release-1.3/api/java/org/apache/flink/contrib/streaming/state/PredefinedOptions.html
|
||||
options.setUseFsync(false);
|
||||
}
|
||||
|
||||
long writeBufferManagerSize;
|
||||
if (databaseOptions.writeBufferManager().isPresent()) {
|
||||
writeBufferManagerSize = databaseOptions.writeBufferManager().get();
|
||||
} else {
|
||||
compressedCache = null;
|
||||
writeBufferManagerSize = 0;
|
||||
}
|
||||
|
||||
if (isDisableAutoCompactions()) {
|
||||
options.setMaxBackgroundCompactions(0);
|
||||
options.setMaxBackgroundJobs(0);
|
||||
} else {
|
||||
var backgroundJobs = Integer.parseInt(System.getProperty("it.cavallium.dbengine.jobs.background.num", "-1"));
|
||||
if (backgroundJobs >= 0) {
|
||||
options.setMaxBackgroundJobs(backgroundJobs);
|
||||
}
|
||||
}
|
||||
|
||||
Cache blockCache;
|
||||
//todo: compressed cache will be replaced with SecondaryCache in the future
|
||||
Cache compressedCache;
|
||||
final boolean useDirectIO = databaseOptions.useDirectIO();
|
||||
final boolean allowMmapReads = !useDirectIO && databaseOptions.allowMemoryMapping();
|
||||
final boolean allowMmapWrites = !useDirectIO && (databaseOptions.allowMemoryMapping()
|
||||
|| parseBoolean(System.getProperty("it.cavallium.dbengine.mmapwrites.enable", "false")));
|
||||
if (databaseOptions.lowMemory()) {
|
||||
// LOW MEMORY
|
||||
options
|
||||
.setBytesPerSync(0) // default
|
||||
.setWalBytesPerSync(0) // default
|
||||
.setIncreaseParallelism(1)
|
||||
.setDbWriteBufferSize(8 * SizeUnit.MB)
|
||||
.setWalTtlSeconds(60)
|
||||
.setMaxTotalWalSize(10 * SizeUnit.GB)
|
||||
;
|
||||
blockCache = CACHE_FACTORY.newCache(writeBufferManagerSize + databaseOptions.blockCache().orElse(8L * SizeUnit.MB));
|
||||
refs.track(blockCache);
|
||||
if (databaseOptions.compressedBlockCache().isPresent()) {
|
||||
compressedCache = CACHE_FACTORY.newCache(databaseOptions.compressedBlockCache().get());
|
||||
refs.track(compressedCache);
|
||||
} else {
|
||||
compressedCache = null;
|
||||
}
|
||||
|
||||
if (useDirectIO) {
|
||||
options
|
||||
// Option to enable readahead in compaction
|
||||
// If not set, it will be set to 2MB internally
|
||||
.setCompactionReadaheadSize(2 * SizeUnit.MB) // recommend at least 2MB
|
||||
// Option to tune write buffer for direct writes
|
||||
.setWritableFileMaxBufferSize(SizeUnit.MB)
|
||||
;
|
||||
}
|
||||
if (databaseOptions.spinning()) {
|
||||
options
|
||||
// method documentation
|
||||
.setCompactionReadaheadSize(4 * SizeUnit.MB)
|
||||
// guessed
|
||||
.setWritableFileMaxBufferSize(2 * SizeUnit.MB);
|
||||
}
|
||||
} else {
|
||||
// HIGH MEMORY
|
||||
options
|
||||
//.setDbWriteBufferSize(64 * SizeUnit.MB)
|
||||
.setBytesPerSync(64 * SizeUnit.MB)
|
||||
.setWalBytesPerSync(64 * SizeUnit.MB)
|
||||
|
||||
.setWalTtlSeconds(60) // Auto
|
||||
.setWalSizeLimitMB(0) // Auto
|
||||
.setMaxTotalWalSize(10 * SizeUnit.GB)
|
||||
;
|
||||
blockCache = CACHE_FACTORY.newCache(writeBufferManagerSize + databaseOptions.blockCache().orElse( 512 * SizeUnit.MB));
|
||||
refs.track(blockCache);
|
||||
if (databaseOptions.compressedBlockCache().isPresent()) {
|
||||
compressedCache = CACHE_FACTORY.newCache(databaseOptions.compressedBlockCache().get());
|
||||
refs.track(compressedCache);
|
||||
} else {
|
||||
compressedCache = null;
|
||||
}
|
||||
|
||||
if (useDirectIO) {
|
||||
options
|
||||
// Option to enable readahead in compaction
|
||||
// If not set, it will be set to 2MB internally
|
||||
.setCompactionReadaheadSize(4 * SizeUnit.MB) // recommend at least 2MB
|
||||
// Option to tune write buffer for direct writes
|
||||
.setWritableFileMaxBufferSize(2 * SizeUnit.MB)
|
||||
;
|
||||
}
|
||||
if (databaseOptions.spinning()) {
|
||||
options
|
||||
// method documentation
|
||||
.setCompactionReadaheadSize(16 * SizeUnit.MB)
|
||||
// guessed
|
||||
.setWritableFileMaxBufferSize(8 * SizeUnit.MB);
|
||||
}
|
||||
options.setIncreaseParallelism(Runtime.getRuntime().availableProcessors());
|
||||
}
|
||||
|
||||
if (databaseOptions.writeBufferManager().isPresent()) {
|
||||
var writeBufferManager = new WriteBufferManager(writeBufferManagerSize, blockCache, false);
|
||||
refs.track(writeBufferManager);
|
||||
options.setWriteBufferManager(writeBufferManager);
|
||||
}
|
||||
|
||||
if (useDirectIO) {
|
||||
options
|
||||
// Option to enable readahead in compaction
|
||||
// If not set, it will be set to 2MB internally
|
||||
.setCompactionReadaheadSize(2 * SizeUnit.MB) // recommend at least 2MB
|
||||
// Option to tune write buffer for direct writes
|
||||
.setWritableFileMaxBufferSize(SizeUnit.MB)
|
||||
.setAllowMmapReads(false)
|
||||
.setAllowMmapWrites(false)
|
||||
.setUseDirectReads(true)
|
||||
;
|
||||
}
|
||||
if (databaseOptions.spinning()) {
|
||||
options
|
||||
// method documentation
|
||||
.setCompactionReadaheadSize(4 * SizeUnit.MB)
|
||||
// guessed
|
||||
.setWritableFileMaxBufferSize(2 * SizeUnit.MB);
|
||||
}
|
||||
} else {
|
||||
// HIGH MEMORY
|
||||
options
|
||||
//.setDbWriteBufferSize(64 * SizeUnit.MB)
|
||||
.setBytesPerSync(64 * SizeUnit.MB)
|
||||
.setWalBytesPerSync(64 * SizeUnit.MB)
|
||||
|
||||
.setWalTtlSeconds(60) // Auto
|
||||
.setWalSizeLimitMB(0) // Auto
|
||||
.setMaxTotalWalSize(10 * SizeUnit.GB)
|
||||
;
|
||||
blockCache = CACHE_FACTORY.newCache(writeBufferManagerSize + databaseOptions.blockCache().orElse( 512 * SizeUnit.MB));
|
||||
refs.track(blockCache);
|
||||
if (databaseOptions.compressedBlockCache().isPresent()) {
|
||||
compressedCache = CACHE_FACTORY.newCache(databaseOptions.compressedBlockCache().get());
|
||||
refs.track(compressedCache);
|
||||
} else {
|
||||
compressedCache = null;
|
||||
}
|
||||
|
||||
if (useDirectIO) {
|
||||
options
|
||||
// Option to enable readahead in compaction
|
||||
// If not set, it will be set to 2MB internally
|
||||
.setCompactionReadaheadSize(4 * SizeUnit.MB) // recommend at least 2MB
|
||||
// Option to tune write buffer for direct writes
|
||||
.setWritableFileMaxBufferSize(2 * SizeUnit.MB)
|
||||
;
|
||||
.setAllowMmapReads(allowMmapReads)
|
||||
.setAllowMmapWrites(allowMmapWrites);
|
||||
}
|
||||
if (databaseOptions.spinning()) {
|
||||
options
|
||||
// method documentation
|
||||
.setCompactionReadaheadSize(16 * SizeUnit.MB)
|
||||
// guessed
|
||||
.setWritableFileMaxBufferSize(8 * SizeUnit.MB);
|
||||
|
||||
if (useDirectIO || !allowMmapWrites) {
|
||||
options.setUseDirectIoForFlushAndCompaction(true);
|
||||
}
|
||||
options.setIncreaseParallelism(Runtime.getRuntime().availableProcessors());
|
||||
}
|
||||
|
||||
if (databaseOptions.writeBufferManager().isPresent()) {
|
||||
var writeBufferManager = new WriteBufferManager(writeBufferManagerSize, blockCache, false);
|
||||
refs.track(writeBufferManager);
|
||||
options.setWriteBufferManager(writeBufferManager);
|
||||
return new OptionsWithCache(options, blockCache, compressedCache);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
|
||||
if (useDirectIO) {
|
||||
options
|
||||
.setAllowMmapReads(false)
|
||||
.setAllowMmapWrites(false)
|
||||
.setUseDirectReads(true)
|
||||
;
|
||||
} else {
|
||||
options
|
||||
.setAllowMmapReads(allowMmapReads)
|
||||
.setAllowMmapWrites(allowMmapWrites);
|
||||
}
|
||||
|
||||
if (useDirectIO || !allowMmapWrites) {
|
||||
options.setUseDirectIoForFlushAndCompaction(true);
|
||||
}
|
||||
|
||||
return new OptionsWithCache(options, blockCache, compressedCache);
|
||||
}
|
||||
|
||||
record DbPathRecord(Path path, long targetSize) {}
|
||||
@ -1316,13 +1319,7 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
||||
|
||||
@Override
|
||||
public Stream<ColumnProperty<Map<String, String>>> getMapColumnProperties(RocksDBMapProperty property) {
|
||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
||||
try {
|
||||
return new ColumnProperty<>(c.name(), property.getName(), this.getMapProperty(c, property));
|
||||
} catch (IOException e) {
|
||||
throw new CompletionException(e);
|
||||
}
|
||||
});
|
||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> new ColumnProperty<>(c.name(), property.getName(), this.getMapProperty(c, property)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1347,11 +1344,7 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
||||
@Override
|
||||
public Stream<ColumnProperty<String>> getStringColumnProperties(RocksDBStringProperty property) {
|
||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
||||
try {
|
||||
return new ColumnProperty<>(c.name(), property.getName(), this.getStringProperty(c, property));
|
||||
} catch (IOException e) {
|
||||
throw new CompletionException(e);
|
||||
}
|
||||
return new ColumnProperty<>(c.name(), property.getName(), this.getStringProperty(c, property));
|
||||
});
|
||||
}
|
||||
|
||||
@ -1377,11 +1370,7 @@ public class LLLocalKeyValueDatabase extends Backuppable implements LLKeyValueDa
|
||||
@Override
|
||||
public Stream<ColumnProperty<Long>> getLongColumnProperties(RocksDBLongProperty property) {
|
||||
return getAllColumnFamilyHandles().keySet().stream().map(c -> {
|
||||
try {
|
||||
return new ColumnProperty<>(c.name(), property.getName(), this.getLongProperty(c, property));
|
||||
} catch (IOException e) {
|
||||
throw new CompletionException(e);
|
||||
}
|
||||
return new ColumnProperty<>(c.name(), property.getName(), this.getLongProperty(c, property));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,11 @@ public class SimpleIndexSearcherManager extends SimpleResource implements IndexS
|
||||
this.similarity = similarity;
|
||||
this.queryRefreshDebounceTime = queryRefreshDebounceTime;
|
||||
|
||||
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
||||
try {
|
||||
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
|
||||
refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
|
||||
try {
|
||||
@ -83,6 +87,8 @@ public class SimpleIndexSearcherManager extends SimpleResource implements IndexS
|
||||
searcherManager.maybeRefreshBlocking();
|
||||
} catch (AlreadyClosedException ignored) {
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
} finally {
|
||||
activeRefreshes.decrementAndGet();
|
||||
}
|
||||
@ -95,6 +101,8 @@ public class SimpleIndexSearcherManager extends SimpleResource implements IndexS
|
||||
searcherManager.maybeRefresh();
|
||||
} catch (AlreadyClosedException ignored) {
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
} finally {
|
||||
activeRefreshes.decrementAndGet();
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class BufSerializer implements DataSerializer<Buf> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Buf bytes) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull Buf bytes) throws IOException {
|
||||
dataOutput.writeInt(bytes.size());
|
||||
for (Byte aByte : bytes) {
|
||||
dataOutput.writeByte(aByte);
|
||||
@ -18,7 +18,7 @@ public class BufSerializer implements DataSerializer<Buf> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Buf deserialize(DataInput dataInput) {
|
||||
public @NotNull Buf deserialize(DataInput dataInput) throws IOException {
|
||||
var size = dataInput.readInt();
|
||||
var bal = Buf.create(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class CompressionSerializer implements DataSerializer<Compression> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Compression compression) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull Compression compression) throws IOException {
|
||||
dataOutput.writeInt(compression.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Compression deserialize(DataInput dataInput) {
|
||||
public @NotNull Compression deserialize(DataInput dataInput) throws IOException {
|
||||
return Compression.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class DurationSerializer implements DataSerializer<Duration> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Duration duration) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull Duration duration) throws IOException {
|
||||
var units = duration.getUnits();
|
||||
var smallestUnit = (ChronoUnit) units.get(units.size() - 1);
|
||||
dataOutput.writeInt(smallestUnit.ordinal());
|
||||
@ -19,7 +19,7 @@ public class DurationSerializer implements DataSerializer<Duration> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Duration deserialize(DataInput dataInput) {
|
||||
public @NotNull Duration deserialize(DataInput dataInput) throws IOException {
|
||||
var smallestUnit = ChronoUnit.values()[dataInput.readInt()];
|
||||
return Duration.of(dataInput.readLong(), smallestUnit);
|
||||
}
|
||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class LLSnapshotSerializer implements DataSerializer<LLSnapshot> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull LLSnapshot llSnapshot) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull LLSnapshot llSnapshot) throws IOException {
|
||||
dataOutput.writeLong(llSnapshot.getSequenceNumber());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull LLSnapshot deserialize(DataInput dataInput) {
|
||||
public @NotNull LLSnapshot deserialize(DataInput dataInput) throws IOException {
|
||||
return new LLSnapshot(dataInput.readLong());
|
||||
}
|
||||
}
|
||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class PathSerializer implements DataSerializer<Path> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Path path) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull Path path) throws IOException {
|
||||
dataOutput.writeUTF(path.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Path deserialize(DataInput dataInput) {
|
||||
public @NotNull Path deserialize(DataInput dataInput) throws IOException {
|
||||
return Path.of(dataInput.readUTF());
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ public class String2FieldAnalyzerMapSerializer implements DataSerializer<Map<Str
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map<String, TextFieldsAnalyzer> deserialize(DataInput dataInput) {
|
||||
public @NotNull Map<String, TextFieldsAnalyzer> deserialize(DataInput dataInput) throws IOException {
|
||||
var size = dataInput.readInt();
|
||||
var result = new HashMap<String, TextFieldsAnalyzer>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -26,7 +26,7 @@ public class String2FieldSimilarityMapSerializer implements DataSerializer<Map<S
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map<String, TextFieldsSimilarity> deserialize(DataInput dataInput) {
|
||||
public @NotNull Map<String, TextFieldsSimilarity> deserialize(DataInput dataInput) throws IOException {
|
||||
var size = dataInput.readInt();
|
||||
var result = new HashMap<String, TextFieldsSimilarity>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -11,13 +11,13 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class StringEntrySerializer implements DataSerializer<Map.Entry> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Map.Entry entry) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull Map.Entry entry) throws IOException {
|
||||
dataOutput.writeUTF((String) entry.getKey());
|
||||
dataOutput.writeUTF((String) entry.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map.Entry deserialize(DataInput dataInput) {
|
||||
public @NotNull Map.Entry deserialize(DataInput dataInput) throws IOException {
|
||||
return Map.entry(dataInput.readUTF(), dataInput.readUTF());
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ public class StringMapSerializer implements DataSerializer<Map<String, String>>
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map<String, String> deserialize(DataInput dataInput) {
|
||||
public @NotNull Map<String, String> deserialize(DataInput dataInput) throws IOException {
|
||||
var size = dataInput.readInt();
|
||||
var result = new HashMap<String, String>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class TextFieldsAnalyzerSerializer implements DataSerializer<TextFieldsAnalyzer> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsAnalyzer textFieldsAnalyzer) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsAnalyzer textFieldsAnalyzer) throws IOException {
|
||||
dataOutput.writeInt(textFieldsAnalyzer.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TextFieldsAnalyzer deserialize(DataInput dataInput) {
|
||||
public @NotNull TextFieldsAnalyzer deserialize(DataInput dataInput) throws IOException {
|
||||
return TextFieldsAnalyzer.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class TextFieldsSimilaritySerializer implements DataSerializer<TextFieldsSimilarity> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsSimilarity textFieldsSimilarity) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsSimilarity textFieldsSimilarity) throws IOException {
|
||||
dataOutput.writeInt(textFieldsSimilarity.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TextFieldsSimilarity deserialize(DataInput dataInput) {
|
||||
public @NotNull TextFieldsSimilarity deserialize(DataInput dataInput) throws IOException {
|
||||
return TextFieldsSimilarity.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
||||
|
@ -10,12 +10,12 @@ import org.jetbrains.annotations.NotNull;
|
||||
public class UpdateReturnModeSerializer implements DataSerializer<UpdateReturnMode> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull UpdateReturnMode updateReturnMode) {
|
||||
public void serialize(DataOutput dataOutput, @NotNull UpdateReturnMode updateReturnMode) throws IOException {
|
||||
dataOutput.writeInt(updateReturnMode.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull UpdateReturnMode deserialize(DataInput dataInput) {
|
||||
public @NotNull UpdateReturnMode deserialize(DataInput dataInput) throws IOException {
|
||||
return UpdateReturnMode.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
||||
|
@ -46,34 +46,24 @@ public class CodecSerializer<A> implements Serializer<A> {
|
||||
|
||||
@Override
|
||||
public @NotNull A deserialize(@NotNull BufDataInput is) throws SerializationException {
|
||||
try {
|
||||
int codecId;
|
||||
if (microCodecs) {
|
||||
codecId = is.readUnsignedByte();
|
||||
} else {
|
||||
codecId = is.readInt();
|
||||
}
|
||||
var serializer = deserializationCodecs.getCodec(codecId);
|
||||
return serializer.deserialize(is);
|
||||
} catch (IOException ex) {
|
||||
// This shouldn't happen
|
||||
throw new IOError(ex);
|
||||
int codecId;
|
||||
if (microCodecs) {
|
||||
codecId = is.readUnsignedByte();
|
||||
} else {
|
||||
codecId = is.readInt();
|
||||
}
|
||||
var serializer = deserializationCodecs.getCodec(codecId);
|
||||
return serializer.deserialize(is);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(@NotNull A deserialized, BufDataOutput os) throws SerializationException {
|
||||
try {
|
||||
if (microCodecs) {
|
||||
os.writeByte(serializationCodecId);
|
||||
} else {
|
||||
os.writeInt(serializationCodecId);
|
||||
}
|
||||
serializationCodec.serialize(os, deserialized);
|
||||
} catch (IOException ex) {
|
||||
// This shouldn't happen
|
||||
throw new IOError(ex);
|
||||
if (microCodecs) {
|
||||
os.writeByte(serializationCodecId);
|
||||
} else {
|
||||
os.writeInt(serializationCodecId);
|
||||
}
|
||||
serializationCodec.serialize(os, deserialized);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
|
@ -9,11 +9,11 @@ import org.apache.lucene.store.IOContext;
|
||||
|
||||
public class AlwaysDirectIOFSDirectory extends DirectIODirectory {
|
||||
|
||||
public AlwaysDirectIOFSDirectory(Path path, int mergeBufferSize, long minBytesDirect) {
|
||||
public AlwaysDirectIOFSDirectory(Path path, int mergeBufferSize, long minBytesDirect) throws IOException {
|
||||
super(FSDirectory.open(path), mergeBufferSize, minBytesDirect);
|
||||
}
|
||||
|
||||
public AlwaysDirectIOFSDirectory(Path path) {
|
||||
public AlwaysDirectIOFSDirectory(Path path) throws IOException {
|
||||
super(FSDirectory.open(path));
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ public class CheckIndexOutput extends IndexOutput {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
public void close() throws IOException {
|
||||
warnLuceneThread();
|
||||
output.close();
|
||||
}
|
||||
@ -31,19 +31,19 @@ public class CheckIndexOutput extends IndexOutput {
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getChecksum() {
|
||||
public long getChecksum() throws IOException {
|
||||
checkThread();
|
||||
return output.getChecksum();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeByte(byte b) {
|
||||
public void writeByte(byte b) throws IOException {
|
||||
checkThread();
|
||||
output.writeByte(b);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(byte[] b, int offset, int length) {
|
||||
public void writeBytes(byte[] b, int offset, int length) throws IOException {
|
||||
checkThread();
|
||||
output.writeBytes(b, offset, length);
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package it.cavallium.dbengine.lucene;
|
||||
|
||||
import static it.cavallium.dbengine.lucene.LuceneUtils.warnLuceneThread;
|
||||
|
||||
import it.cavallium.dbengine.utils.DBException;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
@ -21,69 +22,117 @@ public class CheckOutputDirectory extends Directory {
|
||||
|
||||
@Override
|
||||
public String[] listAll() {
|
||||
return directory.listAll();
|
||||
try {
|
||||
return directory.listAll();
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFile(String name) {
|
||||
directory.deleteFile(name);
|
||||
try {
|
||||
directory.deleteFile(name);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long fileLength(String name) {
|
||||
return directory.fileLength(name);
|
||||
try {
|
||||
return directory.fileLength(name);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createOutput(String name, IOContext context) {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
return new CheckIndexOutput(directory.createOutput(name, context));
|
||||
try {
|
||||
return new CheckIndexOutput(directory.createOutput(name, context));
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
return new CheckIndexOutput(directory.createTempOutput(prefix, suffix, context));
|
||||
try {
|
||||
return new CheckIndexOutput(directory.createTempOutput(prefix, suffix, context));
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sync(Collection<String> names) {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
directory.sync(names);
|
||||
try {
|
||||
directory.sync(names);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void syncMetaData() {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
directory.syncMetaData();
|
||||
try {
|
||||
directory.syncMetaData();
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rename(String source, String dest) {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
directory.rename(source, dest);
|
||||
try {
|
||||
directory.rename(source, dest);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
return new CheckIndexInput(directory.openInput(name, context));
|
||||
try {
|
||||
return new CheckIndexInput(directory.openInput(name, context));
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock obtainLock(String name) {
|
||||
LuceneUtils.checkLuceneThread();
|
||||
return directory.obtainLock(name);
|
||||
try {
|
||||
return directory.obtainLock(name);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
warnLuceneThread();
|
||||
directory.close();
|
||||
try {
|
||||
directory.close();
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getPendingDeletions() {
|
||||
return directory.getPendingDeletions();
|
||||
try {
|
||||
return directory.getPendingDeletions();
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -543,7 +543,12 @@ public class LuceneUtils {
|
||||
}
|
||||
|
||||
// Get the reference docId and apply it to MoreLikeThis, to generate the query
|
||||
Query mltQuery = mlt.like(mltDocumentFields);
|
||||
Query mltQuery = null;
|
||||
try {
|
||||
mltQuery = mlt.like(mltDocumentFields);
|
||||
} catch (IOException e) {
|
||||
throw new DBException(e);
|
||||
}
|
||||
Query luceneQuery;
|
||||
if (!(luceneAdditionalQuery instanceof MatchAllDocsQuery)) {
|
||||
luceneQuery = new Builder()
|
||||
|
@ -31,14 +31,14 @@ public class RandomFieldComparator extends FieldComparator<Float> implements Lea
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareBottom(int doc) {
|
||||
public int compareBottom(int doc) throws IOException {
|
||||
float score = scorer.score();
|
||||
assert !Float.isNaN(score);
|
||||
return Float.compare(score, bottom);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copy(int slot, int doc) {
|
||||
public void copy(int slot, int doc) throws IOException {
|
||||
scores[slot] = scorer.score();
|
||||
assert !Float.isNaN(scores[slot]);
|
||||
}
|
||||
@ -93,7 +93,7 @@ public class RandomFieldComparator extends FieldComparator<Float> implements Lea
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTop(int doc) {
|
||||
public int compareTop(int doc) throws IOException {
|
||||
float docValue = scorer.score();
|
||||
assert !Float.isNaN(docValue);
|
||||
return Float.compare(docValue, topValue);
|
||||
|
@ -114,7 +114,7 @@ public class DecimalBucketMultiCollectorManager implements CollectorMultiManager
|
||||
return new double[buckets];
|
||||
}
|
||||
|
||||
public Buckets search(IndexSearcher indexSearcher) {
|
||||
public Buckets search(IndexSearcher indexSearcher) throws IOException {
|
||||
Query query;
|
||||
if (USE_SINGLE_FACET_COLLECTOR && normalizationQuery != null) {
|
||||
query = normalizationQuery;
|
||||
|
@ -17,7 +17,7 @@ public interface FacetsCollector extends Collector {
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) {
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||
return facetsCollector.getLeafCollector(context);
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ public class FastFacetsCollectorManager implements CollectorManager<FacetsCollec
|
||||
}
|
||||
|
||||
@Override
|
||||
public FacetsCollector reduce(Collection<FacetsCollector> collectors) {
|
||||
public FacetsCollector reduce(Collection<FacetsCollector> collectors) throws IOException {
|
||||
return FacetsCollector.wrap(facetsCollectorManager.reduce(collectors
|
||||
.stream()
|
||||
.map(facetsCollector -> facetsCollector.getLuceneFacetsCollector())
|
||||
@ -61,23 +61,23 @@ public class FastFacetsCollectorManager implements CollectorManager<FacetsCollec
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) {
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||
var leafCollector = collector.getLeafCollector(context);
|
||||
return new LeafCollector() {
|
||||
@Override
|
||||
public void setScorer(Scorable scorer) {
|
||||
public void setScorer(Scorable scorer) throws IOException {
|
||||
leafCollector.setScorer(scorer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void collect(int doc) {
|
||||
public void collect(int doc) throws IOException {
|
||||
if (collectionRate == 1 || hash.hashCode(doc) % collectionRate == 0) {
|
||||
leafCollector.collect(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSetIterator competitiveIterator() {
|
||||
public DocIdSetIterator competitiveIterator() throws IOException {
|
||||
return leafCollector.competitiveIterator();
|
||||
}
|
||||
};
|
||||
|
@ -34,17 +34,17 @@ public class FastRandomSamplingFacetsCollector extends SimpleCollector implement
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doSetNextReader(LeafReaderContext context) {
|
||||
protected void doSetNextReader(LeafReaderContext context) throws IOException {
|
||||
collector.getLeafCollector(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorable scorer) {
|
||||
public void setScorer(Scorable scorer) throws IOException {
|
||||
collector.setScorer(scorer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void collect(int doc) {
|
||||
public void collect(int doc) throws IOException {
|
||||
if (collectionRate == 1 || hash.hashCode(doc) % collectionRate == 0) {
|
||||
collector.collect(doc);
|
||||
}
|
||||
|
@ -84,12 +84,12 @@ public class ScoringShardsCollectorMultiManager implements CollectorMultiManager
|
||||
public CollectorManager<TopFieldCollector, TopDocs> get(IndexSearcher indexSearcher, int shardIndex) {
|
||||
return new CollectorManager<>() {
|
||||
@Override
|
||||
public TopFieldCollector newCollector() {
|
||||
public TopFieldCollector newCollector() throws IOException {
|
||||
return sharedCollectorManager.newCollector();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TopDocs reduce(Collection<TopFieldCollector> collectors) {
|
||||
public TopDocs reduce(Collection<TopFieldCollector> collectors) throws IOException {
|
||||
if (LLUtils.isInNonBlockingThread()) {
|
||||
throw new UnsupportedOperationException("Called reduce in a nonblocking thread");
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ public class TopDocsCollectorMultiManager implements CollectorMultiManager<TopDo
|
||||
}
|
||||
|
||||
@Override
|
||||
public TopDocs reduce(Collection<TopDocsCollector<?>> collectors) {
|
||||
public TopDocs reduce(Collection<TopDocsCollector<?>> collectors) throws IOException {
|
||||
TopDocs[] docsArray;
|
||||
boolean needsSort = luceneSort != null;
|
||||
boolean needsScores = luceneSort != null && luceneSort.needsScores();
|
||||
|
@ -48,7 +48,7 @@ public class TotalHitCountCollectorManager implements CollectorManager<TimeLimit
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) {
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||
return timeLimitingCollector.getLeafCollector(context);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ public class Lucene90NoCompressionStoredFieldsFormat extends StoredFieldsFormat
|
||||
private static final Compressor DUMMY_COMPRESSOR = new Compressor() {
|
||||
|
||||
@Override
|
||||
public void compress(ByteBuffersDataInput byteBuffersDataInput, DataOutput dataOutput) {
|
||||
public void compress(ByteBuffersDataInput byteBuffersDataInput, DataOutput dataOutput) throws IOException {
|
||||
dataOutput.copyBytes(byteBuffersDataInput, byteBuffersDataInput.size());
|
||||
}
|
||||
|
||||
@ -96,7 +96,7 @@ public class Lucene90NoCompressionStoredFieldsFormat extends StoredFieldsFormat
|
||||
}
|
||||
|
||||
@Override
|
||||
public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) {
|
||||
public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException {
|
||||
return impl().fieldsWriter(directory, si, context);
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ public class BigCompositeReader<R extends IndexReader> {
|
||||
}
|
||||
}
|
||||
|
||||
public long getDocCount(String field) {
|
||||
public long getDocCount(String field) throws IOException {
|
||||
this.ensureOpen();
|
||||
long total = 0;
|
||||
|
||||
@ -103,7 +103,7 @@ public class BigCompositeReader<R extends IndexReader> {
|
||||
return total;
|
||||
}
|
||||
|
||||
public long docFreq(Term term) {
|
||||
public long docFreq(Term term) throws IOException {
|
||||
this.ensureOpen();
|
||||
long total = 0;
|
||||
|
||||
@ -137,7 +137,7 @@ public class BigCompositeReader<R extends IndexReader> {
|
||||
return numDocs;
|
||||
}
|
||||
|
||||
public Fields getTermVectors(long docID) {
|
||||
public Fields getTermVectors(long docID) throws IOException {
|
||||
this.ensureOpen();
|
||||
int i = this.readerIndex(docID);
|
||||
return this.subReaders[i].getTermVectors(Math.toIntExact(docID - this.starts[i]));
|
||||
@ -177,19 +177,19 @@ public class BigCompositeReader<R extends IndexReader> {
|
||||
return hi;
|
||||
}
|
||||
|
||||
public final void document(long docID, StoredFieldVisitor visitor) {
|
||||
public final void document(long docID, StoredFieldVisitor visitor) throws IOException {
|
||||
this.ensureOpen();
|
||||
int i = this.readerIndex(docID);
|
||||
this.subReaders[i].document(Math.toIntExact(docID - this.starts[i]), visitor);
|
||||
}
|
||||
|
||||
public final Document document(long docID) {
|
||||
public final Document document(long docID) throws IOException {
|
||||
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor();
|
||||
this.document(docID, visitor);
|
||||
return visitor.getDocument();
|
||||
}
|
||||
|
||||
public final Document document(long docID, Set<String> fieldsToLoad) {
|
||||
public final Document document(long docID, Set<String> fieldsToLoad) throws IOException {
|
||||
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(fieldsToLoad);
|
||||
this.document(docID, visitor);
|
||||
return visitor.getDocument();
|
||||
|
@ -549,7 +549,7 @@ public final class MultiMoreLikeThis {
|
||||
* @param docNum the documentID of the lucene doc to generate the 'More Like This" query for.
|
||||
* @return a query that will return docs like the passed lucene document ID.
|
||||
*/
|
||||
public Query like(long docNum) {
|
||||
public Query like(long docNum) throws IOException {
|
||||
if (fieldNames == null) {
|
||||
// gather list of valid fields from lucene
|
||||
Collection<String> fields;
|
||||
@ -564,7 +564,7 @@ public final class MultiMoreLikeThis {
|
||||
* @param filteredDocument Document with field values extracted for selected fields.
|
||||
* @return More Like This query for the passed document.
|
||||
*/
|
||||
public Query like(Map<String, ? extends Collection<?>> filteredDocument) {
|
||||
public Query like(Map<String, ? extends Collection<?>> filteredDocument) throws IOException {
|
||||
if (fieldNames == null) {
|
||||
// gather list of valid fields from lucene
|
||||
Collection<String> fields = BigCompositeReader.getIndexedFields(ir);
|
||||
@ -579,7 +579,7 @@ public final class MultiMoreLikeThis {
|
||||
*
|
||||
* @return a query that will return docs like the passed Readers.
|
||||
*/
|
||||
public Query like(String fieldName, Reader... readers) {
|
||||
public Query like(String fieldName, Reader... readers) throws IOException {
|
||||
Map<String, Map<String, Long>> perFieldTermFrequencies = new HashMap<>();
|
||||
for (Reader r : readers) {
|
||||
addTermFrequencies(r, perFieldTermFrequencies, fieldName);
|
||||
@ -622,7 +622,7 @@ public final class MultiMoreLikeThis {
|
||||
* objects as the values.
|
||||
*/
|
||||
private PriorityQueue<ScoreTerm> createQueue(
|
||||
Map<String, Map<String, Long>> perFieldTermFrequencies) {
|
||||
Map<String, Map<String, Long>> perFieldTermFrequencies) throws IOException {
|
||||
// have collected all words in doc and their freqs
|
||||
final long limit = Math.min(maxQueryTerms, this.getTermsCount(perFieldTermFrequencies));
|
||||
FreqQ queue = new FreqQ(Math.toIntExact(limit)); // will order words by score
|
||||
@ -709,7 +709,7 @@ public final class MultiMoreLikeThis {
|
||||
*
|
||||
* @param docNum the id of the lucene document from which to find terms
|
||||
*/
|
||||
private PriorityQueue<ScoreTerm> retrieveTerms(long docNum) {
|
||||
private PriorityQueue<ScoreTerm> retrieveTerms(long docNum) throws IOException {
|
||||
Map<String, Map<String, Long>> field2termFreqMap = new HashMap<>();
|
||||
retrieveTermsOfIndexReader(ir, docNum, field2termFreqMap);
|
||||
|
||||
@ -879,14 +879,14 @@ public final class MultiMoreLikeThis {
|
||||
* or best entry, first
|
||||
* @see #retrieveInterestingTerms
|
||||
*/
|
||||
private PriorityQueue<ScoreTerm> retrieveTerms(Reader r, String fieldName) {
|
||||
private PriorityQueue<ScoreTerm> retrieveTerms(Reader r, String fieldName) throws IOException {
|
||||
Map<String, Map<String, Long>> field2termFreqMap = new HashMap<>();
|
||||
addTermFrequencies(r, field2termFreqMap, fieldName);
|
||||
return createQueue(field2termFreqMap);
|
||||
}
|
||||
|
||||
/** @see #retrieveInterestingTerms(java.io.Reader, String) */
|
||||
public String[] retrieveInterestingTerms(long docNum) {
|
||||
public String[] retrieveInterestingTerms(long docNum) throws IOException {
|
||||
ArrayList<String> al = new ArrayList<>(Math.toIntExact(maxQueryTerms));
|
||||
PriorityQueue<ScoreTerm> pq = retrieveTerms(docNum);
|
||||
ScoreTerm scoreTerm;
|
||||
@ -911,7 +911,7 @@ public final class MultiMoreLikeThis {
|
||||
* @see #retrieveTerms(java.io.Reader, String)
|
||||
* @see #setMaxQueryTerms
|
||||
*/
|
||||
public String[] retrieveInterestingTerms(Reader r, String fieldName) {
|
||||
public String[] retrieveInterestingTerms(Reader r, String fieldName) throws IOException {
|
||||
ArrayList<String> al = new ArrayList<>(Math.toIntExact(maxQueryTerms));
|
||||
PriorityQueue<ScoreTerm> pq = retrieveTerms(r, fieldName);
|
||||
ScoreTerm scoreTerm;
|
||||
|
@ -32,11 +32,7 @@ public class AdaptiveLocalSearcher implements LocalSearcher {
|
||||
@Nullable String keyFieldName,
|
||||
GlobalQueryRewrite transformer) {
|
||||
if (transformer != NO_REWRITE) {
|
||||
try {
|
||||
return LuceneUtils.rewrite(this, indexSearcher, queryParams, keyFieldName, transformer);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return LuceneUtils.rewrite(this, indexSearcher, queryParams, keyFieldName, transformer);
|
||||
}
|
||||
return transformedCollect(indexSearcher, queryParams, keyFieldName, transformer);
|
||||
}
|
||||
|
@ -32,11 +32,7 @@ public class AdaptiveMultiSearcher implements MultiSearcher {
|
||||
@Nullable String keyFieldName,
|
||||
GlobalQueryRewrite transformer) {
|
||||
if (transformer != NO_REWRITE) {
|
||||
try {
|
||||
return LuceneUtils.rewriteMulti(this, indexSearchers, queryParams, keyFieldName, transformer);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return LuceneUtils.rewriteMulti(this, indexSearchers, queryParams, keyFieldName, transformer);
|
||||
}
|
||||
return transformedCollectMulti(indexSearchers, queryParams, keyFieldName, transformer);
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import com.google.common.collect.Streams;
|
||||
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||
import it.cavallium.dbengine.lucene.collector.Buckets;
|
||||
import it.cavallium.dbengine.lucene.collector.DecimalBucketMultiCollectorManager;
|
||||
import it.cavallium.dbengine.utils.DBException;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@ -43,11 +44,11 @@ public class DecimalBucketMultiSearcher {
|
||||
bucketParams.collectionRate(),
|
||||
bucketParams.sampleSize()
|
||||
);
|
||||
return cmm.reduce(Streams.stream(indexSearchers).parallel().map(shard -> {
|
||||
return cmm.reduce(Streams.stream(indexSearchers).parallel().map(indexSearcher -> {
|
||||
try {
|
||||
return cmm.search(shard);
|
||||
return cmm.search(indexSearcher);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
throw new DBException(e);
|
||||
}
|
||||
}).toList());
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
||||
remainingOffset--;
|
||||
}
|
||||
|
||||
private Weight createWeight() {
|
||||
private Weight createWeight() throws IOException {
|
||||
ScoreMode scoreMode = computeScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
|
||||
return shard.createWeight(shard.rewrite(query), scoreMode, 1f);
|
||||
}
|
||||
@ -93,7 +93,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
||||
}
|
||||
}
|
||||
|
||||
private ScoreDoc getWeightedNext() {
|
||||
private ScoreDoc getWeightedNext() throws IOException {
|
||||
while (tryAdvanceDocIdSetIterator()) {
|
||||
LeafReader reader = leaf.reader();
|
||||
Bits liveDocs = reader.getLiveDocs();
|
||||
@ -109,7 +109,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
||||
clearState();
|
||||
return null;
|
||||
}
|
||||
private boolean tryAdvanceDocIdSetIterator() {
|
||||
private boolean tryAdvanceDocIdSetIterator() throws IOException {
|
||||
if (docIdSetIterator != null) {
|
||||
return true;
|
||||
}
|
||||
@ -127,7 +127,7 @@ public class LuceneGenerator implements Supplier<ScoreDoc> {
|
||||
return false;
|
||||
}
|
||||
|
||||
private ScoreDoc transformDoc(int doc) {
|
||||
private ScoreDoc transformDoc(int doc) throws IOException {
|
||||
return new ScoreDoc(leaf.docBase + doc, scorer.score(), shardIndex);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(Query original) {
|
||||
public Query rewrite(Query original) throws IOException {
|
||||
final IndexSearcher localSearcher = new IndexSearcher(getIndexReader());
|
||||
original = localSearcher.rewrite(original);
|
||||
final Set<Term> terms = new HashSet<>();
|
||||
@ -112,7 +112,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
||||
|
||||
// Mock: in a real env, this would hit the wire and get
|
||||
// term stats from remote node
|
||||
Map<Term, TermStatistics> getNodeTermStats(Set<Term> terms, int nodeID) {
|
||||
Map<Term, TermStatistics> getNodeTermStats(Set<Term> terms, int nodeID) throws IOException {
|
||||
var s = searchers[nodeID];
|
||||
final Map<Term, TermStatistics> stats = new HashMap<>();
|
||||
if (s == null) {
|
||||
@ -157,7 +157,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionStatistics collectionStatistics(String field) {
|
||||
public CollectionStatistics collectionStatistics(String field) throws IOException {
|
||||
// TODO: we could compute this on init and cache,
|
||||
// since we are re-inited whenever any nodes have a
|
||||
// new reader
|
||||
@ -204,7 +204,7 @@ public class ShardIndexSearcher extends IndexSearcher {
|
||||
}
|
||||
}
|
||||
|
||||
private CollectionStatistics computeNodeCollectionStatistics(FieldAndShar fieldAndShard) {
|
||||
private CollectionStatistics computeNodeCollectionStatistics(FieldAndShar fieldAndShard) throws IOException {
|
||||
var searcher = searchers[fieldAndShard.nodeID];
|
||||
return searcher.collectionStatistics(fieldAndShard.field);
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
public class BooleanListJsonAdapter extends JsonAdapter<BooleanList> {
|
||||
|
||||
@Override
|
||||
public @NotNull BooleanList fromJson(@NotNull JsonReader reader) {
|
||||
public @NotNull BooleanList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||
reader.beginArray();
|
||||
BooleanArrayList modifiableOutput = new BooleanArrayList();
|
||||
while (reader.hasNext()) {
|
||||
@ -24,7 +24,7 @@ public class BooleanListJsonAdapter extends JsonAdapter<BooleanList> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable BooleanList value) {
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable BooleanList value) throws IOException {
|
||||
if (value == null) {
|
||||
writer.nullValue();
|
||||
return;
|
||||
|
@ -11,7 +11,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
public class ByteListJsonAdapter extends JsonAdapter<Buf> {
|
||||
|
||||
@Override
|
||||
public @NotNull Buf fromJson(@NotNull JsonReader reader) {
|
||||
public @NotNull Buf fromJson(@NotNull JsonReader reader) throws IOException {
|
||||
reader.beginArray();
|
||||
var modifiableOutput = Buf.create();
|
||||
while (reader.hasNext()) {
|
||||
@ -22,7 +22,7 @@ public class ByteListJsonAdapter extends JsonAdapter<Buf> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable Buf value) {
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable Buf value) throws IOException {
|
||||
if (value == null) {
|
||||
writer.nullValue();
|
||||
return;
|
||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
public class CharListJsonAdapter extends JsonAdapter<CharList> {
|
||||
|
||||
@Override
|
||||
public @NotNull CharList fromJson(@NotNull JsonReader reader) {
|
||||
public @NotNull CharList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||
reader.beginArray();
|
||||
CharArrayList modifiableOutput = new CharArrayList();
|
||||
while (reader.hasNext()) {
|
||||
@ -24,7 +24,7 @@ public class CharListJsonAdapter extends JsonAdapter<CharList> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable CharList value) {
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable CharList value) throws IOException {
|
||||
if (value == null) {
|
||||
writer.nullValue();
|
||||
return;
|
||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
public class IntListJsonAdapter extends JsonAdapter<IntList> {
|
||||
|
||||
@Override
|
||||
public @NotNull IntList fromJson(@NotNull JsonReader reader) {
|
||||
public @NotNull IntList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||
reader.beginArray();
|
||||
IntArrayList modifiableOutput = new IntArrayList();
|
||||
while (reader.hasNext()) {
|
||||
@ -24,7 +24,7 @@ public class IntListJsonAdapter extends JsonAdapter<IntList> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable IntList value) {
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable IntList value) throws IOException {
|
||||
if (value == null) {
|
||||
writer.nullValue();
|
||||
return;
|
||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
public class LongListJsonAdapter extends JsonAdapter<LongList> {
|
||||
|
||||
@Override
|
||||
public @NotNull LongList fromJson(@NotNull JsonReader reader) {
|
||||
public @NotNull LongList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||
reader.beginArray();
|
||||
LongArrayList modifiableOutput = new LongArrayList();
|
||||
while (reader.hasNext()) {
|
||||
@ -24,7 +24,7 @@ public class LongListJsonAdapter extends JsonAdapter<LongList> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable LongList value) {
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable LongList value) throws IOException {
|
||||
if (value == null) {
|
||||
writer.nullValue();
|
||||
return;
|
||||
|
@ -132,7 +132,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public T fromJson(@NotNull JsonReader jsonReader) {
|
||||
public T fromJson(@NotNull JsonReader jsonReader) throws IOException {
|
||||
String type = null;
|
||||
|
||||
jsonReader.beginObject();
|
||||
@ -165,7 +165,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) {
|
||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) throws IOException {
|
||||
if (t == null) {
|
||||
jsonWriter.nullValue();
|
||||
} else {
|
||||
@ -259,7 +259,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public T fromJson(@NotNull JsonReader jsonReader) {
|
||||
public T fromJson(@NotNull JsonReader jsonReader) throws IOException {
|
||||
try {
|
||||
Object instance;
|
||||
Object[] fields;
|
||||
@ -304,7 +304,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) {
|
||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable T t) throws IOException {
|
||||
if (t == null) {
|
||||
jsonWriter.nullValue();
|
||||
} else {
|
||||
@ -339,7 +339,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public List<T> fromJson(@NotNull JsonReader jsonReader) {
|
||||
public List<T> fromJson(@NotNull JsonReader jsonReader) throws IOException {
|
||||
jsonReader.beginArray();
|
||||
var result = new ArrayList<T>();
|
||||
while (jsonReader.hasNext()) {
|
||||
@ -350,7 +350,7 @@ public abstract class MoshiPolymorphic<OBJ> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable List<T> ts) {
|
||||
public void toJson(@NotNull JsonWriter jsonWriter, @Nullable List<T> ts) throws IOException {
|
||||
if (ts == null) {
|
||||
jsonWriter.nullValue();
|
||||
} else {
|
||||
|
@ -13,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
||||
public class ShortListJsonAdapter extends JsonAdapter<ShortList> {
|
||||
|
||||
@Override
|
||||
public @NotNull ShortList fromJson(@NotNull JsonReader reader) {
|
||||
public @NotNull ShortList fromJson(@NotNull JsonReader reader) throws IOException {
|
||||
reader.beginArray();
|
||||
ShortArrayList modifiableOutput = new ShortArrayList();
|
||||
while (reader.hasNext()) {
|
||||
@ -24,7 +24,7 @@ public class ShortListJsonAdapter extends JsonAdapter<ShortList> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable ShortList value) {
|
||||
public void toJson(@NotNull JsonWriter writer, @Nullable ShortList value) throws IOException {
|
||||
if (value == null) {
|
||||
writer.nullValue();
|
||||
return;
|
||||
|
@ -6,13 +6,13 @@ import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class UTFUtils {
|
||||
public static void writeUTF(DataOutput out, String utf) {
|
||||
public static void writeUTF(DataOutput out, String utf) throws IOException {
|
||||
byte[] bytes = utf.getBytes(StandardCharsets.UTF_8);
|
||||
out.writeInt(bytes.length);
|
||||
out.write(bytes);
|
||||
}
|
||||
|
||||
public static String readUTF(DataInput in) {
|
||||
public static String readUTF(DataInput in) throws IOException {
|
||||
int len = in.readInt();
|
||||
byte[] data = new byte[len];
|
||||
in.readFully(data, 0, len);
|
||||
|
@ -26,148 +26,148 @@ public class DataInputOutputImpl implements DataInputOutput {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte @NotNull [] bytes) {
|
||||
public void readFully(byte @NotNull [] bytes) throws IOException {
|
||||
in.readFully(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte @NotNull [] bytes, int i, int i1) {
|
||||
public void readFully(byte @NotNull [] bytes, int i, int i1) throws IOException {
|
||||
in.readFully(bytes, i, i1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skipBytes(int i) {
|
||||
public int skipBytes(int i) throws IOException {
|
||||
return in.skipBytes(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean readBoolean() {
|
||||
public boolean readBoolean() throws IOException {
|
||||
return in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte readByte() {
|
||||
public byte readByte() throws IOException {
|
||||
return in.readByte();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedByte() {
|
||||
public int readUnsignedByte() throws IOException {
|
||||
return in.readUnsignedByte();
|
||||
}
|
||||
|
||||
@Override
|
||||
public short readShort() {
|
||||
public short readShort() throws IOException {
|
||||
return in.readShort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedShort() {
|
||||
public int readUnsignedShort() throws IOException {
|
||||
return in.readUnsignedShort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public char readChar() {
|
||||
public char readChar() throws IOException {
|
||||
return in.readChar();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readInt() {
|
||||
public int readInt() throws IOException {
|
||||
return in.readInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long readLong() {
|
||||
public long readLong() throws IOException {
|
||||
return in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float readFloat() {
|
||||
public float readFloat() throws IOException {
|
||||
return in.readFloat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double readDouble() {
|
||||
public double readDouble() throws IOException {
|
||||
return in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String readLine() {
|
||||
public String readLine() throws IOException {
|
||||
return in.readLine();
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Override
|
||||
public String readUTF() {
|
||||
public String readUTF() throws IOException {
|
||||
return in.readUTF();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int i) {
|
||||
public void write(int i) throws IOException {
|
||||
out.write(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte @NotNull [] bytes) {
|
||||
public void write(byte @NotNull [] bytes) throws IOException {
|
||||
out.write(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte @NotNull [] bytes, int i, int i1) {
|
||||
public void write(byte @NotNull [] bytes, int i, int i1) throws IOException {
|
||||
out.write(bytes, i, i1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBoolean(boolean b) {
|
||||
public void writeBoolean(boolean b) throws IOException {
|
||||
out.writeBoolean(b);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeByte(int i) {
|
||||
public void writeByte(int i) throws IOException {
|
||||
out.writeByte(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeShort(int i) {
|
||||
public void writeShort(int i) throws IOException {
|
||||
out.writeShort(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeChar(int i) {
|
||||
public void writeChar(int i) throws IOException {
|
||||
out.writeChar(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeInt(int i) {
|
||||
public void writeInt(int i) throws IOException {
|
||||
out.writeInt(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeLong(long l) {
|
||||
public void writeLong(long l) throws IOException {
|
||||
out.writeLong(l);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFloat(float v) {
|
||||
public void writeFloat(float v) throws IOException {
|
||||
out.writeFloat(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeDouble(double v) {
|
||||
public void writeDouble(double v) throws IOException {
|
||||
out.writeDouble(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(@NotNull String s) {
|
||||
public void writeBytes(@NotNull String s) throws IOException {
|
||||
out.writeBytes(s);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeChars(@NotNull String s) {
|
||||
public void writeChars(@NotNull String s) throws IOException {
|
||||
out.writeChars(s);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUTF(@NotNull String s) {
|
||||
public void writeUTF(@NotNull String s) throws IOException {
|
||||
out.writeUTF(s);
|
||||
}
|
||||
}
|
||||
|
@ -25,79 +25,79 @@ public class DataInputOutputStream extends DataOutputStream implements DataInput
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte @NotNull [] bytes) {
|
||||
public void readFully(byte @NotNull [] bytes) throws IOException {
|
||||
in.readFully(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte @NotNull [] bytes, int i, int i1) {
|
||||
public void readFully(byte @NotNull [] bytes, int i, int i1) throws IOException {
|
||||
in.readFully(bytes, i, i1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skipBytes(int i) {
|
||||
public int skipBytes(int i) throws IOException {
|
||||
return in.skipBytes(i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean readBoolean() {
|
||||
public boolean readBoolean() throws IOException {
|
||||
return in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte readByte() {
|
||||
public byte readByte() throws IOException {
|
||||
return in.readByte();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedByte() {
|
||||
public int readUnsignedByte() throws IOException {
|
||||
return in.readUnsignedByte();
|
||||
}
|
||||
|
||||
@Override
|
||||
public short readShort() {
|
||||
public short readShort() throws IOException {
|
||||
return in.readShort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedShort() {
|
||||
public int readUnsignedShort() throws IOException {
|
||||
return in.readUnsignedShort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public char readChar() {
|
||||
public char readChar() throws IOException {
|
||||
return in.readChar();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readInt() {
|
||||
public int readInt() throws IOException {
|
||||
return in.readInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long readLong() {
|
||||
public long readLong() throws IOException {
|
||||
return in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float readFloat() {
|
||||
public float readFloat() throws IOException {
|
||||
return in.readFloat();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double readDouble() {
|
||||
public double readDouble() throws IOException {
|
||||
return in.readDouble();
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public String readLine() {
|
||||
public String readLine() throws IOException {
|
||||
return in.readLine();
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Override
|
||||
public String readUTF() {
|
||||
public String readUTF() throws IOException {
|
||||
return in.readUTF();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user