Partial server implementation
This commit is contained in:
parent
2022495dda
commit
ed37a769e2
106
pom.xml
106
pom.xml
@ -225,6 +225,74 @@
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-test</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor.netty</groupId>
|
||||
<artifactId>reactor-netty-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty.incubator</groupId>
|
||||
<artifactId>netty-incubator-codec-native-quic</artifactId>
|
||||
<version>0.0.25.Final</version>
|
||||
<classifier>linux-x86_64</classifier>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor.netty.incubator</groupId>
|
||||
<artifactId>reactor-netty-incubator-quic</artifactId>
|
||||
<version>0.0.4</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>reactor-netty-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
<version>1.69</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.novasearch</groupId>
|
||||
<artifactId>lucene-relevance</artifactId>
|
||||
@ -401,22 +469,6 @@
|
||||
<version>22.0.0</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<version>3.4.13</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-tools</artifactId>
|
||||
<version>3.4.13</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-test</artifactId>
|
||||
<version>3.4.13</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.novasearch</groupId>
|
||||
<artifactId>lucene-relevance</artifactId>
|
||||
@ -436,7 +488,7 @@
|
||||
<dependency>
|
||||
<groupId>it.cavallium</groupId>
|
||||
<artifactId>data-generator-runtime</artifactId>
|
||||
<version>1.0.43</version>
|
||||
<version>1.0.46</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
@ -450,6 +502,13 @@
|
||||
<version>${micrometer.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-bom</artifactId>
|
||||
<version>2020.0.16</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
<build>
|
||||
@ -519,7 +578,7 @@
|
||||
<plugin>
|
||||
<groupId>it.cavallium</groupId>
|
||||
<artifactId>data-generator</artifactId>
|
||||
<version>0.9.96</version>
|
||||
<version>0.9.102</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-lucene-query-sources</id>
|
||||
@ -532,6 +591,17 @@
|
||||
<configPath>${basedir}/src/main/data-generator/lucene-query.yaml</configPath>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>generate-rpc-sources</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<basePackageName>it.cavallium.dbengine.rpc</basePackageName>
|
||||
<configPath>${basedir}/src/main/data-generator/quic-rpc.yaml</configPath>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
243
src/main/data-generator/quic-rpc.yaml
Normal file
243
src/main/data-generator/quic-rpc.yaml
Normal file
@ -0,0 +1,243 @@
|
||||
# A type that starts with "-" is an optional type, otherwise it can't be null
|
||||
currentVersion: "0.0.0"
|
||||
interfacesData:
|
||||
StandardFSDirectoryOptions:
|
||||
extendInterfaces: [PathDirectoryOptions]
|
||||
PathDirectoryOptions:
|
||||
extendInterfaces: [LuceneDirectoryOptions]
|
||||
# versions must have only numbers, lowercase letters, dots, dashes. Maximum: 99.999.9999
|
||||
versions:
|
||||
0.0.0:
|
||||
details:
|
||||
changelog: "First version"
|
||||
superTypes:
|
||||
ServerBoundRequest: [
|
||||
GetDatabase,
|
||||
GetLuceneIndex,
|
||||
Disconnect,
|
||||
GetSingleton,
|
||||
SingletonGet,
|
||||
SingletonSet,
|
||||
SingletonUpdateInit,
|
||||
SingletonUpdateEnd
|
||||
]
|
||||
ClientBoundResponse: [
|
||||
Empty,
|
||||
GeneratedEntityId,
|
||||
Binary,
|
||||
BinaryOptional
|
||||
]
|
||||
ClientBoundRequest: [
|
||||
SingletonUpdateOldData
|
||||
]
|
||||
ServerBoundResponse: [
|
||||
Empty
|
||||
]
|
||||
LuceneDirectoryOptions: [
|
||||
ByteBuffersDirectory,
|
||||
MemoryMappedFSDirectory,
|
||||
NIOFSDirectory,
|
||||
DirectIOFSDirectory,
|
||||
RocksDBStandaloneDirectory,
|
||||
RocksDBSharedDirectory,
|
||||
NRTCachingDirectory
|
||||
]
|
||||
StandardFSDirectoryOptions: [
|
||||
MemoryMappedFSDirectory,
|
||||
NIOFSDirectory
|
||||
]
|
||||
PathDirectoryOptions: [
|
||||
MemoryMappedFSDirectory,
|
||||
NIOFSDirectory,
|
||||
RocksDBStandaloneDirectory,
|
||||
StandardFSDirectoryOptions
|
||||
]
|
||||
customTypes:
|
||||
Path:
|
||||
javaClass: java.nio.file.Path
|
||||
serializer: it.cavallium.dbengine.database.remote.PathSerializer
|
||||
Compression:
|
||||
javaClass: it.cavallium.dbengine.client.Compression
|
||||
serializer: it.cavallium.dbengine.database.remote.CompressionSerializer
|
||||
TextFieldsAnalyzer:
|
||||
javaClass: it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer
|
||||
serializer: it.cavallium.dbengine.database.remote.TextFieldsAnalyzerSerializer
|
||||
TextFieldsSimilarity:
|
||||
javaClass: it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity
|
||||
serializer: it.cavallium.dbengine.database.remote.TextFieldsSimilaritySerializer
|
||||
Duration:
|
||||
javaClass: java.time.Duration
|
||||
serializer: it.cavallium.dbengine.database.remote.DurationSerializer
|
||||
RocksDB:
|
||||
javaClass: org.rocksdb.RocksDB
|
||||
serializer: it.cavallium.dbengine.database.remote.RocksDBSerializer
|
||||
ColumnFamilyHandle:
|
||||
javaClass: org.rocksdb.ColumnFamilyHandle
|
||||
serializer: it.cavallium.dbengine.database.remote.ColumnFamilyHandleSerializer
|
||||
LuceneHacks:
|
||||
javaClass: it.cavallium.dbengine.lucene.LuceneHacks
|
||||
serializer: it.cavallium.dbengine.database.remote.LuceneHacksSerializer
|
||||
UpdateReturnMode:
|
||||
javaClass: it.cavallium.dbengine.database.UpdateReturnMode
|
||||
serializer: it.cavallium.dbengine.database.remote.UpdateReturnModeSerializer
|
||||
LLSnapshot:
|
||||
javaClass: it.cavallium.dbengine.database.LLSnapshot
|
||||
serializer: it.cavallium.dbengine.database.remote.LLSnapshotSerializer
|
||||
|
||||
StringMap:
|
||||
javaClass: java.util.Map<java.lang.String, java.lang.String>
|
||||
serializer: it.cavallium.dbengine.database.remote.StringMapSerializer
|
||||
String2FieldAnalyzerMap:
|
||||
javaClass: java.util.Map<java.lang.String, it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer>
|
||||
serializer: it.cavallium.dbengine.database.remote.String2FieldAnalyzerMapSerializer
|
||||
String2ColumnFamilyHandleMap:
|
||||
javaClass: java.util.Map<java.lang.String, org.rocksdb.ColumnFamilyHandle>
|
||||
serializer: it.cavallium.dbengine.database.remote.String2ColumnFamilyHandleMapSerializer
|
||||
classes:
|
||||
|
||||
# Server-bound requests
|
||||
|
||||
BoxedServerBoundRequest:
|
||||
data:
|
||||
val: ServerBoundRequest
|
||||
GetDatabase:
|
||||
data:
|
||||
name: String
|
||||
columns: Column[]
|
||||
databaseOptions: DatabaseOptions
|
||||
GetLuceneIndex:
|
||||
data:
|
||||
clusterName: -String
|
||||
shardName: -String
|
||||
instancesCount: int
|
||||
indicizerAnalyzers: IndicizerAnalyzers
|
||||
indicizerSimilarities: IndicizerSimilarities
|
||||
Disconnect: { data: { } }
|
||||
GetSingleton:
|
||||
data:
|
||||
databaseId: long
|
||||
singletonListColumnName: byte[]
|
||||
name: byte[]
|
||||
defaultValue: byte[]
|
||||
SingletonGet:
|
||||
data:
|
||||
singletonId: long
|
||||
snapshot: -LLSnapshot
|
||||
SingletonSet:
|
||||
data:
|
||||
singletonId: long
|
||||
value: byte[]
|
||||
SingletonUpdateInit:
|
||||
data:
|
||||
singletonId: long
|
||||
updateReturnMode: UpdateReturnMode
|
||||
SingletonUpdateEnd:
|
||||
data:
|
||||
exist: boolean
|
||||
value: byte[]
|
||||
|
||||
# Client-bound responses
|
||||
|
||||
BoxedClientBoundResponse:
|
||||
data:
|
||||
val: ClientBoundResponse
|
||||
GeneratedEntityId:
|
||||
data:
|
||||
id: long
|
||||
|
||||
# Client-bound requests
|
||||
|
||||
BoxedClientBoundRequest:
|
||||
data:
|
||||
val: ClientBoundRequest
|
||||
SingletonUpdateOldData:
|
||||
data:
|
||||
exist: boolean
|
||||
oldValue: byte[]
|
||||
|
||||
# Server-bound responses
|
||||
|
||||
BoxedServerBoundResponse:
|
||||
data:
|
||||
val: ServerBoundResponse
|
||||
|
||||
# Data
|
||||
|
||||
BinaryOptional:
|
||||
data:
|
||||
val: -Binary
|
||||
Binary:
|
||||
data:
|
||||
val: byte[]
|
||||
Empty: { data: { } }
|
||||
Column:
|
||||
data:
|
||||
name: String
|
||||
DatabaseOptions:
|
||||
data:
|
||||
volumes: DatabaseVolume[]
|
||||
extraFlags: StringMap
|
||||
absoluteConsistency: boolean
|
||||
lowMemory: boolean
|
||||
useDirectIO: boolean
|
||||
allowMemoryMapping: boolean
|
||||
allowNettyDirect: boolean
|
||||
optimistic: boolean
|
||||
maxOpenFiles: -int
|
||||
memtableMemoryBudgetBytes: -long
|
||||
blockCache: -long
|
||||
setCacheIndexAndFilterBlocks: -boolean
|
||||
DatabaseVolume:
|
||||
data:
|
||||
volumePath: Path
|
||||
targetSizeBytes: long
|
||||
compression: Compression
|
||||
IndicizerAnalyzers:
|
||||
data:
|
||||
defaultAnalyzer: TextFieldsAnalyzer
|
||||
fieldAnalyzer: String2FieldAnalyzerMap
|
||||
indicizerSimilarities: IndicizerSimilarities
|
||||
luceneOptions: LuceneOptions
|
||||
luceneHacks: LuceneHacks
|
||||
IndicizerSimilarities:
|
||||
data:
|
||||
defaultSimilarity: TextFieldsSimilarity
|
||||
fieldSimilarity: String2FieldAnalyzerMap
|
||||
LuceneOptions:
|
||||
data:
|
||||
extraFlags: StringMap
|
||||
queryRefreshDebounceTime: Duration
|
||||
commitDebounceTime: Duration
|
||||
lowMemory: boolean
|
||||
directoryOptions: LuceneDirectoryOptions
|
||||
indexWriterBufferSize: long
|
||||
applyAllDeletes: boolean
|
||||
writeAllDeletes: boolean
|
||||
allowNonVolatileCollection: boolean
|
||||
maxInMemoryResultEntries: int
|
||||
ByteBuffersDirectory: { data: { } }
|
||||
MemoryMappedFSDirectory:
|
||||
data:
|
||||
managedPath: Path
|
||||
NIOFSDirectory:
|
||||
data:
|
||||
managedPath: Path
|
||||
DirectIOFSDirectory:
|
||||
data:
|
||||
delegate: StandardFSDirectoryOptions
|
||||
mergeBufferSize: -int
|
||||
minBytesDirect: -long
|
||||
RocksDBStandaloneDirectory:
|
||||
data:
|
||||
managedPath: Path
|
||||
blockSize: int
|
||||
RocksDBSharedDirectory:
|
||||
data:
|
||||
db: RocksDB
|
||||
handles: String2ColumnFamilyHandleMap
|
||||
blockSize: int
|
||||
NRTCachingDirectory:
|
||||
data:
|
||||
delegate: LuceneDirectoryOptions
|
||||
maxMergeSizeBytes: long
|
||||
maxCachedBytes: long
|
@ -1,6 +1,6 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.unimi.dsi.fastutil.bytes.ByteList;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
|
@ -0,0 +1,39 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import java.net.SocketAddress;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
public sealed interface ConnectionSettings {
|
||||
|
||||
sealed interface PrimaryConnectionSettings extends ConnectionSettings {}
|
||||
|
||||
sealed interface SubConnectionSettings extends ConnectionSettings {}
|
||||
|
||||
record MemoryConnectionSettings() implements PrimaryConnectionSettings, SubConnectionSettings {}
|
||||
|
||||
record LocalConnectionSettings(Path dataPath) implements PrimaryConnectionSettings, SubConnectionSettings {}
|
||||
|
||||
record QuicConnectionSettings(SocketAddress bindAddress, SocketAddress remoteAddress) implements
|
||||
PrimaryConnectionSettings, SubConnectionSettings {}
|
||||
|
||||
record MultiConnectionSettings(Map<ConnectionPart, SubConnectionSettings> parts) implements
|
||||
PrimaryConnectionSettings {
|
||||
|
||||
public Multimap<SubConnectionSettings, ConnectionPart> getConnections() {
|
||||
Multimap<SubConnectionSettings, ConnectionPart> result = com.google.common.collect.HashMultimap.create();
|
||||
parts.forEach((connectionPart, subConnectionSettings) -> result.put(subConnectionSettings,connectionPart));
|
||||
return Multimaps.unmodifiableMultimap(result);
|
||||
}
|
||||
}
|
||||
|
||||
sealed interface ConnectionPart {
|
||||
|
||||
record ConnectionPartLucene(@Nullable String name) implements ConnectionPart {}
|
||||
|
||||
record ConnectionPartRocksDB(@Nullable String name) implements ConnectionPart {}
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
|
||||
import io.soabase.recordbuilder.core.RecordBuilder;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
@RecordBuilder
|
||||
public record DatabaseOptions(List<DatabaseVolume> volumes, Map<String, String> extraFlags, boolean absoluteConsistency,
|
||||
boolean lowMemory, boolean inMemory, boolean useDirectIO, boolean allowMemoryMapping,
|
||||
boolean allowNettyDirect, boolean optimistic, int maxOpenFiles,
|
||||
@Nullable Number memtableMemoryBudgetBytes, @Nullable Number blockCache,
|
||||
@Nullable Boolean setCacheIndexAndFilterBlocks) {}
|
@ -1,14 +0,0 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
|
||||
import io.soabase.recordbuilder.core.RecordBuilder;
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* A database volume is a directory in which the data of the database is stored.
|
||||
*
|
||||
* Volume path can be relative: if it's relative it will be relative to the default data directory
|
||||
*
|
||||
* Target size can be exceeded if all the volumes are full
|
||||
*/
|
||||
@RecordBuilder
|
||||
public record DatabaseVolume(Path volumePath, long targetSizeBytes, Compression compression) {}
|
@ -1,10 +1,13 @@
|
||||
package it.cavallium.dbengine.database;
|
||||
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Objects;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
public record Column(String name) {
|
||||
public class ColumnUtils {
|
||||
|
||||
private ColumnUtils() {
|
||||
|
||||
}
|
||||
|
||||
public static Column dictionary(String name) {
|
||||
return new Column("hash_map_" + name);
|
@ -2,11 +2,12 @@ package it.cavallium.dbengine.database;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.util.List;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
@ -7,6 +7,7 @@ import io.net5.buffer.api.BufferAllocator;
|
||||
import it.cavallium.dbengine.client.MemoryStats;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseInt;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseLong;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@ -18,16 +19,16 @@ public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseS
|
||||
|
||||
@Deprecated
|
||||
default Mono<? extends LLDictionary> getDeprecatedSet(String name, UpdateMode updateMode) {
|
||||
return getDictionary(Column.deprecatedSet(name).name().getBytes(StandardCharsets.US_ASCII), updateMode);
|
||||
return getDictionary(ColumnUtils.deprecatedSet(name).name().getBytes(StandardCharsets.US_ASCII), updateMode);
|
||||
}
|
||||
|
||||
default Mono<? extends LLDictionary> getDictionary(String name, UpdateMode updateMode) {
|
||||
return getDictionary(Column.dictionary(name).name().getBytes(StandardCharsets.US_ASCII), updateMode);
|
||||
return getDictionary(ColumnUtils.dictionary(name).name().getBytes(StandardCharsets.US_ASCII), updateMode);
|
||||
}
|
||||
|
||||
default Mono<DatabaseInt> getInteger(String singletonListName, String name, int defaultValue) {
|
||||
return this
|
||||
.getSingleton(Column.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII),
|
||||
.getSingleton(ColumnUtils.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII),
|
||||
name.getBytes(StandardCharsets.US_ASCII),
|
||||
Ints.toByteArray(defaultValue)
|
||||
)
|
||||
@ -36,7 +37,7 @@ public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseS
|
||||
|
||||
default Mono<DatabaseLong> getLong(String singletonListName, String name, long defaultValue) {
|
||||
return this
|
||||
.getSingleton(Column.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII),
|
||||
.getSingleton(ColumnUtils.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII),
|
||||
name.getBytes(StandardCharsets.US_ASCII),
|
||||
Longs.toByteArray(defaultValue)
|
||||
)
|
||||
|
@ -0,0 +1,143 @@
|
||||
package it.cavallium.dbengine.database;
|
||||
|
||||
import com.google.common.collect.Multimap;
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart;
|
||||
import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart.ConnectionPartLucene;
|
||||
import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart.ConnectionPartRocksDB;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
public class LLMultiDatabaseConnection implements LLDatabaseConnection {
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger(LLMultiDatabaseConnection.class);
|
||||
private final Map<String, LLDatabaseConnection> databaseShardConnections = new HashMap<>();
|
||||
private final Map<String, LLDatabaseConnection> luceneShardConnections = new HashMap<>();
|
||||
private final Set<LLDatabaseConnection> allConnections = new HashSet<>();
|
||||
private final LLDatabaseConnection defaultDatabaseConnection;
|
||||
private final LLDatabaseConnection defaultLuceneConnection;
|
||||
private final LLDatabaseConnection anyConnection;
|
||||
|
||||
public LLMultiDatabaseConnection(Multimap<LLDatabaseConnection, ConnectionPart> subConnections) {
|
||||
LLDatabaseConnection defaultDatabaseConnection = null;
|
||||
LLDatabaseConnection defaultLuceneConnection = null;
|
||||
for (Entry<LLDatabaseConnection, ConnectionPart> entry : subConnections.entries()) {
|
||||
var subConnectionSettings = entry.getKey();
|
||||
var connectionPart = entry.getValue();
|
||||
if (connectionPart instanceof ConnectionPartLucene connectionPartLucene) {
|
||||
if (connectionPartLucene.name() == null) {
|
||||
defaultLuceneConnection = subConnectionSettings;
|
||||
} else {
|
||||
luceneShardConnections.put(connectionPartLucene.name(), subConnectionSettings);
|
||||
}
|
||||
} else if (connectionPart instanceof ConnectionPartRocksDB connectionPartRocksDB) {
|
||||
if (connectionPartRocksDB.name() == null) {
|
||||
defaultDatabaseConnection = subConnectionSettings;
|
||||
} else {
|
||||
databaseShardConnections.put(connectionPartRocksDB.name(), subConnectionSettings);
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unsupported connection part: " + connectionPart);
|
||||
}
|
||||
}
|
||||
this.defaultDatabaseConnection = defaultDatabaseConnection;
|
||||
this.defaultLuceneConnection = defaultLuceneConnection;
|
||||
if (defaultDatabaseConnection != null) {
|
||||
anyConnection = defaultDatabaseConnection;
|
||||
} else if (defaultLuceneConnection != null) {
|
||||
anyConnection = defaultLuceneConnection;
|
||||
} else {
|
||||
anyConnection = subConnections.keySet().stream().findAny().orElse(null);
|
||||
}
|
||||
if (defaultDatabaseConnection != null) {
|
||||
allConnections.add(defaultDatabaseConnection);
|
||||
}
|
||||
if (defaultLuceneConnection != null) {
|
||||
allConnections.add(defaultLuceneConnection);
|
||||
}
|
||||
allConnections.addAll(luceneShardConnections.values());
|
||||
allConnections.addAll(databaseShardConnections.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BufferAllocator getAllocator() {
|
||||
return anyConnection.getAllocator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MeterRegistry getMeterRegistry() {
|
||||
return anyConnection.getMeterRegistry();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLDatabaseConnection> connect() {
|
||||
return Flux
|
||||
.fromIterable(allConnections)
|
||||
.flatMap((LLDatabaseConnection databaseConnection) -> databaseConnection
|
||||
.connect()
|
||||
.doOnError(ex -> LOG.error("Failed to open connection", ex))
|
||||
)
|
||||
.then()
|
||||
.thenReturn(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLKeyValueDatabase> getDatabase(String name,
|
||||
List<Column> columns,
|
||||
DatabaseOptions databaseOptions) {
|
||||
var conn = databaseShardConnections.getOrDefault(name, defaultDatabaseConnection);
|
||||
Objects.requireNonNull(conn, "Null connection");
|
||||
return conn.getDatabase(name, columns, databaseOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLLuceneIndex> getLuceneIndex(@Nullable String clusterName,
|
||||
@Nullable String shardName,
|
||||
int instancesCount,
|
||||
IndicizerAnalyzers indicizerAnalyzers,
|
||||
IndicizerSimilarities indicizerSimilarities,
|
||||
LuceneOptions luceneOptions,
|
||||
@Nullable LuceneHacks luceneHacks) {
|
||||
String indexShardName = Objects.requireNonNullElse(shardName, clusterName);
|
||||
Objects.requireNonNull(indexShardName, "ClusterName and ShardName are both null");
|
||||
LLDatabaseConnection conn = luceneShardConnections.getOrDefault(indexShardName, defaultLuceneConnection);
|
||||
Objects.requireNonNull(conn, "Null connection");
|
||||
return conn.getLuceneIndex(clusterName,
|
||||
shardName,
|
||||
instancesCount,
|
||||
indicizerAnalyzers,
|
||||
indicizerSimilarities,
|
||||
luceneOptions,
|
||||
luceneHacks
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> disconnect() {
|
||||
return Flux
|
||||
.fromIterable(allConnections)
|
||||
.flatMap(databaseConnection -> databaseConnection
|
||||
.disconnect()
|
||||
.doOnError(ex -> LOG.error("Failed to close connection", ex))
|
||||
.onErrorResume(ex -> Mono.empty())
|
||||
)
|
||||
.then();
|
||||
}
|
||||
}
|
@ -11,9 +11,9 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.WritableComponent;
|
||||
import io.net5.util.internal.PlatformDependent;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.RepeatedElementList;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
@ -2,15 +2,15 @@ package it.cavallium.dbengine.database.disk;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLDatabaseConnection;
|
||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.netty.JMXNettyMonitoringManager;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.LinkedList;
|
||||
@ -32,12 +32,17 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
||||
private final BufferAllocator allocator;
|
||||
private final MeterRegistry meterRegistry;
|
||||
private final Path basePath;
|
||||
private final boolean inMemory;
|
||||
private final AtomicReference<LLTempLMDBEnv> env = new AtomicReference<>();
|
||||
|
||||
public LLLocalDatabaseConnection(BufferAllocator allocator, MeterRegistry meterRegistry, Path basePath) {
|
||||
public LLLocalDatabaseConnection(BufferAllocator allocator,
|
||||
MeterRegistry meterRegistry,
|
||||
Path basePath,
|
||||
boolean inMemory) {
|
||||
this.allocator = allocator;
|
||||
this.meterRegistry = meterRegistry;
|
||||
this.basePath = basePath;
|
||||
this.inMemory = inMemory;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -77,6 +82,7 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
||||
allocator,
|
||||
meterRegistry,
|
||||
name,
|
||||
inMemory,
|
||||
basePath.resolve("database_" + name),
|
||||
columns,
|
||||
new LinkedList<>(),
|
||||
|
@ -14,8 +14,7 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.BadBlock;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.ColumnUtils;
|
||||
import it.cavallium.dbengine.database.LLDelta;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||
@ -28,6 +27,7 @@ import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.UpdateReturnMode;
|
||||
import it.cavallium.dbengine.database.serialization.KVSerializationFunction;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
@ -891,7 +891,7 @@ public class LLLocalDictionary implements LLDictionary {
|
||||
rocksIterator.value(DUMMY_WRITE_ONLY_BYTE_BUFFER);
|
||||
rocksIterator.status();
|
||||
} catch (RocksDBException ex) {
|
||||
sink.next(new BadBlock(databaseName, Column.special(columnName), null, ex));
|
||||
sink.next(new BadBlock(databaseName, ColumnUtils.special(columnName), null, ex));
|
||||
}
|
||||
rocksIterator.next();
|
||||
}
|
||||
|
@ -2,25 +2,21 @@ package it.cavallium.dbengine.database.disk;
|
||||
|
||||
import static io.net5.buffer.api.StandardAllocationTypes.OFF_HEAP;
|
||||
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
|
||||
import static java.util.Objects.requireNonNullElse;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.micrometer.core.instrument.Tag;
|
||||
import io.micrometer.core.instrument.Timer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.Resource;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.net5.util.internal.PlatformDependent;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.client.DatabaseVolume;
|
||||
import it.cavallium.dbengine.client.MemoryStats;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLEntry;
|
||||
import it.cavallium.dbengine.database.ColumnUtils;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.SafeCloseable;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseVolume;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
@ -37,7 +33,6 @@ import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@ -72,16 +67,9 @@ import org.rocksdb.TxnDBWritePolicy;
|
||||
import org.rocksdb.WALRecoveryMode;
|
||||
import org.rocksdb.WriteBufferManager;
|
||||
import org.rocksdb.util.SizeUnit;
|
||||
import reactor.core.publisher.Hooks;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Scheduler;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuple3;
|
||||
import reactor.util.function.Tuple4;
|
||||
import reactor.util.function.Tuple5;
|
||||
import reactor.util.function.Tuple6;
|
||||
import reactor.util.function.Tuple7;
|
||||
|
||||
public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
|
||||
@ -118,6 +106,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
public LLLocalKeyValueDatabase(BufferAllocator allocator,
|
||||
MeterRegistry meterRegistry,
|
||||
String name,
|
||||
boolean inMemory,
|
||||
@Nullable Path path,
|
||||
List<Column> columns,
|
||||
List<ColumnFamilyHandle> handles,
|
||||
@ -183,7 +172,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
);*/
|
||||
this.enableColumnsBug = "true".equals(databaseOptions.extraFlags().getOrDefault("enableColumnBug", "false"));
|
||||
|
||||
createIfNotExists(descriptors, rocksdbOptions, databaseOptions, dbPath, dbPathString);
|
||||
createIfNotExists(descriptors, rocksdbOptions, inMemory, dbPath, dbPathString);
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
@ -217,7 +206,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
}
|
||||
}
|
||||
this.handles = new HashMap<>();
|
||||
if (enableColumnsBug && !databaseOptions.inMemory()) {
|
||||
if (enableColumnsBug && !inMemory) {
|
||||
for (int i = 0; i < columns.size(); i++) {
|
||||
this.handles.put(columns.get(i), handles.get(i));
|
||||
}
|
||||
@ -379,7 +368,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
Objects.requireNonNull(path.getFileName());
|
||||
List<DbPath> paths = convertPaths(databasesDirPath, path.getFileName(), databaseOptions.volumes());
|
||||
options.setDbPaths(paths);
|
||||
options.setMaxOpenFiles(databaseOptions.maxOpenFiles());
|
||||
options.setMaxOpenFiles(databaseOptions.maxOpenFiles().orElse(-1));
|
||||
final BlockBasedTableConfig tableOptions = new BlockBasedTableConfig();
|
||||
Cache blockCache;
|
||||
if (databaseOptions.lowMemory()) {
|
||||
@ -393,12 +382,12 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
.setWalSizeLimitMB(0) // 16MB
|
||||
.setMaxTotalWalSize(0) // automatic
|
||||
;
|
||||
blockCache = new ClockCache(requireNonNullElse(databaseOptions.blockCache(), 8L * SizeUnit.MB).longValue(), -1, true);
|
||||
blockCache = new ClockCache(databaseOptions.blockCache().orElse( 8L * SizeUnit.MB), -1, true);
|
||||
tableOptions
|
||||
.setIndexType(IndexType.kTwoLevelIndexSearch)
|
||||
.setPartitionFilters(true)
|
||||
.setBlockCache(blockCache)
|
||||
.setCacheIndexAndFilterBlocks(requireNonNullElse(databaseOptions.setCacheIndexAndFilterBlocks(), true))
|
||||
.setCacheIndexAndFilterBlocks(databaseOptions.setCacheIndexAndFilterBlocks().orElse(true))
|
||||
.setCacheIndexAndFilterBlocksWithHighPriority(true)
|
||||
.setPinTopLevelIndexAndFilter(true)
|
||||
.setPinL0FilterAndIndexBlocksInCache(true)
|
||||
@ -424,12 +413,12 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
.setWalSizeLimitMB(1024) // 1024MB
|
||||
.setMaxTotalWalSize(2L * SizeUnit.GB) // 2GiB max wal directory size
|
||||
;
|
||||
blockCache = new ClockCache(requireNonNullElse(databaseOptions.blockCache(), 512 * SizeUnit.MB).longValue(), -1, true);
|
||||
blockCache = new ClockCache(databaseOptions.blockCache().orElse( 512 * SizeUnit.MB), -1, true);
|
||||
tableOptions
|
||||
.setIndexType(IndexType.kTwoLevelIndexSearch)
|
||||
.setPartitionFilters(true)
|
||||
.setBlockCache(blockCache)
|
||||
.setCacheIndexAndFilterBlocks(requireNonNullElse(databaseOptions.setCacheIndexAndFilterBlocks(), true))
|
||||
.setCacheIndexAndFilterBlocks(databaseOptions.setCacheIndexAndFilterBlocks().orElse( true))
|
||||
.setCacheIndexAndFilterBlocksWithHighPriority(true)
|
||||
.setPinTopLevelIndexAndFilter(true)
|
||||
.setPinL0FilterAndIndexBlocksInCache(true)
|
||||
@ -451,10 +440,12 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
|
||||
options.setWriteBufferManager(new WriteBufferManager(256L * 1024L * 1024L, blockCache));
|
||||
|
||||
//noinspection ConstantConditions
|
||||
if (databaseOptions.memtableMemoryBudgetBytes() != null) {
|
||||
// 16MiB/256MiB of ram will be used for level style compaction
|
||||
options.optimizeLevelStyleCompaction(requireNonNullElse(databaseOptions.memtableMemoryBudgetBytes(),
|
||||
databaseOptions.lowMemory() ? 16L * SizeUnit.MB : 128L * SizeUnit.MB).longValue());
|
||||
options.optimizeLevelStyleCompaction(databaseOptions
|
||||
.memtableMemoryBudgetBytes()
|
||||
.orElse(databaseOptions.lowMemory() ? 16L * SizeUnit.MB : 128L * SizeUnit.MB));
|
||||
}
|
||||
|
||||
if (!databaseOptions.volumes().isEmpty()) {
|
||||
@ -519,10 +510,10 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
|
||||
private void createIfNotExists(List<ColumnFamilyDescriptor> descriptors,
|
||||
Options options,
|
||||
DatabaseOptions databaseOptions,
|
||||
boolean inMemory,
|
||||
Path dbPath,
|
||||
String dbPathString) throws RocksDBException {
|
||||
if (databaseOptions.inMemory()) {
|
||||
if (inMemory) {
|
||||
return;
|
||||
}
|
||||
if (Files.notExists(dbPath)) {
|
||||
@ -575,7 +566,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
allocator,
|
||||
getRocksDBColumn(db, getCfh(columnName)),
|
||||
name,
|
||||
Column.toString(columnName),
|
||||
ColumnUtils.toString(columnName),
|
||||
dbScheduler,
|
||||
(snapshot) -> snapshotsHandles.get(snapshot.getSequenceNumber()),
|
||||
updateMode,
|
||||
@ -595,7 +586,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
||||
}
|
||||
|
||||
private ColumnFamilyHandle getCfh(byte[] columnName) throws RocksDBException {
|
||||
ColumnFamilyHandle cfh = handles.get(Column.special(Column.toString(columnName)));
|
||||
ColumnFamilyHandle cfh = handles.get(ColumnUtils.special(ColumnUtils.toString(columnName)));
|
||||
assert enableColumnsBug || Arrays.equals(cfh.getName(), columnName);
|
||||
return cfh;
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.MemoryManager;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.database.LLDelta;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||
import it.cavallium.dbengine.lucene.ExponentialPageLimits;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.locks.LockSupport;
|
||||
|
@ -7,10 +7,10 @@ import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.MemoryManager;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.database.LLDelta;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
@ -6,10 +6,10 @@ import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.Send;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.database.LLDelta;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
@ -2,13 +2,11 @@ package it.cavallium.dbengine.database.memory;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneDirectoryOptions.ByteBuffersDirectory;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.client.LuceneOptionsBuilder;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLDatabaseConnection;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||
@ -16,6 +14,8 @@ import it.cavallium.dbengine.database.disk.LLLocalLuceneIndex;
|
||||
import it.cavallium.dbengine.database.disk.LLTempLMDBEnv;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.netty.JMXNettyMonitoringManager;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
@ -3,13 +3,13 @@ package it.cavallium.dbengine.database.memory;
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import it.cavallium.dbengine.client.MemoryStats;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.LLSingleton;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.LLUtils;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.unimi.dsi.fastutil.bytes.ByteList;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.rocksdb.ColumnFamilyHandle;
|
||||
|
||||
public class ColumnFamilyHandleSerializer implements DataSerializer<ColumnFamilyHandle> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull ColumnFamilyHandle columnFamilyHandle) throws IOException {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull ColumnFamilyHandle deserialize(DataInput dataInput) throws IOException {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.client.Compression;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class CompressionSerializer implements DataSerializer<Compression> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Compression compression) throws IOException {
|
||||
dataOutput.writeInt(compression.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Compression deserialize(DataInput dataInput) throws IOException {
|
||||
return Compression.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.time.temporal.TemporalUnit;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class DurationSerializer implements DataSerializer<Duration> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Duration duration) throws IOException {
|
||||
var units = duration.getUnits();
|
||||
var smallestUnit = (ChronoUnit) units.get(units.size() - 1);
|
||||
dataOutput.writeInt(smallestUnit.ordinal());
|
||||
dataOutput.writeLong(duration.get(smallestUnit));
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Duration deserialize(DataInput dataInput) throws IOException {
|
||||
var smallestUnit = ChronoUnit.values()[dataInput.readInt()];
|
||||
return Duration.of(dataInput.readLong(), smallestUnit);
|
||||
}
|
||||
}
|
@ -0,0 +1,336 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.net5.buffer.api.Buffer;
|
||||
import io.net5.buffer.api.BufferAllocator;
|
||||
import io.net5.buffer.api.Send;
|
||||
import io.netty.handler.codec.PrematureChannelClosureException;
|
||||
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
|
||||
import io.netty.incubator.codec.quic.QuicSslContextBuilder;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.client.MemoryStats;
|
||||
import it.cavallium.dbengine.database.LLDatabaseConnection;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||
import it.cavallium.dbengine.database.LLSingleton;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import it.cavallium.dbengine.database.UpdateMode;
|
||||
import it.cavallium.dbengine.database.UpdateReturnMode;
|
||||
import it.cavallium.dbengine.database.remote.RPCCodecs.RPCClientBoundResponseDecoder;
|
||||
import it.cavallium.dbengine.database.remote.RPCCodecs.RPCClientAlternateDecoder;
|
||||
import it.cavallium.dbengine.database.remote.RPCCodecs.RPCServerAlternateDecoder;
|
||||
import it.cavallium.dbengine.database.remote.RPCCodecs.RPCServerBoundRequestDecoder;
|
||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.rpc.current.data.BinaryOptional;
|
||||
import it.cavallium.dbengine.rpc.current.data.ClientBoundRequest;
|
||||
import it.cavallium.dbengine.rpc.current.data.ClientBoundResponse;
|
||||
import it.cavallium.dbengine.rpc.current.data.Column;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import it.cavallium.dbengine.rpc.current.data.GeneratedEntityId;
|
||||
import it.cavallium.dbengine.rpc.current.data.GetDatabase;
|
||||
import it.cavallium.dbengine.rpc.current.data.GetSingleton;
|
||||
import it.cavallium.dbengine.rpc.current.data.ServerBoundRequest;
|
||||
import it.cavallium.dbengine.rpc.current.data.ServerBoundResponse;
|
||||
import it.cavallium.dbengine.rpc.current.data.SingletonGet;
|
||||
import it.cavallium.dbengine.rpc.current.data.SingletonSet;
|
||||
import it.cavallium.dbengine.rpc.current.data.nullables.NullableLLSnapshot;
|
||||
import it.unimi.dsi.fastutil.bytes.ByteArrayList;
|
||||
import it.unimi.dsi.fastutil.bytes.ByteList;
|
||||
import java.io.File;
|
||||
import java.net.SocketAddress;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.logging.Level;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.publisher.Sinks;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
import reactor.netty.incubator.quic.QuicClient;
|
||||
import reactor.netty.incubator.quic.QuicConnection;
|
||||
|
||||
public class LLQuicConnection implements LLDatabaseConnection {
|
||||
|
||||
private final BufferAllocator allocator;
|
||||
private final MeterRegistry meterRegistry;
|
||||
private final SocketAddress bindAddress;
|
||||
private final SocketAddress remoteAddress;
|
||||
private volatile QuicConnection quicConnection;
|
||||
private final ConcurrentHashMap<String, Mono<Long>> databases = new ConcurrentHashMap<>();
|
||||
private final ConcurrentHashMap<String, Mono<Long>> indexes = new ConcurrentHashMap<>();
|
||||
private Mono<Void> connectionMono = Mono.error(new IllegalStateException("Not connected"));
|
||||
|
||||
public LLQuicConnection(BufferAllocator allocator,
|
||||
MeterRegistry meterRegistry,
|
||||
SocketAddress bindAddress,
|
||||
SocketAddress remoteAddress) {
|
||||
this.allocator = allocator;
|
||||
this.meterRegistry = meterRegistry;
|
||||
this.bindAddress = bindAddress;
|
||||
this.remoteAddress = remoteAddress;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BufferAllocator getAllocator() {
|
||||
return allocator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MeterRegistry getMeterRegistry() {
|
||||
return meterRegistry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLDatabaseConnection> connect() {
|
||||
String keyFileLocation = System.getProperty("it.cavalliumdb.keyFile", null);
|
||||
String certFileLocation = System.getProperty("it.cavalliumdb.certFile", null);
|
||||
String keyStorePassword = System.getProperty("it.cavalliumdb.keyPassword", null);
|
||||
String certChainLocation = System.getProperty("it.cavalliumdb.caFile", null);
|
||||
File keyFile;
|
||||
File certFile;
|
||||
File certChain;
|
||||
if (keyFileLocation != null) {
|
||||
keyFile = new File(keyFileLocation);
|
||||
} else {
|
||||
keyFile = null;
|
||||
}
|
||||
if (certFileLocation != null) {
|
||||
certFile = new File(certFileLocation);
|
||||
} else {
|
||||
certFile = null;
|
||||
}
|
||||
if (certChainLocation != null) {
|
||||
certChain = new File(certChainLocation);
|
||||
} else {
|
||||
certChain = null;
|
||||
}
|
||||
var sslContextBuilder = QuicSslContextBuilder.forClient();
|
||||
if (keyFileLocation != null || certFileLocation != null) {
|
||||
sslContextBuilder.keyManager(keyFile, keyStorePassword, certFile);
|
||||
}
|
||||
if (certChainLocation != null) {
|
||||
sslContextBuilder.trustManager(certChain);
|
||||
} else {
|
||||
sslContextBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE);
|
||||
}
|
||||
var sslContext = sslContextBuilder
|
||||
.applicationProtocols("db/0.9")
|
||||
.build();
|
||||
return QuicClient.create()
|
||||
.bindAddress(() -> bindAddress)
|
||||
.remoteAddress(() -> remoteAddress)
|
||||
.secure(sslContext)
|
||||
.idleTimeout(Duration.ofSeconds(30))
|
||||
.initialSettings(spec -> spec
|
||||
.maxData(10000000)
|
||||
.maxStreamDataBidirectionalLocal(1000000)
|
||||
)
|
||||
.connect()
|
||||
.doOnNext(conn -> quicConnection = conn)
|
||||
.thenReturn(this);
|
||||
}
|
||||
|
||||
private <T extends ClientBoundResponse> Mono<T> sendRequest(ServerBoundRequest req) {
|
||||
return Mono
|
||||
.<T>create(sink -> {
|
||||
var sub = quicConnection
|
||||
.createStream((in, out) -> {
|
||||
var writerMono = out
|
||||
.withConnection(conn -> conn.addHandler(new RPCServerBoundRequestDecoder()))
|
||||
.sendObject(req)
|
||||
.then();
|
||||
var readerMono = in
|
||||
.withConnection(conn -> conn.addHandler(new RPCClientBoundResponseDecoder()))
|
||||
.receiveObject()
|
||||
.doOnNext(result -> {
|
||||
if (result != null) {
|
||||
//noinspection unchecked
|
||||
sink.success((T) result);
|
||||
} else {
|
||||
sink.success();
|
||||
}
|
||||
})
|
||||
.take(1, true)
|
||||
.singleOrEmpty()
|
||||
.then();
|
||||
return writerMono
|
||||
.then(readerMono)
|
||||
.doOnCancel(() -> sink.error(new PrematureChannelClosureException("Request failed")));
|
||||
})
|
||||
.log("a", Level.INFO)
|
||||
.subscribeOn(Schedulers.parallel())
|
||||
.subscribe(x -> {}, sink::error);
|
||||
sink.onDispose(sub);
|
||||
})
|
||||
.log("x", Level.INFO);
|
||||
}
|
||||
|
||||
private <T extends ClientBoundResponse, U extends ClientBoundRequest> Mono<T> sendUpdateRequest(ServerBoundRequest req,
|
||||
Function<U, ServerBoundResponse> updaterFunction) {
|
||||
return Mono
|
||||
.<T>create(sink -> {
|
||||
var sub = quicConnection
|
||||
.createStream((in, out) -> {
|
||||
var inConn = in.withConnection(conn -> conn.addHandler(new RPCServerAlternateDecoder()));
|
||||
var outConn = out.withConnection(conn -> conn.addHandler(new RPCClientAlternateDecoder()));
|
||||
var request2 = Sinks.one();
|
||||
var writerMono = outConn
|
||||
.sendObject(Mono.<Object>just(req).concatWith(request2.asMono()))
|
||||
.then();
|
||||
var responseMono = inConn
|
||||
.receiveObject()
|
||||
.switchOnFirst((first, flux) -> {
|
||||
//noinspection unchecked
|
||||
var req2 = updaterFunction.apply((U) first);
|
||||
request2.tryEmitValue(req2);
|
||||
//noinspection unchecked
|
||||
return flux.skip(1).map(resp2 -> (T) resp2);
|
||||
});
|
||||
return writerMono
|
||||
.thenMany(responseMono)
|
||||
.doOnCancel(() -> sink.error(new PrematureChannelClosureException("Request failed")))
|
||||
.then();
|
||||
})
|
||||
.log("a", Level.INFO)
|
||||
.subscribeOn(Schedulers.parallel())
|
||||
.subscribe(x -> {}, sink::error);
|
||||
sink.onDispose(sub);
|
||||
})
|
||||
.log("x", Level.INFO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLKeyValueDatabase> getDatabase(String databaseName,
|
||||
List<Column> columns, DatabaseOptions databaseOptions) {
|
||||
return sendRequest(new GetDatabase(databaseName, columns, databaseOptions))
|
||||
.cast(GeneratedEntityId.class)
|
||||
.map(GeneratedEntityId::id)
|
||||
.map(id -> new LLKeyValueDatabase() {
|
||||
@Override
|
||||
public Mono<? extends LLSingleton> getSingleton(byte[] singletonListColumnName,
|
||||
byte[] name,
|
||||
byte[] defaultValue) {
|
||||
return sendRequest(new GetSingleton(id,
|
||||
ByteList.of(singletonListColumnName),
|
||||
ByteList.of(name),
|
||||
ByteList.of(defaultValue)
|
||||
)).cast(GeneratedEntityId.class).map(GeneratedEntityId::id).map(singletonId -> new LLSingleton() {
|
||||
|
||||
@Override
|
||||
public BufferAllocator getAllocator() {
|
||||
return allocator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<byte[]> get(@Nullable LLSnapshot snapshot) {
|
||||
return sendRequest(new SingletonGet(singletonId, NullableLLSnapshot.ofNullable(snapshot)))
|
||||
.cast(BinaryOptional.class)
|
||||
.mapNotNull(b -> b.val().getNullable())
|
||||
.map(binary -> toArrayNoCopy(binary.val()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> set(byte[] value) {
|
||||
return sendRequest(new SingletonSet(singletonId, ByteList.of(value)))
|
||||
.then();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Send<Buffer>> update(SerializationFunction<@Nullable Send<Buffer>, @Nullable Buffer> updater,
|
||||
UpdateReturnMode updateReturnMode) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLDictionary> getDictionary(byte[] columnName, UpdateMode updateMode) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Long> getProperty(String propertyName) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<MemoryStats> getMemoryStats() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> verifyChecksum() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BufferAllocator getAllocator() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MeterRegistry getMeterRegistry() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> close() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDatabaseName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<LLSnapshot> takeSnapshot() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> releaseSnapshot(LLSnapshot snapshot) {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static byte[] toArrayNoCopy(ByteList b) {
|
||||
if (b instanceof ByteArrayList bal) {
|
||||
return bal.elements();
|
||||
} else {
|
||||
return b.toByteArray();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<? extends LLLuceneIndex> getLuceneIndex(@Nullable String clusterName,
|
||||
@Nullable String shardName,
|
||||
int instancesCount,
|
||||
IndicizerAnalyzers indicizerAnalyzers,
|
||||
IndicizerSimilarities indicizerSimilarities,
|
||||
LuceneOptions luceneOptions,
|
||||
@Nullable LuceneHacks luceneHacks) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> disconnect() {
|
||||
return sendDisconnect().then(quicConnection.onDispose().timeout(Duration.ofMinutes(1)));
|
||||
}
|
||||
|
||||
private Mono<Void> sendDisconnect() {
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.database.LLSnapshot;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class LLSnapshotSerializer implements DataSerializer<LLSnapshot> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull LLSnapshot llSnapshot) throws IOException {
|
||||
dataOutput.writeLong(llSnapshot.getSequenceNumber());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull LLSnapshot deserialize(DataInput dataInput) throws IOException {
|
||||
return new LLSnapshot(dataInput.readLong());
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class LuceneHacksSerializer implements DataSerializer<LuceneHacks> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull LuceneHacks luceneHacks) throws IOException {
|
||||
if (luceneHacks.customLocalSearcher() != null || luceneHacks.customMultiSearcher() != null) {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull LuceneHacks deserialize(DataInput dataInput) throws IOException {
|
||||
return new LuceneHacks(null, null);
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class PathSerializer implements DataSerializer<Path> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Path path) throws IOException {
|
||||
dataOutput.writeUTF(path.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Path deserialize(DataInput dataInput) throws IOException {
|
||||
return Path.of(dataInput.readUTF());
|
||||
}
|
||||
}
|
@ -0,0 +1,197 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufInputStream;
|
||||
import io.netty.buffer.ByteBufOutputStream;
|
||||
import io.netty.channel.ChannelHandler;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.handler.codec.ByteToMessageCodec;
|
||||
import it.cavallium.dbengine.rpc.current.data.BoxedClientBoundRequest;
|
||||
import it.cavallium.dbengine.rpc.current.data.BoxedClientBoundResponse;
|
||||
import it.cavallium.dbengine.rpc.current.data.BoxedServerBoundRequest;
|
||||
import it.cavallium.dbengine.rpc.current.data.BoxedServerBoundResponse;
|
||||
import it.cavallium.dbengine.rpc.current.data.ClientBoundRequest;
|
||||
import it.cavallium.dbengine.rpc.current.data.ClientBoundResponse;
|
||||
import it.cavallium.dbengine.rpc.current.data.IBasicType;
|
||||
import it.cavallium.dbengine.rpc.current.data.ServerBoundRequest;
|
||||
import it.cavallium.dbengine.rpc.current.data.ServerBoundResponse;
|
||||
import it.cavallium.dbengine.rpc.current.serializers.BoxedClientBoundRequestSerializer;
|
||||
import it.cavallium.dbengine.rpc.current.serializers.BoxedClientBoundResponseSerializer;
|
||||
import it.cavallium.dbengine.rpc.current.serializers.BoxedServerBoundRequestSerializer;
|
||||
import it.cavallium.dbengine.rpc.current.serializers.BoxedServerBoundResponseSerializer;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.util.List;
|
||||
|
||||
public class RPCCodecs {
|
||||
|
||||
public static class RPCClientBoundRequestDecoder extends ByteToMessageCodec<ClientBoundRequest> {
|
||||
|
||||
public static final ChannelHandler INSTANCE = new RPCClientBoundRequestDecoder();
|
||||
|
||||
@Override
|
||||
protected void encode(ChannelHandlerContext ctx, ClientBoundRequest msg, ByteBuf out) throws Exception {
|
||||
try (var bbos = new ByteBufOutputStream(out)) {
|
||||
try (var dos = new DataOutputStream(bbos)) {
|
||||
BoxedClientBoundRequestSerializer.INSTANCE.serialize(dos, BoxedClientBoundRequest.of(msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
|
||||
try (var bbis = new ByteBufInputStream(msg)) {
|
||||
try (var dis = new DataInputStream(bbis)) {
|
||||
out.add(BoxedClientBoundRequestSerializer.INSTANCE.deserialize(dis).val());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class RPCServerBoundRequestDecoder extends ByteToMessageCodec<ServerBoundRequest> {
|
||||
|
||||
public static final ChannelHandler INSTANCE = new RPCServerBoundRequestDecoder();
|
||||
|
||||
@Override
|
||||
protected void encode(ChannelHandlerContext ctx, ServerBoundRequest msg, ByteBuf out) throws Exception {
|
||||
try (var bbos = new ByteBufOutputStream(out)) {
|
||||
try (var dos = new DataOutputStream(bbos)) {
|
||||
BoxedServerBoundRequestSerializer.INSTANCE.serialize(dos, BoxedServerBoundRequest.of(msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
|
||||
try (var bbis = new ByteBufInputStream(msg)) {
|
||||
try (var dis = new DataInputStream(bbis)) {
|
||||
out.add(BoxedServerBoundRequestSerializer.INSTANCE.deserialize(dis).val());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class RPCClientBoundResponseDecoder extends ByteToMessageCodec<ClientBoundResponse> {
|
||||
|
||||
public static final ChannelHandler INSTANCE = new RPCClientBoundResponseDecoder();
|
||||
|
||||
@Override
|
||||
protected void encode(ChannelHandlerContext ctx, ClientBoundResponse msg, ByteBuf out) throws Exception {
|
||||
try (var bbos = new ByteBufOutputStream(out)) {
|
||||
try (var dos = new DataOutputStream(bbos)) {
|
||||
BoxedClientBoundResponseSerializer.INSTANCE.serialize(dos, BoxedClientBoundResponse.of(msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
|
||||
try (var bbis = new ByteBufInputStream(msg)) {
|
||||
try (var dis = new DataInputStream(bbis)) {
|
||||
out.add(BoxedClientBoundResponseSerializer.INSTANCE.deserialize(dis).val());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class RPCServerBoundResponseDecoder extends ByteToMessageCodec<ServerBoundResponse> {
|
||||
|
||||
public static final ChannelHandler INSTANCE = new RPCServerBoundResponseDecoder();
|
||||
|
||||
@Override
|
||||
protected void encode(ChannelHandlerContext ctx, ServerBoundResponse msg, ByteBuf out) throws Exception {
|
||||
try (var bbos = new ByteBufOutputStream(out)) {
|
||||
try (var dos = new DataOutputStream(bbos)) {
|
||||
BoxedServerBoundResponseSerializer.INSTANCE.serialize(dos, BoxedServerBoundResponse.of(msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
|
||||
try (var bbis = new ByteBufInputStream(msg)) {
|
||||
try (var dis = new DataInputStream(bbis)) {
|
||||
out.add(BoxedServerBoundResponseSerializer.INSTANCE.deserialize(dis).val());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class RPCClientAlternateDecoder extends ByteToMessageCodec<IBasicType> {
|
||||
|
||||
public static final ChannelHandler INSTANCE = new RPCClientAlternateDecoder();
|
||||
|
||||
private boolean alternated = false;
|
||||
|
||||
@Override
|
||||
protected void encode(ChannelHandlerContext ctx, IBasicType msg, ByteBuf out) throws Exception {
|
||||
try (var bbos = new ByteBufOutputStream(out)) {
|
||||
try (var dos = new DataOutputStream(bbos)) {
|
||||
if (!alternated) {
|
||||
BoxedServerBoundRequestSerializer.INSTANCE.serialize(dos,
|
||||
BoxedServerBoundRequest.of((ServerBoundRequest) msg)
|
||||
);
|
||||
} else {
|
||||
BoxedClientBoundResponseSerializer.INSTANCE.serialize(dos,
|
||||
BoxedClientBoundResponse.of((ClientBoundResponse) msg)
|
||||
);
|
||||
}
|
||||
alternated = !alternated;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
|
||||
try (var bbis = new ByteBufInputStream(msg)) {
|
||||
try (var dis = new DataInputStream(bbis)) {
|
||||
if (!alternated) {
|
||||
out.add(BoxedServerBoundRequestSerializer.INSTANCE.deserialize(dis).val());
|
||||
} else {
|
||||
out.add(BoxedClientBoundResponseSerializer.INSTANCE.deserialize(dis).val());
|
||||
}
|
||||
alternated = !alternated;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class RPCServerAlternateDecoder extends ByteToMessageCodec<IBasicType> {
|
||||
|
||||
public static final ChannelHandler INSTANCE = new RPCServerAlternateDecoder();
|
||||
|
||||
private boolean alternated = false;
|
||||
|
||||
@Override
|
||||
protected void encode(ChannelHandlerContext ctx, IBasicType msg, ByteBuf out) throws Exception {
|
||||
try (var bbos = new ByteBufOutputStream(out)) {
|
||||
try (var dos = new DataOutputStream(bbos)) {
|
||||
if (!alternated) {
|
||||
BoxedClientBoundRequestSerializer.INSTANCE.serialize(dos,
|
||||
BoxedClientBoundRequest.of((ClientBoundRequest) msg)
|
||||
);
|
||||
} else {
|
||||
BoxedServerBoundResponseSerializer.INSTANCE.serialize(dos,
|
||||
BoxedServerBoundResponse.of((ServerBoundResponse) msg)
|
||||
);
|
||||
}
|
||||
alternated = !alternated;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
|
||||
try (var bbis = new ByteBufInputStream(msg)) {
|
||||
try (var dis = new DataInputStream(bbis)) {
|
||||
if (!alternated) {
|
||||
out.add(BoxedClientBoundRequestSerializer.INSTANCE.deserialize(dis).val());
|
||||
} else {
|
||||
out.add(BoxedServerBoundResponseSerializer.INSTANCE.deserialize(dis).val());
|
||||
}
|
||||
alternated = !alternated;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.rocksdb.RocksDB;
|
||||
|
||||
public class RocksDBSerializer implements DataSerializer<RocksDB> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull RocksDB rocksDB) throws IOException {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull RocksDB deserialize(DataInput dataInput) throws IOException {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.rocksdb.ColumnFamilyHandle;
|
||||
|
||||
public class String2ColumnFamilyHandleMapSerializer implements DataSerializer<Map<String, ColumnFamilyHandle>> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Map<String, ColumnFamilyHandle> stringColumnFamilyHandleMap)
|
||||
throws IOException {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map<String, ColumnFamilyHandle> deserialize(DataInput dataInput) throws IOException {
|
||||
throw new UnsupportedOperationException("Can't encode this type");
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class String2FieldAnalyzerMapSerializer implements DataSerializer<Map<String, TextFieldsAnalyzer>> {
|
||||
|
||||
private static final TextFieldsAnalyzerSerializer TEXT_FIELDS_ANALYZER_SERIALIZER = new TextFieldsAnalyzerSerializer();
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Map<String, TextFieldsAnalyzer> stringTextFieldsAnalyzerMap)
|
||||
throws IOException {
|
||||
dataOutput.writeInt(stringTextFieldsAnalyzerMap.size());
|
||||
for (Entry<String, TextFieldsAnalyzer> entry : stringTextFieldsAnalyzerMap.entrySet()) {
|
||||
dataOutput.writeUTF(entry.getKey());
|
||||
TEXT_FIELDS_ANALYZER_SERIALIZER.serialize(dataOutput, entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map<String, TextFieldsAnalyzer> deserialize(DataInput dataInput) throws IOException {
|
||||
var size = dataInput.readInt();
|
||||
var result = new HashMap<String, TextFieldsAnalyzer>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
result.put(dataInput.readUTF(), TEXT_FIELDS_ANALYZER_SERIALIZER.deserialize(dataInput));
|
||||
}
|
||||
return Collections.unmodifiableMap(result);
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public class StringEntrySerializer implements DataSerializer<Map.Entry> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Map.Entry entry) throws IOException {
|
||||
dataOutput.writeUTF((String) entry.getKey());
|
||||
dataOutput.writeUTF((String) entry.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map.Entry deserialize(DataInput dataInput) throws IOException {
|
||||
return Map.entry(dataInput.readUTF(), dataInput.readUTF());
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class StringMapSerializer implements DataSerializer<Map<String, String>> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull Map<String, String> stringTextFieldsAnalyzerMap)
|
||||
throws IOException {
|
||||
dataOutput.writeInt(stringTextFieldsAnalyzerMap.size());
|
||||
for (Entry<String, String> entry : stringTextFieldsAnalyzerMap.entrySet()) {
|
||||
dataOutput.writeUTF(entry.getKey());
|
||||
dataOutput.writeUTF(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Map<String, String> deserialize(DataInput dataInput) throws IOException {
|
||||
var size = dataInput.readInt();
|
||||
var result = new HashMap<String, String>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
result.put(dataInput.readUTF(), dataInput.readUTF());
|
||||
}
|
||||
return Collections.unmodifiableMap(result);
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class TextFieldsAnalyzerSerializer implements DataSerializer<TextFieldsAnalyzer> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsAnalyzer textFieldsAnalyzer) throws IOException {
|
||||
dataOutput.writeInt(textFieldsAnalyzer.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TextFieldsAnalyzer deserialize(DataInput dataInput) throws IOException {
|
||||
return TextFieldsAnalyzer.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class TextFieldsSimilaritySerializer implements DataSerializer<TextFieldsSimilarity> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull TextFieldsSimilarity textFieldsSimilarity) throws IOException {
|
||||
dataOutput.writeInt(textFieldsSimilarity.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TextFieldsSimilarity deserialize(DataInput dataInput) throws IOException {
|
||||
return TextFieldsSimilarity.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package it.cavallium.dbengine.database.remote;
|
||||
|
||||
import it.cavallium.data.generator.DataSerializer;
|
||||
import it.cavallium.dbengine.database.UpdateReturnMode;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class UpdateReturnModeSerializer implements DataSerializer<UpdateReturnMode> {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput dataOutput, @NotNull UpdateReturnMode updateReturnMode) throws IOException {
|
||||
dataOutput.writeInt(updateReturnMode.ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull UpdateReturnMode deserialize(DataInput dataInput) throws IOException {
|
||||
return UpdateReturnMode.values()[dataInput.readInt()];
|
||||
}
|
||||
}
|
@ -4,20 +4,23 @@ import static it.cavallium.dbengine.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
|
||||
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
|
||||
|
||||
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
|
||||
import it.cavallium.data.generator.nativedata.Nullableboolean;
|
||||
import it.cavallium.data.generator.nativedata.Nullableint;
|
||||
import it.cavallium.data.generator.nativedata.Nullablelong;
|
||||
import it.cavallium.dbengine.DbTestUtils.TempDb;
|
||||
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneDirectoryOptions;
|
||||
import it.cavallium.dbengine.client.LuceneDirectoryOptions.ByteBuffersDirectory;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.ColumnUtils;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
@ -67,26 +70,25 @@ public class LocalTemporaryDbGenerator implements TemporaryDbGenerator {
|
||||
return null;
|
||||
})
|
||||
.subscribeOn(Schedulers.boundedElastic())
|
||||
.then(new LLLocalDatabaseConnection(allocator.allocator(), new SimpleMeterRegistry(), wrkspcPath).connect())
|
||||
.then(new LLLocalDatabaseConnection(allocator.allocator(), new SimpleMeterRegistry(), wrkspcPath, true).connect())
|
||||
.flatMap(conn -> {
|
||||
SwappableLuceneSearcher searcher = new SwappableLuceneSearcher();
|
||||
var luceneHacks = new LuceneHacks(() -> searcher, () -> searcher);
|
||||
return Mono.zip(
|
||||
conn.getDatabase("testdb",
|
||||
List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")),
|
||||
List.of(ColumnUtils.dictionary("testmap"), ColumnUtils.special("ints"), ColumnUtils.special("longs")),
|
||||
new DatabaseOptions(List.of(),
|
||||
Map.of(),
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
canUseNettyDirect,
|
||||
false,
|
||||
-1,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
Nullableint.of(-1),
|
||||
Nullablelong.empty(),
|
||||
Nullablelong.empty(),
|
||||
Nullableboolean.empty()
|
||||
)
|
||||
),
|
||||
conn.getLuceneIndex(null,
|
||||
|
@ -3,18 +3,21 @@ package it.cavallium.dbengine;
|
||||
import static it.cavallium.dbengine.DbTestUtils.MAX_IN_MEMORY_RESULT_ENTRIES;
|
||||
|
||||
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
|
||||
import it.cavallium.data.generator.nativedata.Nullableboolean;
|
||||
import it.cavallium.data.generator.nativedata.Nullableint;
|
||||
import it.cavallium.data.generator.nativedata.Nullablelong;
|
||||
import it.cavallium.dbengine.DbTestUtils.TempDb;
|
||||
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
|
||||
import it.cavallium.dbengine.client.DatabaseOptions;
|
||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||
import it.cavallium.dbengine.client.LuceneDirectoryOptions.ByteBuffersDirectory;
|
||||
import it.cavallium.dbengine.client.LuceneOptions;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.ColumnUtils;
|
||||
import it.cavallium.dbengine.database.memory.LLMemoryDatabaseConnection;
|
||||
import it.cavallium.dbengine.lucene.LuceneHacks;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
||||
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -45,20 +48,19 @@ public class MemoryTemporaryDbGenerator implements TemporaryDbGenerator {
|
||||
return Mono
|
||||
.zip(
|
||||
conn.getDatabase("testdb",
|
||||
List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")),
|
||||
new DatabaseOptions(List.of(),
|
||||
List.of(ColumnUtils.dictionary("testmap"), ColumnUtils.special("ints"), ColumnUtils.special("longs")),
|
||||
DatabaseOptions.of(List.of(),
|
||||
Map.of(),
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
canUseNettyDirect,
|
||||
true,
|
||||
-1,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
Nullableint.of(-1),
|
||||
Nullablelong.empty(),
|
||||
Nullablelong.empty(),
|
||||
Nullableboolean.empty()
|
||||
)
|
||||
),
|
||||
conn.getLuceneIndex(null,
|
||||
|
Loading…
x
Reference in New Issue
Block a user