Remove netty buffers, remove reactive streams

This commit is contained in:
Andrea Cavalli 2023-02-09 23:34:25 +01:00
parent 5c112484bd
commit a9857f7553
294 changed files with 9141 additions and 16277 deletions

115
pom.xml
View File

@ -5,7 +5,7 @@
<groupId>it.cavallium</groupId> <groupId>it.cavallium</groupId>
<artifactId>dbengine</artifactId> <artifactId>dbengine</artifactId>
<version>3.0.${revision}</version> <version>4.0.${revision}</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
@ -13,7 +13,8 @@
<revision>0-SNAPSHOT</revision> <revision>0-SNAPSHOT</revision>
<dbengine.ci>false</dbengine.ci> <dbengine.ci>false</dbengine.ci>
<micrometer.version>1.9.5</micrometer.version> <micrometer.version>1.9.5</micrometer.version>
<lucene.version>9.4.2</lucene.version> <lucene.version>9.5.0</lucene.version>
<rocksdb.version>7.9.2</rocksdb.version>
<junit.jupiter.version>5.9.0</junit.jupiter.version> <junit.jupiter.version>5.9.0</junit.jupiter.version>
<data.generator.version>1.0.244</data.generator.version> <data.generator.version>1.0.244</data.generator.version>
</properties> </properties>
@ -96,13 +97,6 @@
<artifactId>hamcrest-library</artifactId> <artifactId>hamcrest-library</artifactId>
<version>2.2</version> <version>2.2</version>
</dependency> </dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-bom</artifactId>
<version>2022.0.2</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-bom</artifactId> <artifactId>netty-bom</artifactId>
@ -113,33 +107,11 @@
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>
<dependencies> <dependencies>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-tools</artifactId>
<classifier>original</classifier>
<scope>runtime</scope>
<version>3.5.1</version>
</dependency>
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>31.1-jre</version> <version>31.1-jre</version>
</dependency> </dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty5-buffer</artifactId>
<version>5.0.0.Alpha5</version>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId> <artifactId>netty-buffer</artifactId>
@ -155,11 +127,6 @@
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId> <artifactId>netty-handler</artifactId>
</dependency>
<dependency>
<groupId>io.projectreactor.netty.incubator</groupId>
<artifactId>reactor-netty-incubator-quic</artifactId>
<version>0.1.2</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.yaml</groupId> <groupId>org.yaml</groupId>
@ -266,7 +233,7 @@
<dependency> <dependency>
<groupId>org.rocksdb</groupId> <groupId>org.rocksdb</groupId>
<artifactId>rocksdbjni</artifactId> <artifactId>rocksdbjni</artifactId>
<version>7.9.2</version> <version>${rocksdb.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
@ -356,40 +323,6 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>io.projectreactor.netty</groupId>
<artifactId>reactor-netty</artifactId>
<exclusions>
<exclusion>
<groupId>io.netty.incubator</groupId>
<artifactId>netty-incubator-codec-native-quic</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>reactor-netty-core</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-codec</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-transport</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.bouncycastle</groupId> <groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId> <artifactId>bcpkix-jdk15on</artifactId>
@ -457,11 +390,6 @@
<version>3.12.0</version> <version>3.12.0</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
<testSourceDirectory>src/test/java</testSourceDirectory> <testSourceDirectory>src/test/java</testSourceDirectory>
@ -638,39 +566,4 @@
</plugins> </plugins>
</pluginManagement> </pluginManagement>
</build> </build>
<profiles>
<profile>
<id>reactor-agent</id>
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>reactor.agent.enable</name>
<value>true</value>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-maven-plugin</artifactId>
<version>1.12.22</version>
<executions>
<execution>
<goals>
<goal>transform</goal>
</goals>
</execution>
</executions>
<configuration>
<transformations>
<transformation>
<plugin>reactor.tools.agent.ReactorDebugByteBuddyPlugin</plugin>
</transformation>
</transformations>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project> </project>

View File

@ -77,8 +77,6 @@ superTypesData:
NIOFSDirectory, NIOFSDirectory,
RAFFSDirectory, RAFFSDirectory,
DirectIOFSDirectory, DirectIOFSDirectory,
RocksDBStandaloneDirectory,
RocksDBSharedDirectory,
NRTCachingDirectory NRTCachingDirectory
] ]
StandardFSDirectoryOptions: [ StandardFSDirectoryOptions: [
@ -90,7 +88,6 @@ superTypesData:
MemoryMappedFSDirectory, MemoryMappedFSDirectory,
NIOFSDirectory, NIOFSDirectory,
RAFFSDirectory, RAFFSDirectory,
RocksDBStandaloneDirectory,
StandardFSDirectoryOptions StandardFSDirectoryOptions
] ]
Filter: [ Filter: [
@ -134,8 +131,8 @@ customTypesData:
serializer: it.cavallium.dbengine.database.remote.LLSnapshotSerializer serializer: it.cavallium.dbengine.database.remote.LLSnapshotSerializer
Bytes: Bytes:
javaClass: it.unimi.dsi.fastutil.bytes.ByteList javaClass: it.cavallium.dbengine.buffers.Buf
serializer: it.cavallium.dbengine.database.remote.ByteListSerializer serializer: it.cavallium.dbengine.database.remote.BufSerializer
StringMap: StringMap:
javaClass: java.util.Map<java.lang.String, java.lang.String> javaClass: java.util.Map<java.lang.String, java.lang.String>
serializer: it.cavallium.dbengine.database.remote.StringMapSerializer serializer: it.cavallium.dbengine.database.remote.StringMapSerializer
@ -248,7 +245,6 @@ baseTypesData:
lowMemory: boolean lowMemory: boolean
useDirectIO: boolean useDirectIO: boolean
allowMemoryMapping: boolean allowMemoryMapping: boolean
allowNettyDirect: boolean
optimistic: boolean optimistic: boolean
maxOpenFiles: -int maxOpenFiles: -int
blockCache: -long blockCache: -long
@ -331,7 +327,6 @@ baseTypesData:
indexWriterMaxBufferedDocs: -int indexWriterMaxBufferedDocs: -int
applyAllDeletes: -boolean applyAllDeletes: -boolean
writeAllDeletes: -boolean writeAllDeletes: -boolean
allowNonVolatileCollection: boolean
maxInMemoryResultEntries: int maxInMemoryResultEntries: int
mergePolicy: TieredMergePolicy mergePolicy: TieredMergePolicy
TieredMergePolicy: TieredMergePolicy:
@ -359,14 +354,6 @@ baseTypesData:
delegate: StandardFSDirectoryOptions delegate: StandardFSDirectoryOptions
mergeBufferSize: -int mergeBufferSize: -int
minBytesDirect: -long minBytesDirect: -long
RocksDBStandaloneDirectory:
data:
managedPath: Path
blockSize: int
RocksDBSharedDirectory:
data:
managedPath: Path
blockSize: int
NRTCachingDirectory: NRTCachingDirectory:
data: data:
delegate: LuceneDirectoryOptions delegate: LuceneDirectoryOptions

View File

@ -1,55 +0,0 @@
package it.cavallium.dbengine;
import io.netty5.buffer.pool.PoolArenaMetric;
import io.netty5.buffer.pool.PooledBufferAllocator;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* Netty5 hides some metrics. This utility class can read them.
*/
public class MetricUtils {
private static final Logger LOG = LogManager.getLogger(MetricUtils.class);
private static final MethodHandle GET_ARENA_METRICS;
static {
var lookup = MethodHandles.lookup();
// Get the method handle that returns the metrics of each pool arena
MethodHandle handle = null;
try {
// Find the class
var pooledBufferClass = Class.forName("io.netty5.buffer.pool.PooledBufferAllocatorMetric");
// Find the handle of the method
handle = lookup.findVirtual(pooledBufferClass, "arenaMetrics", MethodType.methodType(List.class));
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException ex) {
logMetricsNotAccessible(ex);
}
GET_ARENA_METRICS = handle;
}
private static void logMetricsNotAccessible(Throwable ex) {
LOG.debug("Failed to open pooled buffer allocator metrics", ex);
}
/**
* Get the metrics of each pool arena of a pooled allocator
* @param allocator Pooled allocator
* @return A list of {@link PoolArenaMetric}
*/
@SuppressWarnings("unchecked")
public static List<PoolArenaMetric> getPoolArenaMetrics(PooledBufferAllocator allocator) {
var metric = allocator.metric();
try {
// Invoke the method to get the metrics
return (List<PoolArenaMetric>) GET_ARENA_METRICS.invoke(metric);
} catch (Throwable e) {
return List.of();
}
}
}

View File

@ -0,0 +1,182 @@
package it.cavallium.dbengine.buffers;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import it.unimi.dsi.fastutil.bytes.ByteArrayList;
import it.unimi.dsi.fastutil.bytes.ByteList;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.RandomAccess;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.warp.commonutils.stream.SafeByteArrayInputStream;
import org.warp.commonutils.stream.SafeByteArrayOutputStream;
import org.warp.commonutils.stream.SafeDataOutput;
public interface Buf extends ByteList, RandomAccess {
static Buf wrap(ByteList bytes) {
if (bytes instanceof Buf buf) {
return buf;
} else if (bytes instanceof ByteArrayList byteArrayList) {
return ByteListBuf.wrap(byteArrayList.elements(), byteArrayList.size());
} else {
return ByteListBuf.wrap(bytes.toByteArray());
}
}
static Buf wrap(ByteList bytes, int from, int to) {
if (bytes instanceof Buf buf) {
return buf.subList(from, to);
} else if (bytes instanceof ByteArrayList byteArrayList) {
return ByteListBuf.wrap(byteArrayList.elements(), byteArrayList.size()).subList(from, to);
} else {
return ByteListBuf.wrap(bytes.toByteArray()).subList(from, to);
}
}
static Buf wrap(byte[] bytes) {
return ByteListBuf.wrap(bytes);
}
static Buf wrap(byte[] bytes, int from, int to) {
return ByteListBuf.wrap(bytes, to).subList(from, to);
}
static Buf create(int initialCapacity) {
return new ByteListBuf(initialCapacity);
}
static Buf copyOf(byte[] original) {
return new ByteListBuf(original);
}
static Buf create() {
return new ByteListBuf();
}
static Buf wrap(byte[] array, int length) {
return ByteListBuf.wrap(array, length);
}
static Buf createZeroes(int length) {
return ByteListBuf.wrap(new byte[length], length);
}
/**
* Get this element as an array, converting it if needed
*/
byte @NotNull[] asArray();
/**
* Get this element as an array, only if it's already an array, otherwise return null
*/
byte @Nullable[] asArrayStrict();
/**
* Get this element as an array with equal or bigger size, converting it if needed
* The returned array may be bigger than expected!
*/
byte @Nullable[] asUnboundedArray();
/**
* Get this element as an array with equal or bigger size, only if it's already an array, otherwise return null
* The returned array may be bigger than expected!
*/
byte @Nullable[] asUnboundedArrayStrict();
boolean isMutable();
void freeze();
@Override
Buf subList(int from, int to);
Buf copy();
SafeByteArrayInputStream binaryInputStream();
void writeTo(SafeDataOutput dataOutput);
default long getLong(int i) {
return Longs.fromBytes(getByte(i),
getByte(i + 1),
getByte(i + 2),
getByte(i + 3),
getByte(i + 4),
getByte(i + 5),
getByte(i + 6),
getByte(i + 7)
);
}
default int getInt(int i) {
return Ints.fromBytes(getByte(i),
getByte(i + 1),
getByte(i + 2),
getByte(i + 3)
);
}
default float getFloat(int i) {
return Float.intBitsToFloat(getInt(i));
}
default double getDouble(int i) {
return Double.longBitsToDouble(getLong(i));
}
default boolean getBoolean(int i) {
return getByte(i) != 0;
}
default void setBoolean(int i, boolean val) {
set(i, val ? (byte) 1 : 0);
}
default void setByte(int i, byte val) {
set(i, val);
}
default void setInt(int i, int val) {
set(i, (byte) (val >> 24));
set(i + 1, (byte) (val >> 16));
set(i + 2, (byte) (val >> 8));
set(i + 3, (byte) val);
}
default void setLong(int i, long val) {
set(i, (byte) (val >> 56));
set(i + 1, (byte) (val >> 48));
set(i + 2, (byte) (val >> 40));
set(i + 3, (byte) (val >> 32));
set(i + 4, (byte) (val >> 24));
set(i + 5, (byte) (val >> 16));
set(i + 6, (byte) (val >> 8));
set(i + 7, (byte) val);
}
default void setFloat(int i, float val) {
setInt(i, Float.floatToRawIntBits(val));
}
default void setDouble(int i, double val) {
setLong(i, Double.doubleToRawLongBits(val));
}
default SafeByteArrayOutputStream binaryOutputStream() {
return binaryOutputStream(0, size());
}
default SafeByteArrayOutputStream binaryOutputStream(int from) {
return binaryOutputStream(from, size());
}
SafeByteArrayOutputStream binaryOutputStream(int from, int to);
boolean equals(int aStartIndex, Buf b, int bStartIndex, int length);
boolean equals(int aStartIndex, byte[] b, int bStartIndex, int length);
default String toString(Charset charset) {
return new String(this.asArray(), charset);
}
}

View File

@ -0,0 +1,42 @@
package it.cavallium.dbengine.buffers;
import org.jetbrains.annotations.NotNull;
import org.warp.commonutils.stream.SafeByteArrayInputStream;
import org.warp.commonutils.stream.SafeDataInputStream;
public class BufDataInput extends SafeDataInputStream {
/**
* Creates a DataInputStream that uses the specified underlying InputStream.
*
* @param in the specified input stream
*/
private BufDataInput(@NotNull SafeByteArrayInputStream in) {
super(in);
}
public static BufDataInput create(Buf byteList) {
return new BufDataInput(byteList.binaryInputStream());
}
@Deprecated
@Override
public void close() {
}
@Override
public void mark(int readlimit) {
throw new UnsupportedOperationException();
}
@Override
public void reset() {
throw new UnsupportedOperationException();
}
@Override
public boolean markSupported() {
return false;
}
}

View File

@ -0,0 +1,218 @@
package it.cavallium.dbengine.buffers;
import it.unimi.dsi.fastutil.Arrays;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;
import org.jetbrains.annotations.NotNull;
import org.warp.commonutils.stream.SafeByteArrayOutputStream;
import org.warp.commonutils.stream.SafeDataOutputStream;
public class BufDataOutput implements DataOutput {
private final SafeByteArrayOutputStream buf;
private final SafeDataOutputStream dOut;
private final int limit;
private BufDataOutput(SafeByteArrayOutputStream buf) {
this.buf = buf;
this.dOut = new SafeDataOutputStream(buf);
limit = Integer.MAX_VALUE;
}
private BufDataOutput(SafeByteArrayOutputStream buf, int maxSize) {
this.buf = buf;
this.dOut = new SafeDataOutputStream(buf);
this.limit = maxSize;
}
public static BufDataOutput createLimited(int maxSize, int hint) {
if (hint >= 0) {
if (maxSize < 0 || maxSize == Integer.MAX_VALUE) {
return create(hint);
} else {
return new BufDataOutput(new SafeByteArrayOutputStream(Math.min(maxSize, hint)), maxSize);
}
} else {
return createLimited(maxSize);
}
}
public static BufDataOutput createLimited(int maxSize) {
if (maxSize < 0 || maxSize == Integer.MAX_VALUE) {
return create();
} else {
return new BufDataOutput(new SafeByteArrayOutputStream(maxSize), maxSize);
}
}
public static BufDataOutput create() {
return new BufDataOutput(new SafeByteArrayOutputStream());
}
public static BufDataOutput create(int hint) {
if (hint >= 0) {
return new BufDataOutput(new SafeByteArrayOutputStream(hint));
} else {
return create();
}
}
public static BufDataOutput wrap(Buf buf, int from, int to) {
Arrays.ensureFromTo(buf.size(), from, to);
if (buf.isEmpty()) {
return createLimited(0);
} else {
return new BufDataOutput(buf.binaryOutputStream(from), to - from);
}
}
public static BufDataOutput wrap(Buf buf) {
if (buf.isEmpty()) {
return createLimited(0);
} else {
return new BufDataOutput(buf.binaryOutputStream(), buf.size());
}
}
private IllegalStateException unreachable(IOException ex) {
return new IllegalStateException(ex);
}
@Override
public void write(int b) {
checkOutOfBounds(1);
dOut.write(b);
}
private void checkOutOfBounds(int delta) {
if (dOut.size() + delta > limit) {
throw new IndexOutOfBoundsException(limit);
}
}
@Override
public void write(byte @NotNull [] b) {
checkOutOfBounds(b.length);
dOut.write(b);
}
@Override
public void write(byte @NotNull [] b, int off, int len) {
checkOutOfBounds(Math.max(0, Math.min(b.length - off, len)));
dOut.write(b, off, len);
}
@Override
public void writeBoolean(boolean v) {
checkOutOfBounds(1);
dOut.writeBoolean(v);
}
@Override
public void writeByte(int v) {
checkOutOfBounds(Byte.BYTES);
dOut.writeByte(v);
}
@Override
public void writeShort(int v) {
checkOutOfBounds(Short.BYTES);
dOut.writeShort(v);
}
@Override
public void writeChar(int v) {
checkOutOfBounds(Character.BYTES);
dOut.writeChar(v);
}
@Override
public void writeInt(int v) {
checkOutOfBounds(Integer.BYTES);
dOut.writeInt(v);
}
@Override
public void writeLong(long v) {
checkOutOfBounds(Long.BYTES);
dOut.writeLong(v);
}
@Override
public void writeFloat(float v) {
checkOutOfBounds(Float.BYTES);
dOut.writeFloat(v);
}
@Override
public void writeDouble(double v) {
checkOutOfBounds(Double.BYTES);
dOut.writeDouble(v);
}
public void ensureWritable(int size) {
dOut.flush();
buf.ensureWritable(size);
}
@Override
public void writeBytes(@NotNull String s) {
checkOutOfBounds(s.length() * Byte.BYTES);
dOut.writeBytes(s);
}
// todo: check
public void writeBytes(Buf deserialized) {
checkOutOfBounds(deserialized.size());
deserialized.writeTo(dOut);
}
public void writeBytes(byte[] b, int off, int len) {
write(b, off, len);
}
@Override
public void writeChars(@NotNull String s) {
checkOutOfBounds(Character.BYTES * s.length());
dOut.writeChars(s);
}
@Override
public void writeUTF(@NotNull String s) {
throw new UnsupportedOperationException();
}
public Buf asList() {
dOut.flush();
return Buf.wrap(this.buf.array, this.buf.length);
}
@Override
public String toString() {
return dOut.toString();
}
@Override
public int hashCode() {
return dOut.hashCode();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BufDataOutput that = (BufDataOutput) o;
return Objects.equals(dOut, that.dOut);
}
public int size() {
return dOut.size();
}
}

View File

@ -0,0 +1,467 @@
package it.cavallium.dbengine.buffers;
import it.unimi.dsi.fastutil.bytes.AbstractByteList;
import it.unimi.dsi.fastutil.bytes.ByteArrayList;
import it.unimi.dsi.fastutil.bytes.ByteCollection;
import it.unimi.dsi.fastutil.bytes.ByteConsumer;
import it.unimi.dsi.fastutil.bytes.ByteIterator;
import it.unimi.dsi.fastutil.bytes.ByteIterators;
import it.unimi.dsi.fastutil.bytes.ByteList;
import it.unimi.dsi.fastutil.bytes.ByteListIterator;
import it.unimi.dsi.fastutil.bytes.ByteSpliterator;
import it.unimi.dsi.fastutil.bytes.ByteSpliterators;
import java.io.Serial;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.warp.commonutils.stream.SafeByteArrayInputStream;
import org.warp.commonutils.stream.SafeByteArrayOutputStream;
import org.warp.commonutils.stream.SafeDataOutput;
class ByteListBuf extends ByteArrayList implements Buf {
private boolean mutable = true;
protected ByteListBuf(byte[] a, boolean wrapped) {
super(a, wrapped);
}
public ByteListBuf(int capacity) {
super(capacity);
}
public ByteListBuf() {
}
public ByteListBuf(Collection<? extends Byte> c) {
super(c);
}
public ByteListBuf(ByteCollection c) {
super(c);
}
public ByteListBuf(ByteList l) {
super(l);
}
public ByteListBuf(byte[] a) {
super(a);
}
public ByteListBuf(byte[] a, int offset, int length) {
super(a, offset, length);
}
public ByteListBuf(Iterator<? extends Byte> i) {
super(i);
}
public ByteListBuf(ByteIterator i) {
super(i);
}
/**
* Wraps a given array into an array list of given size.
*
* <p>
* Note it is guaranteed that the type of the array returned by {@link #elements()} will be the same
* (see the comments in the class documentation).
*
* @param a an array to wrap.
* @param length the length of the resulting array list.
* @return a new array list of the given size, wrapping the given array.
*/
public static ByteListBuf wrap(final byte[] a, final int length) {
if (length > a.length) throw new IllegalArgumentException("The specified length (" + length + ") is greater than the array size (" + a.length + ")");
final ByteListBuf l = new ByteListBuf(a, true);
l.size = length;
return l;
}
/**
* Wraps a given array into an array list.
*
* <p>
* Note it is guaranteed that the type of the array returned by {@link #elements()} will be the same
* (see the comments in the class documentation).
*
* @param a an array to wrap.
* @return a new array list wrapping the given array.
*/
public static ByteListBuf wrap(final byte[] a) {
return wrap(a, a.length);
}
/**
* Creates a new empty array list.
*
* @return a new empty array list.
*/
public static ByteListBuf of() {
return new ByteListBuf();
}
/**
* Creates an array list using an array of elements.
*
* @param init a the array the will become the new backing array of the array list.
* @return a new array list backed by the given array.
* @see #wrap
*/
public static ByteListBuf of(final byte... init) {
return wrap(init);
}
@Override
public byte @NotNull [] asArray() {
if (this.size() == a.length) {
return this.a;
} else {
return this.toByteArray();
}
}
@Override
public byte @Nullable [] asArrayStrict() {
if (this.size() == a.length) {
return a;
} else {
return null;
}
}
@Override
public byte @Nullable [] asUnboundedArray() {
return a;
}
@Override
public byte @Nullable [] asUnboundedArrayStrict() {
return a;
}
@Override
public boolean isMutable() {
return mutable;
}
@Override
public void freeze() {
mutable = false;
}
@Override
public Buf subList(int from, int to) {
if (from == 0 && to == size()) return this;
ensureIndex(from);
ensureIndex(to);
if (from > to) throw new IndexOutOfBoundsException("Start index (" + from + ") is greater than end index (" + to + ")");
return new SubList(from, to);
}
@Override
public Buf copy() {
var copied = ByteListBuf.wrap(this.a.clone());
copied.size = this.size;
return copied;
}
@Override
public SafeByteArrayInputStream binaryInputStream() {
return new SafeByteArrayInputStream(this.a, 0, this.size);
}
@Override
public void writeTo(SafeDataOutput dataOutput) {
dataOutput.write(this.a, 0, this.size);
}
@Override
public SafeByteArrayOutputStream binaryOutputStream(int from, int to) {
it.unimi.dsi.fastutil.Arrays.ensureFromTo(size, from, to);
return new SafeByteArrayOutputStream(a, from, to);
}
@Override
public boolean equals(int aStartIndex, Buf b, int bStartIndex, int length) {
return b.equals(bStartIndex, this.a, aStartIndex, length);
}
@Override
public boolean equals(int aStartIndex, byte[] b, int bStartIndex, int length) {
if (aStartIndex < 0) return false;
if (aStartIndex + length > this.size) {
return false;
}
return Arrays.equals(a, aStartIndex, aStartIndex + length, b, bStartIndex, bStartIndex + length);
}
@Override
public String toString(Charset charset) {
return new String(a, 0, size, charset);
}
private class SubList extends AbstractByteList.ByteRandomAccessSubList implements Buf {
@Serial
private static final long serialVersionUID = -3185226345314976296L;
private boolean subMutable = true;
protected SubList(int from, int to) {
super(ByteListBuf.this, from, to);
}
// Most of the inherited methods should be fine, but we can override a few of them for performance.
// Needed because we can't access the parent class' instance variables directly in a different
// instance of SubList.
private byte[] getParentArray() {
return a;
}
@Override
public @NotNull Buf subList(int from, int to) {
it.unimi.dsi.fastutil.Arrays.ensureFromTo(a.length, from, to);
if (from > to) throw new IllegalArgumentException("Start index (" + from + ") is greater than end index (" + to + ")");
// Sadly we have to rewrap this, because if there is a sublist of a sublist, and the
// subsublist adds, both sublists need to update their "to" value.
return new SubList(from, to);
}
@Override
public Buf copy() {
return Buf.wrap(Arrays.copyOfRange(a, from, to));
}
@Override
public SafeByteArrayInputStream binaryInputStream() {
return new SafeByteArrayInputStream(a, from, size());
}
@Override
public void writeTo(SafeDataOutput dataOutput) {
dataOutput.write(a, from, size());
}
@Override
public SafeByteArrayOutputStream binaryOutputStream(int from, int to) {
it.unimi.dsi.fastutil.Arrays.ensureFromTo(size(), from, to);
return new SafeByteArrayOutputStream(a, from + this.from, to + this.from);
}
@Override
public boolean equals(int aStartIndex, Buf b, int bStartIndex, int length) {
return b.equals(bStartIndex, a, aStartIndex + from, length);
}
@Override
public boolean equals(int aStartIndex, byte[] b, int bStartIndex, int length) {
var aFrom = from + aStartIndex;
var aTo = from + aStartIndex + length;
if (aFrom < from) return false;
if (aTo > to) return false;
return Arrays.equals(a, aFrom, aTo, b, bStartIndex, bStartIndex + length);
}
@Override
public byte getByte(int i) {
ensureRestrictedIndex(i);
return a[i + from];
}
@Override
public byte @NotNull [] asArray() {
if (this.from == 0 && this.to == a.length) {
return a;
} else {
return toByteArray();
}
}
@Override
public byte @Nullable [] asArrayStrict() {
if (this.from == 0 && this.to == a.length) {
return a;
} else {
return null;
}
}
@Override
public byte @Nullable [] asUnboundedArray() {
if (from == 0) {
return a;
} else {
return toByteArray();
}
}
@Override
public byte @Nullable [] asUnboundedArrayStrict() {
if (from == 0) {
return a;
} else {
return null;
}
}
@Override
public boolean isMutable() {
return mutable && subMutable;
}
@Override
public void freeze() {
subMutable = false;
}
private final class SubListIterator extends ByteIterators.AbstractIndexBasedListIterator {
// We are using pos == 0 to be 0 relative to SubList.from (meaning you need to do a[from + i] when
// accessing array).
SubListIterator(int index) {
super(0, index);
}
@Override
protected byte get(int i) {
return a[from + i];
}
@Override
protected void add(int i, byte k) {
ByteListBuf.SubList.this.add(i, k);
}
@Override
protected void set(int i, byte k) {
ByteListBuf.SubList.this.set(i, k);
}
@Override
protected void remove(int i) {
ByteListBuf.SubList.this.removeByte(i);
}
@Override
protected int getMaxPos() {
return to - from;
}
@Override
public byte nextByte() {
if (!hasNext()) throw new NoSuchElementException();
return a[from + (lastReturned = pos++)];
}
@Override
public byte previousByte() {
if (!hasPrevious()) throw new NoSuchElementException();
return a[from + (lastReturned = --pos)];
}
@Override
public void forEachRemaining(final ByteConsumer action) {
final int max = to - from;
while (pos < max) {
action.accept(a[from + (lastReturned = pos++)]);
}
}
}
@Override
public @NotNull ByteListIterator listIterator(int index) {
return new ByteListBuf.SubList.SubListIterator(index);
}
private final class SubListSpliterator extends ByteSpliterators.LateBindingSizeIndexBasedSpliterator {
// We are using pos == 0 to be 0 relative to real array 0
SubListSpliterator() {
super(from);
}
private SubListSpliterator(int pos, int maxPos) {
super(pos, maxPos);
}
@Override
protected int getMaxPosFromBackingStore() {
return to;
}
@Override
protected byte get(int i) {
return a[i];
}
@Override
protected ByteListBuf.SubList.SubListSpliterator makeForSplit(int pos, int maxPos) {
return new ByteListBuf.SubList.SubListSpliterator(pos, maxPos);
}
@Override
public boolean tryAdvance(final ByteConsumer action) {
if (pos >= getMaxPos()) return false;
action.accept(a[pos++]);
return true;
}
@Override
public void forEachRemaining(final ByteConsumer action) {
final int max = getMaxPos();
while (pos < max) {
action.accept(a[pos++]);
}
}
}
@Override
public ByteSpliterator spliterator() {
return new ByteListBuf.SubList.SubListSpliterator();
}
boolean contentsEquals(byte[] otherA, int otherAFrom, int otherATo) {
if (a == otherA && from == otherAFrom && to == otherATo) return true;
return Arrays.equals(a, from, to, otherA, otherAFrom, otherATo);
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null) return false;
if (!(o instanceof java.util.List)) return false;
if (o instanceof ByteListBuf other) {
return contentsEquals(other.a, 0, other.size());
}
if (o instanceof SubList other) {
return contentsEquals(other.getParentArray(), other.from, other.to);
}
return super.equals(o);
}
int contentsCompareTo(byte[] otherA, int otherAFrom, int otherATo) {
if (a == otherA && from == otherAFrom && to == otherATo) return 0;
return Arrays.compareUnsigned(a, from, to, otherA, otherAFrom, otherATo);
}
@Override
public int compareTo(final java.util.@NotNull List<? extends Byte> l) {
if (l instanceof ByteListBuf other) {
return contentsCompareTo(other.a, 0, other.size());
}
if (l instanceof ByteListBuf.SubList other) {
return contentsCompareTo(other.getParentArray(), other.from, other.to);
}
return super.compareTo(l);
}
@Override
public String toString(Charset charset) {
return new String(a, from, to, charset);
}
}
}

View File

@ -1,8 +1,7 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SignalType;
public abstract class Backuppable implements IBackuppable { public abstract class Backuppable implements IBackuppable {
@ -13,29 +12,29 @@ public abstract class Backuppable implements IBackuppable {
private final AtomicInteger state = new AtomicInteger(); private final AtomicInteger state = new AtomicInteger();
@Override @Override
public final Mono<Void> pauseForBackup() { public final void pauseForBackup() {
return Mono.defer(() -> { if (state.compareAndSet(State.RUNNING.ordinal(), State.PAUSING.ordinal())) {
if (state.compareAndSet(State.RUNNING.ordinal(), State.PAUSING.ordinal())) { try {
return onPauseForBackup().doFinally(type -> state.compareAndSet(State.PAUSING.ordinal(), onPauseForBackup();
type == SignalType.ON_ERROR ? State.RUNNING.ordinal() : State.PAUSED.ordinal() state.compareAndSet(State.PAUSING.ordinal(), State.PAUSED.ordinal());
)); } catch (Throwable ex) {
} else { state.compareAndSet(State.PAUSING.ordinal(), State.RUNNING.ordinal());
return Mono.empty(); throw ex;
} }
}); }
} }
@Override @Override
public final Mono<Void> resumeAfterBackup() { public final void resumeAfterBackup() {
return Mono.defer(() -> { if (state.compareAndSet(State.PAUSED.ordinal(), State.RESUMING.ordinal())) {
if (state.compareAndSet(State.PAUSED.ordinal(), State.RESUMING.ordinal())) { try {
return onResumeAfterBackup().doFinally(type -> state.compareAndSet(State.RESUMING.ordinal(), onResumeAfterBackup();
type == SignalType.ON_ERROR ? State.PAUSED.ordinal() : State.RUNNING.ordinal() state.compareAndSet(State.RESUMING.ordinal(), State.RUNNING.ordinal());
)); } catch (Throwable ex) {
} else { state.compareAndSet(State.RESUMING.ordinal(), State.PAUSED.ordinal());
return Mono.empty(); throw ex;
} }
}); }
} }
@Override @Override
@ -47,9 +46,9 @@ public abstract class Backuppable implements IBackuppable {
return State.values()[state.get()]; return State.values()[state.get()];
} }
protected abstract Mono<Void> onPauseForBackup(); protected abstract void onPauseForBackup();
protected abstract Mono<Void> onResumeAfterBackup(); protected abstract void onResumeAfterBackup();
public final void setStopped() { public final void setStopped() {
state.set(State.STOPPED.ordinal()); state.set(State.STOPPED.ordinal());

View File

@ -1,8 +1,8 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.rpc.current.data.Column; import it.cavallium.dbengine.rpc.current.data.Column;
import it.unimi.dsi.fastutil.bytes.ByteList;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
public record BadBlock(String databaseName, @Nullable Column column, @Nullable ByteList rawKey, public record BadBlock(String databaseName, @Nullable Column column, @Nullable Buf rawKey,
@Nullable Throwable ex) {} @Nullable Throwable ex) {}

View File

@ -1,14 +1,14 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import it.cavallium.dbengine.client.Mapper;
public class CastMapper<T, U> implements Mapper<T, U> { public class CastMapper<T, U> implements Mapper<T, U> {
@SuppressWarnings("unchecked")
@Override @Override
public U map(T key) { public U map(T key) {
return (U) key; return (U) key;
} }
@SuppressWarnings("unchecked")
@Override @Override
public T unmap(U key) { public T unmap(U key) {
return (T) key; return (T) key;

View File

@ -1,36 +1,32 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.MeterRegistry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.database.DatabaseOperations; import it.cavallium.dbengine.database.DatabaseOperations;
import it.cavallium.dbengine.database.DatabaseProperties; import it.cavallium.dbengine.database.DatabaseProperties;
import reactor.core.publisher.Flux; import java.util.stream.Stream;
import reactor.core.publisher.Mono;
public interface CompositeDatabase extends DatabaseProperties, DatabaseOperations { public interface CompositeDatabase extends DatabaseProperties, DatabaseOperations {
Mono<Void> preClose(); void preClose();
Mono<Void> close(); void close();
/** /**
* Can return SnapshotException * Can return SnapshotException
*/ */
Mono<CompositeSnapshot> takeSnapshot(); CompositeSnapshot takeSnapshot();
/** /**
* Can return SnapshotException * Can return SnapshotException
*/ */
Mono<Void> releaseSnapshot(CompositeSnapshot snapshot); void releaseSnapshot(CompositeSnapshot snapshot);
BufferAllocator getAllocator();
MeterRegistry getMeterRegistry(); MeterRegistry getMeterRegistry();
/** /**
* Find corrupted items * Find corrupted items
*/ */
Flux<BadBlock> badBlocks(); Stream<BadBlock> badBlocks();
Mono<Void> verifyChecksum(); void verifyChecksum();
} }

View File

@ -1,47 +0,0 @@
package it.cavallium.dbengine.client;
import java.util.Collection;
import java.util.List;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public class CountedStream<T> {
private final Flux<T> stream;
private final long count;
public CountedStream(Flux<T> stream, long count) {
this.stream = stream;
this.count = count;
}
public Flux<T> getStream() {
return stream;
}
public long getCount() {
return count;
}
@SafeVarargs
public static <T> CountedStream<T> merge(CountedStream<T>... stream) {
return merge(List.of(stream));
}
public static <T> CountedStream<T> merge(Collection<CountedStream<T>> stream) {
return stream
.stream()
.reduce((a, b) -> new CountedStream<>(Flux.merge(a.getStream(), b.getStream()), a.getCount() + b.getCount()))
.orElseGet(() -> new CountedStream<>(Flux.empty(), 0));
}
public static <T> Mono<CountedStream<T>> merge(Flux<CountedStream<T>> stream) {
return stream
.reduce((a, b) -> new CountedStream<>(Flux.merge(a.getStream(), b.getStream()), a.getCount() + b.getCount()))
.switchIfEmpty(Mono.fromSupplier(() -> new CountedStream<>(Flux.empty(), 0)));
}
public Mono<List<T>> collectList() {
return stream.collectList();
}
}

View File

@ -58,7 +58,6 @@ public class DefaultDatabaseOptions {
false, false,
false, false,
true, true,
true,
Nullableint.empty(), Nullableint.empty(),
Nullablelong.empty(), Nullablelong.empty(),
Nullablelong.empty(), Nullablelong.empty(),

View File

@ -1,9 +1,9 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Mono; import org.jetbrains.annotations.Nullable;
public record HitEntry<T, U>(T key, U value, float score) public record HitEntry<T, U>(T key, @Nullable U value, float score)
implements Comparable<HitEntry<T, U>> { implements Comparable<HitEntry<T, U>> {
@Override @Override

View File

@ -1,16 +1,13 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import java.util.Comparator;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public record HitKey<T>(T key, float score) implements Comparable<HitKey<T>> { public record HitKey<T>(T key, float score) implements Comparable<HitKey<T>> {
public <U> Mono<HitEntry<T, U>> withValue(Function<T, Mono<U>> valueGetter) { public <U> HitEntry<T, U> withValue(Function<T, U> valueGetter) {
return valueGetter.apply(key).map(value -> new HitEntry<>(key, value, score)); return new HitEntry<>(key, valueGetter.apply(key), score);
} }
public <U> HitEntry<T, U> withNullValue() { public <U> HitEntry<T, U> withNullValue() {

View File

@ -4,29 +4,25 @@ import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.DiscardingCloseable; import it.cavallium.dbengine.database.DiscardingCloseable;
import it.cavallium.dbengine.database.SafeCloseable; import it.cavallium.dbengine.database.SafeCloseable;
import it.cavallium.dbengine.database.collections.ValueGetter; import it.cavallium.dbengine.database.collections.ValueGetter;
import it.cavallium.dbengine.database.collections.ValueTransformer;
import it.cavallium.dbengine.lucene.LuceneCloseable; import it.cavallium.dbengine.lucene.LuceneCloseable;
import it.cavallium.dbengine.utils.SimpleResource; import it.cavallium.dbengine.utils.SimpleResource;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public class Hits<T> extends SimpleResource implements DiscardingCloseable { public class Hits<T> extends SimpleResource implements DiscardingCloseable {
private static final Logger LOG = LogManager.getLogger(Hits.class); private static final Logger LOG = LogManager.getLogger(Hits.class);
private static final Hits<?> EMPTY_HITS = new Hits<>(Flux.empty(), TotalHitsCount.of(0, true), false); private static final Hits<?> EMPTY_HITS = new Hits<>(Stream.empty(), TotalHitsCount.of(0, true), false);
private final Flux<T> results; private final Stream<T> results;
private final TotalHitsCount totalHitsCount; private final TotalHitsCount totalHitsCount;
public Hits(Flux<T> results, TotalHitsCount totalHitsCount) { public Hits(Stream<T> results, TotalHitsCount totalHitsCount) {
this(results, totalHitsCount, true); this(results, totalHitsCount, true);
} }
private Hits(Flux<T> results, TotalHitsCount totalHitsCount, boolean canClose) { private Hits(Stream<T> results, TotalHitsCount totalHitsCount, boolean canClose) {
super(canClose); super(canClose);
this.results = results; this.results = results;
this.totalHitsCount = totalHitsCount; this.totalHitsCount = totalHitsCount;
@ -37,44 +33,16 @@ public class Hits<T> extends SimpleResource implements DiscardingCloseable {
return (Hits<T>) EMPTY_HITS; return (Hits<T>) EMPTY_HITS;
} }
public static <T, U> Function<Hits<HitKey<T>>, Hits<LazyHitEntry<T, U>>> generateMapper( public static <T, U> Function<Hits<HitKey<T>>, Hits<HitEntry<T, U>>> generateMapper(
ValueGetter<T, U> valueGetter) { ValueGetter<T, U> valueGetter) {
return result -> { return result -> {
var hitsToTransform = result.results() var hitsToTransform = result.results()
.map(hit -> new LazyHitEntry<>(Mono.just(hit.key()), valueGetter.get(hit.key()), hit.score())); .map(hit -> new HitEntry<>(hit.key(), valueGetter.get(hit.key()), hit.score()));
return Hits.withResource(hitsToTransform, result.totalHitsCount(), result); return Hits.withResource(hitsToTransform, result.totalHitsCount(), result);
}; };
} }
public static <T, U> Function<Hits<HitKey<T>>, Hits<LazyHitEntry<T, U>>> generateMapper( public static <T> Hits<T> withResource(Stream<T> hits, TotalHitsCount count, SafeCloseable resource) {
ValueTransformer<T, U> valueTransformer) {
return result -> {
try {
var sharedHitsFlux = result.results().publish().refCount(3);
var scoresFlux = sharedHitsFlux.map(HitKey::score);
var keysFlux = sharedHitsFlux.map(HitKey::key);
var valuesFlux = valueTransformer.transform(keysFlux);
var transformedFlux = Flux.zip((Object[] data) -> {
//noinspection unchecked
var keyMono = Mono.just((T) data[0]);
//noinspection unchecked
var val = (Entry<T, Optional<U>>) data[1];
var valMono = Mono.justOrEmpty(val.getValue());
var score = (Float) data[2];
return new LazyHitEntry<>(keyMono, valMono, score);
}, keysFlux, valuesFlux, scoresFlux);
return Hits.withResource(transformedFlux, result.totalHitsCount(), result);
} catch (Throwable t) {
result.close();
throw t;
}
};
}
public static <T> Hits<T> withResource(Flux<T> hits, TotalHitsCount count, SafeCloseable resource) {
if (resource instanceof LuceneCloseable luceneCloseable) { if (resource instanceof LuceneCloseable luceneCloseable) {
return new LuceneHits<>(hits, count, luceneCloseable); return new LuceneHits<>(hits, count, luceneCloseable);
} else { } else {
@ -82,7 +50,7 @@ public class Hits<T> extends SimpleResource implements DiscardingCloseable {
} }
} }
public Flux<T> results() { public Stream<T> results() {
ensureOpen(); ensureOpen();
return results; return results;
} }
@ -105,7 +73,7 @@ public class Hits<T> extends SimpleResource implements DiscardingCloseable {
private final LuceneCloseable resource; private final LuceneCloseable resource;
public LuceneHits(Flux<U> hits, TotalHitsCount count, LuceneCloseable resource) { public LuceneHits(Stream<U> hits, TotalHitsCount count, LuceneCloseable resource) {
super(hits, count); super(hits, count);
this.resource = resource; this.resource = resource;
} }
@ -125,7 +93,7 @@ public class Hits<T> extends SimpleResource implements DiscardingCloseable {
private final SafeCloseable resource; private final SafeCloseable resource;
public CloseableHits(Flux<U> hits, TotalHitsCount count, SafeCloseable resource) { public CloseableHits(Stream<U> hits, TotalHitsCount count, SafeCloseable resource) {
super(hits, count); super(hits, count);
this.resource = resource; this.resource = resource;
} }

View File

@ -1,12 +1,10 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import reactor.core.publisher.Mono;
public interface IBackuppable { public interface IBackuppable {
Mono<Void> pauseForBackup(); void pauseForBackup();
Mono<Void> resumeAfterBackup(); void resumeAfterBackup();
boolean isPaused(); boolean isPaused();
} }

View File

@ -1,128 +0,0 @@
package it.cavallium.dbengine.client;
import it.cavallium.dbengine.client.IndexAction.Add;
import it.cavallium.dbengine.client.IndexAction.AddMulti;
import it.cavallium.dbengine.client.IndexAction.Update;
import it.cavallium.dbengine.client.IndexAction.UpdateMulti;
import it.cavallium.dbengine.client.IndexAction.Delete;
import it.cavallium.dbengine.client.IndexAction.DeleteAll;
import it.cavallium.dbengine.client.IndexAction.TakeSnapshot;
import it.cavallium.dbengine.client.IndexAction.ReleaseSnapshot;
import it.cavallium.dbengine.client.IndexAction.Flush;
import it.cavallium.dbengine.client.IndexAction.Refresh;
import it.cavallium.dbengine.client.IndexAction.Close;
import it.cavallium.dbengine.database.LLUpdateDocument;
import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLTerm;
import java.util.Map;
import java.util.Map.Entry;
import reactor.core.publisher.Flux;
import reactor.core.publisher.MonoSink;
sealed interface IndexAction permits Add, AddMulti, Update, UpdateMulti, Delete, DeleteAll, TakeSnapshot,
ReleaseSnapshot, Flush, Refresh, Close {
IndexActionType getType();
final record Add(LLTerm key, LLUpdateDocument doc, MonoSink<Void> addedFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.ADD;
}
}
final record AddMulti(Flux<Entry<LLTerm, LLUpdateDocument>> docsFlux, MonoSink<Void> addedMultiFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.ADD_MULTI;
}
}
final record Update(LLTerm key, LLUpdateDocument doc, MonoSink<Void> updatedFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.UPDATE;
}
}
final record UpdateMulti(Map<LLTerm, LLUpdateDocument> docs, MonoSink<Void> updatedMultiFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.UPDATE_MULTI;
}
}
final record Delete(LLTerm key, MonoSink<Void> deletedFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.DELETE;
}
}
final record DeleteAll(MonoSink<Void> deletedAllFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.DELETE_ALL;
}
}
final record TakeSnapshot(MonoSink<LLSnapshot> snapshotFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.TAKE_SNAPSHOT;
}
}
final record ReleaseSnapshot(LLSnapshot snapshot, MonoSink<Void> releasedFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.RELEASE_SNAPSHOT;
}
}
final record Flush(MonoSink<Void> flushFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.FLUSH;
}
}
final record Refresh(boolean force, MonoSink<Void> refreshFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.REFRESH;
}
}
final record Close(MonoSink<Void> closeFuture) implements IndexAction {
@Override
public IndexActionType getType() {
return IndexActionType.CLOSE;
}
}
enum IndexActionType {
ADD,
ADD_MULTI,
UPDATE,
UPDATE_MULTI,
DELETE,
DELETE_ALL,
TAKE_SNAPSHOT,
RELEASE_SNAPSHOT,
FLUSH,
REFRESH,
CLOSE
}
}

View File

@ -4,40 +4,33 @@ import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps; import com.google.common.collect.Multimaps;
import it.cavallium.dbengine.database.LLIndexRequest; import it.cavallium.dbengine.database.LLIndexRequest;
import it.cavallium.dbengine.database.LLSoftUpdateDocument; import it.cavallium.dbengine.database.LLSoftUpdateDocument;
import it.cavallium.dbengine.database.LLUpdateDocument;
import it.cavallium.dbengine.database.LLTerm; import it.cavallium.dbengine.database.LLTerm;
import it.cavallium.dbengine.database.LLUpdateDocument;
import it.cavallium.dbengine.database.LLUpdateFields; import it.cavallium.dbengine.database.LLUpdateFields;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers; import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers;
import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities; import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
public abstract class Indicizer<T, U> { public abstract class Indicizer<T, U> {
/** /**
* Transform a value to an IndexRequest. * Transform a value to an IndexRequest.
*/ */
public abstract @NotNull Mono<? extends LLIndexRequest> toIndexRequest(@NotNull T key, @NotNull U value); public abstract @NotNull LLIndexRequest toIndexRequest(@NotNull T key, @NotNull U value);
public final @NotNull Mono<LLUpdateDocument> toDocument(@NotNull T key, @NotNull U value) { public final @NotNull LLUpdateDocument toDocument(@NotNull T key, @NotNull U value) {
return toIndexRequest(key, value).map(req -> { var req = toIndexRequest(key, value);
if (req instanceof LLUpdateFields updateFields) { if (req instanceof LLUpdateFields updateFields) {
return new LLUpdateDocument(updateFields.items()); return new LLUpdateDocument(updateFields.items());
} else if (req instanceof LLUpdateDocument updateDocument) { } else if (req instanceof LLUpdateDocument updateDocument) {
return updateDocument; return updateDocument;
} else if (req instanceof LLSoftUpdateDocument softUpdateDocument) { } else if (req instanceof LLSoftUpdateDocument softUpdateDocument) {
return new LLUpdateDocument(softUpdateDocument.items()); return new LLUpdateDocument(softUpdateDocument.items());
} else { } else {
throw new UnsupportedOperationException("Unexpected request type: " + req); throw new UnsupportedOperationException("Unexpected request type: " + req);
} }
});
} }
public abstract @NotNull LLTerm toIndex(@NotNull T key); public abstract @NotNull LLTerm toIndex(@NotNull T key);

View File

@ -1,7 +1,6 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer; import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import it.cavallium.dbengine.rpc.current.serializers.IndicizerAnalyzersSerializer;
import java.util.Map; import java.util.Map;
public class IndicizerAnalyzers { public class IndicizerAnalyzers {

View File

@ -2,7 +2,6 @@ package it.cavallium.dbengine.client;
import com.squareup.moshi.JsonReader; import com.squareup.moshi.JsonReader;
import com.squareup.moshi.JsonWriter; import com.squareup.moshi.JsonWriter;
import it.cavallium.data.generator.nativedata.Int52;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import java.io.IOException; import java.io.IOException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;

View File

@ -1,15 +0,0 @@
package it.cavallium.dbengine.client;
import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Mono;
public record LazyHitEntry<T, U>(Mono<T> key, Mono<U> value, float score) {
public Mono<HitEntry<T, U>> resolve() {
return Mono.zip(key, value, (k, v) -> new HitEntry<>(k, v, score));
}
public Mono<HitKey<T>> resolveKey() {
return key.map(k -> new HitKey<>(k, score));
}
}

View File

@ -1,19 +0,0 @@
package it.cavallium.dbengine.client;
import java.util.function.Function;
import reactor.core.publisher.Mono;
public record LazyHitKey<T>(Mono<T> key, float score) {
public <U> LazyHitEntry<T, U> withValue(Function<T, Mono<U>> valueGetter) {
return new LazyHitEntry<>(key, key.flatMap(valueGetter), score);
}
public Mono<HitKey<T>> resolve() {
return key.map(k -> new HitKey<>(k, score));
}
public <U> Mono<HitEntry<T, U>> resolveWithValue(Function<T, Mono<U>> valueGetter) {
return resolve().flatMap(key -> key.withValue(valueGetter));
}
}

View File

@ -1,78 +1,71 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.query.ClientQueryParams; import it.cavallium.dbengine.client.query.ClientQueryParams;
import it.cavallium.dbengine.client.query.current.data.Query; import it.cavallium.dbengine.client.query.current.data.Query;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLSnapshottable; import it.cavallium.dbengine.database.LLSnapshottable;
import it.cavallium.dbengine.database.collections.ValueGetter;
import it.cavallium.dbengine.database.collections.ValueTransformer;
import it.cavallium.dbengine.lucene.collector.Buckets; import it.cavallium.dbengine.lucene.collector.Buckets;
import it.cavallium.dbengine.lucene.searcher.BucketParams; import it.cavallium.dbengine.lucene.searcher.BucketParams;
import it.unimi.dsi.fastutil.doubles.DoubleArrayList;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface LuceneIndex<T, U> extends LLSnapshottable { public interface LuceneIndex<T, U> extends LLSnapshottable {
Mono<Void> addDocument(T key, U value); void addDocument(T key, U value);
Mono<Long> addDocuments(boolean atomic, Flux<Entry<T, U>> entries); long addDocuments(boolean atomic, Stream<Entry<T, U>> entries);
Mono<Void> deleteDocument(T key); void deleteDocument(T key);
Mono<Void> updateDocument(T key, @NotNull U value); void updateDocument(T key, @NotNull U value);
Mono<Long> updateDocuments(Flux<Entry<T, U>> entries); long updateDocuments(Stream<Entry<T, U>> entries);
default Mono<Void> updateOrDeleteDocument(T key, @Nullable U value) { default void updateOrDeleteDocument(T key, @Nullable U value) {
if (value == null) { if (value == null) {
return deleteDocument(key); deleteDocument(key);
} else { } else {
return updateDocument(key, value); updateDocument(key, value);
} }
} }
default Mono<Void> updateOrDeleteDocumentIfModified(T key, @NotNull Delta<U> delta) { default void updateOrDeleteDocumentIfModified(T key, @NotNull Delta<U> delta) {
return updateOrDeleteDocumentIfModified(key, delta.current(), delta.isModified()); updateOrDeleteDocumentIfModified(key, delta.current(), delta.isModified());
} }
default Mono<Void> updateOrDeleteDocumentIfModified(T key, @Nullable U currentValue, boolean modified) { default void updateOrDeleteDocumentIfModified(T key, @Nullable U currentValue, boolean modified) {
if (modified) { if (modified) {
return updateOrDeleteDocument(key, currentValue); updateOrDeleteDocument(key, currentValue);
} else {
return Mono.empty();
} }
} }
Mono<Void> deleteAll(); void deleteAll();
Mono<Hits<HitKey<T>>> moreLikeThis(ClientQueryParams queryParams, T key, Hits<HitKey<T>> moreLikeThis(ClientQueryParams queryParams, T key,
U mltDocumentValue); U mltDocumentValue);
Mono<Hits<HitKey<T>>> search(ClientQueryParams queryParams); Hits<HitKey<T>> search(ClientQueryParams queryParams);
Mono<Buckets> computeBuckets(@Nullable CompositeSnapshot snapshot, Buckets computeBuckets(@Nullable CompositeSnapshot snapshot,
@NotNull List<Query> queries, @NotNull List<Query> queries,
@Nullable Query normalizationQuery, @Nullable Query normalizationQuery,
BucketParams bucketParams); BucketParams bucketParams);
Mono<TotalHitsCount> count(@Nullable CompositeSnapshot snapshot, Query query); TotalHitsCount count(@Nullable CompositeSnapshot snapshot, Query query);
boolean isLowMemoryMode(); boolean isLowMemoryMode();
void close(); void close();
Mono<Void> flush(); void flush();
Mono<Void> waitForMerges(); void waitForMerges();
Mono<Void> waitForLastMerges(); void waitForLastMerges();
Mono<Void> refresh(boolean force); void refresh(boolean force);
} }

View File

@ -5,7 +5,6 @@ import it.cavallium.dbengine.client.Hits.LuceneHits;
import it.cavallium.dbengine.client.query.ClientQueryParams; import it.cavallium.dbengine.client.query.ClientQueryParams;
import it.cavallium.dbengine.client.query.current.data.Query; import it.cavallium.dbengine.client.query.current.data.Query;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.database.DiscardingCloseable;
import it.cavallium.dbengine.database.LLKeyScore; import it.cavallium.dbengine.database.LLKeyScore;
import it.cavallium.dbengine.database.LLLuceneIndex; import it.cavallium.dbengine.database.LLLuceneIndex;
import it.cavallium.dbengine.database.LLSearchResultShard; import it.cavallium.dbengine.database.LLSearchResultShard;
@ -13,8 +12,6 @@ import it.cavallium.dbengine.database.LLSearchResultShard.LuceneLLSearchResultSh
import it.cavallium.dbengine.database.LLSearchResultShard.ResourcesLLSearchResultShard; import it.cavallium.dbengine.database.LLSearchResultShard.ResourcesLLSearchResultShard;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLTerm; import it.cavallium.dbengine.database.LLTerm;
import it.cavallium.dbengine.database.LLUpdateDocument;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.SafeCloseable; import it.cavallium.dbengine.database.SafeCloseable;
import it.cavallium.dbengine.lucene.LuceneCloseable; import it.cavallium.dbengine.lucene.LuceneCloseable;
import it.cavallium.dbengine.lucene.LuceneUtils; import it.cavallium.dbengine.lucene.LuceneUtils;
@ -26,14 +23,12 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.logging.Level; import java.util.function.Function;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SignalType;
public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> { public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
@ -56,96 +51,87 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
} }
@Override @Override
public Mono<Void> addDocument(T key, U value) { public void addDocument(T key, U value) {
return indicizer luceneIndex.addDocument(indicizer.toIndex(key), indicizer.toDocument(key, value));
.toDocument(key, value)
.flatMap(doc -> luceneIndex.addDocument(indicizer.toIndex(key), doc));
} }
@Override @Override
public Mono<Long> addDocuments(boolean atomic, Flux<Entry<T, U>> entries) { public long addDocuments(boolean atomic, Stream<Entry<T, U>> entries) {
return luceneIndex.addDocuments(atomic, entries.flatMap(entry -> indicizer return luceneIndex.addDocuments(atomic, entries.map(entry ->
.toDocument(entry.getKey(), entry.getValue()) Map.entry(indicizer.toIndex(entry.getKey()), indicizer.toDocument(entry.getKey(), entry.getValue()))));
.map(doc -> Map.entry(indicizer.toIndex(entry.getKey()), doc))));
} }
@Override @Override
public Mono<Void> deleteDocument(T key) { public void deleteDocument(T key) {
LLTerm id = indicizer.toIndex(key); LLTerm id = indicizer.toIndex(key);
return luceneIndex.deleteDocument(id); luceneIndex.deleteDocument(id);
} }
@Override @Override
public Mono<Void> updateDocument(T key, @NotNull U value) { public void updateDocument(T key, @NotNull U value) {
return indicizer luceneIndex.update(indicizer.toIndex(key), indicizer.toIndexRequest(key, value));
.toIndexRequest(key, value)
.flatMap(doc -> luceneIndex.update(indicizer.toIndex(key), doc));
} }
@Override @Override
public Mono<Long> updateDocuments(Flux<Entry<T, U>> entries) { public long updateDocuments(Stream<Entry<T, U>> entries) {
Flux<Entry<LLTerm, LLUpdateDocument>> mappedEntries = entries return luceneIndex.updateDocuments(entries.map(entry ->
.flatMap(entry -> Mono Map.entry(indicizer.toIndex(entry.getKey()), indicizer.toDocument(entry.getKey(), entry.getValue()))));
.zip(Mono.just(indicizer.toIndex(entry.getKey())),
indicizer.toDocument(entry.getKey(), entry.getValue()).single(),
Map::entry
)
.single()
)
.log("impl-update-documents", Level.FINEST, false, SignalType.ON_NEXT, SignalType.ON_COMPLETE);
return luceneIndex.updateDocuments(mappedEntries);
} }
@Override @Override
public Mono<Void> deleteAll() { public void deleteAll() {
return luceneIndex.deleteAll(); luceneIndex.deleteAll();
} }
@Override @Override
public Mono<Hits<HitKey<T>>> moreLikeThis(ClientQueryParams queryParams, public Hits<HitKey<T>> moreLikeThis(ClientQueryParams queryParams,
T key, T key,
U mltDocumentValue) { U mltDocumentValue) {
var mltDocumentFields var mltDocumentFields
= indicizer.getMoreLikeThisDocumentFields(key, mltDocumentValue); = indicizer.getMoreLikeThisDocumentFields(key, mltDocumentValue);
return luceneIndex var results = luceneIndex
.moreLikeThis(resolveSnapshot(queryParams.snapshot()), .moreLikeThis(resolveSnapshot(queryParams.snapshot()),
queryParams.toQueryParams(), queryParams.toQueryParams(),
indicizer.getKeyFieldName(), indicizer.getKeyFieldName(),
mltDocumentFields mltDocumentFields
) )
.collectList() .toList();
.mapNotNull(shards -> mergeResults(queryParams, shards)) LLSearchResultShard mergedResults = mergeResults(queryParams, results);
.map(llSearchResult -> mapResults(llSearchResult)) if (mergedResults != null) {
.defaultIfEmpty(Hits.empty()) return mapResults(mergedResults);
.doOnDiscard(DiscardingCloseable.class, LLUtils::onDiscard); } else {
return Hits.empty();
}
} }
@Override @Override
public Mono<Hits<HitKey<T>>> search(ClientQueryParams queryParams) { public Hits<HitKey<T>> search(ClientQueryParams queryParams) {
return luceneIndex var results = luceneIndex
.search(resolveSnapshot(queryParams.snapshot()), .search(resolveSnapshot(queryParams.snapshot()),
queryParams.toQueryParams(), queryParams.toQueryParams(),
indicizer.getKeyFieldName() indicizer.getKeyFieldName()
) )
.collectList() .toList();
.mapNotNull(shards -> mergeResults(queryParams, shards))
.map(llSearchResult -> mapResults(llSearchResult)) var mergedResults = mergeResults(queryParams, results);
.defaultIfEmpty(Hits.empty()) if (mergedResults != null) {
.doOnDiscard(DiscardingCloseable.class, LLUtils::onDiscard); return mapResults(mergedResults);
} else {
return Hits.empty();
}
} }
@Override @Override
public Mono<Buckets> computeBuckets(@Nullable CompositeSnapshot snapshot, public Buckets computeBuckets(@Nullable CompositeSnapshot snapshot,
@NotNull List<Query> query, @NotNull List<Query> query,
@Nullable Query normalizationQuery, @Nullable Query normalizationQuery,
BucketParams bucketParams) { BucketParams bucketParams) {
return luceneIndex.computeBuckets(resolveSnapshot(snapshot), query, return luceneIndex.computeBuckets(resolveSnapshot(snapshot), query, normalizationQuery, bucketParams);
normalizationQuery, bucketParams).single();
} }
private Hits<HitKey<T>> mapResults(LLSearchResultShard llSearchResult) { private Hits<HitKey<T>> mapResults(LLSearchResultShard llSearchResult) {
Flux<HitKey<T>> scoresWithKeysFlux = llSearchResult.results() Stream<HitKey<T>> scoresWithKeysFlux = llSearchResult.results()
.map(hit -> new HitKey<>(indicizer.getKey(hit.key()), hit.score())); .map(hit -> new HitKey<>(indicizer.getKey(hit.key()), hit.score()));
if (llSearchResult instanceof LuceneCloseable luceneCloseable) { if (llSearchResult instanceof LuceneCloseable luceneCloseable) {
@ -156,10 +142,8 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
} }
@Override @Override
public Mono<TotalHitsCount> count(@Nullable CompositeSnapshot snapshot, Query query) { public TotalHitsCount count(@Nullable CompositeSnapshot snapshot, Query query) {
return luceneIndex return luceneIndex.count(resolveSnapshot(snapshot), query, MAX_COUNT_TIME);
.count(resolveSnapshot(snapshot), query, MAX_COUNT_TIME)
.doOnDiscard(DiscardingCloseable.class, LLUtils::onDiscard);
} }
@Override @Override
@ -176,36 +160,36 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
* Flush writes to disk * Flush writes to disk
*/ */
@Override @Override
public Mono<Void> flush() { public void flush() {
return luceneIndex.flush(); luceneIndex.flush();
} }
@Override @Override
public Mono<Void> waitForMerges() { public void waitForMerges() {
return luceneIndex.waitForMerges(); luceneIndex.waitForMerges();
} }
@Override @Override
public Mono<Void> waitForLastMerges() { public void waitForLastMerges() {
return luceneIndex.waitForLastMerges(); luceneIndex.waitForLastMerges();
} }
/** /**
* Refresh index searcher * Refresh index searcher
*/ */
@Override @Override
public Mono<Void> refresh(boolean force) { public void refresh(boolean force) {
return luceneIndex.refresh(force); luceneIndex.refresh(force);
} }
@Override @Override
public Mono<LLSnapshot> takeSnapshot() { public LLSnapshot takeSnapshot() {
return luceneIndex.takeSnapshot(); return luceneIndex.takeSnapshot();
} }
@Override @Override
public Mono<Void> releaseSnapshot(LLSnapshot snapshot) { public void releaseSnapshot(LLSnapshot snapshot) {
return luceneIndex.releaseSnapshot(snapshot); luceneIndex.releaseSnapshot(snapshot);
} }
@SuppressWarnings({"unchecked", "rawtypes"}) @SuppressWarnings({"unchecked", "rawtypes"})
@ -217,7 +201,7 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
return shards.get(0); return shards.get(0);
} }
TotalHitsCount count = null; TotalHitsCount count = null;
ObjectArrayList<Flux<LLKeyScore>> results = new ObjectArrayList<>(shards.size()); ObjectArrayList<Stream<LLKeyScore>> results = new ObjectArrayList<>(shards.size());
ObjectArrayList resources = new ObjectArrayList(shards.size()); ObjectArrayList resources = new ObjectArrayList(shards.size());
boolean luceneResources = false; boolean luceneResources = false;
for (LLSearchResultShard shard : shards) { for (LLSearchResultShard shard : shards) {
@ -230,17 +214,17 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
count = LuceneUtils.sum(count, shard.totalHitsCount()); count = LuceneUtils.sum(count, shard.totalHitsCount());
} }
var maxLimit = queryParams.offset() + queryParams.limit(); var maxLimit = queryParams.offset() + queryParams.limit();
results.add(shard.results().take(maxLimit, true)); results.add(shard.results().limit(maxLimit));
resources.add(shard); resources.add(shard);
} }
Objects.requireNonNull(count); Objects.requireNonNull(count);
Flux<LLKeyScore> resultsFlux; Stream<LLKeyScore> resultsFlux;
if (results.size() == 0) { if (results.size() == 0) {
resultsFlux = Flux.empty(); resultsFlux = Stream.empty();
} else if (results.size() == 1) { } else if (results.size() == 1) {
resultsFlux = results.get(0); resultsFlux = results.get(0);
} else { } else {
resultsFlux = Flux.merge(results); resultsFlux = results.parallelStream().flatMap(Function.identity());
} }
if (luceneResources) { if (luceneResources) {
return new LuceneLLSearchResultShard(resultsFlux, count, (List<LuceneCloseable>) resources); return new LuceneLLSearchResultShard(resultsFlux, count, (List<LuceneCloseable>) resources);

View File

@ -1,11 +1,11 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send; import it.cavallium.dbengine.buffers.BufDataInput;
import it.cavallium.dbengine.buffers.BufDataOutput;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class MappedSerializer<A, B> implements Serializer<B> { public class MappedSerializer<A, B> implements Serializer<B> {
@ -19,13 +19,13 @@ public class MappedSerializer<A, B> implements Serializer<B> {
} }
@Override @Override
public @NotNull B deserialize(@NotNull Buffer serialized) throws SerializationException { public @NotNull B deserialize(@NotNull BufDataInput in) throws SerializationException {
return keyMapper.map(serializer.deserialize(serialized)); return keyMapper.map(serializer.deserialize(in));
} }
@Override @Override
public void serialize(@NotNull B deserialized, Buffer output) throws SerializationException { public void serialize(@NotNull B deserialized, BufDataOutput out) throws SerializationException {
serializer.serialize(keyMapper.unmap(deserialized), output); serializer.serialize(keyMapper.unmap(deserialized), out);
} }
@Override @Override

View File

@ -1,11 +1,11 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send; import it.cavallium.dbengine.buffers.BufDataInput;
import it.cavallium.dbengine.buffers.BufDataOutput;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B> { public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B> {
@ -19,13 +19,13 @@ public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryL
} }
@Override @Override
public @NotNull B deserialize(@NotNull Buffer serialized) throws SerializationException { public @NotNull B deserialize(@NotNull BufDataInput in) throws SerializationException {
return keyMapper.map(fixedLengthSerializer.deserialize(serialized)); return keyMapper.map(fixedLengthSerializer.deserialize(in));
} }
@Override @Override
public void serialize(@NotNull B deserialized, Buffer output) throws SerializationException { public void serialize(@NotNull B deserialized, BufDataOutput out) throws SerializationException {
fixedLengthSerializer.serialize(keyMapper.unmap(deserialized), output); fixedLengthSerializer.serialize(keyMapper.unmap(deserialized), out);
} }
@Override @Override

View File

@ -1,7 +1,5 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import it.cavallium.dbengine.client.Mapper;
public class NoMapper<T> implements Mapper<T, T> { public class NoMapper<T> implements Mapper<T, T> {
@Override @Override

View File

@ -1,90 +0,0 @@
package it.cavallium.dbengine.client;
import java.util.concurrent.TimeUnit;
import org.jetbrains.annotations.NotNull;
import reactor.core.Disposable;
import reactor.core.scheduler.Scheduler;
public class UninterruptibleScheduler {
public static Scheduler uninterruptibleScheduler(Scheduler scheduler) {
return new Scheduler() {
@Override
public @NotNull Disposable schedule(@NotNull Runnable task) {
scheduler.schedule(task);
return () -> {};
}
@Override
public @NotNull Disposable schedule(@NotNull Runnable task, long delay, @NotNull TimeUnit unit) {
scheduler.schedule(task, delay, unit);
return () -> {};
}
@Override
public @NotNull Disposable schedulePeriodically(@NotNull Runnable task,
long initialDelay,
long period,
@NotNull TimeUnit unit) {
scheduler.schedulePeriodically(task, initialDelay, period, unit);
return () -> {};
}
@Override
public boolean isDisposed() {
return scheduler.isDisposed();
}
@Override
public void dispose() {
scheduler.dispose();
}
@Override
public void start() {
scheduler.start();
}
@Override
public long now(@NotNull TimeUnit unit) {
return Scheduler.super.now(unit);
}
@Override
public @NotNull Worker createWorker() {
var worker = scheduler.createWorker();
return new Worker() {
@Override
public @NotNull Disposable schedule(@NotNull Runnable task) {
worker.schedule(task);
return () -> {};
}
@Override
public void dispose() {
}
@Override
public boolean isDisposed() {
return worker.isDisposed();
}
@Override
public @NotNull Disposable schedule(@NotNull Runnable task, long delay, @NotNull TimeUnit unit) {
worker.schedule(task, delay, unit);
return () -> {};
}
@Override
public @NotNull Disposable schedulePeriodically(@NotNull Runnable task,
long initialDelay,
long period,
@NotNull TimeUnit unit) {
worker.schedulePeriodically(task, initialDelay, period, unit);
return () -> {};
}
};
}
};
}
}

View File

@ -1,7 +1,6 @@
package it.cavallium.dbengine.client.query; package it.cavallium.dbengine.client.query;
import io.soabase.recordbuilder.core.RecordBuilder; import io.soabase.recordbuilder.core.RecordBuilder;
import it.cavallium.data.generator.nativedata.Nullablefloat;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.client.Sort; import it.cavallium.dbengine.client.Sort;
import it.cavallium.dbengine.client.query.current.data.NoSort; import it.cavallium.dbengine.client.query.current.data.NoSort;

View File

@ -1,12 +1,19 @@
package it.cavallium.dbengine.client.query; package it.cavallium.dbengine.client.query;
import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonAdapter;
import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.client.IntOpenHashSetJsonAdapter; import it.cavallium.dbengine.client.IntOpenHashSetJsonAdapter;
import it.cavallium.dbengine.client.query.current.CurrentVersion; import it.cavallium.dbengine.client.query.current.CurrentVersion;
import it.cavallium.dbengine.client.query.current.IBaseType; import it.cavallium.dbengine.client.query.current.IBaseType;
import it.cavallium.dbengine.client.query.current.IType; import it.cavallium.dbengine.client.query.current.IType;
import it.cavallium.dbengine.utils.BooleanListJsonAdapter;
import it.cavallium.dbengine.utils.ByteListJsonAdapter;
import it.cavallium.dbengine.utils.CharListJsonAdapter;
import it.cavallium.dbengine.utils.IntListJsonAdapter;
import it.cavallium.dbengine.utils.LongListJsonAdapter;
import it.cavallium.dbengine.utils.MoshiPolymorphic;
import it.cavallium.dbengine.utils.ShortListJsonAdapter;
import it.unimi.dsi.fastutil.booleans.BooleanList; import it.unimi.dsi.fastutil.booleans.BooleanList;
import it.unimi.dsi.fastutil.bytes.ByteList;
import it.unimi.dsi.fastutil.chars.CharList; import it.unimi.dsi.fastutil.chars.CharList;
import it.unimi.dsi.fastutil.ints.IntList; import it.unimi.dsi.fastutil.ints.IntList;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
@ -18,13 +25,6 @@ import it.unimi.dsi.fastutil.shorts.ShortList;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import it.cavallium.dbengine.utils.BooleanListJsonAdapter;
import it.cavallium.dbengine.utils.ByteListJsonAdapter;
import it.cavallium.dbengine.utils.CharListJsonAdapter;
import it.cavallium.dbengine.utils.IntListJsonAdapter;
import it.cavallium.dbengine.utils.LongListJsonAdapter;
import it.cavallium.dbengine.utils.MoshiPolymorphic;
import it.cavallium.dbengine.utils.ShortListJsonAdapter;
public class QueryMoshi extends MoshiPolymorphic<IType> { public class QueryMoshi extends MoshiPolymorphic<IType> {
@ -57,7 +57,7 @@ public class QueryMoshi extends MoshiPolymorphic<IType> {
this.concreteClasses = concreteClasses; this.concreteClasses = concreteClasses;
Object2ObjectMap<Class<?>, JsonAdapter<?>> extraAdapters = new Object2ObjectOpenHashMap<>(); Object2ObjectMap<Class<?>, JsonAdapter<?>> extraAdapters = new Object2ObjectOpenHashMap<>();
extraAdapters.put(BooleanList.class, new BooleanListJsonAdapter()); extraAdapters.put(BooleanList.class, new BooleanListJsonAdapter());
extraAdapters.put(ByteList.class, new ByteListJsonAdapter()); extraAdapters.put(Buf.class, new ByteListJsonAdapter());
extraAdapters.put(ShortList.class, new ShortListJsonAdapter()); extraAdapters.put(ShortList.class, new ShortListJsonAdapter());
extraAdapters.put(CharList.class, new CharListJsonAdapter()); extraAdapters.put(CharList.class, new CharListJsonAdapter());
extraAdapters.put(IntList.class, new IntListJsonAdapter()); extraAdapters.put(IntList.class, new IntListJsonAdapter());

View File

@ -51,16 +51,7 @@ import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.en.EnglishPossessiveFilter;
import org.apache.lucene.analysis.en.PorterStemFilter;
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper; import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.IntPoint;
@ -89,7 +80,7 @@ public class QueryParser {
return null; return null;
} }
switch (query.getBaseType$()) { switch (query.getBaseType$()) {
case StandardQuery: case StandardQuery -> {
var standardQuery = (it.cavallium.dbengine.client.query.current.data.StandardQuery) query; var standardQuery = (it.cavallium.dbengine.client.query.current.data.StandardQuery) query;
// Fix the analyzer // Fix the analyzer
@ -98,19 +89,12 @@ public class QueryParser {
.stream() .stream()
.collect(Collectors.toMap(Function.identity(), term -> new NoOpAnalyzer())); .collect(Collectors.toMap(Function.identity(), term -> new NoOpAnalyzer()));
analyzer = new PerFieldAnalyzerWrapper(analyzer, customAnalyzers); analyzer = new PerFieldAnalyzerWrapper(analyzer, customAnalyzers);
var standardQueryParser = new StandardQueryParser(analyzer); var standardQueryParser = new StandardQueryParser(analyzer);
standardQueryParser.setPointsConfigMap(standardQuery.pointsConfig().stream().collect(
standardQueryParser.setPointsConfigMap(standardQuery Collectors.toMap(PointConfig::field, pointConfig ->
.pointsConfig() new PointsConfig(toNumberFormat(pointConfig.data().numberFormat()), toType(pointConfig.data().type()))
.stream() ))
.collect(Collectors.toMap( );
PointConfig::field,
pointConfig -> new PointsConfig(
toNumberFormat(pointConfig.data().numberFormat()),
toType(pointConfig.data().type())
)
)));
var defaultFields = standardQuery.defaultFields(); var defaultFields = standardQuery.defaultFields();
try { try {
Query parsed; Query parsed;
@ -126,7 +110,8 @@ public class QueryParser {
} catch (QueryNodeException e) { } catch (QueryNodeException e) {
throw new IllegalStateException("Can't parse query expression \"" + standardQuery.query() + "\"", e); throw new IllegalStateException("Can't parse query expression \"" + standardQuery.query() + "\"", e);
} }
case BooleanQuery: }
case BooleanQuery -> {
var booleanQuery = (it.cavallium.dbengine.client.query.current.data.BooleanQuery) query; var booleanQuery = (it.cavallium.dbengine.client.query.current.data.BooleanQuery) query;
var bq = new Builder(); var bq = new Builder();
for (BooleanQueryPart part : booleanQuery.parts()) { for (BooleanQueryPart part : booleanQuery.parts()) {
@ -141,101 +126,127 @@ public class QueryParser {
} }
bq.setMinimumNumberShouldMatch(booleanQuery.minShouldMatch()); bq.setMinimumNumberShouldMatch(booleanQuery.minShouldMatch());
return bq.build(); return bq.build();
case IntPointExactQuery: }
case IntPointExactQuery -> {
var intPointExactQuery = (IntPointExactQuery) query; var intPointExactQuery = (IntPointExactQuery) query;
return IntPoint.newExactQuery(intPointExactQuery.field(), intPointExactQuery.value()); return IntPoint.newExactQuery(intPointExactQuery.field(), intPointExactQuery.value());
case IntNDPointExactQuery: }
case IntNDPointExactQuery -> {
var intndPointExactQuery = (IntNDPointExactQuery) query; var intndPointExactQuery = (IntNDPointExactQuery) query;
var intndValues = intndPointExactQuery.value().toIntArray(); var intndValues = intndPointExactQuery.value().toIntArray();
return IntPoint.newRangeQuery(intndPointExactQuery.field(), intndValues, intndValues); return IntPoint.newRangeQuery(intndPointExactQuery.field(), intndValues, intndValues);
case LongPointExactQuery: }
case LongPointExactQuery -> {
var longPointExactQuery = (LongPointExactQuery) query; var longPointExactQuery = (LongPointExactQuery) query;
return LongPoint.newExactQuery(longPointExactQuery.field(), longPointExactQuery.value()); return LongPoint.newExactQuery(longPointExactQuery.field(), longPointExactQuery.value());
case FloatPointExactQuery: }
case FloatPointExactQuery -> {
var floatPointExactQuery = (FloatPointExactQuery) query; var floatPointExactQuery = (FloatPointExactQuery) query;
return FloatPoint.newExactQuery(floatPointExactQuery.field(), floatPointExactQuery.value()); return FloatPoint.newExactQuery(floatPointExactQuery.field(), floatPointExactQuery.value());
case DoublePointExactQuery: }
case DoublePointExactQuery -> {
var doublePointExactQuery = (DoublePointExactQuery) query; var doublePointExactQuery = (DoublePointExactQuery) query;
return DoublePoint.newExactQuery(doublePointExactQuery.field(), doublePointExactQuery.value()); return DoublePoint.newExactQuery(doublePointExactQuery.field(), doublePointExactQuery.value());
case LongNDPointExactQuery: }
case LongNDPointExactQuery -> {
var longndPointExactQuery = (LongNDPointExactQuery) query; var longndPointExactQuery = (LongNDPointExactQuery) query;
var longndValues = longndPointExactQuery.value().toLongArray(); var longndValues = longndPointExactQuery.value().toLongArray();
return LongPoint.newRangeQuery(longndPointExactQuery.field(), longndValues, longndValues); return LongPoint.newRangeQuery(longndPointExactQuery.field(), longndValues, longndValues);
case FloatNDPointExactQuery: }
case FloatNDPointExactQuery -> {
var floatndPointExactQuery = (FloatNDPointExactQuery) query; var floatndPointExactQuery = (FloatNDPointExactQuery) query;
var floatndValues = floatndPointExactQuery.value().toFloatArray(); var floatndValues = floatndPointExactQuery.value().toFloatArray();
return FloatPoint.newRangeQuery(floatndPointExactQuery.field(), floatndValues, floatndValues); return FloatPoint.newRangeQuery(floatndPointExactQuery.field(), floatndValues, floatndValues);
case DoubleNDPointExactQuery: }
case DoubleNDPointExactQuery -> {
var doublendPointExactQuery = (DoubleNDPointExactQuery) query; var doublendPointExactQuery = (DoubleNDPointExactQuery) query;
var doublendValues = doublendPointExactQuery.value().toDoubleArray(); var doublendValues = doublendPointExactQuery.value().toDoubleArray();
return DoublePoint.newRangeQuery(doublendPointExactQuery.field(), doublendValues, doublendValues); return DoublePoint.newRangeQuery(doublendPointExactQuery.field(), doublendValues, doublendValues);
case IntPointSetQuery: }
case IntPointSetQuery -> {
var intPointSetQuery = (IntPointSetQuery) query; var intPointSetQuery = (IntPointSetQuery) query;
return IntPoint.newSetQuery(intPointSetQuery.field(), intPointSetQuery.values().toIntArray()); return IntPoint.newSetQuery(intPointSetQuery.field(), intPointSetQuery.values().toIntArray());
case LongPointSetQuery: }
case LongPointSetQuery -> {
var longPointSetQuery = (LongPointSetQuery) query; var longPointSetQuery = (LongPointSetQuery) query;
return LongPoint.newSetQuery(longPointSetQuery.field(), longPointSetQuery.values().toLongArray()); return LongPoint.newSetQuery(longPointSetQuery.field(), longPointSetQuery.values().toLongArray());
case FloatPointSetQuery: }
case FloatPointSetQuery -> {
var floatPointSetQuery = (FloatPointSetQuery) query; var floatPointSetQuery = (FloatPointSetQuery) query;
return FloatPoint.newSetQuery(floatPointSetQuery.field(), floatPointSetQuery.values().toFloatArray()); return FloatPoint.newSetQuery(floatPointSetQuery.field(), floatPointSetQuery.values().toFloatArray());
case DoublePointSetQuery: }
case DoublePointSetQuery -> {
var doublePointSetQuery = (DoublePointSetQuery) query; var doublePointSetQuery = (DoublePointSetQuery) query;
return DoublePoint.newSetQuery(doublePointSetQuery.field(), doublePointSetQuery.values().toDoubleArray()); return DoublePoint.newSetQuery(doublePointSetQuery.field(), doublePointSetQuery.values().toDoubleArray());
case TermQuery: }
case TermQuery -> {
var termQuery = (TermQuery) query; var termQuery = (TermQuery) query;
return new org.apache.lucene.search.TermQuery(toTerm(termQuery.term())); return new org.apache.lucene.search.TermQuery(toTerm(termQuery.term()));
case IntTermQuery: }
case IntTermQuery -> {
var intTermQuery = (IntTermQuery) query; var intTermQuery = (IntTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(intTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(intTermQuery.field(),
IntPoint.pack(intTermQuery.value()) IntPoint.pack(intTermQuery.value())
)); ));
case IntNDTermQuery: }
case IntNDTermQuery -> {
var intNDTermQuery = (IntNDTermQuery) query; var intNDTermQuery = (IntNDTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(intNDTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(intNDTermQuery.field(),
IntPoint.pack(intNDTermQuery.value().toIntArray()) IntPoint.pack(intNDTermQuery.value().toIntArray())
)); ));
case LongTermQuery: }
case LongTermQuery -> {
var longTermQuery = (LongTermQuery) query; var longTermQuery = (LongTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(longTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(longTermQuery.field(),
LongPoint.pack(longTermQuery.value()) LongPoint.pack(longTermQuery.value())
)); ));
case LongNDTermQuery: }
case LongNDTermQuery -> {
var longNDTermQuery = (LongNDTermQuery) query; var longNDTermQuery = (LongNDTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(longNDTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(longNDTermQuery.field(),
LongPoint.pack(longNDTermQuery.value().toLongArray()) LongPoint.pack(longNDTermQuery.value().toLongArray())
)); ));
case FloatTermQuery: }
case FloatTermQuery -> {
var floatTermQuery = (FloatTermQuery) query; var floatTermQuery = (FloatTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(floatTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(floatTermQuery.field(),
FloatPoint.pack(floatTermQuery.value()) FloatPoint.pack(floatTermQuery.value())
)); ));
case FloatNDTermQuery: }
case FloatNDTermQuery -> {
var floatNDTermQuery = (FloatNDTermQuery) query; var floatNDTermQuery = (FloatNDTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(floatNDTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(floatNDTermQuery.field(),
FloatPoint.pack(floatNDTermQuery.value().toFloatArray()) FloatPoint.pack(floatNDTermQuery.value().toFloatArray())
)); ));
case DoubleTermQuery: }
case DoubleTermQuery -> {
var doubleTermQuery = (DoubleTermQuery) query; var doubleTermQuery = (DoubleTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(doubleTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(doubleTermQuery.field(),
DoublePoint.pack(doubleTermQuery.value()) DoublePoint.pack(doubleTermQuery.value())
)); ));
case DoubleNDTermQuery: }
case DoubleNDTermQuery -> {
var doubleNDTermQuery = (DoubleNDTermQuery) query; var doubleNDTermQuery = (DoubleNDTermQuery) query;
return new org.apache.lucene.search.TermQuery(new Term(doubleNDTermQuery.field(), return new org.apache.lucene.search.TermQuery(new Term(doubleNDTermQuery.field(),
DoublePoint.pack(doubleNDTermQuery.value().toDoubleArray()) DoublePoint.pack(doubleNDTermQuery.value().toDoubleArray())
)); ));
case FieldExistsQuery: }
case FieldExistsQuery -> {
var fieldExistQuery = (FieldExistsQuery) query; var fieldExistQuery = (FieldExistsQuery) query;
return new org.apache.lucene.search.FieldExistsQuery(fieldExistQuery.field()); return new org.apache.lucene.search.FieldExistsQuery(fieldExistQuery.field());
case BoostQuery: }
case BoostQuery -> {
var boostQuery = (BoostQuery) query; var boostQuery = (BoostQuery) query;
return new org.apache.lucene.search.BoostQuery(toQuery(boostQuery.query(), analyzer), boostQuery.scoreBoost()); return new org.apache.lucene.search.BoostQuery(toQuery(boostQuery.query(), analyzer), boostQuery.scoreBoost());
case ConstantScoreQuery: }
case ConstantScoreQuery -> {
var constantScoreQuery = (ConstantScoreQuery) query; var constantScoreQuery = (ConstantScoreQuery) query;
return new org.apache.lucene.search.ConstantScoreQuery(toQuery(constantScoreQuery.query(), analyzer)); return new org.apache.lucene.search.ConstantScoreQuery(toQuery(constantScoreQuery.query(), analyzer));
case BoxedQuery: }
case BoxedQuery -> {
return toQuery(((BoxedQuery) query).query(), analyzer); return toQuery(((BoxedQuery) query).query(), analyzer);
case FuzzyQuery: }
case FuzzyQuery -> {
var fuzzyQuery = (it.cavallium.dbengine.client.query.current.data.FuzzyQuery) query; var fuzzyQuery = (it.cavallium.dbengine.client.query.current.data.FuzzyQuery) query;
return new FuzzyQuery(toTerm(fuzzyQuery.term()), return new FuzzyQuery(toTerm(fuzzyQuery.term()),
fuzzyQuery.maxEdits(), fuzzyQuery.maxEdits(),
@ -243,56 +254,67 @@ public class QueryParser {
fuzzyQuery.maxExpansions(), fuzzyQuery.maxExpansions(),
fuzzyQuery.transpositions() fuzzyQuery.transpositions()
); );
case IntPointRangeQuery: }
case IntPointRangeQuery -> {
var intPointRangeQuery = (IntPointRangeQuery) query; var intPointRangeQuery = (IntPointRangeQuery) query;
return IntPoint.newRangeQuery(intPointRangeQuery.field(), intPointRangeQuery.min(), intPointRangeQuery.max()); return IntPoint.newRangeQuery(intPointRangeQuery.field(), intPointRangeQuery.min(), intPointRangeQuery.max());
case IntNDPointRangeQuery: }
case IntNDPointRangeQuery -> {
var intndPointRangeQuery = (IntNDPointRangeQuery) query; var intndPointRangeQuery = (IntNDPointRangeQuery) query;
return IntPoint.newRangeQuery(intndPointRangeQuery.field(), return IntPoint.newRangeQuery(intndPointRangeQuery.field(),
intndPointRangeQuery.min().toIntArray(), intndPointRangeQuery.min().toIntArray(),
intndPointRangeQuery.max().toIntArray() intndPointRangeQuery.max().toIntArray()
); );
case LongPointRangeQuery: }
case LongPointRangeQuery -> {
var longPointRangeQuery = (LongPointRangeQuery) query; var longPointRangeQuery = (LongPointRangeQuery) query;
return LongPoint.newRangeQuery(longPointRangeQuery.field(), return LongPoint.newRangeQuery(longPointRangeQuery.field(),
longPointRangeQuery.min(), longPointRangeQuery.min(),
longPointRangeQuery.max() longPointRangeQuery.max()
); );
case FloatPointRangeQuery: }
case FloatPointRangeQuery -> {
var floatPointRangeQuery = (FloatPointRangeQuery) query; var floatPointRangeQuery = (FloatPointRangeQuery) query;
return FloatPoint.newRangeQuery(floatPointRangeQuery.field(), return FloatPoint.newRangeQuery(floatPointRangeQuery.field(),
floatPointRangeQuery.min(), floatPointRangeQuery.min(),
floatPointRangeQuery.max() floatPointRangeQuery.max()
); );
case DoublePointRangeQuery: }
case DoublePointRangeQuery -> {
var doublePointRangeQuery = (DoublePointRangeQuery) query; var doublePointRangeQuery = (DoublePointRangeQuery) query;
return DoublePoint.newRangeQuery(doublePointRangeQuery.field(), return DoublePoint.newRangeQuery(doublePointRangeQuery.field(),
doublePointRangeQuery.min(), doublePointRangeQuery.min(),
doublePointRangeQuery.max() doublePointRangeQuery.max()
); );
case LongNDPointRangeQuery: }
case LongNDPointRangeQuery -> {
var longndPointRangeQuery = (LongNDPointRangeQuery) query; var longndPointRangeQuery = (LongNDPointRangeQuery) query;
return LongPoint.newRangeQuery(longndPointRangeQuery.field(), return LongPoint.newRangeQuery(longndPointRangeQuery.field(),
longndPointRangeQuery.min().toLongArray(), longndPointRangeQuery.min().toLongArray(),
longndPointRangeQuery.max().toLongArray() longndPointRangeQuery.max().toLongArray()
); );
case FloatNDPointRangeQuery: }
case FloatNDPointRangeQuery -> {
var floatndPointRangeQuery = (FloatNDPointRangeQuery) query; var floatndPointRangeQuery = (FloatNDPointRangeQuery) query;
return FloatPoint.newRangeQuery(floatndPointRangeQuery.field(), return FloatPoint.newRangeQuery(floatndPointRangeQuery.field(),
floatndPointRangeQuery.min().toFloatArray(), floatndPointRangeQuery.min().toFloatArray(),
floatndPointRangeQuery.max().toFloatArray() floatndPointRangeQuery.max().toFloatArray()
); );
case DoubleNDPointRangeQuery: }
case DoubleNDPointRangeQuery -> {
var doublendPointRangeQuery = (DoubleNDPointRangeQuery) query; var doublendPointRangeQuery = (DoubleNDPointRangeQuery) query;
return DoublePoint.newRangeQuery(doublendPointRangeQuery.field(), return DoublePoint.newRangeQuery(doublendPointRangeQuery.field(),
doublendPointRangeQuery.min().toDoubleArray(), doublendPointRangeQuery.min().toDoubleArray(),
doublendPointRangeQuery.max().toDoubleArray() doublendPointRangeQuery.max().toDoubleArray()
); );
case MatchAllDocsQuery: }
case MatchAllDocsQuery -> {
return new MatchAllDocsQuery(); return new MatchAllDocsQuery();
case MatchNoDocsQuery: }
case MatchNoDocsQuery -> {
return new MatchNoDocsQuery(); return new MatchNoDocsQuery();
case PhraseQuery: }
case PhraseQuery -> {
var phraseQuery = (PhraseQuery) query; var phraseQuery = (PhraseQuery) query;
var pqb = new org.apache.lucene.search.PhraseQuery.Builder(); var pqb = new org.apache.lucene.search.PhraseQuery.Builder();
for (TermPosition phrase : phraseQuery.phrase()) { for (TermPosition phrase : phraseQuery.phrase()) {
@ -300,27 +322,31 @@ public class QueryParser {
} }
pqb.setSlop(phraseQuery.slop()); pqb.setSlop(phraseQuery.slop());
return pqb.build(); return pqb.build();
case SortedDocFieldExistsQuery: }
case SortedDocFieldExistsQuery -> {
var sortedDocFieldExistsQuery = (SortedDocFieldExistsQuery) query; var sortedDocFieldExistsQuery = (SortedDocFieldExistsQuery) query;
return new DocValuesFieldExistsQuery(sortedDocFieldExistsQuery.field()); return new DocValuesFieldExistsQuery(sortedDocFieldExistsQuery.field());
case SynonymQuery: }
case SynonymQuery -> {
var synonymQuery = (SynonymQuery) query; var synonymQuery = (SynonymQuery) query;
var sqb = new org.apache.lucene.search.SynonymQuery.Builder(synonymQuery.field()); var sqb = new org.apache.lucene.search.SynonymQuery.Builder(synonymQuery.field());
for (TermAndBoost part : synonymQuery.parts()) { for (TermAndBoost part : synonymQuery.parts()) {
sqb.addTerm(toTerm(part.term()), part.boost()); sqb.addTerm(toTerm(part.term()), part.boost());
} }
return sqb.build(); return sqb.build();
case SortedNumericDocValuesFieldSlowRangeQuery: }
case SortedNumericDocValuesFieldSlowRangeQuery -> {
var sortedNumericDocValuesFieldSlowRangeQuery = (SortedNumericDocValuesFieldSlowRangeQuery) query; var sortedNumericDocValuesFieldSlowRangeQuery = (SortedNumericDocValuesFieldSlowRangeQuery) query;
return SortedNumericDocValuesField.newSlowRangeQuery(sortedNumericDocValuesFieldSlowRangeQuery.field(), return SortedNumericDocValuesField.newSlowRangeQuery(sortedNumericDocValuesFieldSlowRangeQuery.field(),
sortedNumericDocValuesFieldSlowRangeQuery.min(), sortedNumericDocValuesFieldSlowRangeQuery.min(),
sortedNumericDocValuesFieldSlowRangeQuery.max() sortedNumericDocValuesFieldSlowRangeQuery.max()
); );
case WildcardQuery: }
case WildcardQuery -> {
var wildcardQuery = (WildcardQuery) query; var wildcardQuery = (WildcardQuery) query;
return new org.apache.lucene.search.WildcardQuery(new Term(wildcardQuery.field(), wildcardQuery.pattern())); return new org.apache.lucene.search.WildcardQuery(new Term(wildcardQuery.field(), wildcardQuery.pattern()));
default: }
throw new IllegalStateException("Unexpected value: " + query.getBaseType$()); default -> throw new IllegalStateException("Unexpected value: " + query.getBaseType$());
} }
} }

View File

@ -17,7 +17,6 @@ import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer; import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.util.QueryBuilder; import org.apache.lucene.util.QueryBuilder;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
@ -63,29 +62,17 @@ public class QueryUtils {
for (BooleanClause booleanClause : booleanQuery) { for (BooleanClause booleanClause : booleanQuery) {
org.apache.lucene.search.Query queryPartQuery = booleanClause.getQuery(); org.apache.lucene.search.Query queryPartQuery = booleanClause.getQuery();
Occur occur; Occur occur = switch (booleanClause.getOccur()) {
switch (booleanClause.getOccur()) { case MUST -> OccurMust.of();
case MUST: case FILTER -> OccurFilter.of();
occur = OccurMust.of(); case SHOULD -> OccurShould.of();
break; case MUST_NOT -> OccurMustNot.of();
case FILTER: };
occur = OccurFilter.of();
break;
case SHOULD:
occur = OccurShould.of();
break;
case MUST_NOT:
occur = OccurMustNot.of();
break;
default:
throw new IllegalArgumentException();
}
queryParts.add(BooleanQueryPart.of(transformQuery(field, queryPartQuery), occur)); queryParts.add(BooleanQueryPart.of(transformQuery(field, queryPartQuery), occur));
} }
return BooleanQuery.of(List.copyOf(queryParts), booleanQuery.getMinimumNumberShouldMatch()); return BooleanQuery.of(List.copyOf(queryParts), booleanQuery.getMinimumNumberShouldMatch());
} }
if (luceneQuery instanceof org.apache.lucene.search.PhraseQuery) { if (luceneQuery instanceof org.apache.lucene.search.PhraseQuery phraseQuery) {
var phraseQuery = (org.apache.lucene.search.PhraseQuery) luceneQuery;
int slop = phraseQuery.getSlop(); int slop = phraseQuery.getSlop();
var terms = phraseQuery.getTerms(); var terms = phraseQuery.getTerms();
var positions = phraseQuery.getPositions(); var positions = phraseQuery.getPositions();

View File

@ -1,62 +0,0 @@
package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer;
import io.netty5.util.Send;
import java.util.function.Supplier;
public abstract class BufSupplier implements SafeCloseable, DiscardingCloseable, Supplier<Buffer> {
public static BufSupplier of(Supplier<Buffer> supplier) {
return new SimpleBufSupplier(supplier);
}
public static BufSupplier of(Send<Buffer> supplier) {
return new CopyBufSupplier(supplier.receive());
}
public static BufSupplier ofOwned(Buffer supplier) {
return new CopyBufSupplier(supplier);
}
public static BufSupplier ofShared(Buffer supplier) {
return new SimpleBufSupplier(() -> supplier.copy());
}
private static final class SimpleBufSupplier extends BufSupplier {
private final Supplier<Buffer> supplier;
public SimpleBufSupplier(Supplier<Buffer> supplier) {
this.supplier = supplier;
}
@Override
public Buffer get() {
return supplier.get();
}
@Override
public void close() {
}
}
private static final class CopyBufSupplier extends BufSupplier {
private final Buffer supplier;
public CopyBufSupplier(Buffer supplier) {
this.supplier = supplier;
}
@Override
public Buffer get() {
return supplier.copy();
}
@Override
public void close() {
supplier.close();
}
}
}

View File

@ -2,10 +2,9 @@ package it.cavallium.dbengine.database;
import it.cavallium.dbengine.rpc.current.data.Column; import it.cavallium.dbengine.rpc.current.data.Column;
import java.nio.file.Path; import java.nio.file.Path;
import org.reactivestreams.Publisher; import java.util.stream.Stream;
import reactor.core.publisher.Mono;
public interface DatabaseOperations { public interface DatabaseOperations {
Mono<Void> ingestSST(Column column, Publisher<Path> files, boolean replaceExisting); void ingestSST(Column column, Stream<Path> files, boolean replaceExisting);
} }

View File

@ -2,30 +2,30 @@ package it.cavallium.dbengine.database;
import it.cavallium.dbengine.client.MemoryStats; import it.cavallium.dbengine.client.MemoryStats;
import it.cavallium.dbengine.rpc.current.data.Column; import it.cavallium.dbengine.rpc.current.data.Column;
import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.stream.Stream;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface DatabaseProperties { public interface DatabaseProperties {
Mono<MemoryStats> getMemoryStats(); MemoryStats getMemoryStats();
Mono<String> getRocksDBStats(); String getRocksDBStats();
Mono<Map<String, String>> getMapProperty(@Nullable Column column, RocksDBMapProperty property); Map<String, String> getMapProperty(@Nullable Column column, RocksDBMapProperty property);
Flux<ColumnProperty<Map<String, String>>> getMapColumnProperties(RocksDBMapProperty property); Stream<ColumnProperty<Map<String, String>>> getMapColumnProperties(RocksDBMapProperty property);
Mono<String> getStringProperty(@Nullable Column column, RocksDBStringProperty property); String getStringProperty(@Nullable Column column, RocksDBStringProperty property);
Flux<ColumnProperty<String>> getStringColumnProperties(RocksDBStringProperty property); Stream<ColumnProperty<String>> getStringColumnProperties(RocksDBStringProperty property);
Mono<Long> getLongProperty(@Nullable Column column, RocksDBLongProperty property); Long getLongProperty(@Nullable Column column, RocksDBLongProperty property);
Flux<ColumnProperty<Long>> getLongColumnProperties(RocksDBLongProperty property); Stream<ColumnProperty<Long>> getLongColumnProperties(RocksDBLongProperty property);
Mono<Long> getAggregatedLongProperty(RocksDBLongProperty property); Long getAggregatedLongProperty(RocksDBLongProperty property);
Flux<TableWithProperties> getTableProperties(); Stream<TableWithProperties> getTableProperties();
} }

View File

@ -1,39 +1,34 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.MeterRegistry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.lucene.LuceneHacks; import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneRocksDBManager;
import it.cavallium.dbengine.rpc.current.data.Column; import it.cavallium.dbengine.rpc.current.data.Column;
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions; import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers; import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers;
import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities; import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities;
import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure; import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions; import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import java.io.IOException;
import java.util.List; import java.util.List;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
@SuppressWarnings("UnusedReturnValue") @SuppressWarnings("UnusedReturnValue")
public interface LLDatabaseConnection { public interface LLDatabaseConnection {
BufferAllocator getAllocator();
MeterRegistry getMeterRegistry(); MeterRegistry getMeterRegistry();
Mono<? extends LLDatabaseConnection> connect(); LLDatabaseConnection connect();
Mono<? extends LLKeyValueDatabase> getDatabase(String name, LLKeyValueDatabase getDatabase(String name,
List<Column> columns, List<Column> columns,
DatabaseOptions databaseOptions); DatabaseOptions databaseOptions);
Mono<? extends LLLuceneIndex> getLuceneIndex(String clusterName, LLLuceneIndex getLuceneIndex(String clusterName,
LuceneIndexStructure indexStructure, LuceneIndexStructure indexStructure,
IndicizerAnalyzers indicizerAnalyzers, IndicizerAnalyzers indicizerAnalyzers,
IndicizerSimilarities indicizerSimilarities, IndicizerSimilarities indicizerSimilarities,
LuceneOptions luceneOptions, LuceneOptions luceneOptions,
@Nullable LuceneHacks luceneHacks); @Nullable LuceneHacks luceneHacks);
Mono<Void> disconnect(); void disconnect();
} }

View File

@ -1,71 +1,37 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer; import static it.cavallium.dbengine.database.LLUtils.unmodifiableBytes;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.utils.SimpleResource;
import java.util.StringJoiner; import java.util.StringJoiner;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
public class LLDelta extends SimpleResource implements DiscardingCloseable { public class LLDelta {
@Nullable @Nullable
private final Buffer previous; private final Buf previous;
@Nullable @Nullable
private final Buffer current; private final Buf current;
private LLDelta(@Nullable Buffer previous, @Nullable Buffer current) { private LLDelta(@Nullable Buf previous, @Nullable Buf current) {
super(); super();
this.previous = previous != null ? previous.makeReadOnly() : null; this.previous = unmodifiableBytes(previous);
this.current = current != null ? current.makeReadOnly() : null; this.current = unmodifiableBytes(current);
} }
@Override public static LLDelta of(Buf previous, Buf current) {
protected void ensureOpen() {
super.ensureOpen();
assert previous == null || previous.isAccessible();
assert current == null || current.isAccessible();
}
@Override
protected void onClose() {
if (previous != null && previous.isAccessible()) {
previous.close();
}
if (current != null && current.isAccessible()) {
current.close();
}
}
public static LLDelta of(Buffer previous, Buffer current) {
assert (previous == null && current == null) || (previous != current); assert (previous == null && current == null) || (previous != current);
return new LLDelta(previous, current); return new LLDelta(previous, current);
} }
public Send<Buffer> previous() { public Buf previous() {
ensureOpen();
return previous != null ? previous.copy().send() : null;
}
public Send<Buffer> current() {
ensureOpen();
return current != null ? current.copy().send() : null;
}
public Buffer currentUnsafe() {
ensureOpen();
return current;
}
public Buffer previousUnsafe() {
ensureOpen();
return previous; return previous;
} }
public Buf current() {
return current;
}
public boolean isModified() { public boolean isModified() {
return !LLUtils.equals(previous, current); return !LLUtils.equals(previous, current);
} }

View File

@ -1,105 +1,93 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.database.disk.BinarySerializationFunction; import it.cavallium.dbengine.database.disk.BinarySerializationFunction;
import it.cavallium.dbengine.database.serialization.KVSerializationFunction; import it.cavallium.dbengine.database.serialization.KVSerializationFunction;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux; import org.rocksdb.RocksDBException;
import reactor.core.publisher.Mono;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public interface LLDictionary extends LLKeyValueDatabaseStructure { public interface LLDictionary extends LLKeyValueDatabaseStructure {
String getColumnName(); String getColumnName();
BufferAllocator getAllocator(); Buf get(@Nullable LLSnapshot snapshot, Buf key);
Mono<Buffer> get(@Nullable LLSnapshot snapshot, Mono<Buffer> key); Buf put(Buf key, Buf value, LLDictionaryResultType resultType);
Mono<Buffer> put(Mono<Buffer> key, Mono<Buffer> value, LLDictionaryResultType resultType);
UpdateMode getUpdateMode(); UpdateMode getUpdateMode();
default Mono<Buffer> update(Mono<Buffer> key, default Buf update(Buf key, BinarySerializationFunction updater, UpdateReturnMode updateReturnMode) {
BinarySerializationFunction updater, LLDelta prev = this.updateAndGetDelta(key, updater);
UpdateReturnMode updateReturnMode) { return LLUtils.resolveLLDelta(prev, updateReturnMode);
return this
.updateAndGetDelta(key, updater)
.transform(prev -> LLUtils.resolveLLDelta(prev, updateReturnMode));
} }
Mono<LLDelta> updateAndGetDelta(Mono<Buffer> key, BinarySerializationFunction updater); LLDelta updateAndGetDelta(Buf key, BinarySerializationFunction updater);
Mono<Void> clear(); void clear();
Mono<Buffer> remove(Mono<Buffer> key, LLDictionaryResultType resultType); Buf remove(Buf key, LLDictionaryResultType resultType);
Flux<OptionalBuf> getMulti(@Nullable LLSnapshot snapshot, Flux<Buffer> keys); Stream<OptionalBuf> getMulti(@Nullable LLSnapshot snapshot, Stream<Buf> keys);
Mono<Void> putMulti(Flux<LLEntry> entries); void putMulti(Stream<LLEntry> entries);
<K> Flux<Boolean> updateMulti(Flux<K> keys, Flux<Buffer> serializedKeys, <K> Stream<Boolean> updateMulti(Stream<K> keys, Stream<Buf> serializedKeys,
KVSerializationFunction<K, @Nullable Buffer, @Nullable Buffer> updateFunction); KVSerializationFunction<K, @Nullable Buf, @Nullable Buf> updateFunction);
Flux<LLEntry> getRange(@Nullable LLSnapshot snapshot, Stream<LLEntry> getRange(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, LLRange range,
boolean reverse, boolean reverse,
boolean smallRange); boolean smallRange);
Flux<List<LLEntry>> getRangeGrouped(@Nullable LLSnapshot snapshot, Stream<List<LLEntry>> getRangeGrouped(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, LLRange range,
int prefixLength, int prefixLength,
boolean smallRange); boolean smallRange);
Flux<Buffer> getRangeKeys(@Nullable LLSnapshot snapshot, Stream<Buf> getRangeKeys(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, LLRange range,
boolean reverse, boolean reverse,
boolean smallRange); boolean smallRange) throws RocksDBException, IOException;
Flux<List<Buffer>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, Stream<List<Buf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, LLRange range,
int prefixLength, int prefixLength,
boolean smallRange); boolean smallRange);
Flux<Buffer> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, Stream<Buf> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, LLRange range,
int prefixLength, int prefixLength,
boolean smallRange); boolean smallRange);
Flux<BadBlock> badBlocks(Mono<LLRange> range); Stream<BadBlock> badBlocks(LLRange range);
Mono<Void> setRange(Mono<LLRange> range, Flux<LLEntry> entries, boolean smallRange); void setRange(LLRange range, Stream<LLEntry> entries, boolean smallRange);
default Mono<Void> replaceRange(Mono<LLRange> range, default void replaceRange(LLRange range,
boolean canKeysChange, boolean canKeysChange,
Function<LLEntry, Mono<LLEntry>> entriesReplacer, Function<@NotNull LLEntry, @NotNull LLEntry> entriesReplacer,
boolean smallRange) { boolean smallRange) {
return Mono.defer(() -> { if (canKeysChange) {
if (canKeysChange) { this.setRange(range, this.getRange(null, range, false, smallRange).map(entriesReplacer), smallRange);
return this } else {
.setRange(range, this this.putMulti(this.getRange(null, range, false, smallRange).map(entriesReplacer));
.getRange(null, range, false, smallRange) }
.flatMap(entriesReplacer), smallRange);
} else {
return this.putMulti(this.getRange(null, range, false, smallRange).flatMap(entriesReplacer));
}
});
} }
Mono<Boolean> isRangeEmpty(@Nullable LLSnapshot snapshot, Mono<LLRange> range, boolean fillCache); boolean isRangeEmpty(@Nullable LLSnapshot snapshot, LLRange range, boolean fillCache);
Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, Mono<LLRange> range, boolean fast); long sizeRange(@Nullable LLSnapshot snapshot, LLRange range, boolean fast);
Mono<LLEntry> getOne(@Nullable LLSnapshot snapshot, Mono<LLRange> range); LLEntry getOne(@Nullable LLSnapshot snapshot, LLRange range);
Mono<Buffer> getOneKey(@Nullable LLSnapshot snapshot, Mono<LLRange> range); Buf getOneKey(@Nullable LLSnapshot snapshot, LLRange range);
Mono<LLEntry> removeOne(Mono<LLRange> range); LLEntry removeOne(LLRange range);
} }

View File

@ -1,67 +1,37 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.utils.SimpleResource;
import java.util.Objects; import java.util.Objects;
import java.util.StringJoiner; import java.util.StringJoiner;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class LLEntry extends SimpleResource implements DiscardingCloseable { public class LLEntry {
private static final Logger logger = LogManager.getLogger(LLEntry.class); private static final Logger logger = LogManager.getLogger(LLEntry.class);
private Buffer key; private final Buf key;
private Buffer value; private final Buf value;
private LLEntry(@NotNull Send<Buffer> key, @NotNull Send<Buffer> value) { private LLEntry(@NotNull Buf key, @NotNull Buf value) {
this.key = key.receive();
this.value = value.receive();
assert isAllAccessible();
}
private LLEntry(@NotNull Buffer key, @NotNull Buffer value) {
this.key = key; this.key = key;
this.value = value; this.value = value;
assert isAllAccessible();
} }
private boolean isAllAccessible() { public static LLEntry of(@NotNull Buf key, @NotNull Buf value) {
assert key != null && key.isAccessible();
assert value != null && value.isAccessible();
return true;
}
public static LLEntry of(@NotNull Buffer key, @NotNull Buffer value) {
return new LLEntry(key, value); return new LLEntry(key, value);
} }
public Send<Buffer> getKey() { public static LLEntry copyOf(Buf keyView, Buf valueView) {
ensureOwned(); return new LLEntry(keyView.copy(), valueView.copy());
return Objects.requireNonNull(key).copy().send();
} }
public Buffer getKeyUnsafe() { public Buf getKey() {
return key; return Objects.requireNonNull(key);
} }
public Send<Buffer> getValue() { public Buf getValue() {
ensureOwned(); return Objects.requireNonNull(value);
return Objects.requireNonNull(value).copy().send();
}
public Buffer getValueUnsafe() {
return value;
}
private void ensureOwned() {
assert isAllAccessible();
} }
@Override @Override
@ -90,24 +60,4 @@ public class LLEntry extends SimpleResource implements DiscardingCloseable {
.add("value=" + LLUtils.toString(value)) .add("value=" + LLUtils.toString(value))
.toString(); .toString();
} }
@Override
protected void onClose() {
try {
if (key != null && key.isAccessible()) {
key.close();
}
} catch (Throwable ex) {
logger.error("Failed to close key", ex);
}
try {
if (value != null && value.isAccessible()) {
value.close();
}
} catch (Throwable ex) {
logger.error("Failed to close value", ex);
}
key = null;
value = null;
}
} }

View File

@ -1,13 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import com.google.common.primitives.Floats;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import java.nio.Buffer;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Objects; import java.util.Objects;
import java.util.StringJoiner; import java.util.StringJoiner;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;

View File

@ -1,10 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import java.util.Objects;
import java.util.StringJoiner;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public record LLKeyScore(int docId, int shardId, float score, @Nullable IndexableField key) {} public record LLKeyScore(int docId, int shardId, float score, @Nullable IndexableField key) {}

View File

@ -3,69 +3,67 @@ package it.cavallium.dbengine.database;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.MeterRegistry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.client.IBackuppable; import it.cavallium.dbengine.client.IBackuppable;
import it.cavallium.dbengine.client.MemoryStats;
import it.cavallium.dbengine.database.collections.DatabaseInt; import it.cavallium.dbengine.database.collections.DatabaseInt;
import it.cavallium.dbengine.database.collections.DatabaseLong; import it.cavallium.dbengine.database.collections.DatabaseLong;
import it.cavallium.dbengine.rpc.current.data.Column; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux; import org.rocksdb.RocksDBException;
import reactor.core.publisher.Mono;
public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseStructure, DatabaseProperties, public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseStructure, DatabaseProperties,
IBackuppable, DatabaseOperations { IBackuppable, DatabaseOperations {
Mono<? extends LLSingleton> getSingleton(byte[] singletonListColumnName, byte[] name, byte @Nullable[] defaultValue); LLSingleton getSingleton(byte[] singletonListColumnName, byte[] name, byte @Nullable [] defaultValue)
throws IOException;
Mono<? extends LLDictionary> getDictionary(byte[] columnName, UpdateMode updateMode); LLDictionary getDictionary(byte[] columnName, UpdateMode updateMode);
@Deprecated @Deprecated
default Mono<? extends LLDictionary> getDeprecatedSet(String name, UpdateMode updateMode) { default LLDictionary getDeprecatedSet(String name, UpdateMode updateMode) {
return getDictionary(ColumnUtils.deprecatedSet(name).name().getBytes(StandardCharsets.US_ASCII), updateMode); return getDictionary(ColumnUtils.deprecatedSet(name).name().getBytes(StandardCharsets.US_ASCII), updateMode);
} }
default Mono<? extends LLDictionary> getDictionary(String name, UpdateMode updateMode) { default LLDictionary getDictionary(String name, UpdateMode updateMode) {
return getDictionary(ColumnUtils.dictionary(name).name().getBytes(StandardCharsets.US_ASCII), updateMode); return getDictionary(ColumnUtils.dictionary(name).name().getBytes(StandardCharsets.US_ASCII), updateMode);
} }
default Mono<? extends LLSingleton> getSingleton(String singletonListName, String name) { default LLSingleton getSingleton(String singletonListName, String name) {
return getSingleton(ColumnUtils.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII), return getSingleton(ColumnUtils.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII),
name.getBytes(StandardCharsets.US_ASCII), name.getBytes(StandardCharsets.US_ASCII),
null null
); );
} }
default Mono<DatabaseInt> getInteger(String singletonListName, String name, int defaultValue) { default DatabaseInt getInteger(String singletonListName, String name, int defaultValue) {
return this return new DatabaseInt(this.getSingleton(ColumnUtils
.getSingleton(ColumnUtils.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII), .special(singletonListName)
name.getBytes(StandardCharsets.US_ASCII), .name()
Ints.toByteArray(defaultValue) .getBytes(StandardCharsets.US_ASCII),
) name.getBytes(StandardCharsets.US_ASCII),
.map(DatabaseInt::new); Ints.toByteArray(defaultValue)
));
} }
default Mono<DatabaseLong> getLong(String singletonListName, String name, long defaultValue) { default DatabaseLong getLong(String singletonListName, String name, long defaultValue) {
return this return new DatabaseLong(this.getSingleton(ColumnUtils
.getSingleton(ColumnUtils.special(singletonListName).name().getBytes(StandardCharsets.US_ASCII), .special(singletonListName)
name.getBytes(StandardCharsets.US_ASCII), .name()
Longs.toByteArray(defaultValue) .getBytes(StandardCharsets.US_ASCII),
) name.getBytes(StandardCharsets.US_ASCII),
.map(DatabaseLong::new); Longs.toByteArray(defaultValue)
));
} }
Mono<Void> verifyChecksum(); void verifyChecksum();
Mono<Void> compact(); void compact() throws RocksDBException;
Mono<Void> flush(); void flush();
BufferAllocator getAllocator();
MeterRegistry getMeterRegistry(); MeterRegistry getMeterRegistry();
Mono<Void> preClose(); void preClose();
Mono<Void> close();
void close();
} }

View File

@ -8,30 +8,29 @@ import it.cavallium.dbengine.client.query.current.data.QueryParams;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.lucene.collector.Buckets; import it.cavallium.dbengine.lucene.collector.Buckets;
import it.cavallium.dbengine.lucene.searcher.BucketParams; import it.cavallium.dbengine.lucene.searcher.BucketParams;
import java.io.IOException;
import java.time.Duration; import java.time.Duration;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface LLLuceneIndex extends LLSnapshottable, IBackuppable, SafeCloseable { public interface LLLuceneIndex extends LLSnapshottable, IBackuppable, SafeCloseable {
String getLuceneIndexName(); String getLuceneIndexName();
Mono<Void> addDocument(LLTerm id, LLUpdateDocument doc); void addDocument(LLTerm id, LLUpdateDocument doc);
Mono<Long> addDocuments(boolean atomic, Flux<Entry<LLTerm, LLUpdateDocument>> documents); long addDocuments(boolean atomic, Stream<Entry<LLTerm, LLUpdateDocument>> documents);
Mono<Void> deleteDocument(LLTerm id); void deleteDocument(LLTerm id);
Mono<Void> update(LLTerm id, LLIndexRequest request); void update(LLTerm id, LLIndexRequest request);
Mono<Long> updateDocuments(Flux<Entry<LLTerm, LLUpdateDocument>> documents); long updateDocuments(Stream<Entry<LLTerm, LLUpdateDocument>> documents);
Mono<Void> deleteAll(); void deleteAll();
/** /**
* @param queryParams the limit is valid for each lucene instance. If you have 15 instances, the number of elements * @param queryParams the limit is valid for each lucene instance. If you have 15 instances, the number of elements
@ -40,7 +39,7 @@ public interface LLLuceneIndex extends LLSnapshottable, IBackuppable, SafeClosea
* The additional query will be used with the moreLikeThis query: "mltQuery AND additionalQuery" * The additional query will be used with the moreLikeThis query: "mltQuery AND additionalQuery"
* @return the collection has one or more flux * @return the collection has one or more flux
*/ */
Flux<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot, Stream<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
QueryParams queryParams, QueryParams queryParams,
@Nullable String keyFieldName, @Nullable String keyFieldName,
Multimap<String, String> mltDocumentFields); Multimap<String, String> mltDocumentFields);
@ -50,19 +49,19 @@ public interface LLLuceneIndex extends LLSnapshottable, IBackuppable, SafeClosea
* returned can be at most <code>limit * 15</code> * returned can be at most <code>limit * 15</code>
* @return the collection has one or more flux * @return the collection has one or more flux
*/ */
Flux<LLSearchResultShard> search(@Nullable LLSnapshot snapshot, Stream<LLSearchResultShard> search(@Nullable LLSnapshot snapshot,
QueryParams queryParams, QueryParams queryParams,
@Nullable String keyFieldName); @Nullable String keyFieldName);
/** /**
* @return buckets with each value collected into one of the buckets * @return buckets with each value collected into one of the buckets
*/ */
Mono<Buckets> computeBuckets(@Nullable LLSnapshot snapshot, Buckets computeBuckets(@Nullable LLSnapshot snapshot,
@NotNull List<Query> queries, @NotNull List<Query> queries,
@Nullable Query normalizationQuery, @Nullable Query normalizationQuery,
BucketParams bucketParams); BucketParams bucketParams);
default Mono<TotalHitsCount> count(@Nullable LLSnapshot snapshot, Query query, @Nullable Duration timeout) { default TotalHitsCount count(@Nullable LLSnapshot snapshot, Query query, @Nullable Duration timeout) {
QueryParams params = QueryParams.of(query, QueryParams params = QueryParams.of(query,
0, 0,
0, 0,
@ -70,12 +69,11 @@ public interface LLLuceneIndex extends LLSnapshottable, IBackuppable, SafeClosea
false, false,
timeout == null ? Long.MAX_VALUE : timeout.toMillis() timeout == null ? Long.MAX_VALUE : timeout.toMillis()
); );
return Mono return this
.usingWhen(this.search(snapshot, params, null).singleOrEmpty(), .search(snapshot, params, null)
llSearchResultShard -> Mono.just(llSearchResultShard.totalHitsCount()), .parallel()
LLUtils::finalizeResource .map(LLSearchResultShard::totalHitsCount)
) .reduce(TotalHitsCount.of(0, true), (a, b) -> TotalHitsCount.of(a.value() + b.value(), a.exact() && b.exact()));
.defaultIfEmpty(TotalHitsCount.of(0, true));
} }
boolean isLowMemoryMode(); boolean isLowMemoryMode();
@ -84,18 +82,18 @@ public interface LLLuceneIndex extends LLSnapshottable, IBackuppable, SafeClosea
* Flush writes to disk. * Flush writes to disk.
* This does not commit, it syncs the data to the disk * This does not commit, it syncs the data to the disk
*/ */
Mono<Void> flush(); void flush();
Mono<Void> waitForMerges(); void waitForMerges();
/** /**
* Wait for the latest pending merge * Wait for the latest pending merge
* This disables future merges until shutdown! * This disables future merges until shutdown!
*/ */
Mono<Void> waitForLastMerges(); void waitForLastMerges();
/** /**
* Refresh index searcher * Refresh index searcher
*/ */
Mono<Void> refresh(boolean force); void refresh(boolean force);
} }

View File

@ -2,14 +2,10 @@ package it.cavallium.dbengine.database;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.MeterRegistry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart; import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart;
import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart.ConnectionPartLucene; import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart.ConnectionPartLucene;
import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart.ConnectionPartRocksDB; import it.cavallium.dbengine.client.ConnectionSettings.ConnectionPart.ConnectionPartRocksDB;
import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.lucene.LuceneHacks; import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneRocksDBManager;
import it.cavallium.dbengine.lucene.LuceneUtils; import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.rpc.current.data.Column; import it.cavallium.dbengine.rpc.current.data.Column;
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions; import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
@ -18,6 +14,7 @@ import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.ints.IntSet; import it.unimi.dsi.fastutil.ints.IntSet;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -25,12 +22,10 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletionException;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
public class LLMultiDatabaseConnection implements LLDatabaseConnection { public class LLMultiDatabaseConnection implements LLDatabaseConnection {
@ -83,30 +78,26 @@ public class LLMultiDatabaseConnection implements LLDatabaseConnection {
allConnections.addAll(databaseShardConnections.values()); allConnections.addAll(databaseShardConnections.values());
} }
@Override
public BufferAllocator getAllocator() {
return anyConnection.getAllocator();
}
@Override @Override
public MeterRegistry getMeterRegistry() { public MeterRegistry getMeterRegistry() {
return anyConnection.getMeterRegistry(); return anyConnection.getMeterRegistry();
} }
@Override @Override
public Mono<? extends LLDatabaseConnection> connect() { public LLDatabaseConnection connect() {
return Flux // todo: parallelize?
.fromIterable(allConnections) for (LLDatabaseConnection connection : allConnections) {
.flatMap((LLDatabaseConnection databaseConnection) -> databaseConnection try {
.connect() connection.connect();
.doOnError(ex -> LOG.error("Failed to open connection", ex)) } catch (Exception ex) {
) LOG.error("Failed to open connection", ex);
.then() }
.thenReturn(this); }
return this;
} }
@Override @Override
public Mono<? extends LLKeyValueDatabase> getDatabase(String name, public LLKeyValueDatabase getDatabase(String name,
List<Column> columns, List<Column> columns,
DatabaseOptions databaseOptions) { DatabaseOptions databaseOptions) {
var conn = databaseShardConnections.getOrDefault(name, defaultDatabaseConnection); var conn = databaseShardConnections.getOrDefault(name, defaultDatabaseConnection);
@ -115,7 +106,7 @@ public class LLMultiDatabaseConnection implements LLDatabaseConnection {
} }
@Override @Override
public Mono<? extends LLLuceneIndex> getLuceneIndex(String clusterName, public LLLuceneIndex getLuceneIndex(String clusterName,
LuceneIndexStructure indexStructure, LuceneIndexStructure indexStructure,
it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers indicizerAnalyzers, it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers indicizerAnalyzers,
it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities indicizerSimilarities, it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities indicizerSimilarities,
@ -150,51 +141,44 @@ public class LLMultiDatabaseConnection implements LLDatabaseConnection {
luceneHacks luceneHacks
); );
} else { } else {
return Flux record ShardToIndex(int shard, LLLuceneIndex connIndex) {}
.fromIterable(connectionToShardMap.entrySet()) var indices = connectionToShardMap.entrySet().stream().flatMap(entry -> {
.flatMap(entry -> { var connectionIndexStructure = indexStructure.setActiveShards(new IntArrayList(entry.getValue()));
var connectionIndexStructure = indexStructure
.setActiveShards(new IntArrayList(entry.getValue()));
Flux<LLLuceneIndex> connIndex = entry.getKey() LLLuceneIndex connIndex;
.getLuceneIndex(clusterName, try {
connectionIndexStructure, connIndex = entry.getKey().getLuceneIndex(clusterName, connectionIndexStructure,
indicizerAnalyzers, indicizerAnalyzers, indicizerSimilarities, luceneOptions, luceneHacks);
indicizerSimilarities, } catch (IOException e) {
luceneOptions, throw new CompletionException(e);
luceneHacks }
).cast(LLLuceneIndex.class).cache().repeat();
return Flux return entry.getValue().intStream().mapToObj(shard -> new ShardToIndex(shard, connIndex));
.fromIterable(entry.getValue()) }).toList();
.zipWith(connIndex); var luceneIndices = new LLLuceneIndex[indexStructure.totalShards()];
}) for (var index : indices) {
.collectList() luceneIndices[index.shard] = index.connIndex;
.map(indices -> { }
var luceneIndices = new LLLuceneIndex[indexStructure.totalShards()]; return new LLMultiLuceneIndex(clusterName,
for (var index : indices) { indexStructure,
luceneIndices[index.getT1()] = index.getT2(); indicizerAnalyzers,
} indicizerSimilarities,
return new LLMultiLuceneIndex(clusterName, luceneOptions,
indexStructure, luceneHacks,
indicizerAnalyzers, luceneIndices
indicizerSimilarities, );
luceneOptions,
luceneHacks,
luceneIndices
);
});
} }
} }
@Override @Override
public Mono<Void> disconnect() { public void disconnect() {
return Flux // todo: parallelize?
.fromIterable(allConnections) for (LLDatabaseConnection connection : allConnections) {
.flatMap(databaseConnection -> databaseConnection try {
.disconnect() connection.disconnect();
.doOnError(ex -> LOG.error("Failed to close connection", ex)) } catch (Exception ex) {
.onErrorResume(ex -> Mono.empty()) LOG.error("Failed to close connection", ex);
) }
.then(); }
} }
} }

View File

@ -1,35 +1,31 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import com.google.common.collect.Iterables; import static it.cavallium.dbengine.lucene.LuceneUtils.getLuceneIndexId;
import static java.util.stream.Collectors.groupingBy;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import it.cavallium.dbengine.client.IBackuppable; import it.cavallium.dbengine.client.IBackuppable;
import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers;
import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities;
import it.cavallium.dbengine.client.query.current.data.Query; import it.cavallium.dbengine.client.query.current.data.Query;
import it.cavallium.dbengine.client.query.current.data.QueryParams; import it.cavallium.dbengine.client.query.current.data.QueryParams;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.lucene.LuceneHacks; import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.lucene.collector.Buckets; import it.cavallium.dbengine.lucene.collector.Buckets;
import it.cavallium.dbengine.lucene.searcher.BucketParams; import it.cavallium.dbengine.lucene.searcher.BucketParams;
import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers;
import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities;
import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure; import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions; import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import it.unimi.dsi.fastutil.doubles.DoubleArrayList; import it.unimi.dsi.fastutil.doubles.DoubleArrayList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level; import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SignalType;
public class LLMultiLuceneIndex implements LLLuceneIndex { public class LLMultiLuceneIndex implements LLLuceneIndex {
@ -46,7 +42,6 @@ public class LLMultiLuceneIndex implements LLLuceneIndex {
private final LLLuceneIndex[] luceneIndicesById; private final LLLuceneIndex[] luceneIndicesById;
private final List<LLLuceneIndex> luceneIndicesSet; private final List<LLLuceneIndex> luceneIndicesSet;
private final int totalShards; private final int totalShards;
private final Flux<LLLuceneIndex> luceneIndicesFlux;
public LLMultiLuceneIndex(String clusterName, public LLMultiLuceneIndex(String clusterName,
LuceneIndexStructure indexStructure, LuceneIndexStructure indexStructure,
@ -70,7 +65,6 @@ public class LLMultiLuceneIndex implements LLLuceneIndex {
} }
} }
this.luceneIndicesSet = new ArrayList<>(luceneIndicesSet); this.luceneIndicesSet = new ArrayList<>(luceneIndicesSet);
this.luceneIndicesFlux = Flux.fromIterable(luceneIndicesSet);
} }
@Override @Override
@ -79,108 +73,115 @@ public class LLMultiLuceneIndex implements LLLuceneIndex {
} }
private LLLuceneIndex getLuceneIndex(LLTerm id) { private LLLuceneIndex getLuceneIndex(LLTerm id) {
return luceneIndicesById[LuceneUtils.getLuceneIndexId(id, totalShards)]; return luceneIndicesById[getLuceneIndexId(id, totalShards)];
} }
@Override @Override
public Mono<Void> addDocument(LLTerm id, LLUpdateDocument doc) { public void addDocument(LLTerm id, LLUpdateDocument doc) {
return getLuceneIndex(id).addDocument(id, doc); getLuceneIndex(id).addDocument(id, doc);
} }
@Override @Override
public Mono<Long> addDocuments(boolean atomic, Flux<Entry<LLTerm, LLUpdateDocument>> documents) { public long addDocuments(boolean atomic, Stream<Entry<LLTerm, LLUpdateDocument>> documents) {
return documents var groupedRequests = documents
.groupBy(term -> LuceneUtils.getLuceneIndexId(term.getKey(), totalShards)) .collect(groupingBy(term -> getLuceneIndexId(term.getKey(), totalShards),
.flatMap(group -> { Int2ObjectOpenHashMap::new,
var index = luceneIndicesById[group.key()]; Collectors.toList()
return index.addDocuments(atomic, group); ));
})
return groupedRequests
.int2ObjectEntrySet()
.stream()
.map(entry -> luceneIndicesById[entry.getIntKey()].addDocuments(atomic, entry.getValue().stream()))
.reduce(0L, Long::sum); .reduce(0L, Long::sum);
} }
@Override @Override
public Mono<Void> deleteDocument(LLTerm id) { public void deleteDocument(LLTerm id) {
return getLuceneIndex(id).deleteDocument(id); getLuceneIndex(id).deleteDocument(id);
} }
@Override @Override
public Mono<Void> update(LLTerm id, LLIndexRequest request) { public void update(LLTerm id, LLIndexRequest request) {
return getLuceneIndex(id).update(id, request); getLuceneIndex(id).update(id, request);
} }
@Override @Override
public Mono<Long> updateDocuments(Flux<Entry<LLTerm, LLUpdateDocument>> documents) { public long updateDocuments(Stream<Entry<LLTerm, LLUpdateDocument>> documents) {
return documents var groupedRequests = documents
.log("multi-update-documents", Level.FINEST, false, SignalType.ON_NEXT, SignalType.ON_COMPLETE) .collect(groupingBy(term -> getLuceneIndexId(term.getKey(), totalShards),
.groupBy(term -> getLuceneIndex(term.getKey())) Int2ObjectOpenHashMap::new,
.flatMap(groupFlux -> groupFlux.key().updateDocuments(groupFlux)) Collectors.toList()
));
return groupedRequests
.int2ObjectEntrySet()
.stream()
.map(entry -> luceneIndicesById[entry.getIntKey()].updateDocuments(entry.getValue().stream()))
.reduce(0L, Long::sum); .reduce(0L, Long::sum);
} }
@Override @Override
public Mono<Void> deleteAll() { public void deleteAll() {
Iterable<Mono<Void>> it = () -> luceneIndicesSet.stream().map(llLuceneIndex -> llLuceneIndex.deleteAll()).iterator(); luceneIndicesSet.forEach(LLLuceneIndex::deleteAll);
return Mono.whenDelayError(it);
} }
@Override @Override
public Flux<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot, public Stream<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
QueryParams queryParams, QueryParams queryParams,
@Nullable String keyFieldName, @Nullable String keyFieldName,
Multimap<String, String> mltDocumentFields) { Multimap<String, String> mltDocumentFields) {
return luceneIndicesFlux.flatMap(luceneIndex -> luceneIndex.moreLikeThis(snapshot, return luceneIndicesSet.parallelStream().flatMap(luceneIndex -> luceneIndex.moreLikeThis(snapshot,
queryParams, queryParams,
keyFieldName, keyFieldName,
mltDocumentFields mltDocumentFields
)).doOnDiscard(DiscardingCloseable.class, LLUtils::onDiscard); ));
} }
private Mono<Buckets> mergeShards(List<Buckets> shards) { private Buckets mergeShards(List<Buckets> shards) {
return Mono.fromCallable(() -> { List<DoubleArrayList> seriesValues = new ArrayList<>();
List<DoubleArrayList> seriesValues = new ArrayList<>(); DoubleArrayList totals = new DoubleArrayList(shards.get(0).totals());
DoubleArrayList totals = new DoubleArrayList(shards.get(0).totals());
for (Buckets shard : shards) { for (Buckets shard : shards) {
if (seriesValues.isEmpty()) { if (seriesValues.isEmpty()) {
seriesValues.addAll(shard.seriesValues()); seriesValues.addAll(shard.seriesValues());
} else { } else {
for (int serieIndex = 0; serieIndex < seriesValues.size(); serieIndex++) { for (int serieIndex = 0; serieIndex < seriesValues.size(); serieIndex++) {
DoubleArrayList mergedSerieValues = seriesValues.get(serieIndex); DoubleArrayList mergedSerieValues = seriesValues.get(serieIndex);
for (int dataIndex = 0; dataIndex < mergedSerieValues.size(); dataIndex++) { for (int dataIndex = 0; dataIndex < mergedSerieValues.size(); dataIndex++) {
mergedSerieValues.set(dataIndex, mergedSerieValues.getDouble(dataIndex) mergedSerieValues.set(dataIndex, mergedSerieValues.getDouble(dataIndex)
+ shard.seriesValues().get(serieIndex).getDouble(dataIndex) + shard.seriesValues().get(serieIndex).getDouble(dataIndex)
); );
}
} }
} }
for (int i = 0; i < totals.size(); i++) {
totals.set(i, totals.getDouble(i) + shard.totals().getDouble(i));
}
} }
return new Buckets(seriesValues, totals); for (int i = 0; i < totals.size(); i++) {
}); totals.set(i, totals.getDouble(i) + shard.totals().getDouble(i));
}
}
return new Buckets(seriesValues, totals);
} }
@Override @Override
public Flux<LLSearchResultShard> search(@Nullable LLSnapshot snapshot, public Stream<LLSearchResultShard> search(@Nullable LLSnapshot snapshot,
QueryParams queryParams, QueryParams queryParams,
@Nullable String keyFieldName) { @Nullable String keyFieldName) {
return luceneIndicesFlux.flatMap(luceneIndex -> luceneIndex.search(snapshot, return luceneIndicesSet.parallelStream().flatMap(luceneIndex -> luceneIndex.search(snapshot,
queryParams, queryParams,
keyFieldName keyFieldName
)).doOnDiscard(DiscardingCloseable.class, LLUtils::onDiscard); ));
} }
@Override @Override
public Mono<Buckets> computeBuckets(@Nullable LLSnapshot snapshot, public Buckets computeBuckets(@Nullable LLSnapshot snapshot,
@NotNull List<Query> queries, @NotNull List<Query> queries,
@Nullable Query normalizationQuery, @Nullable Query normalizationQuery,
BucketParams bucketParams) { BucketParams bucketParams) {
return luceneIndicesFlux.flatMap(luceneIndex -> luceneIndex.computeBuckets(snapshot, return mergeShards(luceneIndicesSet.parallelStream().map(luceneIndex -> luceneIndex.computeBuckets(snapshot,
queries, queries,
normalizationQuery, normalizationQuery,
bucketParams bucketParams
)).collectList().flatMap(this::mergeShards).doOnDiscard(DiscardingCloseable.class, LLUtils::onDiscard); )).toList());
} }
@Override @Override
@ -190,78 +191,60 @@ public class LLMultiLuceneIndex implements LLLuceneIndex {
@Override @Override
public void close() { public void close() {
Iterable<Mono<Void>> it = () -> luceneIndicesSet.stream().map(e -> Mono.<Void>fromRunnable(e::close)).iterator(); luceneIndicesSet.parallelStream().forEach(SafeCloseable::close);
Mono.whenDelayError(it).transform(LLUtils::handleDiscard).block();
} }
@Override @Override
public Mono<Void> flush() { public void flush() {
Iterable<Mono<Void>> it = () -> luceneIndicesSet.stream().map(LLLuceneIndex::flush).iterator(); luceneIndicesSet.parallelStream().forEach(LLLuceneIndex::flush);
return Mono.whenDelayError(it);
} }
@Override @Override
public Mono<Void> waitForMerges() { public void waitForMerges() {
Iterable<Mono<Void>> it = () -> luceneIndicesSet.stream().map(LLLuceneIndex::waitForMerges).iterator(); luceneIndicesSet.parallelStream().forEach(LLLuceneIndex::waitForMerges);
return Mono.whenDelayError(it);
} }
@Override @Override
public Mono<Void> waitForLastMerges() { public void waitForLastMerges() {
Iterable<Mono<Void>> it = () -> luceneIndicesSet.stream().map(LLLuceneIndex::waitForLastMerges).iterator(); luceneIndicesSet.parallelStream().forEach(LLLuceneIndex::waitForLastMerges);
return Mono.whenDelayError(it);
} }
@Override @Override
public Mono<Void> refresh(boolean force) { public void refresh(boolean force) {
Iterable<Mono<Void>> it = () -> luceneIndicesSet.stream().map(index -> index.refresh(force)).iterator(); luceneIndicesSet.parallelStream().forEach(index -> index.refresh(force));
return Mono.whenDelayError(it);
} }
@Override @Override
public Mono<LLSnapshot> takeSnapshot() { public LLSnapshot takeSnapshot() {
return Mono // Generate next snapshot index
// Generate next snapshot index var snapshotIndex = nextSnapshotNumber.getAndIncrement();
.fromCallable(nextSnapshotNumber::getAndIncrement) var snapshot = luceneIndicesSet.parallelStream().map(LLSnapshottable::takeSnapshot).toList();
.flatMap(snapshotIndex -> luceneIndicesFlux registeredSnapshots.put(snapshotIndex, snapshot);
.flatMapSequential(llLuceneIndex -> llLuceneIndex.takeSnapshot()) return new LLSnapshot(snapshotIndex);
.collectList()
.doOnNext(instancesSnapshotsArray -> registeredSnapshots.put(snapshotIndex, instancesSnapshotsArray))
.thenReturn(new LLSnapshot(snapshotIndex))
);
} }
@Override @Override
public Mono<Void> releaseSnapshot(LLSnapshot snapshot) { public void releaseSnapshot(LLSnapshot snapshot) {
return Mono var list = registeredSnapshots.remove(snapshot.getSequenceNumber());
.fromCallable(() -> registeredSnapshots.remove(snapshot.getSequenceNumber())) for (int shardIndex = 0; shardIndex < list.size(); shardIndex++) {
.flatMapIterable(list -> list) var luceneIndex = luceneIndicesSet.get(shardIndex);
.index() LLSnapshot instanceSnapshot = list.get(shardIndex);
.flatMap(tuple -> { luceneIndex.releaseSnapshot(instanceSnapshot);
int index = (int) (long) tuple.getT1(); }
LLSnapshot instanceSnapshot = tuple.getT2();
return luceneIndicesSet.get(index).releaseSnapshot(instanceSnapshot);
})
.then();
} }
@Override @Override
public Mono<Void> pauseForBackup() { public void pauseForBackup() {
return Mono.whenDelayError(Iterables.transform(this.luceneIndicesSet, IBackuppable::pauseForBackup)); this.luceneIndicesSet.forEach(IBackuppable::pauseForBackup);
} }
@Override @Override
public Mono<Void> resumeAfterBackup() { public void resumeAfterBackup() {
return Mono.whenDelayError(Iterables.transform(this.luceneIndicesSet, IBackuppable::resumeAfterBackup)); this.luceneIndicesSet.forEach(IBackuppable::resumeAfterBackup);
} }
@Override @Override
public boolean isPaused() { public boolean isPaused() {
for (LLLuceneIndex llLuceneIndex : this.luceneIndicesSet) { return this.luceneIndicesSet.stream().anyMatch(IBackuppable::isPaused);
if (llLuceneIndex.isPaused()) {
return true;
}
}
return false;
} }
} }

View File

@ -1,195 +1,92 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.util.Send;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.utils.SimpleResource;
import java.util.StringJoiner; import java.util.StringJoiner;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
/** /**
* Range of data, from min (inclusive), to max (exclusive) * Range of data, from min (inclusive), to max (exclusive)
*/ */
public class LLRange extends SimpleResource { public class LLRange {
private static final LLRange RANGE_ALL = new LLRange( null, null, (Buffer) null, false); private static final LLRange RANGE_ALL = new LLRange( null, null, (Buf) null);
@Nullable @Nullable
private final Buffer min; private final Buf min;
@Nullable @Nullable
private final Buffer max; private final Buf max;
@Nullable @Nullable
private final Buffer single; private final Buf single;
private LLRange(Send<Buffer> min, Send<Buffer> max, Send<Buffer> single, boolean closeable) { private LLRange(@Nullable Buf min, @Nullable Buf max, @Nullable Buf single) {
super(closeable);
assert single == null || (min == null && max == null); assert single == null || (min == null && max == null);
this.min = min != null ? min.receive().makeReadOnly() : null; this.min = min;
this.max = max != null ? max.receive().makeReadOnly() : null; this.max = max;
this.single = single != null ? single.receive().makeReadOnly() : null; this.single = single;
}
private LLRange(Buffer min, Buffer max, Buffer single, boolean closeable) {
super(closeable);
assert single == null || (min == null && max == null);
this.min = min != null ? min.makeReadOnly() : null;
this.max = max != null ? max.makeReadOnly() : null;
this.single = single != null ? single.makeReadOnly() : null;
} }
public static LLRange all() { public static LLRange all() {
return RANGE_ALL; return RANGE_ALL;
} }
public static LLRange from(Send<Buffer> min) { public static LLRange from(Buf min) {
return new LLRange(min, null, null, true); return new LLRange(min, null, null);
} }
public static LLRange to(Send<Buffer> max) { public static LLRange to(Buf max) {
return new LLRange(null, max, null, true); return new LLRange(null, max, null);
} }
public static LLRange single(Send<Buffer> single) { public static LLRange single(Buf single) {
return new LLRange(null, null, single, true); return new LLRange(null, null, single);
} }
public static LLRange singleUnsafe(Buffer single) { public static LLRange of(Buf min, Buf max) {
return new LLRange(null, null, single, true); return new LLRange(min, max, null);
}
public static LLRange of(Send<Buffer> min, Send<Buffer> max) {
return new LLRange(min, max, null, true);
}
public static LLRange ofUnsafe(Buffer min, Buffer max) {
return new LLRange(min, max, null, true);
} }
public boolean isAll() { public boolean isAll() {
ensureOpen();
return min == null && max == null && single == null; return min == null && max == null && single == null;
} }
public boolean isSingle() { public boolean isSingle() {
ensureOpen();
return single != null; return single != null;
} }
public boolean hasMin() { public boolean hasMin() {
ensureOpen();
return min != null || single != null; return min != null || single != null;
} }
public Send<Buffer> getMin() { public Buf getMin() {
ensureOpen(); // todo: use a read-only copy
if (min != null) {
// todo: use a read-only copy
return min.copy().send();
} else if (single != null) {
// todo: use a read-only copy
return single.copy().send();
} else {
return null;
}
}
public Buffer getMinUnsafe() {
ensureOpen();
if (min != null) { if (min != null) {
return min; return min;
} else if (single != null) { } else {
return single; return single;
} else {
return null;
}
}
public Buffer getMinCopy() {
ensureOpen();
if (min != null) {
return min.copy();
} else if (single != null) {
return single.copy();
} else {
return null;
} }
} }
public boolean hasMax() { public boolean hasMax() {
ensureOpen();
return max != null || single != null; return max != null || single != null;
} }
public Send<Buffer> getMax() { public Buf getMax() {
ensureOpen(); // todo: use a read-only copy
if (max != null) {
// todo: use a read-only copy
return max.copy().send();
} else if (single != null) {
// todo: use a read-only copy
return single.copy().send();
} else {
return null;
}
}
public Buffer getMaxUnsafe() {
ensureOpen();
if (max != null) { if (max != null) {
return max; return max;
} else if (single != null) { } else {
return single; return single;
} else {
return null;
} }
} }
public Buffer getMaxCopy() { public Buf getSingle() {
ensureOpen();
if (max != null) {
return max.copy();
} else if (single != null) {
return single.copy();
} else {
return null;
}
}
public Send<Buffer> getSingle() {
ensureOpen();
assert isSingle(); assert isSingle();
// todo: use a read-only copy // todo: use a read-only copy
return single != null ? single.copy().send() : null;
}
public Buffer getSingleUnsafe() {
ensureOpen();
assert isSingle();
return single; return single;
} }
@Override public Buf getSingleUnsafe() {
protected void ensureOpen() { assert isSingle();
super.ensureOpen(); return single;
assert min == null || min.isAccessible() : "Range min not owned";
assert max == null || max.isAccessible() : "Range max not owned";
assert single == null || single.isAccessible() : "Range single not owned";
}
@Override
protected void onClose() {
if (min != null && min.isAccessible()) {
min.close();
}
if (max != null && max.isAccessible()) {
max.close();
}
if (single != null && single.isAccessible()) {
single.close();
}
} }
@Override @Override
@ -220,12 +117,7 @@ public class LLRange extends SimpleResource {
} }
public LLRange copy() { public LLRange copy() {
ensureOpen();
// todo: use a read-only copy // todo: use a read-only copy
return new LLRange(min != null ? min.copy().send() : null, return new LLRange(min, max, single);
max != null ? max.copy().send() : null,
single != null ? single.copy().send() : null,
true
);
} }
} }

View File

@ -1,13 +1,13 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Flux;
public record LLSearchResult(Flux<LLSearchResultShard> results) { public record LLSearchResult(Stream<LLSearchResultShard> results) {
@NotNull @NotNull
public static BiFunction<LLSearchResult, LLSearchResult, LLSearchResult> accumulator() { public static BiFunction<LLSearchResult, LLSearchResult, LLSearchResult> accumulator() {
return (a, b) -> new LLSearchResult(Flux.merge(a.results, b.results)); return (a, b) -> new LLSearchResult(Stream.concat(a.results, b.results));
} }
} }

View File

@ -1,32 +1,27 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.client.LuceneIndexImpl;
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount; import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
import it.cavallium.dbengine.lucene.LuceneCloseable; import it.cavallium.dbengine.lucene.LuceneCloseable;
import it.cavallium.dbengine.utils.SimpleResource; import it.cavallium.dbengine.utils.SimpleResource;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import reactor.core.publisher.Flux;
public class LLSearchResultShard extends SimpleResource implements DiscardingCloseable { public class LLSearchResultShard extends SimpleResource implements DiscardingCloseable {
private static final Logger LOG = LogManager.getLogger(LLSearchResultShard.class); private static final Logger LOG = LogManager.getLogger(LLSearchResultShard.class);
private final Flux<LLKeyScore> results; private final Stream<LLKeyScore> results;
private final TotalHitsCount totalHitsCount; private final TotalHitsCount totalHitsCount;
public LLSearchResultShard(Flux<LLKeyScore> results, TotalHitsCount totalHitsCount) { public LLSearchResultShard(Stream<LLKeyScore> results, TotalHitsCount totalHitsCount) {
this.results = results; this.results = results;
this.totalHitsCount = totalHitsCount; this.totalHitsCount = totalHitsCount;
} }
public static LLSearchResultShard withResource(Flux<LLKeyScore> results, public static LLSearchResultShard withResource(Stream<LLKeyScore> results,
TotalHitsCount totalHitsCount, TotalHitsCount totalHitsCount,
SafeCloseable closeableResource) { SafeCloseable closeableResource) {
if (closeableResource instanceof LuceneCloseable luceneCloseable) { if (closeableResource instanceof LuceneCloseable luceneCloseable) {
@ -36,7 +31,7 @@ public class LLSearchResultShard extends SimpleResource implements DiscardingClo
} }
} }
public Flux<LLKeyScore> results() { public Stream<LLKeyScore> results() {
ensureOpen(); ensureOpen();
return results; return results;
} }
@ -74,7 +69,7 @@ public class LLSearchResultShard extends SimpleResource implements DiscardingClo
private final List<SafeCloseable> resources; private final List<SafeCloseable> resources;
public ResourcesLLSearchResultShard(Flux<LLKeyScore> resultsFlux, public ResourcesLLSearchResultShard(Stream<LLKeyScore> resultsFlux,
TotalHitsCount count, TotalHitsCount count,
List<SafeCloseable> resources) { List<SafeCloseable> resources) {
super(resultsFlux, count); super(resultsFlux, count);
@ -102,7 +97,7 @@ public class LLSearchResultShard extends SimpleResource implements DiscardingClo
private final List<LuceneCloseable> resources; private final List<LuceneCloseable> resources;
public LuceneLLSearchResultShard(Flux<LLKeyScore> resultsFlux, public LuceneLLSearchResultShard(Stream<LLKeyScore> resultsFlux,
TotalHitsCount count, TotalHitsCount count,
List<LuceneCloseable> resources) { List<LuceneCloseable> resources) {
super(resultsFlux, count); super(resultsFlux, count);

View File

@ -1,32 +1,22 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.disk.BinarySerializationFunction; import it.cavallium.dbengine.database.disk.BinarySerializationFunction;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import java.io.IOException;
import it.unimi.dsi.fastutil.bytes.ByteList;
import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public interface LLSingleton extends LLKeyValueDatabaseStructure { public interface LLSingleton extends LLKeyValueDatabaseStructure {
Buf get(@Nullable LLSnapshot snapshot);
BufferAllocator getAllocator(); void set(Buf value);
Mono<Buffer> get(@Nullable LLSnapshot snapshot); default Buf update(BinarySerializationFunction updater, UpdateReturnMode updateReturnMode) {
var prev = this.updateAndGetDelta(updater);
Mono<Void> set(Mono<Buffer> value); return LLUtils.resolveLLDelta(prev, updateReturnMode);
default Mono<Buffer> update(BinarySerializationFunction updater,
UpdateReturnMode updateReturnMode) {
return this
.updateAndGetDelta(updater)
.transform(prev -> LLUtils.resolveLLDelta(prev, updateReturnMode));
} }
Mono<LLDelta> updateAndGetDelta(BinarySerializationFunction updater); LLDelta updateAndGetDelta(BinarySerializationFunction updater);
String getColumnName(); String getColumnName();

View File

@ -1,10 +1,10 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import reactor.core.publisher.Mono; import java.io.IOException;
public interface LLSnapshottable { public interface LLSnapshottable {
Mono<LLSnapshot> takeSnapshot(); LLSnapshot takeSnapshot();
Mono<Void> releaseSnapshot(LLSnapshot snapshot); void releaseSnapshot(LLSnapshot snapshot);
} }

View File

@ -1,7 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import java.util.Objects; import java.util.Objects;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
public class LLTerm { public class LLTerm {

View File

@ -1,22 +1,11 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import static io.netty5.buffer.StandardAllocationTypes.OFF_HEAP;
import static io.netty5.buffer.internal.InternalBufferUtils.NO_OP_DROP;
import static it.cavallium.dbengine.lucene.LuceneUtils.luceneScheduler;
import static org.apache.commons.lang3.ArrayUtils.EMPTY_BYTE_ARRAY; import static org.apache.commons.lang3.ArrayUtils.EMPTY_BYTE_ARRAY;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import io.netty5.buffer.AllocatorControl; import io.netty.util.IllegalReferenceCountException;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import io.netty5.buffer.BufferComponent;
import io.netty5.buffer.CompositeBuffer;
import io.netty5.buffer.Drop;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import io.netty5.util.IllegalReferenceCountException;
import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.lucene.LuceneCloseable; import it.cavallium.dbengine.lucene.LuceneCloseable;
import it.cavallium.dbengine.lucene.LuceneUtils; import it.cavallium.dbengine.lucene.LuceneUtils;
@ -26,8 +15,6 @@ import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodHandles.Lookup;
import java.lang.invoke.MethodType; import java.lang.invoke.MethodType;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.time.Duration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
@ -36,11 +23,8 @@ import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.function.Consumer;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.ToIntFunction;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.Marker; import org.apache.logging.log4j.Marker;
@ -69,16 +53,7 @@ import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import org.rocksdb.AbstractImmutableNativeReference; import org.rocksdb.AbstractImmutableNativeReference;
import org.rocksdb.AbstractNativeReference; import org.rocksdb.AbstractNativeReference;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB;
import reactor.core.Disposable;
import reactor.core.Fuseable.QueueSubscription;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Hooks;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class LLUtils { public class LLUtils {
@ -89,13 +64,11 @@ public class LLUtils {
public static final int INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES = 4096; public static final int INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES = 4096;
public static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.allocateDirect(0).asReadOnlyBuffer(); public static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.allocateDirect(0).asReadOnlyBuffer();
private static final AllocatorControl NO_OP_ALLOCATION_CONTROL = (AllocatorControl) BufferAllocator.offHeapUnpooled();
private static final byte[] RESPONSE_TRUE = new byte[]{1}; private static final byte[] RESPONSE_TRUE = new byte[]{1};
private static final byte[] RESPONSE_FALSE = new byte[]{0}; private static final byte[] RESPONSE_FALSE = new byte[]{0};
private static final byte[] RESPONSE_TRUE_BUF = new byte[]{1}; private static final byte[] RESPONSE_TRUE_BUF = new byte[]{1};
private static final byte[] RESPONSE_FALSE_BUF = new byte[]{0}; private static final byte[] RESPONSE_FALSE_BUF = new byte[]{0};
public static final byte[][] LEXICONOGRAPHIC_ITERATION_SEEKS = new byte[256][1]; public static final byte[][] LEXICONOGRAPHIC_ITERATION_SEEKS = new byte[256][1];
public static final AtomicBoolean hookRegistered = new AtomicBoolean();
public static final boolean MANUAL_READAHEAD = false; public static final boolean MANUAL_READAHEAD = false;
public static final boolean ALLOW_STATIC_OPTIONS = false; public static final boolean ALLOW_STATIC_OPTIONS = false;
@ -111,29 +84,38 @@ public class LLUtils {
private static final MethodHandle IS_ACCESSIBLE_METHOD_HANDLE; private static final MethodHandle IS_ACCESSIBLE_METHOD_HANDLE;
private static final MethodHandle IS_IN_NON_BLOCKING_THREAD_MH;
private static final Consumer<Object> NULL_CONSUMER = ignored -> {};
static { static {
for (int i1 = 0; i1 < 256; i1++) { for (int i1 = 0; i1 < 256; i1++) {
var b = LEXICONOGRAPHIC_ITERATION_SEEKS[i1]; var b = LEXICONOGRAPHIC_ITERATION_SEEKS[i1];
b[0] = (byte) i1; b[0] = (byte) i1;
} }
var methodType = MethodType.methodType(boolean.class); {
MethodHandle isAccessibleMethodHandle = null; var methodType = MethodType.methodType(boolean.class);
try { MethodHandle isAccessibleMethodHandle = null;
isAccessibleMethodHandle = PUBLIC_LOOKUP.findVirtual(AbstractNativeReference.class, "isAccessible", methodType); try {
} catch (NoSuchMethodException e) { isAccessibleMethodHandle = PUBLIC_LOOKUP.findVirtual(AbstractNativeReference.class, "isAccessible", methodType);
logger.debug("Failed to find isAccessible(): no such method"); } catch (NoSuchMethodException e) {
} catch (IllegalAccessException e) { logger.debug("Failed to find isAccessible(): no such method");
logger.debug("Failed to find isAccessible()", e); } catch (IllegalAccessException e) {
logger.debug("Failed to find isAccessible()", e);
}
IS_ACCESSIBLE_METHOD_HANDLE = isAccessibleMethodHandle;
} }
IS_ACCESSIBLE_METHOD_HANDLE = isAccessibleMethodHandle; {
initHooks(); MethodHandle isInNonBlockingThreadMethodHandle = null;
} try {
var clz = Objects.requireNonNull(PUBLIC_LOOKUP.findClass("reactor.core.scheduler.Schedulers"),
"reactor.core.scheduler.Schedulers not found");
public static void initHooks() { var methodType = MethodType.methodType(boolean.class);
if (hookRegistered.compareAndSet(false, true)) { isInNonBlockingThreadMethodHandle = PUBLIC_LOOKUP.findStatic(clz, "isInNonBlockingThread", methodType);
Hooks.onNextDropped(LLUtils::onNextDropped); } catch (NoSuchMethodException | ClassNotFoundException | IllegalAccessException | NullPointerException e) {
//todo: add Hooks.onDiscard when it will be implemented logger.debug("Failed to obtain access to reactor core schedulers");
// Hooks.onDiscard(LLUtils::onDiscard); }
IS_IN_NON_BLOCKING_THREAD_MH = isInNonBlockingThreadMethodHandle;
} }
} }
@ -141,26 +123,17 @@ public class LLUtils {
return response[0] == 1; return response[0] == 1;
} }
public static boolean responseToBoolean(Send<Buffer> responseToReceive) { public static boolean responseToBoolean(Buf response) {
try (var response = responseToReceive.receive()) { assert response.size() == 1;
assert response.readableBytes() == 1; return response.getBoolean(0);
return response.getByte(response.readerOffset()) == 1;
}
}
public static boolean responseToBoolean(Buffer response) {
try (response) {
assert response.readableBytes() == 1;
return response.getByte(response.readerOffset()) == 1;
}
} }
public static byte[] booleanToResponse(boolean bool) { public static byte[] booleanToResponse(boolean bool) {
return bool ? RESPONSE_TRUE : RESPONSE_FALSE; return bool ? RESPONSE_TRUE : RESPONSE_FALSE;
} }
public static Buffer booleanToResponseByteBuffer(BufferAllocator alloc, boolean bool) { public static Buf booleanToResponseByteBuffer(boolean bool) {
return alloc.allocate(1).writeByte(bool ? (byte) 1 : 0); return Buf.wrap(new byte[] {bool ? (byte) 1 : 0});
} }
@Nullable @Nullable
@ -307,9 +280,9 @@ public class LLUtils {
return new it.cavallium.dbengine.database.LLKeyScore(hit.docId(), hit.shardId(), hit.score(), hit.key()); return new it.cavallium.dbengine.database.LLKeyScore(hit.docId(), hit.shardId(), hit.score(), hit.key());
} }
public static String toStringSafe(@Nullable Buffer key) { public static String toStringSafe(byte @Nullable[] key) {
try { try {
if (key == null || key.isAccessible()) { if (key == null) {
return toString(key); return toString(key);
} else { } else {
return "(released)"; return "(released)";
@ -319,7 +292,7 @@ public class LLUtils {
} }
} }
public static String toStringSafe(byte @Nullable[] key) { public static String toStringSafe(@Nullable Buf key) {
try { try {
if (key == null) { if (key == null) {
return toString(key); return toString(key);
@ -333,7 +306,7 @@ public class LLUtils {
public static String toStringSafe(@Nullable LLRange range) { public static String toStringSafe(@Nullable LLRange range) {
try { try {
if (range == null || !range.isClosed()) { if (range == null) {
return toString(range); return toString(range);
} else { } else {
return "(released)"; return "(released)";
@ -349,60 +322,21 @@ public class LLUtils {
} else if (range.isAll()) { } else if (range.isAll()) {
return "ξ"; return "ξ";
} else if (range.hasMin() && range.hasMax()) { } else if (range.hasMin() && range.hasMax()) {
return "[" + toStringSafe(range.getMinUnsafe()) + "," + toStringSafe(range.getMaxUnsafe()) + ")"; return "[" + toStringSafe(range.getMin()) + "," + toStringSafe(range.getMax()) + ")";
} else if (range.hasMin()) { } else if (range.hasMin()) {
return "[" + toStringSafe(range.getMinUnsafe()) + ",*)"; return "[" + toStringSafe(range.getMin()) + ",*)";
} else if (range.hasMax()) { } else if (range.hasMax()) {
return "[*," + toStringSafe(range.getMaxUnsafe()) + ")"; return "[*," + toStringSafe(range.getMax()) + ")";
} else { } else {
return ""; return "";
} }
} }
public static String toString(@Nullable Buffer key) { public static String toString(@Nullable Buf key) {
if (key == null) { if (key == null) {
return "null"; return "null";
} else { } else {
int startIndex = key.readerOffset(); return toString(key.asArray());
int iMax = key.readableBytes() - 1;
int iLimit = 128;
if (iMax <= -1) {
return "[]";
} else {
StringBuilder arraySB = new StringBuilder();
StringBuilder asciiSB = new StringBuilder();
boolean isAscii = true;
arraySB.append('[');
int i = 0;
while (true) {
var byteVal = key.getUnsignedByte(startIndex + i);
arraySB.append(byteVal);
if (isAscii) {
if (byteVal >= 32 && byteVal < 127) {
asciiSB.append((char) byteVal);
} else if (byteVal == 0) {
asciiSB.append('␀');
} else {
isAscii = false;
asciiSB = null;
}
}
if (i == iLimit) {
arraySB.append("");
}
if (i == iMax || i == iLimit) {
if (isAscii) {
return asciiSB.insert(0, "\"").append("\"").toString();
} else {
return arraySB.append(']').toString();
}
}
arraySB.append(", ");
++i;
}
}
} }
} }
@ -453,21 +387,11 @@ public class LLUtils {
} }
} }
public static boolean equals(Buffer a, Buffer b) { public static boolean equals(Buf a, Buf b) {
if (a == null && b == null) { if (a == null && b == null) {
return true; return true;
} else if (a != null && b != null) { } else if (a != null && b != null) {
var aCur = a.openCursor(); return a.equals(b);
var bCur = b.openCursor();
if (aCur.bytesLeft() != bCur.bytesLeft()) {
return false;
}
while (aCur.readByte() && bCur.readByte()) {
if (aCur.getByte() != bCur.getByte()) {
return false;
}
}
return true;
} else { } else {
return false; return false;
} }
@ -481,123 +405,27 @@ public class LLUtils {
* <p> * <p>
* {@code a[aStartIndex : aStartIndex + length] == b[bStartIndex : bStartIndex + length]} * {@code a[aStartIndex : aStartIndex + length] == b[bStartIndex : bStartIndex + length]}
*/ */
public static boolean equals(Buffer a, int aStartIndex, Buffer b, int bStartIndex, int length) { public static boolean equals(Buf a, int aStartIndex, Buf b, int bStartIndex, int length) {
var aCur = a.openCursor(aStartIndex, length); return a.equals(aStartIndex, b, bStartIndex, length);
var bCur = b.openCursor(bStartIndex, length);
if (aCur.bytesLeft() != bCur.bytesLeft()) {
return false;
}
while (aCur.readByte() && bCur.readByte()) {
if (aCur.getByte() != bCur.getByte()) {
return false;
}
}
return true;
} }
public static byte[] toArray(@Nullable Buffer key) { /**
*
* @return the inner array, DO NOT MODIFY IT
*/
public static byte[] asArray(@Nullable Buf key) {
if (key == null) { if (key == null) {
return EMPTY_BYTE_ARRAY; return EMPTY_BYTE_ARRAY;
} }
byte[] array = new byte[key.readableBytes()]; return key.asArray();
key.copyInto(key.readerOffset(), array, 0, key.readableBytes());
return array;
} }
public static List<byte[]> toArray(List<Buffer> input) { public static int hashCode(Buf buf) {
List<byte[]> result = new ArrayList<>(input.size());
for (Buffer byteBuf : input) {
result.add(toArray(byteBuf));
}
return result;
}
public static int hashCode(Buffer buf) {
if (buf == null) { if (buf == null) {
return 0; return 0;
} }
int result = 1; return buf.hashCode();
var cur = buf.openCursor();
while (cur.readByte()) {
var element = cur.getByte();
result = 31 * result + element;
}
return result;
}
/**
* @return null if size is equal to RocksDB.NOT_FOUND
*/
@Nullable
public static Buffer readNullableDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
if (alloc.getAllocationType() != OFF_HEAP) {
throw new UnsupportedOperationException("Allocator type is not direct: " + alloc);
}
var directBuffer = alloc.allocate(INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES);
try {
assert directBuffer.readerOffset() == 0;
assert directBuffer.writerOffset() == 0;
var directBufferWriter = ((BufferComponent) directBuffer).writableBuffer();
assert directBufferWriter.position() == 0;
assert directBufferWriter.capacity() >= directBuffer.capacity();
assert directBufferWriter.isDirect();
int trueSize = reader.applyAsInt(directBufferWriter);
if (trueSize == RocksDB.NOT_FOUND) {
directBuffer.close();
return null;
}
int readSize = directBufferWriter.limit();
if (trueSize < readSize) {
throw new IllegalStateException();
} else if (trueSize == readSize) {
return directBuffer.writerOffset(directBufferWriter.limit());
} else {
assert directBuffer.readerOffset() == 0;
directBuffer.ensureWritable(trueSize);
assert directBuffer.writerOffset() == 0;
directBufferWriter = ((BufferComponent) directBuffer).writableBuffer();
assert directBufferWriter.position() == 0;
assert directBufferWriter.isDirect();
reader.applyAsInt(directBufferWriter.position(0));
return directBuffer.writerOffset(trueSize);
}
} catch (Throwable t) {
directBuffer.close();
throw t;
}
}
public static void ensureBlocking() {
if (Schedulers.isInNonBlockingThread()) {
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
}
}
// todo: remove this ugly method
/**
* cleanup resource
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
*/
public static <U, T extends Resource<T>> Mono<U> usingSendResource(Mono<Send<T>> resourceSupplier,
Function<T, Mono<U>> resourceClosure,
boolean cleanupOnSuccess) {
return Mono.usingWhen(resourceSupplier.map(Send::receive), resourceClosure, r -> {
if (cleanupOnSuccess) {
return Mono.fromRunnable(() -> r.close());
} else {
return Mono.empty();
}
}, (r, ex) -> Mono.fromRunnable(() -> {
if (r.isAccessible()) {
r.close();
}
}), r -> Mono.fromRunnable(() -> {
if (r.isAccessible()) {
r.close();
}
}));
} }
public static boolean isSet(ScoreDoc[] scoreDocs) { public static boolean isSet(ScoreDoc[] scoreDocs) {
@ -609,26 +437,6 @@ public class LLUtils {
return true; return true;
} }
public static Send<Buffer> empty(BufferAllocator allocator) {
try {
return allocator.allocate(0).send();
} catch (Exception ex) {
try (var empty = CompositeBuffer.compose(allocator)) {
assert empty.readableBytes() == 0;
assert empty.capacity() == 0;
return empty.send();
}
}
}
public static Send<Buffer> copy(BufferAllocator allocator, Buffer buf) {
if (CompositeBuffer.isComposite(buf) && buf.capacity() == 0) {
return empty(allocator);
} else {
return buf.copy().send();
}
}
public static boolean isBoundedRange(LLRange rangeShared) { public static boolean isBoundedRange(LLRange rangeShared) {
return rangeShared.hasMin() && rangeShared.hasMax(); return rangeShared.hasMin() && rangeShared.hasMax();
} }
@ -649,124 +457,26 @@ public class LLUtils {
//noinspection resource //noinspection resource
readOptions = new ReadOptions(); readOptions = new ReadOptions();
} }
if (boundedRange || smallRange) { var hugeRange = !boundedRange && !smallRange;
readOptions.setFillCache(canFillCache); if (hugeRange) {
} else {
if (readOptions.readaheadSize() <= 0) { if (readOptions.readaheadSize() <= 0) {
readOptions.setReadaheadSize(4 * 1024 * 1024); // 4MiB readOptions.setReadaheadSize(4 * 1024 * 1024); // 4MiB
} }
readOptions.setFillCache(false);
readOptions.setVerifyChecksums(false);
}
if (FORCE_DISABLE_CHECKSUM_VERIFICATION) {
readOptions.setVerifyChecksums(false);
} }
readOptions.setFillCache(canFillCache && !hugeRange);
readOptions.setVerifyChecksums(!FORCE_DISABLE_CHECKSUM_VERIFICATION && !hugeRange);
return readOptions; return readOptions;
} }
public static Mono<Void> finalizeResource(Resource<?> resource) { public static void finalizeResource(SafeCloseable resource) {
Mono<Void> runnable = Mono.fromRunnable(() -> LLUtils.finalizeResourceNow(resource)); resource.close();
if (resource instanceof LuceneCloseable) {
return runnable.transform(LuceneUtils::scheduleLucene);
} else {
return runnable;
}
}
public static Mono<Void> finalizeResource(SafeCloseable resource) {
Mono<Void> runnable = Mono.fromRunnable(resource::close);
if (resource instanceof LuceneCloseable) {
return runnable.transform(LuceneUtils::scheduleLucene);
} else {
return runnable;
}
}
public static void finalizeResourceNow(Resource<?> resource) {
if (resource.isAccessible()) {
resource.close();
}
} }
public static void finalizeResourceNow(SafeCloseable resource) { public static void finalizeResourceNow(SafeCloseable resource) {
resource.close(); resource.close();
} }
public static <V> Flux<V> handleDiscard(Flux<V> flux) {
return flux.doOnDiscard(Object.class, LLUtils::onDiscard);
}
public static <V> Mono<V> handleDiscard(Mono<V> flux) {
return flux.doOnDiscard(Object.class, LLUtils::onDiscard);
}
/**
* Obtain the resource, then run the closure.
* If the closure publisher returns a single element, then the resource is kept open,
* otherwise it is closed.
*/
public static <T extends AutoCloseable, U> Mono<U> singleOrClose(Mono<T> resourceMono,
Function<T, Mono<U>> closure) {
return Mono.usingWhen(resourceMono, resource -> {
if (resource instanceof LuceneCloseable) {
return closure.apply(resource).publishOn(luceneScheduler()).doOnSuccess(s -> {
if (s == null) {
try {
resource.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}).publishOn(Schedulers.parallel());
} else {
return closure.apply(resource).doOnSuccess(s -> {
if (s == null) {
try {
resource.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
}, resource -> Mono.empty(), (resource, ex) -> Mono.fromCallable(() -> {
resource.close();
return null;
}), r -> (r instanceof SafeCloseable s) ? LLUtils.finalizeResource(s) : Mono.fromCallable(() -> {
r.close();
return null;
}));
}
public static Disposable scheduleRepeated(Scheduler scheduler, Runnable action, Duration delay) {
var currentDisposable = new AtomicReference<Disposable>();
var disposed = new AtomicBoolean(false);
scheduleRepeatedInternal(scheduler, action, delay, currentDisposable, disposed);
return () -> {
disposed.set(true);
currentDisposable.get().dispose();
};
}
private static void scheduleRepeatedInternal(Scheduler scheduler,
Runnable action,
Duration delay,
AtomicReference<Disposable> currentDisposable,
AtomicBoolean disposed) {
if (disposed.get()) return;
currentDisposable.set(scheduler.schedule(() -> {
if (disposed.get()) return;
try {
action.run();
} catch (Throwable ex) {
logger.error(ex);
}
scheduleRepeatedInternal(scheduler, action, delay, currentDisposable, disposed);
}, delay.toMillis(), TimeUnit.MILLISECONDS));
}
public static boolean isAccessible(AbstractNativeReference abstractNativeReference) { public static boolean isAccessible(AbstractNativeReference abstractNativeReference) {
if (IS_ACCESSIBLE_METHOD_HANDLE != null) { if (IS_ACCESSIBLE_METHOD_HANDLE != null) {
try { try {
@ -778,220 +488,116 @@ public class LLUtils {
return true; return true;
} }
@Deprecated public static Buf unmodifiableBytes(Buf previous) {
public record DirectBuffer(@NotNull Buffer buffer, @NotNull ByteBuffer byteBuffer) {} previous.freeze();
return previous;
@NotNull
public static ByteBuffer newDirect(int size) {
return ByteBuffer.allocateDirect(size);
} }
private static Drop<Buffer> drop() { public static boolean isInNonBlockingThread() {
// We cannot reliably drop unsafe memory. We have to rely on the cleaner to do that. if (IS_IN_NON_BLOCKING_THREAD_MH != null) {
return NO_OP_DROP;
}
public static boolean isReadOnlyDirect(Buffer inputBuffer) {
return inputBuffer instanceof BufferComponent component && component.readableNativeAddress() != 0;
}
public static ByteBuffer getReadOnlyDirect(Buffer inputBuffer) {
assert isReadOnlyDirect(inputBuffer);
return ((BufferComponent) inputBuffer).readableBuffer();
}
public static Buffer fromByteArray(BufferAllocator alloc, byte[] array) {
Buffer result = alloc.allocate(array.length);
result.writeBytes(array);
return result;
}
@NotNull
public static Buffer readDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
var nullable = readNullableDirectNioBuffer(alloc, reader);
if (nullable == null) {
throw new IllegalStateException("A non-nullable buffer read operation tried to return a \"not found\" element");
}
return nullable;
}
public static Buffer compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer) {
return buffer.receive();
}
@NotNull
public static Buffer compositeBuffer(BufferAllocator alloc,
@NotNull Send<Buffer> buffer1,
@NotNull Send<Buffer> buffer2) {
var b1 = buffer1.receive();
try (var b2 = buffer2.receive()) {
if (b1.writerOffset() < b1.capacity() || b2.writerOffset() < b2.capacity()) {
b1.ensureWritable(b2.readableBytes(), b2.readableBytes(), true);
b2.copyInto(b2.readerOffset(), b1, b1.writerOffset(), b2.readableBytes());
b1.writerOffset(b1.writerOffset() + b2.readableBytes());
return b1;
} else {
return alloc.compose(List.of(b1.send(), b2.send()));
}
}
}
@NotNull
public static Buffer compositeBuffer(BufferAllocator alloc,
@NotNull Send<Buffer> buffer1,
@NotNull Send<Buffer> buffer2,
@NotNull Send<Buffer> buffer3) {
var b1 = buffer1.receive();
try (var b2 = buffer2.receive()) {
try (var b3 = buffer3.receive()) {
if (b1.writerOffset() < b1.capacity()
|| b2.writerOffset() < b2.capacity()
|| b3.writerOffset() < b3.capacity()) {
b1.ensureWritable(b2.readableBytes(), b2.readableBytes(), true);
b2.copyInto(b2.readerOffset(), b1, b1.writerOffset(), b2.readableBytes());
b1.writerOffset(b1.writerOffset() + b2.readableBytes());
b1.ensureWritable(b3.readableBytes(), b3.readableBytes(), true);
b3.copyInto(b3.readerOffset(), b1, b1.writerOffset(), b3.readableBytes());
b1.writerOffset(b1.writerOffset() + b3.readableBytes());
return b1;
} else {
return alloc.compose(List.of(b1.send(), b2.send(), b3.send()));
}
}
}
}
public static <T> Mono<T> resolveDelta(Mono<Delta<T>> prev, UpdateReturnMode updateReturnMode) {
return prev.handle((delta, sink) -> {
switch (updateReturnMode) {
case GET_NEW_VALUE -> {
var current = delta.current();
if (current != null) {
sink.next(current);
} else {
sink.complete();
}
}
case GET_OLD_VALUE -> {
var previous = delta.previous();
if (previous != null) {
sink.next(previous);
} else {
sink.complete();
}
}
case NOTHING -> sink.complete();
default -> sink.error(new IllegalStateException());
}
});
}
public static Mono<Buffer> resolveLLDelta(Mono<LLDelta> prev, UpdateReturnMode updateReturnMode) {
return prev.mapNotNull(delta -> {
final Buffer previous = delta.previousUnsafe();
final Buffer current = delta.currentUnsafe();
return switch (updateReturnMode) {
case GET_NEW_VALUE -> {
if (previous != null && previous.isAccessible()) {
previous.close();
}
yield current;
}
case GET_OLD_VALUE -> {
if (current != null && current.isAccessible()) {
current.close();
}
yield previous;
}
case NOTHING -> {
if (previous != null && previous.isAccessible()) {
previous.close();
}
if (current != null && current.isAccessible()) {
current.close();
}
yield null;
}
};
});
}
public static <T, U> Mono<Delta<U>> mapDelta(Mono<Delta<T>> mono,
SerializationFunction<@NotNull T, @Nullable U> mapper) {
return mono.handle((delta, sink) -> {
try { try {
T prev = delta.previous(); return (boolean) IS_IN_NON_BLOCKING_THREAD_MH.invokeExact();
T curr = delta.current(); } catch (Throwable e) {
U newPrev; throw new RuntimeException(e);
U newCurr;
if (prev != null) {
newPrev = mapper.apply(prev);
} else {
newPrev = null;
}
if (curr != null) {
newCurr = mapper.apply(curr);
} else {
newCurr = null;
}
sink.next(new Delta<>(newPrev, newCurr));
} catch (SerializationException ex) {
sink.error(ex);
} }
}); }
return false;
} }
public static <U> Mono<Delta<U>> mapLLDelta(Mono<LLDelta> mono, public static Buf copy(Buf buf) {
SerializationFunction<@NotNull Buffer, @Nullable U> mapper) { return buf.copy();
return Mono.usingWhen(mono, delta -> Mono.fromCallable(() -> { }
Buffer prev = delta.previousUnsafe();
Buffer curr = delta.currentUnsafe(); public static Buf asByteList(byte[] array) {
U newPrev; return Buf.wrap(array);
U newCurr; }
if (prev != null) {
newPrev = mapper.apply(prev); public static Buf toByteList(byte[] array) {
} else { return Buf.copyOf(array);
newPrev = null; }
}
if (curr != null) {
newCurr = mapper.apply(curr); public static Buf compositeBuffer(Buf buffer) {
} else { return buffer;
newCurr = null; }
}
return new Delta<>(newPrev, newCurr); @NotNull
}), LLUtils::finalizeResource); public static Buf compositeBuffer(Buf buffer1, Buf buffer2) {
// todo: create a composite buffer without allocating a new array
var out = Buf.create(buffer1.size() + buffer2.size());
out.addAll(buffer1);
out.addAll(buffer2);
return out;
}
@NotNull
public static Buf compositeBuffer(Buf buffer1, Buf buffer2, Buf buffer3) {
// todo: create a composite buffer without allocating a new array
var out = Buf.create(buffer1.size() + buffer2.size());
out.addAll(buffer1);
out.addAll(buffer2);
out.addAll(buffer3);
return out;
}
public static <T> T resolveDelta(Delta<T> delta, UpdateReturnMode updateReturnMode) {
return switch (updateReturnMode) {
case GET_NEW_VALUE -> delta.current();
case GET_OLD_VALUE -> delta.previous();
case NOTHING -> null;
};
}
public static Buf resolveLLDelta(LLDelta delta, UpdateReturnMode updateReturnMode) {
final Buf previous = delta.previous();
final Buf current = delta.current();
return switch (updateReturnMode) {
case GET_NEW_VALUE -> current;
case GET_OLD_VALUE -> previous;
case NOTHING -> null;
};
}
public static <T, U> Delta<U> mapDelta(Delta<T> delta, SerializationFunction<@NotNull T, @Nullable U> mapper) {
T prev = delta.previous();
T curr = delta.current();
U newPrev;
U newCurr;
if (prev != null) {
newPrev = mapper.apply(prev);
} else {
newPrev = null;
}
if (curr != null) {
newCurr = mapper.apply(curr);
} else {
newCurr = null;
}
return new Delta<>(newPrev, newCurr);
}
public static <U> Delta<U> mapLLDelta(LLDelta delta, SerializationFunction<@NotNull Buf, @Nullable U> mapper) {
var prev = delta.previous();
var curr = delta.current();
U newPrev;
U newCurr;
if (prev != null) {
newPrev = mapper.apply(prev);
} else {
newPrev = null;
}
if (curr != null) {
newCurr = mapper.apply(curr);
} else {
newCurr = null;
}
return new Delta<>(newPrev, newCurr);
} }
public static <R, V> boolean isDeltaChanged(Delta<V> delta) { public static <R, V> boolean isDeltaChanged(Delta<V> delta) {
return !Objects.equals(delta.previous(), delta.current()); return !Objects.equals(delta.previous(), delta.current());
} }
public static boolean isDirect(Buffer key) {
var readableComponents = key.countReadableComponents();
if (readableComponents == 0) {
return true;
} else if (readableComponents == 1) {
return key.isDirect();
} else {
return false;
}
}
public static String deserializeString(Send<Buffer> bufferSend, int readerOffset, int length, Charset charset) {
try (var buffer = bufferSend.receive()) {
byte[] bytes = new byte[Math.min(length, buffer.readableBytes())];
buffer.copyInto(readerOffset, bytes, 0, length);
return new String(bytes, charset);
}
}
public static String deserializeString(@NotNull Buffer buffer, int readerOffset, int length, Charset charset) {
byte[] bytes = new byte[Math.min(length, buffer.readableBytes())];
buffer.copyInto(readerOffset, bytes, 0, length);
return new String(bytes, charset);
}
public static int utf8MaxBytes(String deserialized) { public static int utf8MaxBytes(String deserialized) {
return deserialized.length() * 3; return deserialized.length() * 3;
} }
@ -1015,18 +621,14 @@ public class LLUtils {
} }
private static void closeResource(Object next, boolean manual) { private static void closeResource(Object next, boolean manual) {
if (next instanceof Send<?> send) { if (next instanceof SafeCloseable closeable) {
send.close();
} if (next instanceof SafeCloseable closeable) {
if (manual || closeable instanceof DiscardingCloseable) { if (manual || closeable instanceof DiscardingCloseable) {
if (!manual && !LuceneUtils.isLuceneThread() && closeable instanceof LuceneCloseable luceneCloseable) { if (!manual && !LuceneUtils.isLuceneThread() && closeable instanceof LuceneCloseable luceneCloseable) {
luceneScheduler().schedule(() -> luceneCloseable.close()); luceneCloseable.close();
} else { } else {
closeable.close(); closeable.close();
} }
} }
} else if (next instanceof Resource<?> resource && resource.isAccessible()) {
resource.close();
} else if (next instanceof List<?> iterable) { } else if (next instanceof List<?> iterable) {
iterable.forEach(obj -> closeResource(obj, manual)); iterable.forEach(obj -> closeResource(obj, manual));
} else if (next instanceof Set<?> iterable) { } else if (next instanceof Set<?> iterable) {
@ -1080,4 +682,10 @@ public class LLUtils {
return term.getValueBytesRef(); return term.getValueBytesRef();
} }
} }
public static <X> void consume(Stream<X> stream) {
try (stream) {
stream.forEach(NULL_CONSUMER);
}
}
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
@ -9,20 +9,20 @@ import java.util.function.Function;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
public final class OptionalBuf implements DiscardingCloseable { public final class OptionalBuf {
private static final OptionalBuf EMPTY = new OptionalBuf(null); private static final OptionalBuf EMPTY = new OptionalBuf(null);
private final Buffer buffer; private final Buf buffer;
private OptionalBuf(@Nullable Buffer buffer) { private OptionalBuf(@Nullable Buf buffer) {
this.buffer = buffer; this.buffer = buffer;
} }
public static OptionalBuf ofNullable(@Nullable Buffer buffer) { public static OptionalBuf ofNullable(@Nullable Buf buffer) {
return new OptionalBuf(buffer); return new OptionalBuf(buffer);
} }
public static OptionalBuf of(@NotNull Buffer buffer) { public static OptionalBuf of(@NotNull Buf buffer) {
Objects.requireNonNull(buffer); Objects.requireNonNull(buffer);
return new OptionalBuf(buffer); return new OptionalBuf(buffer);
} }
@ -31,13 +31,6 @@ public final class OptionalBuf implements DiscardingCloseable {
return EMPTY; return EMPTY;
} }
@Override
public void close() {
if (buffer != null && buffer.isAccessible()) {
buffer.close();
}
}
@Override @Override
public String toString() { public String toString() {
if (buffer != null) { if (buffer != null) {
@ -66,21 +59,21 @@ public final class OptionalBuf implements DiscardingCloseable {
return buffer != null ? buffer.hashCode() : 0; return buffer != null ? buffer.hashCode() : 0;
} }
public Buffer get() { public Buf get() {
if (buffer == null) { if (buffer == null) {
throw new NoSuchElementException(); throw new NoSuchElementException();
} }
return buffer; return buffer;
} }
public Buffer orElse(Buffer alternative) { public Buf orElse(Buf alternative) {
if (buffer == null) { if (buffer == null) {
return alternative; return alternative;
} }
return buffer; return buffer;
} }
public void ifPresent(Consumer<Buffer> consumer) { public void ifPresent(Consumer<Buf> consumer) {
if (buffer != null) { if (buffer != null) {
consumer.accept(buffer); consumer.accept(buffer);
} }
@ -94,7 +87,7 @@ public final class OptionalBuf implements DiscardingCloseable {
return buffer == null; return buffer == null;
} }
public <U> Optional<U> map(Function<Buffer, U> mapper) { public <U> Optional<U> map(Function<Buf, U> mapper) {
if (buffer != null) { if (buffer != null) {
return Optional.of(mapper.apply(buffer)); return Optional.of(mapper.apply(buffer));
} else { } else {

View File

@ -1,57 +0,0 @@
package it.cavallium.dbengine.database;
import io.netty5.util.Send;
import java.util.function.Supplier;
public abstract class RangeSupplier implements DiscardingCloseable, Supplier<LLRange> {
public static RangeSupplier of(Supplier<LLRange> supplier) {
return new SimpleSupplier(supplier);
}
public static RangeSupplier ofOwned(LLRange supplier) {
return new CopySupplier(supplier);
}
public static RangeSupplier ofShared(LLRange supplier) {
return new SimpleSupplier(supplier::copy);
}
private static final class SimpleSupplier extends RangeSupplier {
private final Supplier<LLRange> supplier;
public SimpleSupplier(Supplier<LLRange> supplier) {
this.supplier = supplier;
}
@Override
public LLRange get() {
return supplier.get();
}
@Override
public void close() {
}
}
private static final class CopySupplier extends RangeSupplier {
private final LLRange supplier;
public CopySupplier(LLRange supplier) {
this.supplier = supplier;
}
@Override
public LLRange get() {
return supplier.copy();
}
@Override
public void close() {
supplier.close();
}
}
}

View File

@ -1,7 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
public interface SafeCloseable extends io.netty5.util.SafeCloseable { public interface SafeCloseable extends AutoCloseable {
@Override
void close(); void close();
} }

View File

@ -4,7 +4,7 @@ import it.cavallium.dbengine.database.collections.DatabaseStage;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
public final class SubStageEntry<T, U extends DatabaseStage<?>> implements DiscardingCloseable, Entry<T, U> { public final class SubStageEntry<T, U extends DatabaseStage<?>> implements Entry<T, U> {
private final T key; private final T key;
private final U value; private final U value;
@ -14,13 +14,6 @@ public final class SubStageEntry<T, U extends DatabaseStage<?>> implements Disca
this.value = value; this.value = value;
} }
@Override
public void close() {
if (value != null) {
value.close();
}
}
@Override @Override
public T getKey() { public T getKey() {
return key; return key;

View File

@ -1,11 +1,11 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator; import it.cavallium.dbengine.buffers.BufDataInput;
import it.cavallium.dbengine.database.BufSupplier; import it.cavallium.dbengine.buffers.BufDataOutput;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import java.util.function.Supplier;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
public class DatabaseEmpty { public class DatabaseEmpty {
@ -13,16 +13,16 @@ public class DatabaseEmpty {
@SuppressWarnings({"unused", "InstantiationOfUtilityClass"}) @SuppressWarnings({"unused", "InstantiationOfUtilityClass"})
public static final Nothing NOTHING = new Nothing(); public static final Nothing NOTHING = new Nothing();
public static Serializer<Nothing> nothingSerializer(BufferAllocator bufferAllocator) { public static Serializer<Nothing> nothingSerializer() {
return new Serializer<>() { return new Serializer<>() {
@Override @Override
public @NotNull Nothing deserialize(@NotNull Buffer serialized) { public @NotNull Nothing deserialize(@NotNull BufDataInput in) throws SerializationException {
return NOTHING; return NOTHING;
} }
@Override @Override
public void serialize(@NotNull Nothing deserialized, Buffer output) { public void serialize(@NotNull Nothing deserialized, BufDataOutput out) throws SerializationException {
} }
@ -36,8 +36,8 @@ public class DatabaseEmpty {
private DatabaseEmpty() { private DatabaseEmpty() {
} }
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, BufSupplier key) { public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, Buf key) {
return new DatabaseMapSingle<>(dictionary, key, nothingSerializer(dictionary.getAllocator())); return new DatabaseMapSingle<>(dictionary, key, nothingSerializer());
} }
public static final class Nothing { public static final class Nothing {

View File

@ -1,14 +1,12 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import com.google.common.primitives.Ints; import it.cavallium.dbengine.buffers.BufDataInput;
import it.cavallium.dbengine.buffers.BufDataOutput;
import it.cavallium.dbengine.database.LLKeyValueDatabaseStructure; import it.cavallium.dbengine.database.LLKeyValueDatabaseStructure;
import it.cavallium.dbengine.database.LLSingleton; import it.cavallium.dbengine.database.LLSingleton;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class DatabaseInt implements LLKeyValueDatabaseStructure { public class DatabaseInt implements LLKeyValueDatabaseStructure {
@ -17,28 +15,18 @@ public class DatabaseInt implements LLKeyValueDatabaseStructure {
public DatabaseInt(LLSingleton singleton) { public DatabaseInt(LLSingleton singleton) {
this.singleton = singleton; this.singleton = singleton;
this.serializer = SerializerFixedBinaryLength.intSerializer(singleton.getAllocator()); this.serializer = SerializerFixedBinaryLength.intSerializer();
} }
public Mono<Integer> get(@Nullable LLSnapshot snapshot) { public Integer get(@Nullable LLSnapshot snapshot) {
var resultMono = singleton.get(snapshot); var result = singleton.get(snapshot);
return Mono.usingWhen(resultMono, return serializer.deserialize(BufDataInput.create(result));
result -> Mono.fromSupplier(() -> serializer.deserialize(result)),
LLUtils::finalizeResource
);
} }
public Mono<Void> set(int value) { public void set(int value) {
return singleton.set(Mono.fromCallable(() -> { var buf = BufDataOutput.createLimited(Integer.BYTES);
var buf = singleton.getAllocator().allocate(Integer.BYTES); serializer.serialize(value, buf);
try { singleton.set(buf.asList());
serializer.serialize(value, buf);
return buf;
} catch (Throwable ex) {
buf.close();
throw ex;
}
}));
} }
@Override @Override

View File

@ -1,16 +1,14 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import com.google.common.primitives.Ints; import it.cavallium.dbengine.buffers.Buf;
import com.google.common.primitives.Longs; import it.cavallium.dbengine.buffers.BufDataInput;
import it.cavallium.dbengine.buffers.BufDataOutput;
import it.cavallium.dbengine.database.LLKeyValueDatabaseStructure; import it.cavallium.dbengine.database.LLKeyValueDatabaseStructure;
import it.cavallium.dbengine.database.LLSingleton; import it.cavallium.dbengine.database.LLSingleton;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class DatabaseLong implements LLKeyValueDatabaseStructure { public class DatabaseLong implements LLKeyValueDatabaseStructure {
@ -20,82 +18,63 @@ public class DatabaseLong implements LLKeyValueDatabaseStructure {
public DatabaseLong(LLSingleton singleton) { public DatabaseLong(LLSingleton singleton) {
this.singleton = singleton; this.singleton = singleton;
this.serializer = SerializerFixedBinaryLength.longSerializer(singleton.getAllocator()); this.serializer = SerializerFixedBinaryLength.longSerializer();
this.bugSerializer = SerializerFixedBinaryLength.intSerializer(singleton.getAllocator()); this.bugSerializer = SerializerFixedBinaryLength.intSerializer();
} }
public Mono<Long> get(@Nullable LLSnapshot snapshot) { public Long get(@Nullable LLSnapshot snapshot) {
var resultMono = singleton.get(snapshot); var result = BufDataInput.create(singleton.get(snapshot));
return Mono.usingWhen(resultMono, if (result.available() == 4) {
result -> Mono.fromSupplier(() -> { return (long) (int) bugSerializer.deserialize(result);
if (result.readableBytes() == 4) { } else {
return (long) (int) bugSerializer.deserialize(result); return serializer.deserialize(result);
} else { }
return serializer.deserialize(result);
}
}),
LLUtils::finalizeResource
);
} }
public Mono<Long> incrementAndGet() { public Long incrementAndGet() {
return addAnd(1, UpdateReturnMode.GET_NEW_VALUE); return addAnd(1, UpdateReturnMode.GET_NEW_VALUE);
} }
public Mono<Long> getAndIncrement() { public Long getAndIncrement() {
return addAnd(1, UpdateReturnMode.GET_OLD_VALUE); return addAnd(1, UpdateReturnMode.GET_OLD_VALUE);
} }
public Mono<Long> decrementAndGet() { public Long decrementAndGet() {
return addAnd(-1, UpdateReturnMode.GET_NEW_VALUE); return addAnd(-1, UpdateReturnMode.GET_NEW_VALUE);
} }
public Mono<Long> getAndDecrement() { public Long getAndDecrement() {
return addAnd(-1, UpdateReturnMode.GET_OLD_VALUE); return addAnd(-1, UpdateReturnMode.GET_OLD_VALUE);
} }
public Mono<Long> addAndGet(long count) { public Long addAndGet(long count) {
return addAnd(count, UpdateReturnMode.GET_NEW_VALUE); return addAnd(count, UpdateReturnMode.GET_NEW_VALUE);
} }
public Mono<Long> getAndAdd(long count) { public Long getAndAdd(long count) {
return addAnd(count, UpdateReturnMode.GET_OLD_VALUE); return addAnd(count, UpdateReturnMode.GET_OLD_VALUE);
} }
private Mono<Long> addAnd(long count, UpdateReturnMode updateReturnMode) { private Long addAnd(long count, UpdateReturnMode updateReturnMode) {
var resultMono = singleton.update(prev -> { var result = singleton.update(prev -> {
try (prev) { if (prev != null) {
if (prev != null) { var prevLong = prev.getLong(0);
var prevLong = prev.readLong(); var buf = Buf.createZeroes(Long.BYTES);
var alloc = singleton.getAllocator(); buf.setLong(0, prevLong + count);
var buf = alloc.allocate(Long.BYTES); return buf;
buf.writeLong(prevLong + count); } else {
return buf; var buf = Buf.createZeroes(Long.BYTES);
} else { buf.setLong(0, count);
var alloc = singleton.getAllocator(); return buf;
var buf = alloc.allocate(Long.BYTES);
buf.writeLong(count);
return buf;
}
} }
}, updateReturnMode); }, updateReturnMode);
return Mono.usingWhen(resultMono, return result.getLong(0);
result -> Mono.fromSupplier(result::readLong),
LLUtils::finalizeResource
).single();
} }
public Mono<Void> set(long value) { public void set(long value) {
return singleton.set(Mono.fromCallable(() -> { var buf = BufDataOutput.createLimited(Long.BYTES);
var buf = singleton.getAllocator().allocate(Long.BYTES); serializer.serialize(value, buf);
try { singleton.set(buf.asList());
serializer.serialize(value, buf);
} catch (Throwable ex) {
buf.close();
throw ex;
}
return buf;
}));
} }
@Override @Override

View File

@ -1,13 +1,11 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import static java.util.Objects.requireNonNullElseGet; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.util.Resource;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLDelta;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
@ -22,22 +20,21 @@ import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.cavallium.dbengine.utils.InternalMonoUtils;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMaps; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMaps;
import java.util.Collections; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/** /**
* Optimized implementation of "DatabaseMapDictionary with SubStageGetterSingle" * Optimized implementation of "DatabaseMapDictionary with SubStageGetterSingle"
@ -50,11 +47,11 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
private final Serializer<U> valueSerializer; private final Serializer<U> valueSerializer;
protected DatabaseMapDictionary(LLDictionary dictionary, protected DatabaseMapDictionary(LLDictionary dictionary,
@Nullable BufSupplier prefixKeySupplier, @Nullable Buf prefixKey,
SerializerFixedBinaryLength<T> keySuffixSerializer, SerializerFixedBinaryLength<T> keySuffixSerializer,
Serializer<U> valueSerializer) { Serializer<U> valueSerializer) {
// Do not retain or release or use the prefixKey here // Do not retain or release or use the prefixKey here
super(dictionary, prefixKeySupplier, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0); super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0);
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }
@ -65,65 +62,48 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary, public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
@Nullable BufSupplier prefixKeySupplier, @Nullable Buf prefixKey,
SerializerFixedBinaryLength<T> keySuffixSerializer, SerializerFixedBinaryLength<T> keySuffixSerializer,
Serializer<U> valueSerializer) { Serializer<U> valueSerializer) {
return new DatabaseMapDictionary<>(dictionary, prefixKeySupplier, keySuffixSerializer, valueSerializer); return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
} }
public static <K, V> Flux<Entry<K, V>> getLeavesFrom(DatabaseMapDictionary<K, V> databaseMapDictionary, public static <K, V> Stream<Entry<K, V>> getLeavesFrom(DatabaseMapDictionary<K, V> databaseMapDictionary,
CompositeSnapshot snapshot, CompositeSnapshot snapshot,
Mono<K> keyMin, @Nullable K keyMin,
Mono<K> keyMax, @Nullable K keyMax,
boolean reverse, boolean smallRange) { boolean reverse,
Mono<Optional<K>> keyMinOptMono = keyMin.map(Optional::of).defaultIfEmpty(Optional.empty()); boolean smallRange) {
Mono<Optional<K>> keyMaxOptMono = keyMax.map(Optional::of).defaultIfEmpty(Optional.empty());
return Mono.zip(keyMinOptMono, keyMaxOptMono).flatMapMany(entry -> { if (keyMin != null || keyMax != null) {
var keyMinOpt = entry.getT1(); return databaseMapDictionary.getAllValues(snapshot,
var keyMaxOpt = entry.getT2(); keyMin,
if (keyMinOpt.isPresent() || keyMaxOpt.isPresent()) { keyMax,
return databaseMapDictionary.getAllValues(snapshot, reverse,
keyMinOpt.orElse(null), smallRange
keyMaxOpt.orElse(null), );
reverse, } else {
smallRange return databaseMapDictionary.getAllValues(snapshot, smallRange);
); }
} else {
return databaseMapDictionary.getAllValues(snapshot, smallRange);
}
});
} }
public static <K> Flux<K> getKeyLeavesFrom(DatabaseMapDictionary<K, ?> databaseMapDictionary, public static <K> Stream<K> getKeyLeavesFrom(DatabaseMapDictionary<K, ?> databaseMapDictionary,
CompositeSnapshot snapshot, CompositeSnapshot snapshot,
Mono<K> keyMin, @Nullable K keyMin,
Mono<K> keyMax, @Nullable K keyMax,
boolean reverse, boolean smallRange) { boolean reverse,
Mono<Optional<K>> keyMinOptMono = keyMin.map(Optional::of).defaultIfEmpty(Optional.empty()); boolean smallRange) {
Mono<Optional<K>> keyMaxOptMono = keyMax.map(Optional::of).defaultIfEmpty(Optional.empty());
return Mono.zip(keyMinOptMono, keyMaxOptMono).flatMapMany(keys -> { Stream<? extends Entry<K, ? extends DatabaseStageEntry<?>>> stagesFlux;
var keyMinOpt = keys.getT1(); if (keyMin != null || keyMax != null) {
var keyMaxOpt = keys.getT2(); stagesFlux = databaseMapDictionary.getAllStages(snapshot, keyMin, keyMax, reverse, smallRange);
Flux<? extends Entry<K, ? extends DatabaseStageEntry<?>>> stagesFlux; } else {
if (keyMinOpt.isPresent() || keyMaxOpt.isPresent()) { stagesFlux = databaseMapDictionary.getAllStages(snapshot, smallRange);
stagesFlux = databaseMapDictionary }
.getAllStages(snapshot, keyMinOpt.orElse(null), keyMaxOpt.orElse(null), reverse, smallRange); return stagesFlux.map(Entry::getKey);
} else {
stagesFlux = databaseMapDictionary.getAllStages(snapshot, smallRange);
}
return stagesFlux.doOnNext(e -> e.getValue().close())
.doOnDiscard(Entry.class, e -> {
if (e.getValue() instanceof DatabaseStageEntry<?> resource) {
LLUtils.onDiscard(resource);
}
})
.map(Entry::getKey);
});
} }
private @Nullable U deserializeValue(T keySuffix, Buffer value) { private @Nullable U deserializeValue(T keySuffix, BufDataInput value) {
try { try {
return valueSerializer.deserialize(value); return valueSerializer.deserialize(value);
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {
@ -131,19 +111,16 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) { if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) {
var totalZeroBytesErrors = this.totalZeroBytesErrors.incrementAndGet(); var totalZeroBytesErrors = this.totalZeroBytesErrors.incrementAndGet();
if (totalZeroBytesErrors < 512 || totalZeroBytesErrors % 10000 == 0) { if (totalZeroBytesErrors < 512 || totalZeroBytesErrors % 10000 == 0) {
try (var keyPrefix = keyPrefixSupplier.get()) { var keySuffixBytes = serializeKeySuffixToKey(keySuffix);
try (var keySuffixBytes = serializeKeySuffixToKey(keySuffix)) { try {
LOG.error( LOG.error(
"Unexpected zero-bytes value at " "Unexpected zero-bytes value at " + dictionary.getDatabaseName() + ":" + dictionary.getColumnName()
+ dictionary.getDatabaseName() + ":" + dictionary.getColumnName() + ":" + LLUtils.toStringSafe(keyPrefix) + ":" + keySuffix + "(" + LLUtils.toStringSafe(
+ ":" + LLUtils.toStringSafe(keyPrefix) + ":" + keySuffix keySuffixBytes) + ") total=" + totalZeroBytesErrors);
+ "(" + LLUtils.toStringSafe(keySuffixBytes) + ") total=" + totalZeroBytesErrors); } catch (SerializationException e) {
} catch (SerializationException e) { LOG.error(
LOG.error( "Unexpected zero-bytes value at " + dictionary.getDatabaseName() + ":" + dictionary.getColumnName()
"Unexpected zero-bytes value at " + dictionary.getDatabaseName() + ":" + dictionary.getColumnName() + ":" + LLUtils.toStringSafe(keyPrefix) + ":" + keySuffix + "(?) total=" + totalZeroBytesErrors);
+ ":" + LLUtils.toStringSafe(keyPrefix) + ":" + keySuffix + "(?) total="
+ totalZeroBytesErrors);
}
} }
} }
return null; return null;
@ -153,139 +130,110 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
} }
private Buffer serializeValue(U value) throws SerializationException { private Buf serializeValue(U value) throws SerializationException {
var valSizeHint = valueSerializer.getSerializedSizeHint(); var valSizeHint = valueSerializer.getSerializedSizeHint();
if (valSizeHint == -1) valSizeHint = 128; if (valSizeHint == -1) valSizeHint = 128;
var valBuf = dictionary.getAllocator().allocate(valSizeHint); var valBuf = BufDataOutput.create(valSizeHint);
try { valueSerializer.serialize(value, valBuf);
valueSerializer.serialize(value, valBuf); return valBuf.asList();
return valBuf;
} catch (Throwable t) {
valBuf.close();
throw t;
}
} }
private Buffer serializeKeySuffixToKey(T keySuffix) throws SerializationException { private Buf serializeKeySuffixToKey(T keySuffix) throws SerializationException {
Buffer keyBuf; BufDataOutput keyBuf = BufDataOutput.createLimited(keyPrefixLength + keySuffixLength + keyExtLength);
if (keyPrefixSupplier != null) { if (keyPrefix != null) {
keyBuf = keyPrefixSupplier.get(); keyBuf.writeBytes(keyPrefix);
} else {
keyBuf = this.dictionary.getAllocator().allocate(keyPrefixLength + keySuffixLength + keyExtLength);
}
try {
assert keyBuf.readableBytes() == keyPrefixLength;
keyBuf.ensureWritable(keySuffixLength + keyExtLength);
serializeSuffix(keySuffix, keyBuf);
assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
return keyBuf;
} catch (Throwable t) {
keyBuf.close();
throw t;
} }
assert keyBuf.size() == keyPrefixLength;
serializeSuffixTo(keySuffix, keyBuf);
assert keyBuf.size() == keyPrefixLength + keySuffixLength + keyExtLength;
return keyBuf.asList();
} }
private Buffer toKey(Buffer suffixKey) { private Buf toKey(Buf suffixKey) {
assert suffixKeyLengthConsistency(suffixKey.readableBytes()); assert suffixKeyLengthConsistency(suffixKey.size());
if (keyPrefixSupplier != null) { if (keyPrefix != null) {
var result = LLUtils.compositeBuffer(dictionary.getAllocator(), keyPrefixSupplier.get().send(), suffixKey.send()); var result = keyPrefix.copy();
try { result.addAll(suffixKey);
assert result.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength; assert result.size() == keyPrefixLength + keySuffixLength + keyExtLength;
return result; return result;
} catch (Throwable t) {
result.close();
throw t;
}
} else { } else {
assert suffixKey.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength; assert suffixKey.size() == keyPrefixLength + keySuffixLength + keyExtLength;
return suffixKey; return suffixKey;
} }
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> get(@Nullable CompositeSnapshot snapshot) { public Object2ObjectSortedMap<T, U> get(@Nullable CompositeSnapshot snapshot) {
return dictionary var map = dictionary
.getRange(resolveSnapshot(snapshot), rangeMono, false, true) .getRange(resolveSnapshot(snapshot), range, false, true)
.map(entry -> { .map(entry -> {
Entry<T, U> deserializedEntry; Entry<T, U> deserializedEntry;
try (entry) { T key;
T key; // serializedKey
var serializedKey = entry.getKeyUnsafe(); var buf1 = BufDataInput.create(entry.getKey());
var serializedValue = entry.getValueUnsafe(); var serializedValue = BufDataInput.create(entry.getValue());
splitPrefix(serializedKey).close(); // after this, it becomes serializedSuffixAndExt
suffixKeyLengthConsistency(serializedKey.readableBytes()); buf1.skipNBytes(keyPrefixLength);
key = deserializeSuffix(serializedKey); suffixAndExtKeyConsistency(buf1.available());
U value = valueSerializer.deserialize(serializedValue);
deserializedEntry = Map.entry(key, value); key = deserializeSuffix(serializedValue);
} U value = valueSerializer.deserialize(serializedValue);
deserializedEntry = Map.entry(key, value);
return deserializedEntry; return deserializedEntry;
}) })
.collectMap(Entry::getKey, Entry::getValue, Object2ObjectLinkedOpenHashMap::new) .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, Object2ObjectLinkedOpenHashMap::new));
.map(map -> (Object2ObjectSortedMap<T, U>) map) return map.isEmpty() ? null : map;
.filter(map -> !map.isEmpty());
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> setAndGetPrevious(Object2ObjectSortedMap<T, U> value) { public Object2ObjectSortedMap<T, U> setAndGetPrevious(Object2ObjectSortedMap<T, U> value) {
return this Object2ObjectSortedMap<T, U> prev = this.get(null);
.get(null) if (value == null || value.isEmpty()) {
.concatWith(dictionary dictionary.clear();
.setRange(rangeMono, } else {
Flux.fromIterable(Collections.unmodifiableMap(value).entrySet()).map(entry -> serializeEntry(entry)), dictionary.setRange(range, value.entrySet().stream().map(this::serializeEntry), true);
true }
) return prev;
.as(InternalMonoUtils::toAny))
.singleOrEmpty();
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> clearAndGetPrevious() { public Object2ObjectSortedMap<T, U> clearAndGetPrevious() {
return this return this.setAndGetPrevious(Object2ObjectSortedMaps.emptyMap());
.setAndGetPrevious(Object2ObjectSortedMaps.emptyMap());
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return dictionary.sizeRange(resolveSnapshot(snapshot), rangeMono, fast); return dictionary.sizeRange(resolveSnapshot(snapshot), range, fast);
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), rangeMono, false); return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range, false);
} }
@Override @Override
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) { public @NotNull DatabaseStageEntry<U> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
return Mono.fromCallable(() -> return new DatabaseMapSingle<>(dictionary, serializeKeySuffixToKey(keySuffix), valueSerializer);
new DatabaseMapSingle<>(dictionary, BufSupplier.ofOwned(serializeKeySuffixToKey(keySuffix)), valueSerializer));
} }
@Override @Override
public Mono<Boolean> containsKey(@Nullable CompositeSnapshot snapshot, T keySuffix) { public boolean containsKey(@Nullable CompositeSnapshot snapshot, T keySuffix) {
return dictionary return !dictionary.isRangeEmpty(resolveSnapshot(snapshot),
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(serializeKeySuffixToKey(keySuffix)), true);
Mono.fromCallable(() -> LLRange.singleUnsafe(serializeKeySuffixToKey(keySuffix))),
true
)
.map(empty -> !empty);
} }
@Override @Override
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix) { public U getValue(@Nullable CompositeSnapshot snapshot, T keySuffix) {
return Mono.usingWhen(dictionary var keySuffixBuf = serializeKeySuffixToKey(keySuffix);
.get(resolveSnapshot(snapshot), Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix))), Buf value = dictionary.get(resolveSnapshot(snapshot), keySuffixBuf);
value -> Mono.fromCallable(() -> deserializeValue(keySuffix, value)), return deserializeValue(keySuffix, BufDataInput.create(value));
LLUtils::finalizeResource);
} }
@Override @Override
public Mono<Void> putValue(T keySuffix, U value) { public void putValue(T keySuffix, U value) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)).single(); var keyMono = serializeKeySuffixToKey(keySuffix);
var valueMono = Mono.fromCallable(() -> serializeValue(value)).single(); var valueMono = serializeValue(value);
return Mono.usingWhen(dictionary.put(keyMono, valueMono, LLDictionaryResultType.VOID), dictionary.put(keyMono, valueMono, LLDictionaryResultType.VOID);
v -> Mono.empty(),
LLUtils::finalizeResource
);
} }
@Override @Override
@ -294,21 +242,19 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
@Override @Override
public Mono<U> updateValue(T keySuffix, UpdateReturnMode updateReturnMode, public U updateValue(T keySuffix,
UpdateReturnMode updateReturnMode,
SerializationFunction<@Nullable U, @Nullable U> updater) { SerializationFunction<@Nullable U, @Nullable U> updater) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
return Mono.usingWhen(dictionary.update(keyMono, getSerializedUpdater(updater), updateReturnMode), var result = dictionary.update(keyMono, getSerializedUpdater(updater), updateReturnMode);
result -> Mono.fromCallable(() -> deserializeValue(keySuffix, result)), return deserializeValue(keySuffix, BufDataInput.create(result));
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Delta<U>> updateValueAndGetDelta(T keySuffix, SerializationFunction<@Nullable U, @Nullable U> updater) { public Delta<U> updateValueAndGetDelta(T keySuffix, SerializationFunction<@Nullable U, @Nullable U> updater) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
return dictionary LLDelta delta = dictionary.updateAndGetDelta(keyMono, getSerializedUpdater(updater));
.updateAndGetDelta(keyMono, getSerializedUpdater(updater)) return LLUtils.mapLLDelta(delta, in -> valueSerializer.deserialize(BufDataInput.create(in)));
.transform(mono -> LLUtils.mapLLDelta(mono, serialized -> valueSerializer.deserialize(serialized)));
} }
public BinarySerializationFunction getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) { public BinarySerializationFunction getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) {
@ -317,9 +263,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
if (oldSerialized == null) { if (oldSerialized == null) {
result = updater.apply(null); result = updater.apply(null);
} else { } else {
try (oldSerialized) { result = updater.apply(valueSerializer.deserialize(BufDataInput.create(oldSerialized)));
result = updater.apply(valueSerializer.deserialize(oldSerialized));
}
} }
if (result == null) { if (result == null) {
return null; return null;
@ -329,101 +273,81 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
}; };
} }
public KVSerializationFunction<@NotNull T, @Nullable Buffer, @Nullable Buffer> getSerializedUpdater( public KVSerializationFunction<@NotNull T, @Nullable Buf, @Nullable Buf> getSerializedUpdater(
KVSerializationFunction<@NotNull T, @Nullable U, @Nullable U> updater) { KVSerializationFunction<@NotNull T, @Nullable U, @Nullable U> updater) {
return (key, oldSerialized) -> { return (key, oldSerialized) -> {
try (oldSerialized) { U result;
U result; if (oldSerialized == null) {
if (oldSerialized == null) { result = updater.apply(key, null);
result = updater.apply(key, null); } else {
} else { result = updater.apply(key, valueSerializer.deserialize(BufDataInput.create(oldSerialized)));
try (oldSerialized) { }
result = updater.apply(key, valueSerializer.deserialize(oldSerialized)); if (result == null) {
} return null;
} } else {
if (result == null) { return serializeValue(result);
return null;
} else {
return serializeValue(result);
}
} }
}; };
} }
@Override @Override
public Mono<U> putValueAndGetPrevious(T keySuffix, U value) { public U putValueAndGetPrevious(T keySuffix, U value) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
var valueMono = Mono.fromCallable(() -> serializeValue(value)); var valueMono = serializeValue(value);
return Mono.usingWhen(dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE), var valueBuf = dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE);
valueBuf -> Mono.fromCallable(() -> deserializeValue(keySuffix, valueBuf)), return deserializeValue(keySuffix, BufDataInput.create(valueBuf));
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Boolean> putValueAndGetChanged(T keySuffix, U value) { public boolean putValueAndGetChanged(T keySuffix, U value) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
var valueMono = Mono.fromCallable(() -> serializeValue(value)); var valueMono = serializeValue(value);
return Mono var oldValueBuf = dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE);
.usingWhen(dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE), var oldValue = deserializeValue(keySuffix, BufDataInput.create(oldValueBuf));
valueBuf -> Mono.fromCallable(() -> deserializeValue(keySuffix, valueBuf)), if (oldValue == null) {
LLUtils::finalizeResource return value != null;
) } else {
.map(oldValue -> !Objects.equals(oldValue, value)) return !Objects.equals(oldValue, value);
.defaultIfEmpty(value != null); }
} }
@Override @Override
public Mono<Void> remove(T keySuffix) { public void remove(T keySuffix) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
return dictionary dictionary.remove(keyMono, LLDictionaryResultType.VOID);
.remove(keyMono, LLDictionaryResultType.VOID)
.doOnNext(LLUtils::finalizeResourceNow)
.then();
} }
@Override @Override
public Mono<U> removeAndGetPrevious(T keySuffix) { public U removeAndGetPrevious(T keySuffix) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
return Mono.usingWhen(dictionary.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE), var valueBuf = dictionary.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE);
valueBuf -> Mono.fromCallable(() -> deserializeValue(keySuffix, valueBuf)), return deserializeValue(keySuffix, BufDataInput.create(valueBuf));
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Boolean> removeAndGetStatus(T keySuffix) { public boolean removeAndGetStatus(T keySuffix) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = serializeKeySuffixToKey(keySuffix);
return dictionary return LLUtils.responseToBoolean(dictionary.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE));
.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
.map(response -> LLUtils.responseToBoolean(response));
} }
@Override @Override
public Flux<Optional<U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) { public Stream<Optional<U>> getMulti(@Nullable CompositeSnapshot snapshot, Stream<T> keys) {
var mappedKeys = keys.map(keySuffix -> serializeKeySuffixToKey(keySuffix)); var mappedKeys = keys.map(keySuffix -> serializeKeySuffixToKey(keySuffix));
return dictionary return dictionary
.getMulti(resolveSnapshot(snapshot), mappedKeys) .getMulti(resolveSnapshot(snapshot), mappedKeys)
.map(valueBufOpt -> { .map(valueBufOpt -> {
try (valueBufOpt) { if (valueBufOpt.isPresent()) {
if (valueBufOpt.isPresent()) { return Optional.of(valueSerializer.deserialize(BufDataInput.create(valueBufOpt.get())));
return Optional.of(valueSerializer.deserialize(valueBufOpt.get())); } else {
} else { return Optional.empty();
return Optional.empty();
}
} }
}); });
} }
private LLEntry serializeEntry(T keySuffix, U value) throws SerializationException { private LLEntry serializeEntry(T keySuffix, U value) throws SerializationException {
var key = serializeKeySuffixToKey(keySuffix); var key = serializeKeySuffixToKey(keySuffix);
try { var serializedValue = serializeValue(value);
var serializedValue = serializeValue(value); return LLEntry.of(key, serializedValue);
return LLEntry.of(key, serializedValue);
} catch (Throwable t) {
key.close();
throw t;
}
} }
private LLEntry serializeEntry(Entry<T, U> entry) throws SerializationException { private LLEntry serializeEntry(Entry<T, U> entry) throws SerializationException {
@ -431,59 +355,56 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
@Override @Override
public Mono<Void> putMulti(Flux<Entry<T, U>> entries) { public void putMulti(Stream<Entry<T, U>> entries) {
var serializedEntries = entries.map(entry -> serializeEntry(entry)); try (var serializedEntries = entries.map(entry -> serializeEntry(entry))) {
return dictionary.putMulti(serializedEntries); dictionary.putMulti(serializedEntries);
}
} }
@Override @Override
public Flux<Boolean> updateMulti(Flux<T> keys, public Stream<Boolean> updateMulti(Stream<T> keys,
KVSerializationFunction<T, @Nullable U, @Nullable U> updater) { KVSerializationFunction<T, @Nullable U, @Nullable U> updater) {
var sharedKeys = keys.publish().refCount(2); List<T> sharedKeys = keys.toList();
var serializedKeys = sharedKeys.map(keySuffix -> serializeKeySuffixToKey(keySuffix)); var serializedKeys = sharedKeys.stream().map(keySuffix -> serializeKeySuffixToKey(keySuffix));
var serializedUpdater = getSerializedUpdater(updater); var serializedUpdater = getSerializedUpdater(updater);
return dictionary.updateMulti(sharedKeys, serializedKeys, serializedUpdater); return dictionary.updateMulti(sharedKeys.stream(), serializedKeys, serializedUpdater);
} }
@Override @Override
public Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Stream<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return getAllStages(snapshot, rangeMono, false, smallRange); return getAllStages(snapshot, range, false, smallRange);
} }
private LLRange getPatchedRange(@NotNull LLRange range, @Nullable T keyMin, @Nullable T keyMax) private LLRange getPatchedRange(@NotNull LLRange range, @Nullable T keyMin, @Nullable T keyMax)
throws SerializationException { throws SerializationException {
Buffer keyMinBuf = serializeSuffixForRange(keyMin); Buf keyMinBuf = serializeSuffixForRange(keyMin);
if (keyMinBuf == null) { if (keyMinBuf == null) {
keyMinBuf = range.getMinCopy(); keyMinBuf = range.getMin();
} }
Buffer keyMaxBuf = serializeSuffixForRange(keyMax); Buf keyMaxBuf = serializeSuffixForRange(keyMax);
if (keyMaxBuf == null) { if (keyMaxBuf == null) {
keyMaxBuf = range.getMaxCopy(); keyMaxBuf = range.getMax();
} }
return LLRange.ofUnsafe(keyMinBuf, keyMaxBuf); return LLRange.of(keyMinBuf, keyMaxBuf);
} }
private Buffer serializeSuffixForRange(@Nullable T key) throws SerializationException { private Buf serializeSuffixForRange(@Nullable T key) throws SerializationException {
if (key == null) { if (key == null) {
return null; return null;
} }
var keyWithoutExtBuf = var keyWithoutExtBuf = BufDataOutput.createLimited(keyPrefixLength + keySuffixLength);
keyPrefixSupplier == null ? alloc.allocate(keySuffixLength + keyExtLength) : keyPrefixSupplier.get(); if (keyPrefix != null) {
try { keyWithoutExtBuf.writeBytes(keyPrefix);
keyWithoutExtBuf.ensureWritable(keySuffixLength + keyExtLength);
serializeSuffix(key, keyWithoutExtBuf);
return keyWithoutExtBuf;
} catch (Throwable ex) {
keyWithoutExtBuf.close();
throw ex;
} }
serializeSuffixTo(key, keyWithoutExtBuf);
return keyWithoutExtBuf.asList();
} }
/** /**
* Get all stages * Get all stages
* @param reverse if true, the results will go backwards from the specified key (inclusive) * @param reverse if true, the results will go backwards from the specified key (inclusive)
*/ */
public Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, public Stream<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot,
@Nullable T keyMin, @Nullable T keyMin,
@Nullable T keyMax, @Nullable T keyMax,
boolean reverse, boolean reverse,
@ -491,48 +412,38 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
if (keyMin == null && keyMax == null) { if (keyMin == null && keyMax == null) {
return getAllStages(snapshot, smallRange); return getAllStages(snapshot, smallRange);
} else { } else {
Mono<LLRange> boundedRangeMono = rangeMono.map(range -> { LLRange boundedRange = getPatchedRange(range, keyMin, keyMax);
try (range) { return getAllStages(snapshot, boundedRange, reverse, smallRange);
return getPatchedRange(range, keyMin, keyMax);
}
});
return getAllStages(snapshot, boundedRangeMono, reverse, smallRange);
} }
} }
private Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, private Stream<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot,
Mono<LLRange> sliceRangeMono, boolean reverse, boolean smallRange) { LLRange sliceRange, boolean reverse, boolean smallRange) {
return dictionary return dictionary
.getRangeKeys(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange) .getRangeKeys(resolveSnapshot(snapshot), sliceRange, reverse, smallRange)
.map(keyBuf -> { .map(keyBuf -> {
try (keyBuf) { assert keyBuf.size() == keyPrefixLength + keySuffixLength + keyExtLength;
assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength; // Remove prefix. Keep only the suffix and the ext
// Remove prefix. Keep only the suffix and the ext var suffixAndExtIn = BufDataInput.create(keyBuf);
splitPrefix(keyBuf).close(); suffixAndExtIn.skipBytes(keyPrefixLength);
suffixKeyLengthConsistency(keyBuf.readableBytes());
var bufSupplier = BufSupplier.ofOwned(toKey(keyBuf.copy())); suffixKeyLengthConsistency(suffixAndExtIn.available());
try { T keySuffix = deserializeSuffix(suffixAndExtIn);
T keySuffix = deserializeSuffix(keyBuf); var subStage = new DatabaseMapSingle<>(dictionary, keyBuf, valueSerializer);
var subStage = new DatabaseMapSingle<>(dictionary, bufSupplier, valueSerializer); return new SubStageEntry<>(keySuffix, subStage);
return new SubStageEntry<>(keySuffix, subStage);
} catch (Throwable ex) {
bufSupplier.close();
throw ex;
}
}
}); });
} }
@Override @Override
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Stream<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return getAllValues(snapshot, rangeMono, false, smallRange); return getAllValues(snapshot, range, false, smallRange);
} }
/** /**
* Get all values * Get all values
* @param reverse if true, the results will go backwards from the specified key (inclusive) * @param reverse if true, the results will go backwards from the specified key (inclusive)
*/ */
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, public Stream<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot,
@Nullable T keyMin, @Nullable T keyMin,
@Nullable T keyMax, @Nullable T keyMax,
boolean reverse, boolean reverse,
@ -540,59 +451,52 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
if (keyMin == null && keyMax == null) { if (keyMin == null && keyMax == null) {
return getAllValues(snapshot, smallRange); return getAllValues(snapshot, smallRange);
} else { } else {
Mono<LLRange> boundedRangeMono = Mono.usingWhen(rangeMono, LLRange boundedRange = getPatchedRange(range, keyMin, keyMax);
range -> Mono.fromCallable(() -> getPatchedRange(range, keyMin, keyMax)), return getAllValues(snapshot, boundedRange, reverse, smallRange);
LLUtils::finalizeResource);
return getAllValues(snapshot, boundedRangeMono, reverse, smallRange);
} }
} }
private Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, private Stream<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot,
Mono<LLRange> sliceRangeMono, LLRange sliceRangeMono,
boolean reverse, boolean smallRange) { boolean reverse, boolean smallRange) {
return dictionary return dictionary
.getRange(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange) .getRange(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange)
.map((serializedEntry) -> { .map((serializedEntry) -> {
Entry<T, U> entry; Entry<T, U> entry;
try (serializedEntry) { var keyBuf = serializedEntry.getKey();
var keyBuf = serializedEntry.getKeyUnsafe(); assert keyBuf != null;
assert keyBuf != null; assert keyBuf.size() == keyPrefixLength + keySuffixLength + keyExtLength;
assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
// Remove prefix. Keep only the suffix and the ext
splitPrefix(keyBuf).close();
assert suffixKeyLengthConsistency(keyBuf.readableBytes());
T keySuffix = deserializeSuffix(keyBuf);
assert serializedEntry.getValueUnsafe() != null; // Remove prefix. Keep only the suffix and the ext
U value = valueSerializer.deserialize(serializedEntry.getValueUnsafe()); var suffixAndExtIn = BufDataInput.create(keyBuf);
entry = Map.entry(keySuffix, value); suffixAndExtIn.skipBytes(keyPrefixLength);
}
assert suffixKeyLengthConsistency(suffixAndExtIn.available());
T keySuffix = deserializeSuffix(suffixAndExtIn);
assert serializedEntry.getValue() != null;
U value = valueSerializer.deserialize(BufDataInput.create(serializedEntry.getValue()));
entry = Map.entry(keySuffix, value);
return entry; return entry;
}); });
} }
@Override @Override
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) { public Stream<Entry<T, U>> setAllValuesAndGetPrevious(Stream<Entry<T, U>> entries) {
return Flux.usingWhen(Mono.just(true), var previous = this.getAllValues(null, false);
b -> this.getAllValues(null, false), dictionary.setRange(range, entries.map(entry -> serializeEntry(entry)), false);
b -> dictionary.setRange(rangeMono, entries.map(entry -> serializeEntry(entry)), false) return previous;
);
} }
@Override @Override
public Mono<Void> clear() { public void clear() {
return Mono.using(() -> rangeSupplier.get(), range -> { if (range.isAll()) {
if (range.isAll()) { dictionary.clear();
return dictionary.clear(); } else if (range.isSingle()) {
} else if (range.isSingle()) { dictionary.remove(range.getSingleUnsafe(), LLDictionaryResultType.VOID);
return dictionary } else {
.remove(Mono.fromCallable(() -> range.getSingleUnsafe()), LLDictionaryResultType.VOID) dictionary.setRange(range, Stream.empty(), false);
.doOnNext(LLUtils::finalizeResourceNow) }
.then();
} else {
return dictionary.setRange(rangeMono, Flux.empty(), false);
}
}, LLUtils::finalizeResourceNow);
} }
} }

View File

@ -1,137 +1,106 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator; import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.DefaultBufferAllocators; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.util.Resource;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.RangeSupplier;
import it.cavallium.dbengine.database.SubStageEntry; import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.cavallium.dbengine.utils.InternalMonoUtils;
import it.cavallium.dbengine.utils.SimpleResource;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
import org.apache.commons.lang3.function.TriFunction; import org.apache.commons.lang3.function.TriFunction;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
// todo: implement optimized methods (which?) // todo: implement optimized methods (which?)
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extends SimpleResource implements public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
DatabaseStageMap<T, U, US> {
private static final Logger LOG = LogManager.getLogger(DatabaseMapDictionaryDeep.class); private static final Logger LOG = LogManager.getLogger(DatabaseMapDictionaryDeep.class);
protected final LLDictionary dictionary; protected final LLDictionary dictionary;
protected final BufferAllocator alloc;
private final AtomicLong totalZeroBytesErrors = new AtomicLong(); private final AtomicLong totalZeroBytesErrors = new AtomicLong();
protected final SubStageGetter<U, US> subStageGetter; protected final SubStageGetter<U, US> subStageGetter;
protected final SerializerFixedBinaryLength<T> keySuffixSerializer; protected final SerializerFixedBinaryLength<T> keySuffixSerializer;
protected final int keyPrefixLength; protected final int keyPrefixLength;
protected final int keySuffixLength; protected final int keySuffixLength;
protected final int keyExtLength; protected final int keyExtLength;
protected final Mono<LLRange> rangeMono; protected final LLRange range;
protected RangeSupplier rangeSupplier; protected Buf keyPrefix;
protected BufSupplier keyPrefixSupplier;
private static void incrementPrefix(Buffer prefix, int prefixLength) { private static void incrementPrefix(Buf modifiablePrefix, int prefixLength) {
assert prefix.readableBytes() >= prefixLength; assert modifiablePrefix.size() >= prefixLength;
assert prefix.readerOffset() == 0; final var originalKeyLength = modifiablePrefix.size();
final var originalKeyLength = prefix.readableBytes();
boolean overflowed = true; boolean overflowed = true;
final int ff = 0xFF; final int ff = 0xFF;
int writtenBytes = 0; int writtenBytes = 0;
for (int i = prefixLength - 1; i >= 0; i--) { for (int i = prefixLength - 1; i >= 0; i--) {
int iByte = prefix.getUnsignedByte(i); int iByte = Byte.toUnsignedInt(modifiablePrefix.getByte(i));
if (iByte != ff) { if (iByte != ff) {
prefix.setUnsignedByte(i, iByte + 1); modifiablePrefix.set(i, (byte) (iByte + 1));
writtenBytes++; writtenBytes++;
overflowed = false; overflowed = false;
break; break;
} else { } else {
prefix.setUnsignedByte(i, 0x00); modifiablePrefix.set(i, (byte) 0x00);
writtenBytes++; writtenBytes++;
} }
} }
assert prefixLength - writtenBytes >= 0; assert prefixLength - writtenBytes >= 0;
if (overflowed) { if (overflowed) {
assert prefix.writerOffset() == originalKeyLength; modifiablePrefix.add((byte) 0);
prefix.ensureWritable(1, 1, true);
prefix.writerOffset(originalKeyLength + 1);
for (int i = 0; i < originalKeyLength; i++) { for (int i = 0; i < originalKeyLength; i++) {
prefix.setUnsignedByte(i, 0xFF); modifiablePrefix.set(i, (byte) 0xFF);
} }
prefix.setUnsignedByte(originalKeyLength, (byte) 0x00); modifiablePrefix.set(originalKeyLength, (byte) 0x00);
} }
} }
static void firstRangeKey(Buffer prefixKey, int prefixLength, Buffer suffixAndExtZeroes) { static Buf firstRangeKey(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) {
zeroFillKeySuffixAndExt(prefixKey, prefixLength, suffixAndExtZeroes); var modifiablePrefixKey = Buf.create(prefixLength + suffixAndExtZeroes.size());
modifiablePrefixKey.addAll(prefixKey);
zeroFillKeySuffixAndExt(modifiablePrefixKey, prefixLength, suffixAndExtZeroes);
return modifiablePrefixKey;
} }
static void nextRangeKey(Buffer prefixKey, int prefixLength, Buffer suffixAndExtZeroes) { static Buf nextRangeKey(Buf prefixKey, int prefixLength, Buf suffixAndExtZeroes) {
zeroFillKeySuffixAndExt(prefixKey, prefixLength, suffixAndExtZeroes); var modifiablePrefixKey = Buf.create(prefixLength + suffixAndExtZeroes.size());
incrementPrefix(prefixKey, prefixLength); modifiablePrefixKey.addAll(prefixKey);
zeroFillKeySuffixAndExt(modifiablePrefixKey, prefixLength, suffixAndExtZeroes);
incrementPrefix(modifiablePrefixKey, prefixLength);
return modifiablePrefixKey;
} }
@Deprecated /**
static void firstRangeKey(Buffer prefixKey, int prefixLength, int suffixLength, int extLength) { * @param modifiablePrefixKey This field content will be modified
try (var zeroBuf = DefaultBufferAllocators.offHeapAllocator().allocate(suffixLength + extLength)) { */
zeroBuf.fill((byte) 0); protected static void zeroFillKeySuffixAndExt(@NotNull Buf modifiablePrefixKey, int prefixLength, Buf suffixAndExtZeroes) {
zeroBuf.writerOffset(suffixLength + extLength);
zeroFillKeySuffixAndExt(prefixKey, prefixLength, zeroBuf);
}
}
@Deprecated
static void nextRangeKey(Buffer prefixKey, int prefixLength, int suffixLength, int extLength) {
try (var zeroBuf = DefaultBufferAllocators.offHeapAllocator().allocate(suffixLength + extLength)) {
zeroBuf.fill((byte) 0);
zeroBuf.writerOffset(suffixLength + extLength);
zeroFillKeySuffixAndExt(prefixKey, prefixLength, zeroBuf);
incrementPrefix(prefixKey, prefixLength);
}
}
protected static void zeroFillKeySuffixAndExt(@NotNull Buffer prefixKey,
int prefixLength, Buffer suffixAndExtZeroes) {
//noinspection UnnecessaryLocalVariable //noinspection UnnecessaryLocalVariable
var result = prefixKey; var result = modifiablePrefixKey;
var suffixLengthAndExtLength = suffixAndExtZeroes.readableBytes(); var suffixLengthAndExtLength = suffixAndExtZeroes.size();
assert result.readableBytes() == prefixLength; assert result.size() == prefixLength;
assert suffixLengthAndExtLength > 0 : "Suffix length + ext length is < 0: " + suffixLengthAndExtLength; assert suffixLengthAndExtLength > 0 : "Suffix length + ext length is < 0: " + suffixLengthAndExtLength;
prefixKey.ensureWritable(suffixLengthAndExtLength); result.size(prefixLength + suffixLengthAndExtLength);
suffixAndExtZeroes.copyInto(suffixAndExtZeroes.readerOffset(), modifiablePrefixKey.addAll(suffixAndExtZeroes);
prefixKey, assert modifiablePrefixKey.size() == prefixLength + suffixAndExtZeroes.size() : "Result buffer size is wrong";
prefixKey.writerOffset(),
suffixLengthAndExtLength
);
prefixKey.skipWritableBytes(suffixLengthAndExtLength);
} }
/** /**
@ -150,94 +119,55 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate( public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(
LLDictionary dictionary, BufSupplier prefixKey, SerializerFixedBinaryLength<T> keySuffixSerializer, LLDictionary dictionary, Buf prefixKey, SerializerFixedBinaryLength<T> keySuffixSerializer,
SubStageGetter<U, US> subStageGetter, int keyExtLength) { SubStageGetter<U, US> subStageGetter, int keyExtLength) {
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength); return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength);
} }
@SuppressWarnings({"unchecked", "rawtypes"}) protected DatabaseMapDictionaryDeep(LLDictionary dictionary, @Nullable Buf prefixKey,
protected DatabaseMapDictionaryDeep(LLDictionary dictionary, @Nullable BufSupplier prefixKeySupplier,
SerializerFixedBinaryLength<T> keySuffixSerializer, SubStageGetter<U, US> subStageGetter, int keyExtLength) { SerializerFixedBinaryLength<T> keySuffixSerializer, SubStageGetter<U, US> subStageGetter, int keyExtLength) {
try (var prefixKey = prefixKeySupplier != null ? prefixKeySupplier.get() : null) { this.dictionary = dictionary;
this.dictionary = dictionary; this.subStageGetter = subStageGetter;
this.alloc = dictionary.getAllocator(); this.keySuffixSerializer = keySuffixSerializer;
this.subStageGetter = subStageGetter; this.keyPrefixLength = prefixKey != null ? prefixKey.size() : 0;
this.keySuffixSerializer = keySuffixSerializer; this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
this.keyPrefixLength = prefixKey != null ? prefixKey.readableBytes() : 0; this.keyExtLength = keyExtLength;
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength(); var keySuffixAndExtZeroBuffer = Buf.createZeroes(keySuffixLength + keyExtLength);
this.keyExtLength = keyExtLength; assert keySuffixAndExtZeroBuffer.size() == keySuffixLength + keyExtLength :
try (var keySuffixAndExtZeroBuffer = alloc "Key suffix and ext zero buffer readable length is not equal"
.allocate(keySuffixLength + keyExtLength) + " to the key suffix length + key ext length. keySuffixAndExtZeroBuffer="
.fill((byte) 0) + keySuffixAndExtZeroBuffer.size() + " keySuffixLength=" + keySuffixLength + " keyExtLength="
.writerOffset(keySuffixLength + keyExtLength) + keyExtLength;
.makeReadOnly()) { assert keySuffixAndExtZeroBuffer.size() > 0;
assert keySuffixAndExtZeroBuffer.readableBytes() == keySuffixLength + keyExtLength : var firstKey = firstRangeKey(prefixKey, keyPrefixLength, keySuffixAndExtZeroBuffer);
"Key suffix and ext zero buffer readable length is not equal" var nextRangeKey = nextRangeKey(prefixKey, keyPrefixLength, keySuffixAndExtZeroBuffer);
+ " to the key suffix length + key ext length. keySuffixAndExtZeroBuffer=" assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
+ keySuffixAndExtZeroBuffer.readableBytes() + " keySuffixLength=" + keySuffixLength + " keyExtLength=" if (keyPrefixLength == 0) {
+ keyExtLength; this.range = LLRange.all();
assert keySuffixAndExtZeroBuffer.readableBytes() > 0; } else {
var firstKey = prefixKey != null ? prefixKeySupplier.get() this.range = LLRange.of(firstKey, nextRangeKey);
: alloc.allocate(keyPrefixLength + keySuffixLength + keyExtLength);
try {
firstRangeKey(firstKey, keyPrefixLength, keySuffixAndExtZeroBuffer);
var nextRangeKey = prefixKey != null ? prefixKeySupplier.get()
: alloc.allocate(keyPrefixLength + keySuffixLength + keyExtLength);
try {
nextRangeKey(nextRangeKey, keyPrefixLength, keySuffixAndExtZeroBuffer);
assert prefixKey == null || prefixKey.isAccessible();
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
if (keyPrefixLength == 0) {
this.rangeSupplier = RangeSupplier.ofOwned(LLRange.all());
firstKey.close();
nextRangeKey.close();
} else {
this.rangeSupplier = RangeSupplier.ofOwned(LLRange.ofUnsafe(firstKey, nextRangeKey));
}
this.rangeMono = Mono.fromSupplier(rangeSupplier);
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
} catch (Throwable t) {
nextRangeKey.close();
throw t;
}
} catch (Throwable t) {
firstKey.close();
throw t;
}
this.keyPrefixSupplier = prefixKeySupplier;
}
} catch (Throwable t) {
if (prefixKeySupplier != null) {
prefixKeySupplier.close();
}
throw t;
} }
} assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
@SuppressWarnings({"unchecked", "rawtypes"}) this.keyPrefix = prefixKey;
}
private DatabaseMapDictionaryDeep(LLDictionary dictionary, private DatabaseMapDictionaryDeep(LLDictionary dictionary,
BufferAllocator alloc,
SubStageGetter<U, US> subStageGetter, SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T> keySuffixSerializer, SerializerFixedBinaryLength<T> keySuffixSerializer,
int keyPrefixLength, int keyPrefixLength,
int keySuffixLength, int keySuffixLength,
int keyExtLength, int keyExtLength,
Mono<LLRange> rangeMono, LLRange range,
RangeSupplier rangeSupplier, Buf keyPrefix) {
BufSupplier keyPrefixSupplier,
Runnable onClose) {
this.dictionary = dictionary; this.dictionary = dictionary;
this.alloc = alloc;
this.subStageGetter = subStageGetter; this.subStageGetter = subStageGetter;
this.keySuffixSerializer = keySuffixSerializer; this.keySuffixSerializer = keySuffixSerializer;
this.keyPrefixLength = keyPrefixLength; this.keyPrefixLength = keyPrefixLength;
this.keySuffixLength = keySuffixLength; this.keySuffixLength = keySuffixLength;
this.keyExtLength = keyExtLength; this.keyExtLength = keyExtLength;
this.rangeMono = rangeMono; this.range = range;
this.rangeSupplier = rangeSupplier; this.keyPrefix = keyPrefix;
this.keyPrefixSupplier = keyPrefixSupplier;
} }
@SuppressWarnings("unused") @SuppressWarnings("unused")
@ -256,16 +186,39 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
/** /**
* Removes the prefix from the key
* @return the prefix * @return the prefix
*/ */
protected Buffer splitPrefix(Buffer key) { protected Buf prefixSubList(Buf key) {
assert key.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength assert key.size() == keyPrefixLength + keySuffixLength + keyExtLength
|| key.readableBytes() == keyPrefixLength + keySuffixLength; || key.size() == keyPrefixLength + keySuffixLength;
var prefix = key.readSplit(this.keyPrefixLength); return key.subList(0, this.keyPrefixLength);
assert key.readableBytes() == keySuffixLength + keyExtLength }
|| key.readableBytes() == keySuffixLength;
return prefix; /**
* @return the suffix
*/
protected Buf suffixSubList(Buf key) {
assert key.size() == keyPrefixLength + keySuffixLength + keyExtLength
|| key.size() == keyPrefixLength + keySuffixLength;
return key.subList(this.keyPrefixLength, keyPrefixLength + keySuffixLength);
}
/**
* @return the suffix
*/
protected Buf suffixAndExtSubList(Buf key) {
assert key.size() == keyPrefixLength + keySuffixLength + keyExtLength
|| key.size() == keyPrefixLength + keySuffixLength;
return key.subList(this.keyPrefixLength, key.size());
}
/**
* @return the ext
*/
protected Buf extSubList(Buf key) {
assert key.size() == keyPrefixLength + keySuffixLength + keyExtLength
|| key.size() == keyPrefixLength + keySuffixLength;
return key.subList(this.keyPrefixLength + this.keySuffixLength, key.size());
} }
protected LLSnapshot resolveSnapshot(@Nullable CompositeSnapshot snapshot) { protected LLSnapshot resolveSnapshot(@Nullable CompositeSnapshot snapshot) {
@ -277,30 +230,23 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return dictionary.sizeRange(resolveSnapshot(snapshot), rangeMono, fast); return dictionary.sizeRange(resolveSnapshot(snapshot), range, fast);
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), rangeMono, false); return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range, false);
} }
@Override @Override
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) { public @NotNull US at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
var suffixKeyWithoutExt = Mono.fromCallable(() -> { BufDataOutput bufOutput = BufDataOutput.createLimited(keyPrefixLength + keySuffixLength + keyExtLength);
var keyWithoutExtBuf = keyPrefixSupplier == null if (keyPrefix != null) {
? alloc.allocate(keySuffixLength + keyExtLength) : keyPrefixSupplier.get(); bufOutput.writeBytes(keyPrefix);
try { }
keyWithoutExtBuf.ensureWritable(keySuffixLength + keyExtLength); serializeSuffixTo(keySuffix, bufOutput);
serializeSuffix(keySuffix, keyWithoutExtBuf); return this.subStageGetter.subStage(dictionary, snapshot, bufOutput.asList());
} catch (Throwable ex) {
keyWithoutExtBuf.close();
throw ex;
}
return keyWithoutExtBuf;
});
return this.subStageGetter.subStage(dictionary, snapshot, suffixKeyWithoutExt);
} }
@Override @Override
@ -309,39 +255,22 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Stream<BadBlock> badBlocks() {
return dictionary.badBlocks(rangeMono); return dictionary.badBlocks(range);
} }
@Override @Override
public Flux<SubStageEntry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Stream<SubStageEntry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return dictionary return dictionary
.getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength, smallRange) .getRangeKeyPrefixes(resolveSnapshot(snapshot), range, keyPrefixLength + keySuffixLength, smallRange)
.flatMapSequential(groupKeyWithoutExt -> this.subStageGetter .parallel()
.subStage(dictionary, snapshot, Mono.fromCallable(() -> groupKeyWithoutExt.copy())) .map(groupKeyWithoutExt -> {
.map(us -> { T deserializedSuffix;
T deserializedSuffix; var splittedGroupSuffix = suffixSubList(groupKeyWithoutExt);
try (var splittedGroupSuffix = splitGroupSuffix(groupKeyWithoutExt)) { deserializedSuffix = this.deserializeSuffix(BufDataInput.create(splittedGroupSuffix));
deserializedSuffix = this.deserializeSuffix(splittedGroupSuffix); return new SubStageEntry<>(deserializedSuffix,
return new SubStageEntry<>(deserializedSuffix, us); this.subStageGetter.subStage(dictionary, snapshot, groupKeyWithoutExt));
} });
})
.doFinally(s -> groupKeyWithoutExt.close())
);
}
/**
* Split the input. The input will become the ext, the returned data will be the group suffix
* @param groupKey group key, will become ext
* @return group suffix
*/
private Buffer splitGroupSuffix(@NotNull Buffer groupKey) {
assert subStageKeysConsistency(groupKey.readableBytes())
|| subStageKeysConsistency(groupKey.readableBytes() + keyExtLength);
this.splitPrefix(groupKey).close();
assert subStageKeysConsistency(keyPrefixLength + groupKey.readableBytes())
|| subStageKeysConsistency(keyPrefixLength + groupKey.readableBytes() + keyExtLength);
return groupKey.readSplit(keySuffixLength);
} }
private boolean subStageKeysConsistency(int totalKeyLength) { private boolean subStageKeysConsistency(int totalKeyLength) {
@ -357,51 +286,45 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
@Override @Override
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) { public void setAllValues(Stream<Entry<T, U>> entries) {
return this this.clear();
.getAllValues(null, false) this.putMulti(entries);
.concatWith(this
.clear()
.then(this.putMulti(entries))
.as(InternalMonoUtils::toAny)
);
} }
@Override @Override
public Mono<Void> clear() { public Stream<Entry<T, U>> setAllValuesAndGetPrevious(Stream<Entry<T, U>> entries) {
return Mono.using(() -> rangeSupplier.get(), range -> { return this.getAllValues(null, false).onClose(() -> setAllValues(entries));
if (range.isAll()) {
return dictionary.clear();
} else if (range.isSingle()) {
return dictionary
.remove(Mono.fromCallable(() -> range.getSingleUnsafe()), LLDictionaryResultType.VOID)
.doOnNext(resource -> LLUtils.finalizeResourceNow(resource))
.then();
} else {
return dictionary.setRange(rangeMono, Flux.empty(), false);
}
}, resource -> LLUtils.finalizeResourceNow(resource));
} }
protected T deserializeSuffix(@NotNull Buffer keySuffix) throws SerializationException { @Override
assert suffixKeyLengthConsistency(keySuffix.readableBytes()); public void clear() {
var result = keySuffixSerializer.deserialize(keySuffix); if (range.isAll()) {
return result; dictionary.clear();
} else if (range.isSingle()) {
dictionary.remove(range.getSingleUnsafe(), LLDictionaryResultType.VOID);
} else {
dictionary.setRange(range, Stream.empty(), false);
}
} }
protected void serializeSuffix(T keySuffix, Buffer output) throws SerializationException { protected T deserializeSuffix(@NotNull BufDataInput keySuffix) throws SerializationException {
output.ensureWritable(keySuffixLength); assert suffixKeyLengthConsistency(keySuffix.available());
var beforeWriterOffset = output.writerOffset(); return keySuffixSerializer.deserialize(keySuffix);
}
protected void serializeSuffixTo(T keySuffix, BufDataOutput output) throws SerializationException {
assert suffixKeyLengthConsistency(output.size());
var beforeWriterOffset = output.size();
keySuffixSerializer.serialize(keySuffix, output); keySuffixSerializer.serialize(keySuffix, output);
var afterWriterOffset = output.writerOffset(); var afterWriterOffset = output.size();
assert suffixKeyLengthConsistency(afterWriterOffset - beforeWriterOffset) assert suffixKeyLengthConsistency(afterWriterOffset - beforeWriterOffset)
: "Invalid key suffix length: " + (afterWriterOffset - beforeWriterOffset) + ". Expected: " + keySuffixLength; : "Invalid key suffix length: " + (afterWriterOffset - beforeWriterOffset) + ". Expected: " + keySuffixLength;
} }
public static <K1, K2, V, R> Flux<R> getAllLeaves2(DatabaseMapDictionaryDeep<K1, Object2ObjectSortedMap<K2, V>, ? extends DatabaseStageMap<K2, V, DatabaseStageEntry<V>>> deepMap, public static <K1, K2, V, R> Stream<R> getAllLeaves2(DatabaseMapDictionaryDeep<K1, Object2ObjectSortedMap<K2, V>, ? extends DatabaseStageMap<K2, V, DatabaseStageEntry<V>>> deepMap,
CompositeSnapshot snapshot, CompositeSnapshot snapshot,
TriFunction<K1, K2, V, R> merger, TriFunction<K1, K2, V, R> merger,
@NotNull Mono<K1> savedProgressKey1) { @Nullable K1 savedProgressKey1) {
var keySuffix1Serializer = deepMap.keySuffixSerializer; var keySuffix1Serializer = deepMap.keySuffixSerializer;
SerializerFixedBinaryLength<?> keySuffix2Serializer; SerializerFixedBinaryLength<?> keySuffix2Serializer;
Serializer<?> valueSerializer; Serializer<?> valueSerializer;
@ -434,64 +357,47 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
throw new IllegalArgumentException(); throw new IllegalArgumentException();
} }
var savedProgressKey1Opt = savedProgressKey1.map(value1 -> Optional.of(value1)).defaultIfEmpty(Optional.empty()); var firstKey = Optional.ofNullable(savedProgressKey1);
var fullRange = deepMap.range;
return deepMap
.dictionary LLRange range;
.getRange(deepMap.resolveSnapshot(snapshot), Mono.zip(savedProgressKey1Opt, deepMap.rangeMono).handle((tuple, sink) -> { if (firstKey.isPresent()) {
var firstKey = tuple.getT1(); var key1Buf = BufDataOutput.create(keySuffix1Serializer.getSerializedBinaryLength());
var fullRange = tuple.getT2(); keySuffix1Serializer.serialize(firstKey.get(), key1Buf);
try { range = LLRange.of(key1Buf.asList(), fullRange.getMax());
if (firstKey.isPresent()) { } else {
try (fullRange) { range = fullRange;
try (var key1Buf = deepMap.alloc.allocate(keySuffix1Serializer.getSerializedBinaryLength())) { }
keySuffix1Serializer.serialize(firstKey.get(), key1Buf);
sink.next(LLRange.of(key1Buf.send(), fullRange.getMax())); return deepMap.dictionary.getRange(deepMap.resolveSnapshot(snapshot), range, false, false)
} catch (SerializationException e) { .flatMap(entry -> {
sink.error(e);
}
}
} else {
sink.next(fullRange);
}
} catch (Throwable ex) {
try {
fullRange.close();
} catch (Throwable ex2) {
LOG.error(ex2);
}
sink.error(ex);
}
}), false, false)
.concatMapIterable(entry -> {
K1 key1 = null; K1 key1 = null;
Object key2 = null; Object key2 = null;
try (entry) { try {
var keyBuf = entry.getKeyUnsafe(); var keyBuf = entry.getKey();
var valueBuf = entry.getValueUnsafe(); var valueBuf = entry.getValue();
try { try {
assert keyBuf != null; assert keyBuf != null;
keyBuf.skipReadableBytes(deepMap.keyPrefixLength); var suffix1And2 = BufDataInput.create(keyBuf.subList(deepMap.keyPrefixLength, deepMap.keyPrefixLength + deepMap.keySuffixLength + deepMap.keyExtLength));
try (var key1Buf = keyBuf.split(deepMap.keySuffixLength)) { key1 = keySuffix1Serializer.deserialize(suffix1And2);
key1 = keySuffix1Serializer.deserialize(key1Buf); key2 = keySuffix2Serializer.deserialize(suffix1And2);
}
key2 = keySuffix2Serializer.deserialize(keyBuf);
assert valueBuf != null; assert valueBuf != null;
Object value = valueSerializer.deserialize(valueBuf); Object value = valueSerializer.deserialize(BufDataInput.create(valueBuf));
if (isHashedSet) { if (isHashedSet) {
//noinspection unchecked //noinspection unchecked
Set<K2> set = (Set<K2>) value; Set<K2> set = (Set<K2>) value;
K1 finalKey1 = key1; K1 finalKey1 = key1;
//noinspection unchecked //noinspection unchecked
return set.stream().map(e -> merger.apply(finalKey1, e, (V) Nothing.INSTANCE)).toList(); return set.stream().map(e -> merger.apply(finalKey1, e, (V) Nothing.INSTANCE));
} else if (isHashed) { } else if (isHashed) {
//noinspection unchecked //noinspection unchecked
Set<Entry<K2, V>> set = (Set<Entry<K2, V>>) value; Set<Entry<K2, V>> set = (Set<Entry<K2, V>>) value;
K1 finalKey1 = key1; K1 finalKey1 = key1;
return set.stream().map(e -> merger.apply(finalKey1, e.getKey(), e.getValue())).toList(); return set.stream().map(e -> merger.apply(finalKey1, e.getKey(), e.getValue()));
} else { } else {
//noinspection unchecked //noinspection unchecked
return List.of(merger.apply(key1, (K2) key2, (V) value)); return Stream.of(merger.apply(key1, (K2) key2, (V) value));
} }
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {
var exMessage = ex.getMessage(); var exMessage = ex.getMessage();
@ -504,7 +410,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
+ ":" + key2 + ":" + key2
+ "](" + LLUtils.toStringSafe(keyBuf) + ") total=" + totalZeroBytesErrors); + "](" + LLUtils.toStringSafe(keyBuf) + ") total=" + totalZeroBytesErrors);
} }
return List.of(); return Stream.empty();
} else { } else {
throw ex; throw ex;
} }
@ -514,22 +420,4 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
}); });
} }
@Override
protected void onClose() {
try {
if (rangeSupplier != null) {
rangeSupplier.close();
}
} catch (Throwable ex) {
LOG.error("Failed to close range", ex);
}
try {
if (keyPrefixSupplier != null) {
keyPrefixSupplier.close();
}
} catch (Throwable ex) {
LOG.error("Failed to close keyPrefix", ex);
}
}
} }

View File

@ -1,49 +1,40 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.BufferAllocator; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.database.SubStageEntry; import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.cavallium.dbengine.utils.SimpleResource;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import it.unimi.dsi.fastutil.objects.ObjectArraySet; import it.unimi.dsi.fastutil.objects.ObjectArraySet;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implements public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
private static final Logger logger = LogManager.getLogger(DatabaseMapDictionaryHashed.class); private static final Logger logger = LogManager.getLogger(DatabaseMapDictionaryHashed.class);
private final BufferAllocator alloc;
private final Function<T, TH> keySuffixHashFunction; private final Function<T, TH> keySuffixHashFunction;
private final DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary; private final DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary;
protected DatabaseMapDictionaryHashed(LLDictionary dictionary, protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
@Nullable BufSupplier prefixKeySupplier, @Nullable Buf prefixKeySupplier,
Serializer<T> keySuffixSerializer, Serializer<T> keySuffixSerializer,
Serializer<U> valueSerializer, Serializer<U> valueSerializer,
Function<T, TH> keySuffixHashFunction, Function<T, TH> keySuffixHashFunction,
@ -52,7 +43,6 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
if (updateMode != UpdateMode.ALLOW) { if (updateMode != UpdateMode.ALLOW) {
throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW"); throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
} }
this.alloc = dictionary.getAllocator();
ValueWithHashSerializer<T, U> valueWithHashSerializer ValueWithHashSerializer<T, U> valueWithHashSerializer
= new ValueWithHashSerializer<>(keySuffixSerializer, valueSerializer); = new ValueWithHashSerializer<>(keySuffixSerializer, valueSerializer);
ValuesSetSerializer<Entry<T, U>> valuesSetSerializer ValuesSetSerializer<Entry<T, U>> valuesSetSerializer
@ -62,11 +52,8 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
this.keySuffixHashFunction = keySuffixHashFunction; this.keySuffixHashFunction = keySuffixHashFunction;
} }
private DatabaseMapDictionaryHashed(BufferAllocator alloc, private DatabaseMapDictionaryHashed(Function<T, TH> keySuffixHashFunction,
Function<T, TH> keySuffixHashFunction, DatabaseStage<Object2ObjectSortedMap<TH, ObjectArraySet<Entry<T, U>>>> subDictionary) {
DatabaseStage<Object2ObjectSortedMap<TH, ObjectArraySet<Entry<T, U>>>> subDictionary,
Drop<DatabaseMapDictionaryHashed<T, U, TH>> drop) {
this.alloc = alloc;
this.keySuffixHashFunction = keySuffixHashFunction; this.keySuffixHashFunction = keySuffixHashFunction;
this.subDictionary = (DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>>) subDictionary; this.subDictionary = (DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>>) subDictionary;
@ -88,7 +75,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
} }
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary, public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary,
@Nullable BufSupplier prefixKeySupplier, @Nullable Buf prefixKeySupplier,
Serializer<T> keySuffixSerializer, Serializer<T> keySuffixSerializer,
Serializer<U> valueSerializer, Serializer<U> valueSerializer,
Function<T, UH> keySuffixHashFunction, Function<T, UH> keySuffixHashFunction,
@ -121,36 +108,35 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> get(@Nullable CompositeSnapshot snapshot) { public Object2ObjectSortedMap<T, U> get(@Nullable CompositeSnapshot snapshot) {
return subDictionary.get(snapshot).map(map -> deserializeMap(map)); var v = subDictionary.get(snapshot);
return v != null ? deserializeMap(v) : null;
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> getOrDefault(@Nullable CompositeSnapshot snapshot, public Object2ObjectSortedMap<T, U> getOrDefault(@Nullable CompositeSnapshot snapshot,
Mono<Object2ObjectSortedMap<T, U>> defaultValue) { Object2ObjectSortedMap<T, U> defaultValue) {
return this.get(snapshot).switchIfEmpty(defaultValue); return Objects.requireNonNullElse(this.get(snapshot), defaultValue);
} }
@Override @Override
public Mono<Void> set(Object2ObjectSortedMap<T, U> map) { public void set(Object2ObjectSortedMap<T, U> map) {
return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(value -> subDictionary.set(value)); var value = this.serializeMap(map);
subDictionary.set(value);
} }
@Override @Override
public Mono<Boolean> setAndGetChanged(Object2ObjectSortedMap<T, U> map) { public boolean setAndGetChanged(Object2ObjectSortedMap<T, U> map) {
return Mono return subDictionary.setAndGetChanged(this.serializeMap(map));
.fromSupplier(() -> this.serializeMap(map))
.flatMap(value -> subDictionary.setAndGetChanged(value))
.single();
} }
@Override @Override
public Mono<Boolean> clearAndGetStatus() { public boolean clearAndGetStatus() {
return subDictionary.clearAndGetStatus(); return subDictionary.clearAndGetStatus();
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return subDictionary.isEmpty(snapshot); return subDictionary.isEmpty(snapshot);
} }
@ -160,20 +146,17 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Stream<BadBlock> badBlocks() {
return this.subDictionary.badBlocks(); return this.subDictionary.badBlocks();
} }
@Override @Override
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) { public @NotNull DatabaseStageEntry<U> at(@Nullable CompositeSnapshot snapshot, T key) {
return this return this.atPrivate(snapshot, key, keySuffixHashFunction.apply(key));
.atPrivate(snapshot, key, keySuffixHashFunction.apply(key))
.map(cast -> cast);
} }
private Mono<DatabaseSingleBucket<T, U, TH>> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) { private DatabaseSingleBucket<T, U, TH> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) {
return subDictionary.at(snapshot, hash) return new DatabaseSingleBucket<T, U, TH>(subDictionary.at(snapshot, hash), key);
.map(entry -> new DatabaseSingleBucket<T, U, TH>(entry, key));
} }
@Override @Override
@ -182,57 +165,55 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
} }
@Override @Override
public Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, public Stream<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot,
boolean smallRange) { boolean smallRange) {
return subDictionary return subDictionary
.getAllValues(snapshot, smallRange) .getAllValues(snapshot, smallRange)
.map(Entry::getValue) .map(Entry::getValue)
.map(Collections::unmodifiableSet) .map(Collections::unmodifiableSet)
.flatMap(bucket -> Flux .flatMap(bucket -> bucket.stream()
.fromIterable(bucket)
.map(Entry::getKey) .map(Entry::getKey)
.flatMap(key -> this.at(snapshot, key).map(stage -> new SubStageEntry<>(key, stage)))); .map(key -> new SubStageEntry<>(key, this.at(snapshot, key))));
} }
@Override @Override
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Stream<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return subDictionary return subDictionary
.getAllValues(snapshot, smallRange) .getAllValues(snapshot, smallRange)
.map(Entry::getValue) .map(Entry::getValue)
.map(Collections::unmodifiableSet) .map(Collections::unmodifiableSet)
.concatMapIterable(list -> list); .flatMap(Collection::stream);
} }
@Override @Override
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) { public Stream<Entry<T, U>> setAllValuesAndGetPrevious(Stream<Entry<T, U>> entries) {
return entries.flatMap(entry -> Mono.usingWhen(this.at(null, entry.getKey()), return entries.mapMulti((entry, sink) -> {
stage -> stage.setAndGetPrevious(entry.getValue()).map(prev -> Map.entry(entry.getKey(), prev)), var prev = this.at(null, entry.getKey()).setAndGetPrevious(entry.getValue());
LLUtils::finalizeResource if (prev != null) {
)); sink.accept(Map.entry(entry.getKey(), prev));
}
});
} }
@Override @Override
public Mono<Void> clear() { public void clear() {
return subDictionary.clear(); subDictionary.clear();
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> setAndGetPrevious(Object2ObjectSortedMap<T, U> value) { public Object2ObjectSortedMap<T, U> setAndGetPrevious(Object2ObjectSortedMap<T, U> value) {
return Mono var v = subDictionary.setAndGetPrevious(this.serializeMap(value));
.fromSupplier(() -> this.serializeMap(value)) return v != null ? deserializeMap(v) : null;
.flatMap(value1 -> subDictionary.setAndGetPrevious(value1))
.map(map -> deserializeMap(map));
} }
@Override @Override
public Mono<Object2ObjectSortedMap<T, U>> clearAndGetPrevious() { public Object2ObjectSortedMap<T, U> clearAndGetPrevious() {
return subDictionary var v = subDictionary.clearAndGetPrevious();
.clearAndGetPrevious() return v != null ? deserializeMap(v) : null;
.map(map -> deserializeMap(map));
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return subDictionary.leavesCount(snapshot, fast); return subDictionary.leavesCount(snapshot, fast);
} }
@ -245,13 +226,14 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
@Override @Override
public ValueGetter<T, U> getAsyncDbValueGetter(@Nullable CompositeSnapshot snapshot) { public ValueGetter<T, U> getAsyncDbValueGetter(@Nullable CompositeSnapshot snapshot) {
ValueGetter<TH, ObjectArraySet<Entry<T, U>>> getter = subDictionary.getAsyncDbValueGetter(snapshot); ValueGetter<TH, ObjectArraySet<Entry<T, U>>> getter = subDictionary.getAsyncDbValueGetter(snapshot);
return key -> getter return key -> {
.get(keySuffixHashFunction.apply(key)) ObjectArraySet<Entry<T, U>> set = getter.get(keySuffixHashFunction.apply(key));
.flatMap(set -> this.extractValueTransformation(set, key)); if (set != null) {
} return this.extractValue(set, key);
} else {
private Mono<U> extractValueTransformation(ObjectArraySet<Entry<T, U>> entries, T key) { return null;
return Mono.fromCallable(() -> extractValue(entries, key)); }
};
} }
@Nullable @Nullable
@ -299,15 +281,4 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends SimpleResource implem
return null; return null;
} }
} }
@Override
protected void onClose() {
try {
if (subDictionary != null) {
subDictionary.close();
}
} catch (Throwable ex) {
logger.error("Failed to close subDictionary", ex);
}
}
} }

View File

@ -1,12 +1,10 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop; import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.Owned; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
@ -14,32 +12,26 @@ import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.disk.BinarySerializationFunction;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.utils.SimpleResource; import java.util.stream.Stream;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public class DatabaseMapSingle<U> extends SimpleResource implements DatabaseStageEntry<U> { public final class DatabaseMapSingle<U> implements DatabaseStageEntry<U> {
private static final Logger LOG = LogManager.getLogger(DatabaseMapSingle.class); private static final Logger LOG = LogManager.getLogger(DatabaseMapSingle.class);
private final AtomicLong totalZeroBytesErrors = new AtomicLong();
private final LLDictionary dictionary; private final LLDictionary dictionary;
private final Mono<Buffer> keyMono; private final Buf key;
private final Serializer<U> serializer; private final Serializer<U> serializer;
private final BufSupplier keySupplier;
public DatabaseMapSingle(LLDictionary dictionary, BufSupplier keySupplier, Serializer<U> serializer) { public DatabaseMapSingle(LLDictionary dictionary, Buf key, Serializer<U> serializer) {
this.dictionary = dictionary; this.dictionary = dictionary;
this.keySupplier = keySupplier; this.key = key;
this.keyMono = Mono.fromSupplier(() -> keySupplier.get());
this.serializer = serializer; this.serializer = serializer;
} }
@ -51,127 +43,98 @@ public class DatabaseMapSingle<U> extends SimpleResource implements DatabaseStag
} }
} }
private U deserializeValue(Buffer value) { private U deserializeValue(Buf value) {
try { try {
return serializer.deserialize(value); return serializer.deserialize(BufDataInput.create(value));
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {
var exMessage = ex.getMessage(); var exMessage = ex.getMessage();
if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) { if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) {
try (var key = keySupplier.get()) { LOG.error("Unexpected zero-bytes value at %s:%s:%s".formatted(dictionary.getDatabaseName(),
LOG.error("Unexpected zero-bytes value at " dictionary.getColumnName(),
+ dictionary.getDatabaseName() + ":" + dictionary.getColumnName() + ":" + LLUtils.toStringSafe(key)); LLUtils.toStringSafe(key)
} ));
return null; return null;
} else { } else {
throw ex; throw ex;
} }
} catch (SerializationException ex) {
throw ex;
} }
} }
private Buffer serializeValue(U value) throws SerializationException { private Buf serializeValue(U value) throws SerializationException {
var valSizeHint = serializer.getSerializedSizeHint(); BufDataOutput valBuf = BufDataOutput.create(serializer.getSerializedSizeHint());
if (valSizeHint == -1) valSizeHint = 128; serializer.serialize(value, valBuf);
var valBuf = dictionary.getAllocator().allocate(valSizeHint); return valBuf.asList();
try { }
serializer.serialize(value, valBuf);
return valBuf; @Override
} catch (Throwable ex) { public U get(@Nullable CompositeSnapshot snapshot) {
valBuf.close(); var result = dictionary.get(resolveSnapshot(snapshot), key);
throw ex; if (result != null) {
return deserializeValue(result);
} else {
return null;
} }
} }
@Override @Override
public Mono<U> get(@Nullable CompositeSnapshot snapshot) { public U setAndGetPrevious(U value) {
return Mono.usingWhen(dictionary.get(resolveSnapshot(snapshot), keyMono), var serializedKey = value != null ? serializeValue(value) : null;
buf -> Mono.fromSupplier(() -> deserializeValue(buf)), var result = dictionary.put(key, serializedKey, LLDictionaryResultType.PREVIOUS_VALUE);
LLUtils::finalizeResource if (result != null) {
); return deserializeValue(result);
} else {
return null;
}
} }
@Override @Override
public Mono<U> setAndGetPrevious(U value) { public U update(SerializationFunction<@Nullable U, @Nullable U> updater,
return Mono.usingWhen(dictionary
.put(keyMono, Mono.fromCallable(() -> serializeValue(value)), LLDictionaryResultType.PREVIOUS_VALUE),
buf -> Mono.fromSupplier(() -> deserializeValue(buf)),
LLUtils::finalizeResource);
}
@Override
public Mono<U> update(SerializationFunction<@Nullable U, @Nullable U> updater,
UpdateReturnMode updateReturnMode) { UpdateReturnMode updateReturnMode) {
var resultMono = dictionary Buf resultBytes = dictionary.update(key, this.createUpdater(updater), updateReturnMode);
.update(keyMono, (oldValueSer) -> { return deserializeValue(resultBytes);
try (oldValueSer) {
U result;
if (oldValueSer == null) {
result = updater.apply(null);
} else {
U deserializedValue = serializer.deserialize(oldValueSer);
result = updater.apply(deserializedValue);
}
if (result == null) {
return null;
} else {
return serializeValue(result);
}
}
}, updateReturnMode);
return Mono.usingWhen(resultMono,
result -> Mono.fromSupplier(() -> deserializeValue(result)),
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Delta<U>> updateAndGetDelta(SerializationFunction<@Nullable U, @Nullable U> updater) { public Delta<U> updateAndGetDelta(SerializationFunction<@Nullable U, @Nullable U> updater) {
return dictionary var delta = dictionary.updateAndGetDelta(key, this.createUpdater(updater));
.updateAndGetDelta(keyMono, (oldValueSer) -> { return LLUtils.mapLLDelta(delta, bytes -> serializer.deserialize(BufDataInput.create(bytes)));
U result; }
if (oldValueSer == null) {
result = updater.apply(null); private BinarySerializationFunction createUpdater(SerializationFunction<U, U> updater) {
} else { return oldBytes -> {
U deserializedValue = serializer.deserialize(oldValueSer); U result;
result = updater.apply(deserializedValue); if (oldBytes == null) {
} result = updater.apply(null);
if (result == null) { } else {
return null; U deserializedValue = serializer.deserialize(BufDataInput.create(oldBytes));
} else { result = updater.apply(deserializedValue);
return serializeValue(result); }
} if (result == null) {
}).transform(mono -> LLUtils.mapLLDelta(mono, serialized -> serializer.deserialize(serialized))); return null;
} else {
return serializeValue(result);
}
};
} }
@Override @Override
public Mono<U> clearAndGetPrevious() { public U clearAndGetPrevious() {
return Mono.usingWhen(dictionary.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE), return deserializeValue(dictionary.remove(key, LLDictionaryResultType.PREVIOUS_VALUE));
result -> Mono.fromSupplier(() -> deserializeValue(result)),
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return dictionary return dictionary.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key), false) ? 0L : 1L;
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(single -> LLRange.singleUnsafe(single)), false)
.map(empty -> empty ? 0L : 1L);
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return dictionary return dictionary.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key), true);
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(single -> LLRange.singleUnsafe(single)), true);
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Stream<BadBlock> badBlocks() {
return dictionary.badBlocks(keyMono.map(single -> LLRange.singleUnsafe(single))); return dictionary.badBlocks(LLRange.single(key));
} }
@Override
protected void onClose() {
keySupplier.close();
}
} }

View File

@ -1,32 +1,21 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing> { public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing> {
protected DatabaseSetDictionary(LLDictionary dictionary, protected DatabaseSetDictionary(LLDictionary dictionary,
BufSupplier prefixKeySupplier, Buf prefixKeySupplier,
SerializerFixedBinaryLength<T> keySuffixSerializer) { SerializerFixedBinaryLength<T> keySuffixSerializer) {
super(dictionary, super(dictionary, prefixKeySupplier, keySuffixSerializer, DatabaseEmpty.nothingSerializer());
prefixKeySupplier,
keySuffixSerializer,
DatabaseEmpty.nothingSerializer(dictionary.getAllocator())
);
} }
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary, public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
@ -35,24 +24,27 @@ public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing>
} }
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary, public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
BufSupplier prefixKeySupplier, Buf prefixKeySupplier,
SerializerFixedBinaryLength<T> keySuffixSerializer) { SerializerFixedBinaryLength<T> keySuffixSerializer) {
return new DatabaseSetDictionary<>(dictionary, prefixKeySupplier, keySuffixSerializer); return new DatabaseSetDictionary<>(dictionary, prefixKeySupplier, keySuffixSerializer);
} }
public Mono<Set<T>> getKeySet(@Nullable CompositeSnapshot snapshot) { public Set<T> getKeySet(@Nullable CompositeSnapshot snapshot) {
return get(snapshot).map(Map::keySet); var v = get(snapshot);
return v != null ? v.keySet() : null;
} }
public Mono<Set<T>> setAndGetPreviousKeySet(Set<T> value) { public Set<T> setAndGetPreviousKeySet(Set<T> value) {
var hm = new Object2ObjectLinkedOpenHashMap<T, Nothing>(); var hm = new Object2ObjectLinkedOpenHashMap<T, Nothing>();
for (T t : value) { for (T t : value) {
hm.put(t, DatabaseEmpty.NOTHING); hm.put(t, DatabaseEmpty.NOTHING);
} }
return setAndGetPrevious(hm).map(Map::keySet); var v = setAndGetPrevious(hm);
return v != null ? v.keySet() : null;
} }
public Mono<Set<T>> clearAndGetPreviousKeySet() { public Set<T> clearAndGetPreviousKeySet() {
return clearAndGetPrevious().map(Map::keySet); var v = clearAndGetPrevious();
return v != null ? v.keySet() : null;
} }
} }

View File

@ -1,36 +1,28 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> { public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
protected DatabaseSetDictionaryHashed(LLDictionary dictionary, protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
@Nullable BufSupplier prefixKeySupplier, @Nullable Buf prefixKeySupplier,
Serializer<T> keySuffixSerializer, Serializer<T> keySuffixSerializer,
Function<T, TH> keySuffixHashFunction, Function<T, TH> keySuffixHashFunction,
SerializerFixedBinaryLength<TH> keySuffixHashSerializer) { SerializerFixedBinaryLength<TH> keySuffixHashSerializer) {
super(dictionary, super(dictionary,
prefixKeySupplier, prefixKeySupplier,
keySuffixSerializer, keySuffixSerializer,
DatabaseEmpty.nothingSerializer(dictionary.getAllocator()), DatabaseEmpty.nothingSerializer(),
keySuffixHashFunction, keySuffixHashFunction,
keySuffixHashSerializer keySuffixHashSerializer
); );
@ -49,7 +41,7 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
} }
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary, public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary,
@Nullable BufSupplier prefixKeySupplier, @Nullable Buf prefixKeySupplier,
Serializer<T> keySuffixSerializer, Serializer<T> keySuffixSerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH> keyHashSerializer) { SerializerFixedBinaryLength<TH> keyHashSerializer) {
@ -61,19 +53,22 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
); );
} }
public Mono<Set<T>> getKeySet(@Nullable CompositeSnapshot snapshot) { public Set<T> getKeySet(@Nullable CompositeSnapshot snapshot) {
return get(snapshot).map(Map::keySet); var v = get(snapshot);
return v != null ? v.keySet() : null;
} }
public Mono<Set<T>> setAndGetPreviousKeySet(Set<T> value) { public Set<T> setAndGetPreviousKeySet(Set<T> value) {
var hm = new Object2ObjectLinkedOpenHashMap<T, Nothing>(); var hm = new Object2ObjectLinkedOpenHashMap<T, Nothing>();
for (T t : value) { for (T t : value) {
hm.put(t, DatabaseEmpty.NOTHING); hm.put(t, DatabaseEmpty.NOTHING);
} }
return setAndGetPrevious(hm).map(Map::keySet); var v = setAndGetPrevious(hm);
return v != null ? v.keySet() : null;
} }
public Mono<Set<T>> clearAndGetPreviousKeySet() { public Set<T> clearAndGetPreviousKeySet() {
return clearAndGetPrevious().map(Map::keySet); var v = clearAndGetPrevious();
return v != null ? v.keySet() : null;
} }
} }

View File

@ -1,30 +1,24 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.utils.SimpleResource;
import it.unimi.dsi.fastutil.objects.ObjectArraySet; import it.unimi.dsi.fastutil.objects.ObjectArraySet;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class DatabaseSingleBucket<K, V, TH> extends SimpleResource implements DatabaseStageEntry<V> { public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
private static final Logger logger = LogManager.getLogger(DatabaseSingleBucket.class); private static final Logger logger = LogManager.getLogger(DatabaseSingleBucket.class);
@ -43,33 +37,35 @@ public class DatabaseSingleBucket<K, V, TH> extends SimpleResource implements Da
} }
@Override @Override
public Mono<V> get(@Nullable CompositeSnapshot snapshot) { public V get(@Nullable CompositeSnapshot snapshot) {
return bucketStage.get(snapshot).flatMap(entries -> extractValueTransformation(entries)); var entries = bucketStage.get(snapshot);
return entries != null ? extractValue(entries) : null;
} }
@Override @Override
public Mono<V> getOrDefault(@Nullable CompositeSnapshot snapshot, Mono<V> defaultValue) { public V getOrDefault(@Nullable CompositeSnapshot snapshot, V defaultValue) {
return bucketStage.get(snapshot).flatMap(entries -> extractValueTransformation(entries)).switchIfEmpty(defaultValue); var entries = bucketStage.get(snapshot);
return entries != null ? extractValue(entries) : defaultValue;
} }
@Override @Override
public Mono<Void> set(V value) { public void set(V value) {
return this.update(prev -> value, UpdateReturnMode.NOTHING).then(); this.update(prev -> value, UpdateReturnMode.NOTHING);
} }
@Override @Override
public Mono<V> setAndGetPrevious(V value) { public V setAndGetPrevious(V value) {
return this.update(prev -> value, UpdateReturnMode.GET_OLD_VALUE); return this.update(prev -> value, UpdateReturnMode.GET_OLD_VALUE);
} }
@Override @Override
public Mono<Boolean> setAndGetChanged(V value) { public boolean setAndGetChanged(V value) {
return this.updateAndGetDelta(prev -> value).map(delta -> LLUtils.isDeltaChanged(delta)); return LLUtils.isDeltaChanged(this.updateAndGetDelta(prev -> value));
} }
@Override @Override
public Mono<V> update(SerializationFunction<@Nullable V, @Nullable V> updater, UpdateReturnMode updateReturnMode) { public V update(SerializationFunction<@Nullable V, @Nullable V> updater, UpdateReturnMode updateReturnMode) {
return bucketStage var result = bucketStage
.update(oldBucket -> { .update(oldBucket -> {
V oldValue = extractValue(oldBucket); V oldValue = extractValue(oldBucket);
V newValue = updater.apply(oldValue); V newValue = updater.apply(oldValue);
@ -79,13 +75,13 @@ public class DatabaseSingleBucket<K, V, TH> extends SimpleResource implements Da
} else { } else {
return this.insertValueOrCreate(oldBucket, newValue); return this.insertValueOrCreate(oldBucket, newValue);
} }
}, updateReturnMode) }, updateReturnMode);
.flatMap(entries -> extractValueTransformation(entries)); return result != null ? extractValue(result) : null;
} }
@Override @Override
public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater) { public Delta<V> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater) {
return bucketStage.updateAndGetDelta(oldBucket -> { var delta = bucketStage.updateAndGetDelta(oldBucket -> {
V oldValue = extractValue(oldBucket); V oldValue = extractValue(oldBucket);
var result = updater.apply(oldValue); var result = updater.apply(oldValue);
if (result == null) { if (result == null) {
@ -93,32 +89,33 @@ public class DatabaseSingleBucket<K, V, TH> extends SimpleResource implements Da
} else { } else {
return this.insertValueOrCreate(oldBucket, result); return this.insertValueOrCreate(oldBucket, result);
} }
}).transform(mono -> LLUtils.mapDelta(mono, entries -> extractValue(entries))); });
return LLUtils.mapDelta(delta, this::extractValue);
} }
@Override @Override
public Mono<Void> clear() { public void clear() {
return this.update(prev -> null, UpdateReturnMode.NOTHING).then(); this.update(prev -> null, UpdateReturnMode.NOTHING);
} }
@Override @Override
public Mono<V> clearAndGetPrevious() { public V clearAndGetPrevious() {
return this.update(prev -> null, UpdateReturnMode.GET_OLD_VALUE); return this.update(prev -> null, UpdateReturnMode.GET_OLD_VALUE);
} }
@Override @Override
public Mono<Boolean> clearAndGetStatus() { public boolean clearAndGetStatus() {
return this.updateAndGetDelta(prev -> null).map(delta -> LLUtils.isDeltaChanged(delta)); return LLUtils.isDeltaChanged(this.updateAndGetDelta(prev -> null));
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return this.get(snapshot).map(prev -> 1L).defaultIfEmpty(0L); return this.get(snapshot) != null ? 1L : 0L;
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return this.get(snapshot).map(prev -> true).defaultIfEmpty(true); return this.get(snapshot) == null;
} }
@Override @Override
@ -127,14 +124,10 @@ public class DatabaseSingleBucket<K, V, TH> extends SimpleResource implements Da
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Stream<BadBlock> badBlocks() {
return bucketStage.badBlocks(); return bucketStage.badBlocks();
} }
private Mono<V> extractValueTransformation(Set<Entry<K, V>> entries) {
return Mono.fromCallable(() -> extractValue(entries));
}
@Nullable @Nullable
private V extractValue(Set<Entry<K, V>> entries) { private V extractValue(Set<Entry<K, V>> entries) {
if (entries == null) return null; if (entries == null) return null;
@ -188,15 +181,4 @@ public class DatabaseSingleBucket<K, V, TH> extends SimpleResource implements Da
return null; return null;
} }
} }
@Override
protected void onClose() {
try {
if (bucketStage != null) {
bucketStage.close();
}
} catch (Throwable ex) {
logger.error("Failed to close bucketStage", ex);
}
}
} }

View File

@ -1,9 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.util.Send;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.client.Mapper; import it.cavallium.dbengine.client.Mapper;
@ -12,126 +8,108 @@ import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.utils.SimpleResource; import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SynchronousSink;
@SuppressWarnings("unused") public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
public class DatabaseSingleMapped<A, B> extends SimpleResource implements DatabaseStageEntry<A> {
private static final Logger logger = LogManager.getLogger(DatabaseSingleMapped.class);
private final Mapper<A, B> mapper; private final Mapper<A, B> mapper;
private final DatabaseStageEntry<B> serializedSingle; private final DatabaseStageEntry<B> serializedSingle;
@SuppressWarnings({"unchecked", "rawtypes"}) public DatabaseSingleMapped(DatabaseStageEntry<B> serializedSingle, Mapper<A, B> mapper) {
public DatabaseSingleMapped(DatabaseStageEntry<B> serializedSingle, Mapper<A, B> mapper,
Drop<DatabaseSingleMapped<A, B>> drop) {
this.serializedSingle = serializedSingle; this.serializedSingle = serializedSingle;
this.mapper = mapper; this.mapper = mapper;
} }
@SuppressWarnings({"unchecked", "rawtypes"}) private DatabaseSingleMapped(DatabaseStage<B> serializedSingle, Mapper<A, B> mapper) {
private DatabaseSingleMapped(DatabaseStage<B> serializedSingle, Mapper<A, B> mapper,
Drop<DatabaseSingleMapped<A, B>> drop) {
this.mapper = mapper; this.mapper = mapper;
this.serializedSingle = (DatabaseStageEntry<B>) serializedSingle; this.serializedSingle = (DatabaseStageEntry<B>) serializedSingle;
} }
private void deserializeSink(B value, SynchronousSink<A> sink) { @Override
try { public A get(@Nullable CompositeSnapshot snapshot) {
sink.next(this.unMap(value)); var data = serializedSingle.get(snapshot);
} catch (SerializationException ex) { if (data == null) return null;
sink.error(ex); return this.unMap(data);
}
} }
@Override @Override
public Mono<A> get(@Nullable CompositeSnapshot snapshot) { public A getOrDefault(@Nullable CompositeSnapshot snapshot, A defaultValue) {
return serializedSingle.get(snapshot).handle((value, sink) -> deserializeSink(value, sink)); var value = serializedSingle.get(snapshot);
if (value == null) return defaultValue;
return this.unMap(value);
} }
@Override @Override
public Mono<A> getOrDefault(@Nullable CompositeSnapshot snapshot, Mono<A> defaultValue) { public void set(A value) {
return serializedSingle.get(snapshot).handle((B value, SynchronousSink<A> sink) -> deserializeSink(value, sink)).switchIfEmpty(defaultValue); B mappedValue = value != null ? map(value) : null;
serializedSingle.set(mappedValue);
} }
@Override @Override
public Mono<Void> set(A value) { public A setAndGetPrevious(A value) {
return Mono var mappedValue = value != null ? map(value) : null;
.fromCallable(() -> map(value)) var prev = serializedSingle.setAndGetPrevious(mappedValue);
.flatMap(value1 -> serializedSingle.set(value1)); return prev != null ? unMap(prev) : null;
} }
@Override @Override
public Mono<A> setAndGetPrevious(A value) { public boolean setAndGetChanged(A value) {
return Mono var mappedValue = value != null ? map(value) : null;
.fromCallable(() -> map(value)) return serializedSingle.setAndGetChanged(mappedValue);
.flatMap(value2 -> serializedSingle.setAndGetPrevious(value2))
.handle((value1, sink) -> deserializeSink(value1, sink));
} }
@Override @Override
public Mono<Boolean> setAndGetChanged(A value) { public A update(SerializationFunction<@Nullable A, @Nullable A> updater, UpdateReturnMode updateReturnMode) {
return Mono B prev = serializedSingle.update(oldValue -> {
.fromCallable(() -> map(value))
.flatMap(value1 -> serializedSingle.setAndGetChanged(value1))
.single();
}
@Override
public Mono<A> update(SerializationFunction<@Nullable A, @Nullable A> updater,
UpdateReturnMode updateReturnMode) {
return serializedSingle.update(oldValue -> {
var result = updater.apply(oldValue == null ? null : this.unMap(oldValue)); var result = updater.apply(oldValue == null ? null : this.unMap(oldValue));
if (result == null) { if (result == null) {
return null; return null;
} else { } else {
return this.map(result); return this.map(result);
} }
}, updateReturnMode).handle((value, sink) -> deserializeSink(value, sink)); }, updateReturnMode);
return prev != null ? unMap(prev) : null;
} }
@Override @Override
public Mono<Delta<A>> updateAndGetDelta(SerializationFunction<@Nullable A, @Nullable A> updater) { public Delta<A> updateAndGetDelta(SerializationFunction<@Nullable A, @Nullable A> updater) {
return serializedSingle.updateAndGetDelta(oldValue -> { var delta = serializedSingle.updateAndGetDelta(oldValue -> {
var result = updater.apply(oldValue == null ? null : this.unMap(oldValue)); var result = updater.apply(oldValue == null ? null : this.unMap(oldValue));
if (result == null) { if (result == null) {
return null; return null;
} else { } else {
return this.map(result); return this.map(result);
} }
}).transform(mono -> LLUtils.mapDelta(mono, bytes -> unMap(bytes))); });
return LLUtils.mapDelta(delta, this::unMap);
} }
@Override @Override
public Mono<Void> clear() { public void clear() {
return serializedSingle.clear(); serializedSingle.clear();
} }
@Override @Override
public Mono<A> clearAndGetPrevious() { public A clearAndGetPrevious() {
return serializedSingle.clearAndGetPrevious().handle((value, sink) -> deserializeSink(value, sink)); var prev = serializedSingle.clearAndGetPrevious();
return prev != null ? unMap(prev) : null;
} }
@Override @Override
public Mono<Boolean> clearAndGetStatus() { public boolean clearAndGetStatus() {
return serializedSingle.clearAndGetStatus(); return serializedSingle.clearAndGetStatus();
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return serializedSingle.leavesCount(snapshot, fast); return serializedSingle.leavesCount(snapshot, fast);
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return serializedSingle.isEmpty(snapshot); return serializedSingle.isEmpty(snapshot);
} }
@ -141,7 +119,7 @@ public class DatabaseSingleMapped<A, B> extends SimpleResource implements Databa
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Stream<BadBlock> badBlocks() {
return this.serializedSingle.badBlocks(); return this.serializedSingle.badBlocks();
} }
@ -152,9 +130,4 @@ public class DatabaseSingleMapped<A, B> extends SimpleResource implements Databa
private B map(A bytes) throws SerializationException { private B map(A bytes) throws SerializationException {
return mapper.map(bytes); return mapper.map(bytes);
} }
@Override
protected void onClose() {
serializedSingle.close();
}
} }

View File

@ -1,9 +1,8 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.Drop; import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.Owned; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
@ -14,23 +13,18 @@ import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.utils.InternalMonoUtils; import java.util.stream.Stream;
import it.cavallium.dbengine.utils.SimpleResource;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SynchronousSink;
public class DatabaseSingleton<U> extends SimpleResource implements DatabaseStageEntry<U> { public class DatabaseSingleton<U> implements DatabaseStageEntry<U> {
private static final Logger LOG = LogManager.getLogger(DatabaseSingleton.class); private static final Logger LOG = LogManager.getLogger(DatabaseSingleton.class);
private final LLSingleton singleton; private final LLSingleton singleton;
private final Serializer<U> serializer; private final Serializer<U> serializer;
@SuppressWarnings({"unchecked", "rawtypes"})
public DatabaseSingleton(LLSingleton singleton, Serializer<U> serializer) { public DatabaseSingleton(LLSingleton singleton, Serializer<U> serializer) {
this.singleton = singleton; this.singleton = singleton;
this.serializer = serializer; this.serializer = serializer;
@ -44,13 +38,9 @@ public class DatabaseSingleton<U> extends SimpleResource implements DatabaseStag
} }
} }
private U deserializeValue(Buffer value) { private U deserializeValue(Buf value) {
try { try {
U deserializedValue; return serializer.deserialize(BufDataInput.create(value));
try (value) {
deserializedValue = serializer.deserialize(value);
}
return deserializedValue;
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {
var exMessage = ex.getMessage(); var exMessage = ex.getMessage();
if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) { if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) {
@ -63,124 +53,96 @@ public class DatabaseSingleton<U> extends SimpleResource implements DatabaseStag
} }
} }
private Buffer serializeValue(U value) throws SerializationException { private Buf serializeValue(U value) throws SerializationException {
var valSizeHint = serializer.getSerializedSizeHint(); var valSizeHint = serializer.getSerializedSizeHint();
if (valSizeHint == -1) valSizeHint = 128; if (valSizeHint == -1) valSizeHint = 128;
var valBuf = singleton.getAllocator().allocate(valSizeHint); var valBuf = BufDataOutput.create(valSizeHint);
try { serializer.serialize(value, valBuf);
serializer.serialize(value, valBuf); return valBuf.asList();
return valBuf;
} catch (Throwable ex) {
valBuf.close();
throw ex;
}
} }
@Override @Override
public Mono<U> get(@Nullable CompositeSnapshot snapshot) { public U get(@Nullable CompositeSnapshot snapshot) {
var resultMono = singleton.get(resolveSnapshot(snapshot)); Buf result = singleton.get(resolveSnapshot(snapshot));
return Mono.usingWhen(resultMono, return this.deserializeValue(result);
result -> Mono.fromSupplier(() -> this.deserializeValue(result)),
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Void> set(U value) { public void set(U value) {
return singleton.set(Mono.fromCallable(() -> serializeValue(value))); singleton.set(serializeValue(value));
} }
@Override @Override
public Mono<U> setAndGetPrevious(U value) { public U setAndGetPrevious(U value) {
var resultMono = Flux var prev = singleton.get(null);
.concat(singleton.get(null), singleton.set(serializeValue(value));
singleton.set(Mono.fromCallable(() -> serializeValue(value))).as(InternalMonoUtils::toAny) return this.deserializeValue(prev);
)
.last();
return Mono.usingWhen(resultMono,
result -> Mono.fromSupplier(() -> this.deserializeValue(result)),
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<U> update(SerializationFunction<@Nullable U, @Nullable U> updater, public U update(SerializationFunction<@Nullable U, @Nullable U> updater,
UpdateReturnMode updateReturnMode) { UpdateReturnMode updateReturnMode) {
var resultMono = singleton Buf resultBuf = singleton
.update((oldValueSer) -> { .update((oldValueSer) -> {
try (oldValueSer) { U result;
U result; if (oldValueSer == null) {
if (oldValueSer == null) { result = updater.apply(null);
result = updater.apply(null); } else {
} else { U deserializedValue = serializer.deserialize(BufDataInput.create(oldValueSer));
U deserializedValue = serializer.deserialize(oldValueSer); result = updater.apply(deserializedValue);
result = updater.apply(deserializedValue); }
} if (result == null) {
if (result == null) { return null;
return null; } else {
} else { return serializeValue(result);
return serializeValue(result);
}
} }
}, updateReturnMode); }, updateReturnMode);
return Mono.usingWhen(resultMono, return this.deserializeValue(resultBuf);
result -> Mono.fromSupplier(() -> this.deserializeValue(result)),
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Delta<U>> updateAndGetDelta(SerializationFunction<@Nullable U, @Nullable U> updater) { public Delta<U> updateAndGetDelta(SerializationFunction<@Nullable U, @Nullable U> updater) {
return singleton var mono = singleton.updateAndGetDelta((oldValueSer) -> {
.updateAndGetDelta((oldValueSer) -> { U result;
try (oldValueSer) { if (oldValueSer == null) {
U result; result = updater.apply(null);
if (oldValueSer == null) { } else {
result = updater.apply(null); U deserializedValue = serializer.deserialize(BufDataInput.create(oldValueSer));
} else { result = updater.apply(deserializedValue);
U deserializedValue = serializer.deserialize(oldValueSer); }
result = updater.apply(deserializedValue); if (result == null) {
} return null;
if (result == null) { } else {
return null; return serializeValue(result);
} else { }
return serializeValue(result); });
} return LLUtils.mapLLDelta(mono, serialized -> serializer.deserialize(BufDataInput.create(serialized)));
}
}).transform(mono -> LLUtils.mapLLDelta(mono, serialized -> serializer.deserialize(serialized)));
} }
@Override @Override
public Mono<Void> clear() { public void clear() {
return singleton.set(Mono.empty()); singleton.set(null);
} }
@Override @Override
public Mono<U> clearAndGetPrevious() { public U clearAndGetPrevious() {
var resultMono = Flux.concat(singleton.get(null), singleton.set(Mono.empty()).as(InternalMonoUtils::toAny)).last(); var result = singleton.get(null);
return Mono.usingWhen(resultMono, singleton.set(null);
result -> Mono.fromSupplier(() -> this.deserializeValue(result)), return this.deserializeValue(result);
LLUtils::finalizeResource
);
} }
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return singleton.get(null).map(unused -> 1L).defaultIfEmpty(0L); return singleton.get(null) != null ? 1L : 0L;
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return singleton.get(null).map(t -> false).defaultIfEmpty(true); return singleton.get(null) == null;
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Stream<BadBlock> badBlocks() {
return Flux.empty(); return Stream.empty();
}
@Override
protected void onClose() {
} }
} }

View File

@ -1,64 +1,56 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.util.Resource;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.SafeCloseable;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Stream;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface DatabaseStage<T> extends DatabaseStageWithEntry<T>, SafeCloseable { public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
Mono<T> get(@Nullable CompositeSnapshot snapshot); @Nullable T get(@Nullable CompositeSnapshot snapshot);
default Mono<T> getOrDefault(@Nullable CompositeSnapshot snapshot, default T getOrDefault(@Nullable CompositeSnapshot snapshot, T defaultValue, boolean existsAlmostCertainly) {
Mono<T> defaultValue, return Objects.requireNonNullElse(get(snapshot), defaultValue);
boolean existsAlmostCertainly) {
return get(snapshot).switchIfEmpty(defaultValue).single();
} }
default Mono<T> getOrDefault(@Nullable CompositeSnapshot snapshot, Mono<T> defaultValue) { default T getOrDefault(@Nullable CompositeSnapshot snapshot, T defaultValue) {
return getOrDefault(snapshot, defaultValue, false); return getOrDefault(snapshot, defaultValue, false);
} }
default Mono<Void> set(T value) { default void set(@Nullable T value) {
return this this.setAndGetChanged(value);
.setAndGetChanged(value)
.then();
} }
Mono<T> setAndGetPrevious(T value); @Nullable T setAndGetPrevious(@Nullable T value);
default Mono<Boolean> setAndGetChanged(T value) { default boolean setAndGetChanged(@Nullable T value) {
return this T oldValue = this.setAndGetPrevious(value);
.setAndGetPrevious(value) if (oldValue != null) {
.map(oldValue -> !Objects.equals(oldValue, value)) return !Objects.equals(oldValue, value);
.switchIfEmpty(Mono.fromSupplier(() -> value != null)); } else {
return value != null;
}
} }
default Mono<T> update(SerializationFunction<@Nullable T, @Nullable T> updater, default @Nullable T update(SerializationFunction<@Nullable T, @Nullable T> updater, UpdateReturnMode updateReturnMode) {
UpdateReturnMode updateReturnMode) { return LLUtils.resolveDelta(this.updateAndGetDelta(updater), updateReturnMode);
return this
.updateAndGetDelta(updater)
.transform(prev -> LLUtils.resolveDelta(prev, updateReturnMode));
} }
Mono<Delta<T>> updateAndGetDelta(SerializationFunction<@Nullable T, @Nullable T> updater); Delta<T> updateAndGetDelta(SerializationFunction<@Nullable T, @Nullable T> updater);
default Mono<Void> clear() { default void clear() {
return clearAndGetStatus().then(); clearAndGetStatus();
} }
Mono<T> clearAndGetPrevious(); @Nullable T clearAndGetPrevious();
default Mono<Boolean> clearAndGetStatus() { default boolean clearAndGetStatus() {
return clearAndGetPrevious().map(Objects::nonNull).defaultIfEmpty(false); return clearAndGetPrevious() != null;
} }
/** /**
@ -66,11 +58,11 @@ public interface DatabaseStage<T> extends DatabaseStageWithEntry<T>, SafeCloseab
* If it's a nested collection the count will include all the children recursively * If it's a nested collection the count will include all the children recursively
* @param fast true to return an approximate value * @param fast true to return an approximate value
*/ */
Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast); long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast);
default Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { default boolean isEmpty(@Nullable CompositeSnapshot snapshot) {
return leavesCount(snapshot, false).map(size -> size <= 0); return leavesCount(snapshot, false) <= 0;
} }
Flux<BadBlock> badBlocks(); Stream<BadBlock> badBlocks();
} }

View File

@ -1,9 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.util.Resource;
import it.cavallium.dbengine.client.BadBlock;
import reactor.core.publisher.Flux;
public interface DatabaseStageEntry<U> extends DatabaseStage<U> { public interface DatabaseStageEntry<U> extends DatabaseStage<U> {
@Override @Override

View File

@ -1,5 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import static it.cavallium.dbengine.database.LLUtils.consume;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
@ -7,7 +9,6 @@ import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.KVSerializationFunction; import it.cavallium.dbengine.database.serialization.KVSerializationFunction;
import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
@ -16,261 +17,211 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends DatabaseStageEntry<Object2ObjectSortedMap<T, U>> {
DatabaseStageEntry<Object2ObjectSortedMap<T, U>> {
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key); @NotNull US at(@Nullable CompositeSnapshot snapshot, T key);
default Mono<Boolean> containsKey(@Nullable CompositeSnapshot snapshot, T key) { default boolean containsKey(@Nullable CompositeSnapshot snapshot, T key) {
return Mono.usingWhen(this.at(snapshot, key), return !this.at(snapshot, key).isEmpty(snapshot);
stage -> stage.isEmpty(snapshot).map(empty -> !empty),
LLUtils::finalizeResource
);
} }
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) { default @Nullable U getValue(@Nullable CompositeSnapshot snapshot, T key) {
return Mono.usingWhen(this.at(snapshot, key), return this.at(snapshot, key).get(snapshot);
stage -> stage.get(snapshot),
LLUtils::finalizeResource
);
} }
default Mono<U> getValueOrDefault(@Nullable CompositeSnapshot snapshot, T key, Mono<U> defaultValue) { default U getValueOrDefault(@Nullable CompositeSnapshot snapshot, T key, U defaultValue) {
return getValue(snapshot, key).switchIfEmpty(defaultValue).single(); return Objects.requireNonNullElse(getValue(snapshot, key), defaultValue);
} }
default Mono<Void> putValue(T key, U value) { default U getValueOrDefault(@Nullable CompositeSnapshot snapshot, T key, Supplier<U> defaultValue) {
return Mono.usingWhen(at(null, key).single(), stage -> stage.set(value), LLUtils::finalizeResource); return Objects.requireNonNullElseGet(getValue(snapshot, key), defaultValue);
}
default void putValue(T key, U value) {
at(null, key).set(value);
} }
UpdateMode getUpdateMode(); UpdateMode getUpdateMode();
default Mono<U> updateValue(T key, default U updateValue(T key,
UpdateReturnMode updateReturnMode, UpdateReturnMode updateReturnMode,
SerializationFunction<@Nullable U, @Nullable U> updater) { SerializationFunction<@Nullable U, @Nullable U> updater) {
return Mono.usingWhen(at(null, key).single(), return at(null, key).update(updater, updateReturnMode);
stage -> stage.update(updater, updateReturnMode),
LLUtils::finalizeResource
);
} }
default Flux<Boolean> updateMulti(Flux<T> keys, KVSerializationFunction<T, @Nullable U, @Nullable U> updater) { default Stream<Boolean> updateMulti(Stream<T> keys, KVSerializationFunction<T, @Nullable U, @Nullable U> updater) {
return keys.flatMapSequential(key -> this.updateValue(key, prevValue -> updater.apply(key, prevValue))); return keys.parallel().map(key -> this.updateValue(key, prevValue -> updater.apply(key, prevValue)));
} }
default Mono<Boolean> updateValue(T key, SerializationFunction<@Nullable U, @Nullable U> updater) { default boolean updateValue(T key, SerializationFunction<@Nullable U, @Nullable U> updater) {
return updateValueAndGetDelta(key, updater).map(delta -> LLUtils.isDeltaChanged(delta)).single(); return LLUtils.isDeltaChanged(updateValueAndGetDelta(key, updater));
} }
default Mono<Delta<U>> updateValueAndGetDelta(T key, default Delta<U> updateValueAndGetDelta(T key, SerializationFunction<@Nullable U, @Nullable U> updater) {
SerializationFunction<@Nullable U, @Nullable U> updater) { return this.at(null, key).updateAndGetDelta(updater);
var stageMono = this.at(null, key).single();
return stageMono.flatMap(stage -> stage
.updateAndGetDelta(updater)
.doFinally(s -> stage.close()));
} }
default Mono<U> putValueAndGetPrevious(T key, U value) { default @Nullable U putValueAndGetPrevious(T key, @Nullable U value) {
return Mono.usingWhen(at(null, key).single(), return at(null, key).setAndGetPrevious(value);
stage -> stage.setAndGetPrevious(value),
LLUtils::finalizeResource
);
} }
/** /**
* @return true if the key was associated with any value, false if the key didn't exist. * @return true if the key was associated with any value, false if the key didn't exist.
*/ */
default Mono<Boolean> putValueAndGetChanged(T key, U value) { default boolean putValueAndGetChanged(T key, @Nullable U value) {
return Mono return at(null, key).setAndGetChanged(value);
.usingWhen(at(null, key).single(), stage -> stage.setAndGetChanged(value), LLUtils::finalizeResource)
.single();
} }
default Mono<Void> remove(T key) { default void remove(T key) {
return removeAndGetStatus(key).then(); removeAndGetStatus(key);
} }
default Mono<U> removeAndGetPrevious(T key) { default @Nullable U removeAndGetPrevious(T key) {
return Mono.usingWhen(at(null, key), us -> us.clearAndGetPrevious(), LLUtils::finalizeResource); return at(null, key).clearAndGetPrevious();
} }
default Mono<Boolean> removeAndGetStatus(T key) { default boolean removeAndGetStatus(T key) {
return removeAndGetPrevious(key).map(o -> true).defaultIfEmpty(false); return removeAndGetPrevious(key) != null;
} }
/** /**
* GetMulti must return the elements in sequence! * GetMulti must return the elements in sequence!
*/ */
default Flux<Optional<U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys) { default Stream<Optional<U>> getMulti(@Nullable CompositeSnapshot snapshot, Stream<T> keys) {
return keys.flatMapSequential(key -> this return keys.parallel().map(key -> Optional.ofNullable(this.getValue(snapshot, key)));
.getValue(snapshot, key)
.map(Optional::of)
.defaultIfEmpty(Optional.empty())
);
} }
default Mono<Void> putMulti(Flux<Entry<T, U>> entries) { default void putMulti(Stream<Entry<T, U>> entries) {
return entries.flatMap(entry -> this.putValue(entry.getKey(), entry.getValue())).then(); try (var stream = entries.parallel()) {
stream.forEach(entry -> this.putValue(entry.getKey(), entry.getValue()));
}
} }
Flux<SubStageEntry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange); Stream<SubStageEntry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange);
default Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) { default Stream<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return this return this.getAllStages(snapshot, smallRange).parallel().mapMulti((stage, mapper) -> {
.getAllStages(snapshot, smallRange) var val = stage.getValue().get(snapshot);
.flatMapSequential(stage -> stage if (val != null) {
.getValue() mapper.accept(Map.entry(stage.getKey(), val));
.get(snapshot) }
.map(value -> Map.entry(stage.getKey(), value)) });
.doFinally(s -> stage.getValue().close())
);
} }
default Mono<Void> setAllValues(Flux<Entry<T, U>> entries) { default void setAllValues(Stream<Entry<T, U>> entries) {
return setAllValuesAndGetPrevious(entries).then(); consume(setAllValuesAndGetPrevious(entries));
} }
Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries); Stream<Entry<T, U>> setAllValuesAndGetPrevious(Stream<Entry<T, U>> entries);
default Mono<Void> clear() { default void clear() {
return setAllValues(Flux.empty()); setAllValues(Stream.empty());
} }
default Mono<Void> replaceAllValues(boolean canKeysChange, default void replaceAllValues(boolean canKeysChange,
Function<Entry<T, U>, Mono<Entry<T, U>>> entriesReplacer, Function<Entry<T, U>, @NotNull Entry<T, U>> entriesReplacer,
boolean smallRange) { boolean smallRange) {
if (canKeysChange) { if (canKeysChange) {
return this.setAllValues(this.getAllValues(null, smallRange).flatMap(entriesReplacer)).then(); this.setAllValues(this.getAllValues(null, smallRange).map(entriesReplacer));
} else { } else {
return this this.getAllValues(null, smallRange).map(entriesReplacer)
.getAllValues(null, smallRange) .forEach(replacedEntry -> this.at(null, replacedEntry.getKey()).set(replacedEntry.getValue()));
.flatMap(entriesReplacer)
.flatMap(replacedEntry -> this
.at(null, replacedEntry.getKey())
.flatMap(stage -> stage
.set(replacedEntry.getValue())
.doFinally(s -> stage.close())
)
)
.then();
} }
} }
default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) { default void replaceAll(Consumer<Entry<T, US>> entriesReplacer) {
return this this.getAllStages(null, false).forEach(entriesReplacer);
.getAllStages(null, false)
.flatMap(stage -> entriesReplacer.apply(stage)
.doFinally(s -> stage.getValue().close())
)
.then();
} }
@Override @Override
default Mono<Object2ObjectSortedMap<T, U>> setAndGetPrevious(Object2ObjectSortedMap<T, U> value) { default Object2ObjectSortedMap<T, U> setAndGetPrevious(Object2ObjectSortedMap<T, U> value) {
return this Object2ObjectSortedMap<T, U> map;
.setAllValuesAndGetPrevious(Flux.fromIterable(value.entrySet())) if (value == null) {
.collectMap(Entry::getKey, Entry::getValue, Object2ObjectLinkedOpenHashMap::new) map = this.clearAndGetPrevious();
.map(map -> (Object2ObjectSortedMap<T, U>) map) } else {
.filter(map -> !map.isEmpty()); map = this
.setAllValuesAndGetPrevious(value.entrySet().stream())
.collect(Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, Object2ObjectLinkedOpenHashMap::new));
}
return map;
} }
@Override @Override
default Mono<Boolean> setAndGetChanged(Object2ObjectSortedMap<T, U> value) { default boolean setAndGetChanged(@Nullable Object2ObjectSortedMap<T, U> value) {
return this if (value != null && value.isEmpty()) {
.setAndGetPrevious(value) value = null;
.map(oldValue -> !Objects.equals(oldValue, value.isEmpty() ? null : value)) }
.switchIfEmpty(Mono.fromSupplier(() -> !value.isEmpty())); var prev = this.setAndGetPrevious(value);
if (prev == null) {
return value != null;
} else {
return !Objects.equals(prev, value);
}
} }
@Override @Override
default Mono<Delta<Object2ObjectSortedMap<T, U>>> updateAndGetDelta(SerializationFunction<@Nullable Object2ObjectSortedMap<T, U>, @Nullable Object2ObjectSortedMap<T, U>> updater) { default Delta<Object2ObjectSortedMap<T, U>> updateAndGetDelta(
SerializationFunction<@Nullable Object2ObjectSortedMap<T, U>, @Nullable Object2ObjectSortedMap<T, U>> updater) {
var updateMode = this.getUpdateMode(); var updateMode = this.getUpdateMode();
if (updateMode == UpdateMode.ALLOW_UNSAFE) { if (updateMode == UpdateMode.ALLOW_UNSAFE) {
return this Object2ObjectSortedMap<T, U> v = this
.getAllValues(null, true) .getAllValues(null, true)
.collectMap(Entry::getKey, Entry::getValue, Object2ObjectLinkedOpenHashMap::new) .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, Object2ObjectLinkedOpenHashMap::new));
.map(map -> (Object2ObjectSortedMap<T, U>) map)
.single() if (v.isEmpty()) {
.<Tuple2<Optional<Object2ObjectSortedMap<T, U>>, Optional<Object2ObjectSortedMap<T, U>>>>handle((v, sink) -> { v = null;
if (v.isEmpty()) { }
v = null;
} var result = updater.apply(v);
try { if (result != null && result.isEmpty()) {
var result = updater.apply(v); result = null;
if (result != null && result.isEmpty()) { }
result = null; this.setAllValues(result != null ? result.entrySet().stream() : null);
} return new Delta<>(v, result);
sink.next(Tuples.of(Optional.ofNullable(v), Optional.ofNullable(result)));
} catch (SerializationException ex) {
sink.error(ex);
}
})
.flatMap(result -> Mono
.justOrEmpty(result.getT2())
.flatMap(values -> this.setAllValues(Flux.fromIterable(values.entrySet())))
.thenReturn(new Delta<>(result.getT1().orElse(null), result.getT2().orElse(null)))
);
} else if (updateMode == UpdateMode.ALLOW) { } else if (updateMode == UpdateMode.ALLOW) {
return Mono.fromCallable(() -> { throw new UnsupportedOperationException("Maps can't be updated atomically");
throw new UnsupportedOperationException("Maps can't be updated atomically");
});
} else if (updateMode == UpdateMode.DISALLOW) { } else if (updateMode == UpdateMode.DISALLOW) {
return Mono.fromCallable(() -> { throw new UnsupportedOperationException("Map can't be updated because updates are disabled");
throw new UnsupportedOperationException("Map can't be updated because updates are disabled");
});
} else { } else {
return Mono.fromCallable(() -> { throw new UnsupportedOperationException("Unknown update mode: " + updateMode);
throw new UnsupportedOperationException("Unknown update mode: " + updateMode);
});
} }
} }
@Override @Override
default Mono<Object2ObjectSortedMap<T, U>> clearAndGetPrevious() { default Object2ObjectSortedMap<T, U> clearAndGetPrevious() {
return this.setAndGetPrevious(Object2ObjectSortedMaps.emptyMap()); return this.setAndGetPrevious(Object2ObjectSortedMaps.emptyMap());
} }
@Override @Override
default Mono<Object2ObjectSortedMap<T, U>> get(@Nullable CompositeSnapshot snapshot) { default Object2ObjectSortedMap<T, U> get(@Nullable CompositeSnapshot snapshot) {
return this Object2ObjectSortedMap<T, U> map = this
.getAllValues(snapshot, true) .getAllValues(snapshot, true)
.collectMap(Entry::getKey, Entry::getValue, Object2ObjectLinkedOpenHashMap::new) .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (a, b) -> a, Object2ObjectLinkedOpenHashMap::new));
.map(map -> (Object2ObjectSortedMap<T, U>) map) return map.isEmpty() ? null : map;
.filter(map -> !map.isEmpty());
} }
@Override @Override
default Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { default long leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return this return this.getAllStages(snapshot, false).count();
.getAllStages(snapshot, false)
.doOnNext(stage -> stage.getValue().close())
.count();
} }
/** /**
* Value getter doesn't lock data. Please make sure to lock before getting data. * Value getter doesn't lock data. Please make sure to lock before getting data.
*/ */
default ValueGetterBlocking<T, U> getDbValueGetter(@Nullable CompositeSnapshot snapshot) { default ValueGetterBlocking<T, U> getDbValueGetter(@Nullable CompositeSnapshot snapshot) {
return k -> getValue(snapshot, k).transform(LLUtils::handleDiscard).block(); return k -> getValue(snapshot, k);
} }
default ValueGetter<T, U> getAsyncDbValueGetter(@Nullable CompositeSnapshot snapshot) { default ValueGetter<T, U> getAsyncDbValueGetter(@Nullable CompositeSnapshot snapshot) {
return k -> getValue(snapshot, k); return k -> getValue(snapshot, k);
} }
default ValueTransformer<T, U> getAsyncDbValueTransformer(@Nullable CompositeSnapshot snapshot) {
return keys -> {
var sharedKeys = keys.publish().refCount(2);
var values = DatabaseStageMap.this.getMulti(snapshot, sharedKeys);
return Flux.zip(sharedKeys, values, Map::entry);
};
}
} }

View File

@ -1,9 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.util.Resource;
import it.cavallium.dbengine.client.BadBlock;
import reactor.core.publisher.Mono;
public interface DatabaseStageWithEntry<T> { public interface DatabaseStageWithEntry<T> {
DatabaseStageEntry<T> entry(); DatabaseStageEntry<T> entry();

View File

@ -1,16 +1,12 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public interface SubStageGetter<U, US extends DatabaseStage<U>> { public interface SubStageGetter<U, US extends DatabaseStage<U>> {
Mono<US> subStage(LLDictionary dictionary, US subStage(LLDictionary dictionary, @Nullable CompositeSnapshot snapshot, Buf prefixKey);
@Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKey);
} }

View File

@ -1,19 +1,13 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class SubStageGetterHashMap<T, U, TH> implements public class SubStageGetterHashMap<T, U, TH> implements
@ -35,16 +29,16 @@ public class SubStageGetterHashMap<T, U, TH> implements
} }
@Override @Override
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary, public DatabaseMapDictionaryHashed<T, U, TH> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Buf prefixKey) {
return prefixKeyMono.map(prefixKey -> DatabaseMapDictionaryHashed.tail(dictionary, return DatabaseMapDictionaryHashed.tail(dictionary,
BufSupplier.ofOwned(prefixKey), prefixKey,
keySerializer, keySerializer,
valueSerializer, valueSerializer,
keyHashFunction, keyHashFunction,
keyHashSerializer keyHashSerializer
)); );
} }
public int getKeyHashBinaryLength() { public int getKeyHashBinaryLength() {

View File

@ -1,22 +1,16 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
@SuppressWarnings({"unused", "ClassCanBeRecord"}) @SuppressWarnings({"unused"})
public class SubStageGetterHashSet<T, TH> implements public class SubStageGetterHashSet<T, TH> implements
SubStageGetter<Object2ObjectSortedMap<T, Nothing>, DatabaseSetDictionaryHashed<T, TH>> { SubStageGetter<Object2ObjectSortedMap<T, Nothing>, DatabaseSetDictionaryHashed<T, TH>> {
@ -33,15 +27,15 @@ public class SubStageGetterHashSet<T, TH> implements
} }
@Override @Override
public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary, public DatabaseSetDictionaryHashed<T, TH> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Buf prefixKey) {
return prefixKeyMono.map(prefixKey -> DatabaseSetDictionaryHashed.tail(dictionary, return DatabaseSetDictionaryHashed.tail(dictionary,
BufSupplier.ofOwned(prefixKey), prefixKey,
keySerializer, keySerializer,
keyHashFunction, keyHashFunction,
keyHashSerializer keyHashSerializer
)); );
} }
public int getKeyHashBinaryLength() { public int getKeyHashBinaryLength() {

View File

@ -1,14 +1,12 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class SubStageGetterMap<T, U> implements public class SubStageGetterMap<T, U> implements
SubStageGetter<Object2ObjectSortedMap<T, U>, DatabaseMapDictionary<T, U>> { SubStageGetter<Object2ObjectSortedMap<T, U>, DatabaseMapDictionary<T, U>> {
@ -23,14 +21,10 @@ public class SubStageGetterMap<T, U> implements
} }
@Override @Override
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary, public DatabaseMapDictionary<T, U> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Buf prefixKey) {
return prefixKeyMono.map(prefixKey -> DatabaseMapDictionary.tail(dictionary, return DatabaseMapDictionary.tail(dictionary, prefixKey, keySerializer, valueSerializer);
BufSupplier.ofOwned(prefixKey),
keySerializer,
valueSerializer
));
} }
public int getKeyBinaryLength() { public int getKeyBinaryLength() {

View File

@ -1,17 +1,11 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.util.Map;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
SubStageGetter<Object2ObjectSortedMap<T, U>, DatabaseMapDictionaryDeep<T, U, US>> { SubStageGetter<Object2ObjectSortedMap<T, U>, DatabaseMapDictionaryDeep<T, U, US>> {
@ -41,15 +35,15 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
} }
@Override @Override
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary, public DatabaseMapDictionaryDeep<T, U, US> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Buf prefixKey) {
return prefixKeyMono.map(prefixKey -> DatabaseMapDictionaryDeep.deepIntermediate(dictionary, return DatabaseMapDictionaryDeep.deepIntermediate(dictionary,
BufSupplier.ofOwned(prefixKey), prefixKey,
keySerializer, keySerializer,
subStageGetter, subStageGetter,
keyExtLength keyExtLength
)); );
} }
public int getKeyBinaryLength() { public int getKeyBinaryLength() {

View File

@ -1,17 +1,12 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Resource;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import java.util.Map;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class SubStageGetterSet<T> implements public class SubStageGetterSet<T> implements
SubStageGetter<Object2ObjectSortedMap<T, Nothing>, DatabaseSetDictionary<T>> { SubStageGetter<Object2ObjectSortedMap<T, Nothing>, DatabaseSetDictionary<T>> {
@ -23,13 +18,10 @@ public class SubStageGetterSet<T> implements
} }
@Override @Override
public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary, public DatabaseSetDictionary<T> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> prefixKeyMono) { Buf prefixKey) {
return prefixKeyMono.map(prefixKey -> DatabaseSetDictionary.tail(dictionary, return DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer);
BufSupplier.ofOwned(prefixKey),
keySerializer
));
} }
public int getKeyBinaryLength() { public int getKeyBinaryLength() {

View File

@ -1,13 +1,10 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> { public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> {
@ -18,13 +15,10 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
} }
@Override @Override
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary, public DatabaseStageEntry<T> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<Buffer> keyPrefixMono) { Buf keyPrefix) {
return keyPrefixMono.map(keyPrefix -> new DatabaseMapSingle<>(dictionary, return new DatabaseMapSingle<>(dictionary, keyPrefix, serializer);
BufSupplier.ofOwned(keyPrefix),
serializer
));
} }
} }

View File

@ -1,12 +1,11 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
public class SubStageGetterSingleBytes extends SubStageGetterSingle<Send<Buffer>> { public class SubStageGetterSingleBytes extends SubStageGetterSingle<Buf> {
public SubStageGetterSingleBytes() { public SubStageGetterSingleBytes() {
super(Serializer.NOOP_SEND_SERIALIZER); super(Serializer.NOOP_SERIALIZER);
} }
} }

View File

@ -1,11 +1,11 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import reactor.core.publisher.Mono; import org.jetbrains.annotations.Nullable;
public interface ValueGetter<KEY, VALUE> { public interface ValueGetter<KEY, VALUE> {
/** /**
* Can return Mono error IOException * Can return Mono error IOException
*/ */
Mono<VALUE> get(KEY key); @Nullable VALUE get(KEY key);
} }

View File

@ -4,5 +4,5 @@ import java.io.IOException;
public interface ValueGetterBlocking<KEY, VALUE> { public interface ValueGetterBlocking<KEY, VALUE> {
VALUE get(KEY key) throws IOException; VALUE get(KEY key);
} }

View File

@ -1,16 +0,0 @@
package it.cavallium.dbengine.database.collections;
import java.util.Map.Entry;
import java.util.Optional;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuple3;
public interface ValueTransformer<KEY, VALUE> {
/**
* Can return Flux error IOException
*/
Flux<Entry<KEY, Optional<VALUE>>> transform(Flux<KEY> keys);
}

View File

@ -1,17 +1,13 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.BufferAllocator; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.buffer.CompositeBuffer;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> { class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> {
@ -26,17 +22,17 @@ class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> {
} }
@Override @Override
public @NotNull Entry<X, Y> deserialize(@NotNull Buffer serialized) throws SerializationException { public @NotNull Entry<X, Y> deserialize(@NotNull BufDataInput in) throws SerializationException {
Objects.requireNonNull(serialized); Objects.requireNonNull(in);
X deserializedKey = keySuffixSerializer.deserialize(serialized); X deserializedKey = keySuffixSerializer.deserialize(in);
Y deserializedValue = valueSerializer.deserialize(serialized); Y deserializedValue = valueSerializer.deserialize(in);
return Map.entry(deserializedKey, deserializedValue); return Map.entry(deserializedKey, deserializedValue);
} }
@Override @Override
public void serialize(@NotNull Entry<X, Y> deserialized, Buffer output) throws SerializationException { public void serialize(@NotNull Entry<X, Y> deserialized, BufDataOutput out) throws SerializationException {
keySuffixSerializer.serialize(deserialized.getKey(), output); keySuffixSerializer.serialize(deserialized.getKey(), out);
valueSerializer.serialize(deserialized.getValue(), output); valueSerializer.serialize(deserialized.getValue(), out);
} }
@Override @Override

View File

@ -1,8 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.BufDataInput;
import io.netty5.buffer.BufferAllocator; import it.cavallium.dbengine.buffers.BufDataOutput;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.unimi.dsi.fastutil.objects.ObjectArraySet; import it.unimi.dsi.fastutil.objects.ObjectArraySet;
@ -11,7 +10,6 @@ import java.util.Objects;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> { class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
@ -24,17 +22,17 @@ class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
} }
@Override @Override
public @NotNull ObjectArraySet<X> deserialize(@NotNull Buffer serialized) throws SerializationException { public @NotNull ObjectArraySet<X> deserialize(@NotNull BufDataInput in) throws SerializationException {
try { try {
Objects.requireNonNull(serialized); Objects.requireNonNull(in);
if (serialized.readableBytes() == 0) { if (in.available() <= 0) {
logger.error("Can't deserialize, 0 bytes are readable"); logger.error("Can't deserialize, 0 bytes are readable");
return new ObjectArraySet<>(); return new ObjectArraySet<>();
} }
int entriesLength = serialized.readInt(); int entriesLength = in.readInt();
ArrayList<X> deserializedElements = new ArrayList<>(entriesLength); ArrayList<X> deserializedElements = new ArrayList<>(entriesLength);
for (int i = 0; i < entriesLength; i++) { for (int i = 0; i < entriesLength; i++) {
var deserializationResult = entrySerializer.deserialize(serialized); var deserializationResult = entrySerializer.deserialize(in);
deserializedElements.add(deserializationResult); deserializedElements.add(deserializationResult);
} }
return new ObjectArraySet<>(deserializedElements); return new ObjectArraySet<>(deserializedElements);
@ -45,10 +43,10 @@ class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
} }
@Override @Override
public void serialize(@NotNull ObjectArraySet<X> deserialized, Buffer output) throws SerializationException { public void serialize(@NotNull ObjectArraySet<X> deserialized, BufDataOutput out) throws SerializationException {
output.writeInt(deserialized.size()); out.writeInt(deserialized.size());
for (X entry : deserialized) { for (X entry : deserialized) {
entrySerializer.serialize(entry, output); entrySerializer.serialize(entry, out);
} }
} }

View File

@ -1,27 +1,16 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import static io.netty5.buffer.StandardAllocationTypes.OFF_HEAP;
import static it.cavallium.dbengine.database.LLUtils.INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES;
import static it.cavallium.dbengine.database.LLUtils.isReadOnlyDirect;
import static java.lang.Boolean.parseBoolean;
import static java.lang.System.getProperty;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static org.rocksdb.KeyMayExist.KeyMayExistEnum.kExistsWithValue;
import static org.rocksdb.KeyMayExist.KeyMayExistEnum.kExistsWithoutValue;
import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.DistributionSummary; import io.micrometer.core.instrument.DistributionSummary;
import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Timer; import io.micrometer.core.instrument.Timer;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import io.netty5.buffer.BufferComponent;
import io.netty5.buffer.DefaultBufferAllocators;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.RepeatedElementList; import it.cavallium.dbengine.database.RepeatedElementList;
import it.cavallium.dbengine.database.disk.rocksdb.RocksIteratorObj; import it.cavallium.dbengine.database.disk.rocksdb.RocksIteratorObj;
import it.cavallium.dbengine.database.serialization.SerializationException;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -37,12 +26,9 @@ import org.rocksdb.AbstractImmutableNativeReference;
import org.rocksdb.AbstractSlice; import org.rocksdb.AbstractSlice;
import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.CompactRangeOptions; import org.rocksdb.CompactRangeOptions;
import org.rocksdb.DirectSlice;
import org.rocksdb.FlushOptions; import org.rocksdb.FlushOptions;
import org.rocksdb.Holder; import org.rocksdb.Holder;
import org.rocksdb.KeyMayExist; import org.rocksdb.KeyMayExist;
import org.rocksdb.KeyMayExist.KeyMayExistEnum;
import org.rocksdb.Range;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException; import org.rocksdb.RocksDBException;
@ -53,23 +39,15 @@ import org.rocksdb.Transaction;
import org.rocksdb.TransactionOptions; import org.rocksdb.TransactionOptions;
import org.rocksdb.WriteBatch; import org.rocksdb.WriteBatch;
import org.rocksdb.WriteOptions; import org.rocksdb.WriteOptions;
import reactor.core.scheduler.Schedulers;
public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements RocksDBColumn public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements RocksDBColumn
permits StandardRocksDBColumn, OptimisticRocksDBColumn, PessimisticRocksDBColumn { permits StandardRocksDBColumn, OptimisticRocksDBColumn, PessimisticRocksDBColumn {
/**
* Default: true
*/
private static final boolean USE_DIRECT_BUFFER_BOUNDS = true;
private static final byte[] NO_DATA = new byte[0]; private static final byte[] NO_DATA = new byte[0];
protected static final UpdateAtomicResult RESULT_NOTHING = new UpdateAtomicResultNothing(); protected static final UpdateAtomicResult RESULT_NOTHING = new UpdateAtomicResultNothing();
protected final Logger logger = LogManager.getLogger(this.getClass()); protected final Logger logger = LogManager.getLogger(this.getClass());
private final T db; private final T db;
private final boolean nettyDirect;
private final BufferAllocator alloc;
private final ColumnFamilyHandle cfh; private final ColumnFamilyHandle cfh;
protected final MeterRegistry meterRegistry; protected final MeterRegistry meterRegistry;
@ -103,15 +81,11 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
private final DBColumnKeyMayExistGetter keyMayExistGetter; private final DBColumnKeyMayExistGetter keyMayExistGetter;
public AbstractRocksDBColumn(T db, public AbstractRocksDBColumn(T db,
boolean nettyDirect,
BufferAllocator alloc,
String databaseName, String databaseName,
ColumnFamilyHandle cfh, ColumnFamilyHandle cfh,
MeterRegistry meterRegistry, MeterRegistry meterRegistry,
StampedLock closeLock) { StampedLock closeLock) {
this.db = db; this.db = db;
this.nettyDirect = nettyDirect && alloc.getAllocationType() == OFF_HEAP;
this.alloc = alloc;
this.cfh = cfh; this.cfh = cfh;
String columnName; String columnName;
try { try {
@ -253,18 +227,10 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
/** /**
* This method should not modify or move the writerIndex/readerIndex of the key * This method should not modify or move the writerIndex/readerIndex of the key
*/ */
static AbstractSlice<?> setIterateBound(boolean allowNettyDirect, static AbstractSlice<?> setIterateBound(ReadOptions readOpts, IterateBound boundType, Buf key) {
ReadOptions readOpts, IterateBound boundType, Buffer key) {
requireNonNull(key); requireNonNull(key);
AbstractSlice<?> slice; AbstractSlice<?> slice;
if (allowNettyDirect && USE_DIRECT_BUFFER_BOUNDS && isReadOnlyDirect(key)) { slice = new Slice(requireNonNull(LLUtils.asArray(key)));
ByteBuffer keyInternalByteBuffer = ((BufferComponent) key).readableBuffer();
assert keyInternalByteBuffer.position() == 0;
slice = new DirectSlice(keyInternalByteBuffer, key.readableBytes());
assert slice.size() == key.readableBytes();
} else {
slice = new Slice(requireNonNull(LLUtils.toArray(key)));
}
if (boundType == IterateBound.LOWER) { if (boundType == IterateBound.LOWER) {
readOpts.setIterateLowerBound(slice); readOpts.setIterateLowerBound(slice);
} else { } else {
@ -282,23 +248,22 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
/** /**
* This method should not modify or move the writerIndex/readerIndex of the buffers inside the range * This method should not modify or move the writerIndex/readerIndex of the buffers inside the range
*/ */
@Override
@NotNull @NotNull
public RocksIteratorObj newRocksIterator(boolean allowNettyDirect, public RocksIteratorObj newRocksIterator(ReadOptions readOptions, LLRange range, boolean reverse)
ReadOptions readOptions, throws RocksDBException {
LLRange range, assert !LLUtils.isInNonBlockingThread() : "Called getRocksIterator in a nonblocking thread";
boolean reverse) throws RocksDBException { var rocksIterator = this.newIterator(readOptions, range.getMin(), range.getMax());
assert !Schedulers.isInNonBlockingThread() : "Called getRocksIterator in a nonblocking thread";
var rocksIterator = this.newIterator(readOptions, range.getMinUnsafe(), range.getMaxUnsafe());
try { try {
if (reverse) { if (reverse) {
if (!LLLocalDictionary.PREFER_AUTO_SEEK_BOUND && range.hasMax()) { if (!LLLocalDictionary.PREFER_AUTO_SEEK_BOUND && range.hasMax()) {
rocksIterator.seekFrom(range.getMaxUnsafe()); rocksIterator.seekFrom(range.getMax());
} else { } else {
rocksIterator.seekToLast(); rocksIterator.seekToLast();
} }
} else { } else {
if (!LLLocalDictionary.PREFER_AUTO_SEEK_BOUND && range.hasMin()) { if (!LLLocalDictionary.PREFER_AUTO_SEEK_BOUND && range.hasMin()) {
rocksIterator.seekTo(range.getMinUnsafe()); rocksIterator.seekTo(range.getMin());
} else { } else {
rocksIterator.seekToFirst(); rocksIterator.seekToFirst();
} }
@ -331,14 +296,8 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
RocksDBUtils.ensureOwned(rocksObject); RocksDBUtils.ensureOwned(rocksObject);
} }
protected void ensureOwned(Buffer buffer) {
if (buffer != null && !buffer.isAccessible()) {
throw new IllegalStateException("Buffer is not accessible");
}
}
@Override @Override
public @Nullable Buffer get(@NotNull ReadOptions readOptions, Buffer key) throws RocksDBException { public @Nullable Buf get(@NotNull ReadOptions readOptions, Buf key) throws RocksDBException {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
@ -350,221 +309,74 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
} }
@Override @Override
public void put(@NotNull WriteOptions writeOptions, Buffer key, Buffer value) throws RocksDBException { public void put(@NotNull WriteOptions writeOptions, Buf key, Buf value) throws RocksDBException {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
ensureOwned(writeOptions); ensureOwned(writeOptions);
assert key.isAccessible(); this.keyBufferSize.record(key.size());
assert value.isAccessible(); this.writeValueBufferSize.record(value.size());
this.keyBufferSize.record(key.readableBytes()); db.put(cfh, writeOptions, LLUtils.asArray(key), LLUtils.asArray(value));
this.writeValueBufferSize.record(value.readableBytes());
if (nettyDirect) {
// Get the key nio buffer to pass to RocksDB
ByteBuffer keyNioBuffer;
boolean mustCloseKey;
{
if (!LLUtils.isReadOnlyDirect(key)) {
// If the nio buffer is not available, copy the netty buffer into a new direct buffer
mustCloseKey = true;
var directKey = DefaultBufferAllocators.offHeapAllocator().allocate(key.readableBytes());
key.copyInto(key.readerOffset(), directKey, 0, key.readableBytes());
key = directKey;
} else {
mustCloseKey = false;
}
keyNioBuffer = ((BufferComponent) key).readableBuffer();
assert keyNioBuffer.isDirect();
assert keyNioBuffer.limit() == key.readableBytes();
}
try {
// Get the value nio buffer to pass to RocksDB
ByteBuffer valueNioBuffer;
boolean mustCloseValue;
{
if (!LLUtils.isReadOnlyDirect(value)) {
// If the nio buffer is not available, copy the netty buffer into a new direct buffer
mustCloseValue = true;
var directValue = DefaultBufferAllocators.offHeapAllocator().allocate(value.readableBytes());
value.copyInto(value.readerOffset(), directValue, 0, value.readableBytes());
value = directValue;
} else {
mustCloseValue = false;
}
valueNioBuffer = ((BufferComponent) value).readableBuffer();
assert valueNioBuffer.isDirect();
assert valueNioBuffer.limit() == value.readableBytes();
}
try {
db.put(cfh, writeOptions, keyNioBuffer, valueNioBuffer);
} finally {
if (mustCloseValue) {
value.close();
}
}
} finally {
if (mustCloseKey) {
key.close();
}
}
} else {
db.put(cfh, writeOptions, LLUtils.toArray(key), LLUtils.toArray(value));
}
} finally { } finally {
closeLock.unlockRead(closeReadLock); closeLock.unlockRead(closeReadLock);
} }
} }
@Override @Override
public boolean exists(@NotNull ReadOptions readOptions, Buffer key) throws RocksDBException { public boolean exists(@NotNull ReadOptions readOptions, Buf key) throws RocksDBException {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
ensureOwned(readOptions); ensureOwned(readOptions);
if (nettyDirect) { int size = RocksDB.NOT_FOUND;
// Get the key nio buffer to pass to RocksDB byte[] keyBytes = LLUtils.asArray(key);
ByteBuffer keyNioBuffer; Holder<byte[]> data = new Holder<>();
boolean mustCloseKey; boolean mayExistHit = false;
{ if (db.keyMayExist(cfh, readOptions, keyBytes, data)) {
if (!LLUtils.isReadOnlyDirect(key)) { mayExistHit = true;
// If the nio buffer is not available, copy the netty buffer into a new direct buffer if (data.getValue() != null) {
mustCloseKey = true; size = data.getValue().length;
var directKey = DefaultBufferAllocators.offHeapAllocator().allocate(key.readableBytes());
key.copyInto(key.readerOffset(), directKey, 0, key.readableBytes());
key = directKey;
} else {
mustCloseKey = false;
}
keyNioBuffer = ((BufferComponent) key).readableBuffer();
assert keyNioBuffer.isDirect();
assert keyNioBuffer.limit() == key.readableBytes();
}
try {
if (db.keyMayExist(cfh, keyNioBuffer)) {
int size = db.get(cfh, readOptions, keyNioBuffer.position(0), LLUtils.EMPTY_BYTE_BUFFER);
boolean found = size != RocksDB.NOT_FOUND;
if (found) {
readValueFoundWithBloomSimpleBufferSize.record(size);
return true;
} else {
readValueNotFoundWithMayExistBloomBufferSize.record(0);
return false;
}
} else {
readValueNotFoundWithBloomBufferSize.record(0);
return false;
}
} finally {
if (mustCloseKey) {
key.close();
}
}
} else {
int size = RocksDB.NOT_FOUND;
byte[] keyBytes = LLUtils.toArray(key);
Holder<byte[]> data = new Holder<>();
boolean mayExistHit = false;
if (db.keyMayExist(cfh, readOptions, keyBytes, data)) {
mayExistHit = true;
if (data.getValue() != null) {
size = data.getValue().length;
} else {
size = db.get(cfh, readOptions, keyBytes, NO_DATA);
}
}
boolean found = size != RocksDB.NOT_FOUND;
if (found) {
readValueFoundWithBloomSimpleBufferSize.record(size);
} else { } else {
if (mayExistHit) { size = db.get(cfh, readOptions, keyBytes, NO_DATA);
readValueNotFoundWithMayExistBloomBufferSize.record(0);
} else {
readValueNotFoundWithBloomBufferSize.record(0);
}
} }
return found;
} }
boolean found = size != RocksDB.NOT_FOUND;
if (found) {
readValueFoundWithBloomSimpleBufferSize.record(size);
} else {
if (mayExistHit) {
readValueNotFoundWithMayExistBloomBufferSize.record(0);
} else {
readValueNotFoundWithBloomBufferSize.record(0);
}
}
return found;
} finally { } finally {
closeLock.unlockRead(closeReadLock); closeLock.unlockRead(closeReadLock);
} }
} }
@Override @Override
public boolean mayExists(@NotNull ReadOptions readOptions, Buffer key) throws RocksDBException { public boolean mayExists(@NotNull ReadOptions readOptions, Buf key) throws RocksDBException {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
ensureOwned(readOptions); ensureOwned(readOptions);
if (nettyDirect) { byte[] keyBytes = LLUtils.asArray(key);
// Get the key nio buffer to pass to RocksDB return db.keyMayExist(cfh, readOptions, keyBytes, null);
ByteBuffer keyNioBuffer;
boolean mustCloseKey;
{
if (!LLUtils.isReadOnlyDirect(key)) {
// If the nio buffer is not available, copy the netty buffer into a new direct buffer
mustCloseKey = true;
var directKey = DefaultBufferAllocators.offHeapAllocator().allocate(key.readableBytes());
key.copyInto(key.readerOffset(), directKey, 0, key.readableBytes());
key = directKey;
} else {
mustCloseKey = false;
}
keyNioBuffer = ((BufferComponent) key).readableBuffer();
assert keyNioBuffer.isDirect();
assert keyNioBuffer.limit() == key.readableBytes();
}
try {
return db.keyMayExist(cfh, readOptions, keyNioBuffer);
} finally {
if (mustCloseKey) {
key.close();
}
}
} else {
byte[] keyBytes = LLUtils.toArray(key);
return db.keyMayExist(cfh, readOptions, keyBytes, null);
}
} finally { } finally {
closeLock.unlockRead(closeReadLock); closeLock.unlockRead(closeReadLock);
} }
} }
@Override @Override
public void delete(WriteOptions writeOptions, Buffer key) throws RocksDBException { public void delete(WriteOptions writeOptions, Buf key) throws RocksDBException {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
ensureOwned(writeOptions); ensureOwned(writeOptions);
keyBufferSize.record(key.readableBytes()); keyBufferSize.record(key.size());
if (nettyDirect) { db.delete(cfh, writeOptions, LLUtils.asArray(key));
// Get the key nio buffer to pass to RocksDB
ByteBuffer keyNioBuffer;
boolean mustCloseKey;
{
if (!LLUtils.isReadOnlyDirect(key)) {
// If the nio buffer is not available, copy the netty buffer into a new direct buffer
mustCloseKey = true;
var directKey = DefaultBufferAllocators.offHeapAllocator().allocate(key.readableBytes());
key.copyInto(key.readerOffset(), directKey, 0, key.readableBytes());
key = directKey;
} else {
mustCloseKey = false;
}
keyNioBuffer = ((BufferComponent) key).readableBuffer();
assert keyNioBuffer.isDirect();
assert keyNioBuffer.limit() == key.readableBytes();
}
try {
db.delete(cfh, writeOptions, keyNioBuffer);
} finally {
if (mustCloseKey) {
key.close();
}
}
} else {
db.delete(cfh, writeOptions, LLUtils.toArray(key));
}
} finally { } finally {
closeLock.unlockRead(closeReadLock); closeLock.unlockRead(closeReadLock);
} }
@ -690,21 +502,17 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
@Override @Override
public final @NotNull UpdateAtomicResult updateAtomic(@NotNull ReadOptions readOptions, public final @NotNull UpdateAtomicResult updateAtomic(@NotNull ReadOptions readOptions,
@NotNull WriteOptions writeOptions, @NotNull WriteOptions writeOptions,
Buffer key, Buf key,
BinarySerializationFunction updater, BinarySerializationFunction updater,
UpdateAtomicResultMode returnMode) throws IOException { UpdateAtomicResultMode returnMode) throws RocksDBException {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
ensureOwned(readOptions); ensureOwned(readOptions);
try { try {
keyBufferSize.record(key.readableBytes()); keyBufferSize.record(key.size());
startedUpdate.increment(); startedUpdate.increment();
return updateAtomicImpl(readOptions, writeOptions, key, updater, returnMode); return updateAtomicImpl(readOptions, writeOptions, key, updater, returnMode);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
} finally { } finally {
endedUpdate.increment(); endedUpdate.increment();
} }
@ -733,31 +541,29 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
protected abstract @NotNull UpdateAtomicResult updateAtomicImpl(@NotNull ReadOptions readOptions, protected abstract @NotNull UpdateAtomicResult updateAtomicImpl(@NotNull ReadOptions readOptions,
@NotNull WriteOptions writeOptions, @NotNull WriteOptions writeOptions,
Buffer key, Buf key,
BinarySerializationFunction updater, BinarySerializationFunction updater,
UpdateAtomicResultMode returnMode) throws IOException; UpdateAtomicResultMode returnMode) throws RocksDBException;
@Override @Override
@NotNull @NotNull
public RocksIteratorObj newIterator(@NotNull ReadOptions readOptions, public RocksIteratorObj newIterator(@NotNull ReadOptions readOptions,
@Nullable Buffer min, @Nullable Buf min,
@Nullable Buffer max) { @Nullable Buf max) {
var closeReadLock = closeLock.readLock(); var closeReadLock = closeLock.readLock();
try { try {
ensureOpen(); ensureOpen();
ensureOwned(readOptions); ensureOwned(readOptions);
ensureOwned(min);
ensureOwned(max);
AbstractSlice<?> sliceMin; AbstractSlice<?> sliceMin;
AbstractSlice<?> sliceMax; AbstractSlice<?> sliceMax;
if (min != null) { if (min != null) {
sliceMin = setIterateBound(nettyDirect, readOptions, IterateBound.LOWER, min); sliceMin = setIterateBound(readOptions, IterateBound.LOWER, min);
} else { } else {
sliceMin = null; sliceMin = null;
} }
try { try {
if (max != null) { if (max != null) {
sliceMax = setIterateBound(nettyDirect, readOptions, IterateBound.UPPER, max); sliceMax = setIterateBound(readOptions, IterateBound.UPPER, max);
} else { } else {
sliceMax = null; sliceMax = null;
} }
@ -769,7 +575,6 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
sliceMax, sliceMax,
min, min,
max, max,
nettyDirect,
this.startedIterSeek, this.startedIterSeek,
this.endedIterSeek, this.endedIterSeek,
this.iterSeekTime, this.iterSeekTime,
@ -814,6 +619,8 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
try { try {
ensureOpen(); ensureOpen();
RocksDBUtils.forceCompaction(db, db.getName(), cfh, volumeId, logger); RocksDBUtils.forceCompaction(db, db.getName(), cfh, volumeId, logger);
} catch (RocksDBException e) {
throw new RuntimeException(e);
} finally { } finally {
closeLock.unlockRead(closeReadLock); closeLock.unlockRead(closeReadLock);
} }
@ -824,11 +631,6 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
return cfh; return cfh;
} }
@Override
public BufferAllocator getAllocator() {
return alloc;
}
public MeterRegistry getMeterRegistry() { public MeterRegistry getMeterRegistry() {
return meterRegistry; return meterRegistry;
} }
@ -847,10 +649,6 @@ public sealed abstract class AbstractRocksDBColumn<T extends RocksDB> implements
private class DBColumnKeyMayExistGetter extends KeyMayExistGetter { private class DBColumnKeyMayExistGetter extends KeyMayExistGetter {
public DBColumnKeyMayExistGetter() {
super(alloc, nettyDirect);
}
@Override @Override
protected KeyMayExist keyMayExist(ReadOptions readOptions, ByteBuffer key, ByteBuffer value) { protected KeyMayExist keyMayExist(ReadOptions readOptions, ByteBuffer key, ByteBuffer value) {
return db.keyMayExist(cfh, readOptions, key, value); return db.keyMayExist(cfh, readOptions, key, value);

View File

@ -1,7 +1,7 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
public interface BinarySerializationFunction extends SerializationFunction<@Nullable Buffer, @Nullable Buffer> {} public interface BinarySerializationFunction extends SerializationFunction<@Nullable Buf, @Nullable Buf> {}

View File

@ -1,23 +1,19 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.client.UninterruptibleScheduler.uninterruptibleScheduler;
import static it.cavallium.dbengine.lucene.LuceneUtils.luceneScheduler;
import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader; import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache; import com.google.common.cache.LoadingCache;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.lucene.LuceneCloseable; import it.cavallium.dbengine.lucene.LuceneCloseable;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.utils.SimpleResource; import it.cavallium.dbengine.utils.SimpleResource;
import java.io.IOException; import java.io.IOException;
import java.io.UncheckedIOException; import it.cavallium.dbengine.utils.DBException;
import java.time.Duration; import java.time.Duration;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.LockSupport; import java.util.concurrent.locks.LockSupport;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@ -30,11 +26,6 @@ import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import it.cavallium.dbengine.utils.ShortNamedThreadFactory;
import reactor.core.Disposable;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
// todo: deduplicate code between Cached and Simple searcher managers // todo: deduplicate code between Cached and Simple searcher managers
public class CachedIndexSearcherManager extends SimpleResource implements IndexSearcherManager, LuceneCloseable { public class CachedIndexSearcherManager extends SimpleResource implements IndexSearcherManager, LuceneCloseable {
@ -49,7 +40,7 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
@Nullable @Nullable
private final SnapshotsManager snapshotsManager; private final SnapshotsManager snapshotsManager;
private final Scheduler luceneHeavyTasksScheduler; private final ScheduledExecutorService luceneHeavyTasksScheduler;
private final Similarity similarity; private final Similarity similarity;
private final SearcherManager searcherManager; private final SearcherManager searcherManager;
private final Duration queryRefreshDebounceTime; private final Duration queryRefreshDebounceTime;
@ -57,17 +48,16 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
private final AtomicLong activeSearchers = new AtomicLong(0); private final AtomicLong activeSearchers = new AtomicLong(0);
private final AtomicLong activeRefreshes = new AtomicLong(0); private final AtomicLong activeRefreshes = new AtomicLong(0);
private final LoadingCache<LLSnapshot, Mono<LLIndexSearcher>> cachedSnapshotSearchers; private final LoadingCache<LLSnapshot, LLIndexSearcher> cachedSnapshotSearchers;
private final Mono<LLIndexSearcher> cachedMainSearcher; private final ScheduledFuture<?> refreshSubscription;
private final Disposable refreshSubscription;
public CachedIndexSearcherManager(IndexWriter indexWriter, public CachedIndexSearcherManager(IndexWriter indexWriter,
@Nullable SnapshotsManager snapshotsManager, @Nullable SnapshotsManager snapshotsManager,
Scheduler luceneHeavyTasksScheduler, ScheduledExecutorService luceneHeavyTasksScheduler,
Similarity similarity, Similarity similarity,
boolean applyAllDeletes, boolean applyAllDeletes,
boolean writeAllDeletes, boolean writeAllDeletes,
Duration queryRefreshDebounceTime) throws IOException { Duration queryRefreshDebounceTime) {
this.snapshotsManager = snapshotsManager; this.snapshotsManager = snapshotsManager;
this.luceneHeavyTasksScheduler = luceneHeavyTasksScheduler; this.luceneHeavyTasksScheduler = luceneHeavyTasksScheduler;
this.similarity = similarity; this.similarity = similarity;
@ -75,13 +65,17 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY); this.searcherManager = new SearcherManager(indexWriter, applyAllDeletes, writeAllDeletes, SEARCHER_FACTORY);
refreshSubscription = LLUtils.scheduleRepeated(luceneHeavyTasksScheduler, () -> { refreshSubscription = luceneHeavyTasksScheduler.scheduleAtFixedRate(() -> {
try { try {
maybeRefresh(); maybeRefresh();
} catch (Exception ex) { } catch (Exception ex) {
LOG.error("Failed to refresh the searcher manager", ex); LOG.error("Failed to refresh the searcher manager", ex);
} }
}, queryRefreshDebounceTime); },
queryRefreshDebounceTime.toMillis(),
queryRefreshDebounceTime.toMillis(),
TimeUnit.MILLISECONDS
);
this.cachedSnapshotSearchers = CacheBuilder.newBuilder() this.cachedSnapshotSearchers = CacheBuilder.newBuilder()
.expireAfterWrite(queryRefreshDebounceTime) .expireAfterWrite(queryRefreshDebounceTime)
@ -89,44 +83,40 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
.maximumSize(3) .maximumSize(3)
.build(new CacheLoader<>() { .build(new CacheLoader<>() {
@Override @Override
public Mono<LLIndexSearcher> load(@NotNull LLSnapshot snapshot) { public LLIndexSearcher load(@NotNull LLSnapshot snapshot) {
return CachedIndexSearcherManager.this.generateCachedSearcher(snapshot); return CachedIndexSearcherManager.this.generateCachedSearcher(snapshot);
} }
}); });
this.cachedMainSearcher = this.generateCachedSearcher(null);
} }
private Mono<LLIndexSearcher> generateCachedSearcher(@Nullable LLSnapshot snapshot) { private LLIndexSearcher generateCachedSearcher(@Nullable LLSnapshot snapshot) {
return Mono.fromCallable(() -> { if (isClosed()) {
if (isClosed()) { return null;
return null; }
} activeSearchers.incrementAndGet();
activeSearchers.incrementAndGet(); try {
try { IndexSearcher indexSearcher;
IndexSearcher indexSearcher; boolean fromSnapshot;
boolean fromSnapshot; if (snapshotsManager == null || snapshot == null) {
if (snapshotsManager == null || snapshot == null) { indexSearcher = searcherManager.acquire();
indexSearcher = searcherManager.acquire(); fromSnapshot = false;
fromSnapshot = false; } else {
} else { indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher(SEARCH_EXECUTOR);
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher(SEARCH_EXECUTOR); fromSnapshot = true;
fromSnapshot = true; }
} indexSearcher.setSimilarity(similarity);
indexSearcher.setSimilarity(similarity); assert indexSearcher.getIndexReader().getRefCount() > 0;
assert indexSearcher.getIndexReader().getRefCount() > 0; LLIndexSearcher llIndexSearcher;
LLIndexSearcher llIndexSearcher; if (fromSnapshot) {
if (fromSnapshot) { llIndexSearcher = new SnapshotIndexSearcher(indexSearcher);
llIndexSearcher = new SnapshotIndexSearcher(indexSearcher); } else {
} else { llIndexSearcher = new MainIndexSearcher(indexSearcher, searcherManager);
llIndexSearcher = new MainIndexSearcher(indexSearcher, searcherManager); }
} return llIndexSearcher;
return llIndexSearcher; } catch (Throwable ex) {
} catch (Throwable ex) { activeSearchers.decrementAndGet();
activeSearchers.decrementAndGet(); throw ex;
throw ex; }
}
})
.transform(LuceneUtils::scheduleLucene);
} }
private void dropCachedIndexSearcher() { private void dropCachedIndexSearcher() {
@ -135,7 +125,7 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
} }
@Override @Override
public void maybeRefreshBlocking() throws IOException { public void maybeRefreshBlocking() {
try { try {
activeRefreshes.incrementAndGet(); activeRefreshes.incrementAndGet();
searcherManager.maybeRefreshBlocking(); searcherManager.maybeRefreshBlocking();
@ -147,7 +137,7 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
} }
@Override @Override
public void maybeRefresh() throws IOException { public void maybeRefresh() {
try { try {
activeRefreshes.incrementAndGet(); activeRefreshes.incrementAndGet();
searcherManager.maybeRefresh(); searcherManager.maybeRefresh();
@ -159,9 +149,9 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
} }
@Override @Override
public Mono<LLIndexSearcher> retrieveSearcher(@Nullable LLSnapshot snapshot) { public LLIndexSearcher retrieveSearcher(@Nullable LLSnapshot snapshot) {
if (snapshot == null) { if (snapshot == null) {
return this.cachedMainSearcher; return this.generateCachedSearcher(null);
} else { } else {
return this.cachedSnapshotSearchers.getUnchecked(snapshot); return this.cachedSnapshotSearchers.getUnchecked(snapshot);
} }
@ -170,10 +160,15 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
@Override @Override
protected void onClose() { protected void onClose() {
LOG.debug("Closing IndexSearcherManager..."); LOG.debug("Closing IndexSearcherManager...");
refreshSubscription.dispose(); long initTime = System.nanoTime();
refreshSubscription.cancel(false);
while (!refreshSubscription.isDone() && (System.nanoTime() - initTime) <= 240000000000L) {
LockSupport.parkNanos(50000000);
}
refreshSubscription.cancel(true);
LOG.debug("Closed IndexSearcherManager"); LOG.debug("Closed IndexSearcherManager");
LOG.debug("Closing refreshes..."); LOG.debug("Closing refreshes...");
long initTime = System.nanoTime(); initTime = System.nanoTime();
while (activeRefreshes.get() > 0 && (System.nanoTime() - initTime) <= 15000000000L) { while (activeRefreshes.get() > 0 && (System.nanoTime() - initTime) <= 15000000000L) {
LockSupport.parkNanos(50000000); LockSupport.parkNanos(50000000);
} }
@ -227,7 +222,7 @@ public class CachedIndexSearcherManager extends SimpleResource implements IndexS
try { try {
searcherManager.release(indexSearcher); searcherManager.release(indexSearcher);
} catch (IOException ex) { } catch (IOException ex) {
throw new UncheckedIOException(ex); throw new DBException(ex);
} }
} }
} }

View File

@ -1,19 +1,9 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.isDirect; import static it.cavallium.dbengine.database.LLUtils.asArray;
import static it.cavallium.dbengine.database.LLUtils.isReadOnlyDirect;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import io.netty5.buffer.BufferComponent;
import io.netty5.util.Send;
import io.netty5.util.internal.PlatformDependent;
import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.disk.RocksDBColumn;
import java.io.Closeable;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.RocksDBException; import org.rocksdb.RocksDBException;
import org.rocksdb.WriteBatch; import org.rocksdb.WriteBatch;
@ -21,61 +11,30 @@ import org.rocksdb.WriteOptions;
public class CappedWriteBatch extends WriteBatch { public class CappedWriteBatch extends WriteBatch {
/**
* Default: true, Use false to debug problems with direct buffers
*/
private static final boolean USE_FAST_DIRECT_BUFFERS = true;
private final RocksDBColumn db; private final RocksDBColumn db;
private final BufferAllocator alloc;
private final int cap; private final int cap;
private final WriteOptions writeOptions; private final WriteOptions writeOptions;
private final List<Buffer> buffersToRelease;
private final List<ByteBuffer> byteBuffersToRelease;
/** /**
* @param db * @param db
* @param cap The limit of operations * @param cap The limit of operations
*/ */
public CappedWriteBatch(RocksDBColumn db, public CappedWriteBatch(RocksDBColumn db,
BufferAllocator alloc,
int cap, int cap,
int reservedWriteBatchSize, int reservedWriteBatchSize,
long maxWriteBatchSize, long maxWriteBatchSize,
WriteOptions writeOptions) { WriteOptions writeOptions) {
super(reservedWriteBatchSize); super(reservedWriteBatchSize);
this.db = db; this.db = db;
this.alloc = alloc;
this.cap = cap; this.cap = cap;
this.writeOptions = writeOptions; this.writeOptions = writeOptions;
this.setMaxBytes(maxWriteBatchSize); this.setMaxBytes(maxWriteBatchSize);
this.buffersToRelease = new ArrayList<>();
this.byteBuffersToRelease = new ArrayList<>();
} }
private synchronized void flushIfNeeded(boolean force) throws RocksDBException { private synchronized void flushIfNeeded(boolean force) throws RocksDBException {
if (this.count() >= (force ? 1 : cap)) { if (this.count() >= (force ? 1 : cap)) {
try { db.write(writeOptions, this.getWriteBatch());
db.write(writeOptions, this.getWriteBatch()); this.clear();
this.clear();
} finally {
releaseAllBuffers();
}
}
}
public synchronized void releaseAllBuffers() {
if (!buffersToRelease.isEmpty()) {
for (Buffer byteBuffer : buffersToRelease) {
byteBuffer.close();
}
buffersToRelease.clear();
}
if (!byteBuffersToRelease.isEmpty()) {
for (var byteBuffer : byteBuffersToRelease) {
PlatformDependent.freeDirectBuffer(byteBuffer);
}
byteBuffersToRelease.clear();
} }
} }
@ -109,29 +68,9 @@ public class CappedWriteBatch extends WriteBatch {
} }
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, public synchronized void put(ColumnFamilyHandle columnFamilyHandle,
Send<Buffer> keyToReceive, Buf keyToReceive,
Send<Buffer> valueToReceive) throws RocksDBException { Buf valueToReceive) throws RocksDBException {
var key = keyToReceive.receive(); super.put(columnFamilyHandle, asArray(keyToReceive), asArray(valueToReceive));
var value = valueToReceive.receive();
if (USE_FAST_DIRECT_BUFFERS
&& (isReadOnlyDirect(key))
&& (isReadOnlyDirect(value))) {
ByteBuffer keyNioBuffer = ((BufferComponent) key).readableBuffer();
ByteBuffer valueNioBuffer = ((BufferComponent) value).readableBuffer();
buffersToRelease.add(value);
buffersToRelease.add(key);
super.put(columnFamilyHandle, keyNioBuffer, valueNioBuffer);
} else {
try {
byte[] keyArray = LLUtils.toArray(key);
byte[] valueArray = LLUtils.toArray(value);
super.put(columnFamilyHandle, keyArray, valueArray);
} finally {
key.close();
value.close();
}
}
flushIfNeeded(false); flushIfNeeded(false);
} }
@ -159,19 +98,8 @@ public class CappedWriteBatch extends WriteBatch {
flushIfNeeded(false); flushIfNeeded(false);
} }
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, Send<Buffer> keyToReceive) throws RocksDBException { public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, Buf keyToDelete) throws RocksDBException {
var key = keyToReceive.receive(); super.delete(columnFamilyHandle, asArray(keyToDelete));
if (USE_FAST_DIRECT_BUFFERS && isReadOnlyDirect(key)) {
ByteBuffer keyNioBuffer = ((BufferComponent) key).readableBuffer();
buffersToRelease.add(key);
delete(columnFamilyHandle, keyNioBuffer);
} else {
try {
super.delete(columnFamilyHandle, LLUtils.toArray(key));
} finally {
key.close();
}
}
flushIfNeeded(false); flushIfNeeded(false);
} }
@ -221,7 +149,6 @@ public class CappedWriteBatch extends WriteBatch {
@Override @Override
public synchronized void clear() { public synchronized void clear() {
super.clear(); super.clear();
releaseAllBuffers();
} }
@Override @Override
@ -250,26 +177,11 @@ public class CappedWriteBatch extends WriteBatch {
} }
public synchronized void writeToDbAndClose() throws RocksDBException { public synchronized void writeToDbAndClose() throws RocksDBException {
try { flushIfNeeded(true);
flushIfNeeded(true); super.close();
super.close();
} finally {
releaseAllBuffers();
}
} }
public void flush() throws RocksDBException { public void flush() throws RocksDBException {
try { flushIfNeeded(true);
flushIfNeeded(true);
} finally {
releaseAllBuffers();
}
} }
/*
protected void disposeInternal(boolean owningHandle) {
super.disposeInternal(owningHandle);
releaseAllBuffers();
}
*/
} }

View File

@ -1,84 +0,0 @@
package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.disk.LLTempHugePqEnv.getColumnOptions;
import com.google.common.primitives.Ints;
import io.micrometer.core.instrument.composite.CompositeMeterRegistry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.utils.SimpleResource;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.Objects;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.StampedLock;
import org.rocksdb.AbstractComparator;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
public class HugePqEnv extends SimpleResource {
private final RocksDB db;
private final ArrayList<ColumnFamilyHandle> defaultCfh;
private final Int2ObjectMap<ColumnFamilyHandle> cfhs = new Int2ObjectOpenHashMap<>();
public HugePqEnv(RocksDB db, ArrayList<ColumnFamilyHandle> defaultCfh) {
this.db = db;
this.defaultCfh = defaultCfh;
}
@Override
protected void onClose() {
for (var cfh : defaultCfh) {
db.destroyColumnFamilyHandle(cfh);
}
try {
db.closeE();
} catch (RocksDBException e) {
throw new IllegalStateException(e);
}
}
public int createColumnFamily(int name, AbstractComparator comparator) throws RocksDBException {
var cfh = db.createColumnFamily(new ColumnFamilyDescriptor(Ints.toByteArray(name), getColumnOptions(comparator)));
synchronized (cfhs) {
var prev = cfhs.put(name, cfh);
if (prev != null) {
throw new UnsupportedOperationException("Db " + name + " already exists");
}
return name;
}
}
public void deleteColumnFamily(int db) throws RocksDBException {
ColumnFamilyHandle cfh;
synchronized (cfhs) {
cfh = cfhs.remove(db);
}
if (cfh != null) {
this.db.dropColumnFamily(cfh);
this.db.destroyColumnFamilyHandle(cfh);
}
}
public StandardRocksDBColumn openDb(int hugePqId) {
ColumnFamilyHandle cfh;
synchronized (cfhs) {
cfh = Objects.requireNonNull(cfhs.get(hugePqId), () -> "column " + hugePqId + " does not exist");
}
return new StandardRocksDBColumn(db,
true,
BufferAllocator.offHeapPooled(),
db.getName(),
cfh,
new CompositeMeterRegistry(),
new StampedLock()
);
}
}

View File

@ -1,20 +1,16 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.SafeCloseable; import it.cavallium.dbengine.database.SafeCloseable;
import java.io.IOException; import java.io.IOException;
import java.util.function.Function; import java.util.function.Supplier;
import org.apache.lucene.search.IndexSearcher;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface IndexSearcherManager extends SafeCloseable { public interface IndexSearcherManager extends SafeCloseable {
void maybeRefreshBlocking() throws IOException; void maybeRefreshBlocking();
void maybeRefresh() throws IOException; void maybeRefresh();
Mono<LLIndexSearcher> retrieveSearcher(@Nullable LLSnapshot snapshot); LLIndexSearcher retrieveSearcher(@Nullable LLSnapshot snapshot);
} }

View File

@ -1,31 +1,19 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES;
import static java.lang.Boolean.parseBoolean; import static java.lang.Boolean.parseBoolean;
import static java.lang.System.getProperty; import static java.lang.System.getProperty;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static org.rocksdb.KeyMayExist.KeyMayExistEnum.kExistsWithValue;
import static org.rocksdb.KeyMayExist.KeyMayExistEnum.kExistsWithoutValue;
import static org.rocksdb.KeyMayExist.KeyMayExistEnum.kNotExist;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.buffer.BufferAllocator;
import io.netty5.buffer.BufferComponent;
import io.netty5.buffer.DefaultBufferAllocators;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HexFormat;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.Holder; import org.rocksdb.Holder;
import org.rocksdb.KeyMayExist; import org.rocksdb.KeyMayExist;
import org.rocksdb.KeyMayExist.KeyMayExistEnum;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException; import org.rocksdb.RocksDBException;
public abstract class KeyMayExistGetter { public abstract class KeyMayExistGetter {
@ -40,180 +28,19 @@ public abstract class KeyMayExistGetter {
"it.cavallium.dbengine.mayexist.strict_no_value", "it.cavallium.dbengine.mayexist.strict_no_value",
"false" "false"
)); ));
private static final BufferAllocator OFF_HEAP_ALLOCATOR = DefaultBufferAllocators.offHeapAllocator();
private final BufferAllocator bufferAllocator; public KeyMayExistGetter() {
private final boolean nettyDirect;
public KeyMayExistGetter(BufferAllocator bufferAllocator, boolean nettyDirect) {
this.bufferAllocator = bufferAllocator;
this.nettyDirect = nettyDirect;
} }
public final @Nullable Buffer get(@NotNull ReadOptions readOptions, Buffer key) throws RocksDBException { public final @Nullable Buf get(@NotNull ReadOptions readOptions, Buf key) throws RocksDBException {
recordKeyBufferSize(key.readableBytes()); recordKeyBufferSize(key.size());
if (nettyDirect) { return getHeap(readOptions, key);
return getDirect(readOptions, key);
} else {
return getHeap(readOptions, key);
}
} }
private Buffer getDirect(ReadOptions readOptions, Buffer key) throws RocksDBException { private Buf getHeap(ReadOptions readOptions, Buf key) throws RocksDBException {
int readAttemptsCount = 0;
// Get the key nio buffer to pass to RocksDB
ByteBuffer keyNioBuffer;
boolean mustCloseKey;
{
if (!LLUtils.isReadOnlyDirect(key)) {
// If the nio buffer is not available, copy the netty buffer into a new direct buffer
mustCloseKey = true;
var directKey = OFF_HEAP_ALLOCATOR.allocate(key.readableBytes());
key.copyInto(key.readerOffset(), directKey, 0, key.readableBytes());
key = directKey;
} else {
mustCloseKey = false;
}
keyNioBuffer = ((BufferComponent) key).readableBuffer();
assert keyNioBuffer.isDirect();
assert keyNioBuffer.limit() == key.readableBytes();
}
try {
// Create a direct result buffer because RocksDB works only with direct buffers
var resultBuffer = bufferAllocator.allocate(INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES);
try {
assert resultBuffer.readerOffset() == 0;
assert resultBuffer.writerOffset() == 0;
var resultWritable = ((BufferComponent) resultBuffer).writableBuffer();
var keyMayExist = keyMayExist(readOptions, keyNioBuffer.rewind(), resultWritable.clear());
if (STRICT_MAYEXIST_NO_VALUE && keyMayExist.exists != kExistsWithValue && keyMayExist.valueLength != 0) {
// Create a direct result buffer because RocksDB works only with direct buffers
try (var realResultBuffer = bufferAllocator.allocate(INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES)) {
var resultWritableF = resultWritable;
var realResultWritable = ((BufferComponent) realResultBuffer).writableBuffer();
var realSize = get(readOptions, keyNioBuffer.rewind(), realResultWritable);
var hexFormat = HexFormat.ofDelimiter(" ");
LOG.error(
"KeyMayExist is {}, but value length is non-zero: {}! Disk value size is {}\nBytes from bloom cache:\n{}\nBytes from db:\n{}",
() -> keyMayExist.exists,
() -> keyMayExist.valueLength,
() -> realSize,
() -> {
resultBuffer.writerOffset(resultWritableF.limit());
return hexFormat.formatHex(LLUtils.toArray(resultBuffer));
},
() -> {
realResultBuffer.writerOffset(realResultWritable.limit());
return hexFormat.formatHex(LLUtils.toArray(realResultBuffer));
}
);
var sliceKME = LLUtils.toArray(resultBuffer.copy(0, Math.min(resultWritableF.limit(), realSize)));
var sliceDB = LLUtils.toArray(realResultBuffer.copy(0, Math.min(realResultWritable.limit(), realSize)));
throw new RocksDBException(
"KeyMayExist is " + keyMayExist.exists + ", but value length is non-zero: " + keyMayExist.valueLength
+ "! Disk value size is " + realSize + ". The bloom cache partial value is "
+ (Arrays.equals(sliceKME, sliceDB) ? "correct" : "corrupted"));
}
}
KeyMayExistEnum keyMayExistState = keyMayExist.exists;
int keyMayExistValueLength = keyMayExist.valueLength;
// At the beginning, size reflects the expected size, then it becomes the real data size
//noinspection SwitchStatementWithTooFewBranches
int size = switch (keyMayExistState) {
case kExistsWithValue -> keyMayExistValueLength;
default -> -1;
};
boolean isKExistsWithoutValue = false;
switch (keyMayExistState) {
case kNotExist: {
recordReadValueNotFoundWithBloomBufferSize(0);
resultBuffer.close();
return null;
}
// todo: kExistsWithValue is not reliable (read below),
// in some cases it should be treated as kExistsWithoutValue
case kExistsWithValue:
case kExistsWithoutValue: {
if (keyMayExistState == kExistsWithoutValue) {
isKExistsWithoutValue = true;
} else if (WORKAROUND_MAY_EXIST_FAKE_ZERO) {
// todo: "size == 0 || resultWritable.limit() == 0" is checked because keyMayExist is broken,
// and sometimes it returns an empty array, as if it exists
if (size == 0 || resultWritable.limit() == 0) {
isKExistsWithoutValue = true;
}
}
if (isKExistsWithoutValue) {
assert
!STRICT_MAYEXIST_NO_VALUE || keyMayExistValueLength == 0 :
"keyMayExist value length is " + keyMayExistValueLength + " instead of 0";
resultWritable.clear();
readAttemptsCount++;
// real data size
size = get(readOptions, keyNioBuffer.rewind(), resultWritable.clear());
if (keyMayExistState == kExistsWithValue && size != keyMayExistValueLength) {
throw new IllegalStateException("Bloom filter data is corrupted."
+ " Bloom value size=" + keyMayExistState + ", Real value size=" + size);
}
if (size == RocksDB.NOT_FOUND) {
resultBuffer.close();
recordReadValueNotFoundWithMayExistBloomBufferSize(0);
return null;
}
}
}
default: {
// real data size
assert size >= 0;
if (size <= resultWritable.limit()) {
if (isKExistsWithoutValue) {
recordReadValueFoundWithBloomUncachedBufferSize(size);
} else {
recordReadValueFoundWithBloomCacheBufferSize(size);
}
assert size == resultWritable.limit();
return resultBuffer.writerOffset(resultWritable.limit());
} else {
resultBuffer.ensureWritable(size);
resultWritable = ((BufferComponent) resultBuffer).writableBuffer();
assert resultBuffer.readerOffset() == 0;
assert resultBuffer.writerOffset() == 0;
readAttemptsCount++;
size = get(readOptions, keyNioBuffer.rewind(), resultWritable.clear());
if (size == RocksDB.NOT_FOUND) {
recordReadValueNotFoundWithMayExistBloomBufferSize(0);
resultBuffer.close();
return null;
}
assert size == resultWritable.limit();
if (isKExistsWithoutValue) {
recordReadValueFoundWithBloomUncachedBufferSize(size);
} else {
recordReadValueFoundWithBloomCacheBufferSize(size);
}
return resultBuffer.writerOffset(resultWritable.limit());
}
}
}
} catch (Throwable t) {
resultBuffer.close();
throw t;
}
} finally {
if (mustCloseKey) {
key.close();
}
recordReadAttempts(readAttemptsCount);
}
}
private Buffer getHeap(ReadOptions readOptions, Buffer key) throws RocksDBException {
int readAttemptsCount = 0; int readAttemptsCount = 0;
try { try {
byte[] keyArray = LLUtils.toArray(key); byte[] keyArray = LLUtils.asArray(key);
requireNonNull(keyArray); requireNonNull(keyArray);
Holder<byte[]> data = new Holder<>(); Holder<byte[]> data = new Holder<>();
if (keyMayExist(readOptions, keyArray, data)) { if (keyMayExist(readOptions, keyArray, data)) {
@ -221,7 +48,7 @@ public abstract class KeyMayExistGetter {
// returns an empty array, as if it exists // returns an empty array, as if it exists
if (data.getValue() != null && (!WORKAROUND_MAY_EXIST_FAKE_ZERO || data.getValue().length > 0)) { if (data.getValue() != null && (!WORKAROUND_MAY_EXIST_FAKE_ZERO || data.getValue().length > 0)) {
recordReadValueFoundWithBloomCacheBufferSize(data.getValue().length); recordReadValueFoundWithBloomCacheBufferSize(data.getValue().length);
return LLUtils.fromByteArray(bufferAllocator, data.getValue()); return LLUtils.asByteList(data.getValue());
} else { } else {
readAttemptsCount++; readAttemptsCount++;
byte[] result = get(readOptions, keyArray); byte[] result = get(readOptions, keyArray);
@ -234,7 +61,7 @@ public abstract class KeyMayExistGetter {
return null; return null;
} else { } else {
recordReadValueFoundWithBloomUncachedBufferSize(0); recordReadValueFoundWithBloomUncachedBufferSize(0);
return LLUtils.fromByteArray(bufferAllocator, result); return LLUtils.asByteList(result);
} }
} }
} else { } else {

View File

@ -1,17 +1,10 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty5.buffer.Drop;
import io.netty5.buffer.Owned;
import io.netty5.buffer.internal.ResourceSupport;
import it.cavallium.dbengine.database.DiscardingCloseable; import it.cavallium.dbengine.database.DiscardingCloseable;
import it.cavallium.dbengine.database.SafeCloseable;
import it.cavallium.dbengine.utils.SimpleResource; import it.cavallium.dbengine.utils.SimpleResource;
import java.io.Closeable;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
public abstract class LLIndexSearcher extends SimpleResource implements DiscardingCloseable { public abstract class LLIndexSearcher extends SimpleResource implements DiscardingCloseable {

View File

@ -1,11 +1,8 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import it.cavallium.dbengine.database.DiscardingCloseable;
import it.cavallium.dbengine.utils.SimpleResource;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
public abstract class LLIndexSearcherImpl extends LLIndexSearcher { public abstract class LLIndexSearcherImpl extends LLIndexSearcher {

View File

@ -1,56 +1,38 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.lucene.LuceneUtils.luceneScheduler;
import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.MeterRegistry;
import io.netty5.buffer.BufferAllocator;
import it.cavallium.dbengine.database.LLDatabaseConnection; import it.cavallium.dbengine.database.LLDatabaseConnection;
import it.cavallium.dbengine.database.LLLuceneIndex; import it.cavallium.dbengine.database.LLLuceneIndex;
import it.cavallium.dbengine.lucene.LuceneHacks; import it.cavallium.dbengine.lucene.LuceneHacks;
import it.cavallium.dbengine.lucene.LuceneRocksDBManager;
import it.cavallium.dbengine.lucene.LuceneUtils;
import it.cavallium.dbengine.rpc.current.data.Column; import it.cavallium.dbengine.rpc.current.data.Column;
import it.cavallium.dbengine.rpc.current.data.DatabaseOptions; import it.cavallium.dbengine.rpc.current.data.DatabaseOptions;
import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers; import it.cavallium.dbengine.rpc.current.data.IndicizerAnalyzers;
import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities; import it.cavallium.dbengine.rpc.current.data.IndicizerSimilarities;
import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure; import it.cavallium.dbengine.rpc.current.data.LuceneIndexStructure;
import it.cavallium.dbengine.rpc.current.data.LuceneOptions; import it.cavallium.dbengine.rpc.current.data.LuceneOptions;
import it.cavallium.dbengine.utils.DBException;
import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
public class LLLocalDatabaseConnection implements LLDatabaseConnection { public class LLLocalDatabaseConnection implements LLDatabaseConnection {
private final AtomicBoolean connected = new AtomicBoolean(); private final AtomicBoolean connected = new AtomicBoolean();
private final BufferAllocator allocator;
private final MeterRegistry meterRegistry; private final MeterRegistry meterRegistry;
private final Path basePath; private final Path basePath;
private final boolean inMemory; private final boolean inMemory;
private final LuceneRocksDBManager rocksDBManager;
private final AtomicReference<LLTempHugePqEnv> env = new AtomicReference<>();
public LLLocalDatabaseConnection(BufferAllocator allocator, public LLLocalDatabaseConnection(
MeterRegistry meterRegistry, MeterRegistry meterRegistry,
Path basePath, Path basePath,
boolean inMemory, boolean inMemory) {
LuceneRocksDBManager rocksDBManager) {
this.allocator = allocator;
this.meterRegistry = meterRegistry; this.meterRegistry = meterRegistry;
this.basePath = basePath; this.basePath = basePath;
this.inMemory = inMemory; this.inMemory = inMemory;
this.rocksDBManager = rocksDBManager;
}
@Override
public BufferAllocator getAllocator() {
return allocator;
} }
public MeterRegistry getMeterRegistry() { public MeterRegistry getMeterRegistry() {
@ -58,94 +40,67 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
} }
@Override @Override
public Mono<LLDatabaseConnection> connect() { public LLDatabaseConnection connect() {
return Mono if (!connected.compareAndSet(false, true)) {
.<LLDatabaseConnection>fromCallable(() -> { throw new IllegalStateException("Already connected");
if (!connected.compareAndSet(false, true)) { }
throw new IllegalStateException("Already connected"); if (Files.notExists(basePath)) {
} try {
if (Files.notExists(basePath)) { Files.createDirectories(basePath);
Files.createDirectories(basePath); } catch (IOException e) {
} throw new DBException(e);
var prev = env.getAndSet(new LLTempHugePqEnv()); }
if (prev != null) { }
throw new IllegalStateException("Env was already set"); return this;
}
return this;
})
.subscribeOn(Schedulers.boundedElastic());
} }
@Override @Override
public Mono<LLLocalKeyValueDatabase> getDatabase(String name, public LLLocalKeyValueDatabase getDatabase(String name, List<Column> columns, DatabaseOptions databaseOptions) {
List<Column> columns, return new LLLocalKeyValueDatabase(meterRegistry,
DatabaseOptions databaseOptions) { name,
return Mono inMemory,
.fromCallable(() -> new LLLocalKeyValueDatabase( basePath.resolve("database_" + name),
allocator, columns,
meterRegistry, new LinkedList<>(),
name, databaseOptions
inMemory, );
basePath.resolve("database_" + name),
columns,
new LinkedList<>(),
databaseOptions
))
.subscribeOn(Schedulers.boundedElastic());
} }
@Override @Override
public Mono<? extends LLLuceneIndex> getLuceneIndex(String clusterName, public LLLuceneIndex getLuceneIndex(String clusterName,
LuceneIndexStructure indexStructure, LuceneIndexStructure indexStructure,
IndicizerAnalyzers indicizerAnalyzers, IndicizerAnalyzers indicizerAnalyzers,
IndicizerSimilarities indicizerSimilarities, IndicizerSimilarities indicizerSimilarities,
LuceneOptions luceneOptions, LuceneOptions luceneOptions,
@Nullable LuceneHacks luceneHacks) { @Nullable LuceneHacks luceneHacks) {
return Mono if (clusterName == null) {
.fromCallable(() -> { throw new IllegalArgumentException("Cluster name must be set");
var env = this.env.get(); }
if (clusterName == null) { if (indexStructure.activeShards().size() != 1) {
throw new IllegalArgumentException("Cluster name must be set"); return new LLLocalMultiLuceneIndex(meterRegistry,
} clusterName,
if (indexStructure.activeShards().size() != 1) { indexStructure.activeShards(),
Objects.requireNonNull(env, "Environment not set"); indexStructure.totalShards(),
return new LLLocalMultiLuceneIndex(env, indicizerAnalyzers,
meterRegistry, indicizerSimilarities,
clusterName, luceneOptions,
indexStructure.activeShards(), luceneHacks
indexStructure.totalShards(), );
indicizerAnalyzers, } else {
indicizerSimilarities, return new LLLocalLuceneIndex(meterRegistry,
luceneOptions, clusterName,
luceneHacks, indexStructure.activeShards().getInt(0),
rocksDBManager indicizerAnalyzers,
); indicizerSimilarities,
} else { luceneOptions,
return new LLLocalLuceneIndex(env, luceneHacks
meterRegistry, );
clusterName, }
indexStructure.activeShards().getInt(0),
indicizerAnalyzers,
indicizerSimilarities,
luceneOptions,
luceneHacks,
rocksDBManager
);
}
})
.transform(LuceneUtils::scheduleLucene);
} }
@Override @Override
public Mono<Void> disconnect() { public void disconnect() {
return Mono.<Void>fromCallable(() -> { if (connected.compareAndSet(true, false)) {
if (connected.compareAndSet(true, false)) { }
var env = this.env.get();
if (env != null) {
env.close();
}
}
return null;
}).subscribeOn(Schedulers.boundedElastic());
} }
} }

View File

@ -1,31 +1,25 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import java.util.function.Supplier; import java.util.function.Supplier;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import reactor.core.publisher.Mono;
public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator<LLEntry> { public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator<LLEntry> {
public LLLocalEntryReactiveRocksIterator(RocksDBColumn db, public LLLocalEntryReactiveRocksIterator(RocksDBColumn db,
Mono<LLRange> rangeMono, LLRange range,
boolean allowNettyDirect,
Supplier<ReadOptions> readOptions, Supplier<ReadOptions> readOptions,
boolean reverse, boolean reverse,
boolean smallRange) { boolean smallRange) {
super(db, rangeMono, allowNettyDirect, readOptions, true, reverse, smallRange); super(db, range, readOptions, true, reverse, smallRange);
} }
@Override @Override
public LLEntry getEntry(Buffer key, Buffer value) { public LLEntry getEntry(Buf key, Buf value) {
assert key != null; assert key != null;
assert value != null; assert value != null;
return LLEntry.of( return LLEntry.of(key.copy(), value.copy());
key.touch("iteration entry key"),
value.touch("iteration entry value")
);
} }
} }

View File

@ -1,32 +1,25 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import java.util.function.Supplier; import java.util.function.Supplier;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import reactor.core.publisher.Mono;
public class LLLocalGroupedEntryReactiveRocksIterator extends public class LLLocalGroupedEntryReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<LLEntry> {
LLLocalGroupedReactiveRocksIterator<LLEntry> {
public LLLocalGroupedEntryReactiveRocksIterator(RocksDBColumn db, public LLLocalGroupedEntryReactiveRocksIterator(RocksDBColumn db,
int prefixLength, int prefixLength,
Mono<LLRange> rangeMono, LLRange range,
boolean allowNettyDirect,
Supplier<ReadOptions> readOptions, Supplier<ReadOptions> readOptions,
boolean smallRange) { boolean smallRange) {
super(db, prefixLength, rangeMono, allowNettyDirect, readOptions, false, true, smallRange); super(db, prefixLength, range, readOptions, true, true, smallRange);
} }
@Override @Override
public LLEntry getEntry(Buffer key, Buffer value) { public LLEntry getEntry(Buf key, Buf value) {
assert key != null; assert key != null;
assert value != null; assert value != null;
return LLEntry.of( return LLEntry.of(key.copy(), value.copy());
key.touch("iteration entry key"),
value.touch("iteration entry value")
);
} }
} }

View File

@ -1,28 +1,23 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty5.buffer.Buffer; import it.cavallium.dbengine.buffers.Buf;
import io.netty5.util.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import java.util.function.Supplier; import java.util.function.Supplier;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import reactor.core.publisher.Mono;
public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<Buffer> { public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<Buf> {
public LLLocalGroupedKeyReactiveRocksIterator(RocksDBColumn db, public LLLocalGroupedKeyReactiveRocksIterator(RocksDBColumn db,
int prefixLength, int prefixLength,
Mono<LLRange> rangeMono, LLRange range,
boolean allowNettyDirect,
Supplier<ReadOptions> readOptions, Supplier<ReadOptions> readOptions,
boolean smallRange) { boolean smallRange) {
super(db, prefixLength, rangeMono, allowNettyDirect, readOptions, true, false, smallRange); super(db, prefixLength, range, readOptions, true, false, smallRange);
} }
@Override @Override
public Buffer getEntry(Buffer key, Buffer value) { public Buf getEntry(Buf key, Buf value) {
if (value != null) { assert key != null;
value.close(); return key.copy();
}
return key;
} }
} }

Some files were not shown because too many files have changed in this diff Show More