Converted everything to netty direct buffers
This commit is contained in:
parent
df84562bb9
commit
2e6aceafe6
16
pom.xml
16
pom.xml
@ -107,7 +107,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.junit.jupiter</groupId>
|
<groupId>org.junit.jupiter</groupId>
|
||||||
<artifactId>junit-jupiter-api</artifactId>
|
<artifactId>junit-jupiter-api</artifactId>
|
||||||
<version>5.7.0</version>
|
<version>5.8.0-M1</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
@ -116,6 +116,18 @@
|
|||||||
</exclusion>
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter-engine</artifactId>
|
||||||
|
<version>5.8.0-M1</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter-params</artifactId>
|
||||||
|
<version>5.8.0-M1</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
<!-- This will get hamcrest-core automatically -->
|
<!-- This will get hamcrest-core automatically -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.hamcrest</groupId>
|
<groupId>org.hamcrest</groupId>
|
||||||
@ -138,7 +150,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.rocksdb</groupId>
|
<groupId>org.rocksdb</groupId>
|
||||||
<artifactId>rocksdbjni</artifactId>
|
<artifactId>rocksdbjni</artifactId>
|
||||||
<version>6.16.4</version>
|
<version>6.19.3</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.lucene</groupId>
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
@ -11,7 +11,7 @@ public interface LLDatabaseConnection {
|
|||||||
|
|
||||||
Mono<? extends LLDatabaseConnection> connect();
|
Mono<? extends LLDatabaseConnection> connect();
|
||||||
|
|
||||||
Mono<? extends LLKeyValueDatabase> getDatabase(String name, List<Column> columns, boolean lowMemory);
|
Mono<? extends LLKeyValueDatabase> getDatabase(String name, List<Column> columns, boolean lowMemory, boolean inMemory);
|
||||||
|
|
||||||
Mono<? extends LLLuceneIndex> getLuceneIndex(String name,
|
Mono<? extends LLLuceneIndex> getLuceneIndex(String name,
|
||||||
int instancesCount,
|
int instancesCount,
|
||||||
@ -19,7 +19,8 @@ public interface LLDatabaseConnection {
|
|||||||
TextFieldsSimilarity textFieldsSimilarity,
|
TextFieldsSimilarity textFieldsSimilarity,
|
||||||
Duration queryRefreshDebounceTime,
|
Duration queryRefreshDebounceTime,
|
||||||
Duration commitDebounceTime,
|
Duration commitDebounceTime,
|
||||||
boolean lowMemory);
|
boolean lowMemory,
|
||||||
|
boolean inMemory);
|
||||||
|
|
||||||
Mono<Void> disconnect();
|
Mono<Void> disconnect();
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
package it.cavallium.dbengine.database;
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.warp.commonutils.concurrency.atomicity.NotAtomic;
|
import org.warp.commonutils.concurrency.atomicity.NotAtomic;
|
||||||
@ -13,60 +14,62 @@ import reactor.core.publisher.Mono;
|
|||||||
@NotAtomic
|
@NotAtomic
|
||||||
public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
||||||
|
|
||||||
Mono<byte[]> get(@Nullable LLSnapshot snapshot, byte[] key, boolean existsAlmostCertainly);
|
ByteBufAllocator getAllocator();
|
||||||
|
|
||||||
default Mono<byte[]> get(@Nullable LLSnapshot snapshot, byte[] key) {
|
Mono<ByteBuf> get(@Nullable LLSnapshot snapshot, ByteBuf key, boolean existsAlmostCertainly);
|
||||||
|
|
||||||
|
default Mono<ByteBuf> get(@Nullable LLSnapshot snapshot, ByteBuf key) {
|
||||||
return get(snapshot, key, false);
|
return get(snapshot, key, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<byte[]> put(byte[] key, byte[] value, LLDictionaryResultType resultType);
|
Mono<ByteBuf> put(ByteBuf key, ByteBuf value, LLDictionaryResultType resultType);
|
||||||
|
|
||||||
Mono<Boolean> update(byte[] key, Function<Optional<byte[]>, Optional<byte[]>> updater, boolean existsAlmostCertainly);
|
Mono<Boolean> update(ByteBuf key, Function<@Nullable ByteBuf, @Nullable ByteBuf> updater, boolean existsAlmostCertainly);
|
||||||
|
|
||||||
default Mono<Boolean> update(byte[] key, Function<Optional<byte[]>, Optional<byte[]>> updater) {
|
default Mono<Boolean> update(ByteBuf key, Function<@Nullable ByteBuf, @Nullable ByteBuf> updater) {
|
||||||
return update(key, updater, false);
|
return update(key, updater, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<Void> clear();
|
Mono<Void> clear();
|
||||||
|
|
||||||
Mono<byte[]> remove(byte[] key, LLDictionaryResultType resultType);
|
Mono<ByteBuf> remove(ByteBuf key, LLDictionaryResultType resultType);
|
||||||
|
|
||||||
Flux<Entry<byte[], byte[]>> getMulti(@Nullable LLSnapshot snapshot, Flux<byte[]> keys, boolean existsAlmostCertainly);
|
Flux<Entry<ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, Flux<ByteBuf> keys, boolean existsAlmostCertainly);
|
||||||
|
|
||||||
default Flux<Entry<byte[], byte[]>> getMulti(@Nullable LLSnapshot snapshot, Flux<byte[]> keys) {
|
default Flux<Entry<ByteBuf, ByteBuf>> getMulti(@Nullable LLSnapshot snapshot, Flux<ByteBuf> keys) {
|
||||||
return getMulti(snapshot, keys, false);
|
return getMulti(snapshot, keys, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Flux<Entry<byte[], byte[]>> putMulti(Flux<Entry<byte[], byte[]>> entries, boolean getOldValues);
|
Flux<Entry<ByteBuf, ByteBuf>> putMulti(Flux<Entry<ByteBuf, ByteBuf>> entries, boolean getOldValues);
|
||||||
|
|
||||||
Flux<Entry<byte[], byte[]>> getRange(@Nullable LLSnapshot snapshot, LLRange range, boolean existsAlmostCertainly);
|
Flux<Entry<ByteBuf, ByteBuf>> getRange(@Nullable LLSnapshot snapshot, LLRange range, boolean existsAlmostCertainly);
|
||||||
|
|
||||||
default Flux<Entry<byte[], byte[]>> getRange(@Nullable LLSnapshot snapshot, LLRange range) {
|
default Flux<Entry<ByteBuf, ByteBuf>> getRange(@Nullable LLSnapshot snapshot, LLRange range) {
|
||||||
return getRange(snapshot, range, false);
|
return getRange(snapshot, range, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Flux<List<Entry<byte[], byte[]>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
|
Flux<List<Entry<ByteBuf, ByteBuf>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
int prefixLength,
|
int prefixLength,
|
||||||
boolean existsAlmostCertainly);
|
boolean existsAlmostCertainly);
|
||||||
|
|
||||||
default Flux<List<Entry<byte[], byte[]>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
|
default Flux<List<Entry<ByteBuf, ByteBuf>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
int prefixLength) {
|
int prefixLength) {
|
||||||
return getRangeGrouped(snapshot, range, prefixLength, false);
|
return getRangeGrouped(snapshot, range, prefixLength, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Flux<byte[]> getRangeKeys(@Nullable LLSnapshot snapshot, LLRange range);
|
Flux<ByteBuf> getRangeKeys(@Nullable LLSnapshot snapshot, LLRange range);
|
||||||
|
|
||||||
Flux<List<byte[]>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
|
Flux<List<ByteBuf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
|
||||||
|
|
||||||
Flux<byte[]> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
|
Flux<ByteBuf> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
|
||||||
|
|
||||||
Flux<Entry<byte[], byte[]>> setRange(LLRange range, Flux<Entry<byte[], byte[]>> entries, boolean getOldValues);
|
Flux<Entry<ByteBuf, ByteBuf>> setRange(LLRange range, Flux<Entry<ByteBuf, ByteBuf>> entries, boolean getOldValues);
|
||||||
|
|
||||||
default Mono<Void> replaceRange(LLRange range,
|
default Mono<Void> replaceRange(LLRange range,
|
||||||
boolean canKeysChange,
|
boolean canKeysChange,
|
||||||
Function<Entry<byte[], byte[]>, Mono<Entry<byte[], byte[]>>> entriesReplacer,
|
Function<Entry<ByteBuf, ByteBuf>, Mono<Entry<ByteBuf, ByteBuf>>> entriesReplacer,
|
||||||
boolean existsAlmostCertainly) {
|
boolean existsAlmostCertainly) {
|
||||||
return Mono.defer(() -> {
|
return Mono.defer(() -> {
|
||||||
if (canKeysChange) {
|
if (canKeysChange) {
|
||||||
@ -87,7 +90,7 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
|||||||
|
|
||||||
default Mono<Void> replaceRange(LLRange range,
|
default Mono<Void> replaceRange(LLRange range,
|
||||||
boolean canKeysChange,
|
boolean canKeysChange,
|
||||||
Function<Entry<byte[], byte[]>, Mono<Entry<byte[], byte[]>>> entriesReplacer) {
|
Function<Entry<ByteBuf, ByteBuf>, Mono<Entry<ByteBuf, ByteBuf>>> entriesReplacer) {
|
||||||
return replaceRange(range, canKeysChange, entriesReplacer, false);
|
return replaceRange(range, canKeysChange, entriesReplacer, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,9 +98,9 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
|
|||||||
|
|
||||||
Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, LLRange range, boolean fast);
|
Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, LLRange range, boolean fast);
|
||||||
|
|
||||||
Mono<Entry<byte[], byte[]>> getOne(@Nullable LLSnapshot snapshot, LLRange range);
|
Mono<Entry<ByteBuf, ByteBuf>> getOne(@Nullable LLSnapshot snapshot, LLRange range);
|
||||||
|
|
||||||
Mono<byte[]> getOneKey(@Nullable LLSnapshot snapshot, LLRange range);
|
Mono<ByteBuf> getOneKey(@Nullable LLSnapshot snapshot, LLRange range);
|
||||||
|
|
||||||
Mono<Entry<byte[], byte[]>> removeOne(LLRange range);
|
Mono<Entry<ByteBuf, ByteBuf>> removeOne(LLRange range);
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
package it.cavallium.dbengine.database;
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
public enum LLDictionaryResultType {
|
public enum LLDictionaryResultType {
|
||||||
VOID, VALUE_CHANGED, PREVIOUS_VALUE
|
VOID, PREVIOUS_VALUE_EXISTENCE, PREVIOUS_VALUE
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
package it.cavallium.dbengine.database;
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
|
import static io.netty.buffer.Unpooled.wrappedBuffer;
|
||||||
|
import static io.netty.buffer.Unpooled.wrappedUnmodifiableBuffer;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.StringJoiner;
|
import java.util.StringJoiner;
|
||||||
|
|
||||||
@ -9,10 +14,18 @@ import java.util.StringJoiner;
|
|||||||
public class LLRange {
|
public class LLRange {
|
||||||
|
|
||||||
private static final LLRange RANGE_ALL = new LLRange(null, null);
|
private static final LLRange RANGE_ALL = new LLRange(null, null);
|
||||||
private final byte[] min;
|
private final ByteBuf min;
|
||||||
private final byte[] max;
|
private final ByteBuf max;
|
||||||
|
|
||||||
private LLRange(byte[] min, byte[] max) {
|
private LLRange(ByteBuf min, ByteBuf max) {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
|
if (min != null && !min.isDirect()) {
|
||||||
|
throw new IllegalArgumentException("Min buffer must be direct");
|
||||||
|
}
|
||||||
|
if (max != null && !max.isDirect()) {
|
||||||
|
throw new IllegalArgumentException("Min buffer must be direct");
|
||||||
|
}
|
||||||
this.min = min;
|
this.min = min;
|
||||||
this.max = max;
|
this.max = max;
|
||||||
}
|
}
|
||||||
@ -21,50 +34,64 @@ public class LLRange {
|
|||||||
return RANGE_ALL;
|
return RANGE_ALL;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LLRange from(byte[] min) {
|
public static LLRange from(ByteBuf min) {
|
||||||
return new LLRange(min, null);
|
return new LLRange(min, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LLRange to(byte[] max) {
|
public static LLRange to(ByteBuf max) {
|
||||||
return new LLRange(null, max);
|
return new LLRange(null, max);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LLRange single(byte[] single) {
|
public static LLRange single(ByteBuf single) {
|
||||||
return new LLRange(single, single);
|
return new LLRange(single, single);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LLRange of(byte[] min, byte[] max) {
|
public static LLRange of(ByteBuf min, ByteBuf max) {
|
||||||
return new LLRange(min, max);
|
return new LLRange(min, max);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isAll() {
|
public boolean isAll() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
return min == null && max == null;
|
return min == null && max == null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isSingle() {
|
public boolean isSingle() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
if (min == null || max == null) return false;
|
if (min == null || max == null) return false;
|
||||||
return Arrays.equals(min, max);
|
return LLUtils.equals(min, max);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasMin() {
|
public boolean hasMin() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
return min != null;
|
return min != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] getMin() {
|
public ByteBuf getMin() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
assert min != null;
|
assert min != null;
|
||||||
return min;
|
return min;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasMax() {
|
public boolean hasMax() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
return max != null;
|
return max != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] getMax() {
|
public ByteBuf getMax() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
assert max != null;
|
assert max != null;
|
||||||
return max;
|
return max;
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] getSingle() {
|
public ByteBuf getSingle() {
|
||||||
|
assert min == null || min.refCnt() > 0;
|
||||||
|
assert max == null || max.refCnt() > 0;
|
||||||
assert isSingle();
|
assert isSingle();
|
||||||
return min;
|
return min;
|
||||||
}
|
}
|
||||||
@ -78,21 +105,40 @@ public class LLRange {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
LLRange llRange = (LLRange) o;
|
LLRange llRange = (LLRange) o;
|
||||||
return Arrays.equals(min, llRange.min) && Arrays.equals(max, llRange.max);
|
return LLUtils.equals(min, llRange.min) && LLUtils.equals(max, llRange.max);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
int result = Arrays.hashCode(min);
|
int result = LLUtils.hashCode(min);
|
||||||
result = 31 * result + Arrays.hashCode(max);
|
result = 31 * result + LLUtils.hashCode(max);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new StringJoiner(", ", LLRange.class.getSimpleName() + "[", "]")
|
return new StringJoiner(", ", LLRange.class.getSimpleName() + "[", "]")
|
||||||
.add("min=" + Arrays.toString(min))
|
.add("min=" + LLUtils.toString(min))
|
||||||
.add("max=" + Arrays.toString(max))
|
.add("max=" + LLUtils.toString(max))
|
||||||
.toString();
|
.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public LLRange retain() {
|
||||||
|
if (min != null) {
|
||||||
|
min.retain();
|
||||||
|
}
|
||||||
|
if (max != null) {
|
||||||
|
max.retain();
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void release() {
|
||||||
|
if (min != null) {
|
||||||
|
min.release();
|
||||||
|
}
|
||||||
|
if (max != null) {
|
||||||
|
max.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,18 @@ package it.cavallium.dbengine.database;
|
|||||||
|
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
|
import io.netty.buffer.AbstractByteBufAllocator;
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
|
import io.netty.buffer.CompositeByteBuf;
|
||||||
|
import io.netty.buffer.PooledByteBufAllocator;
|
||||||
import it.cavallium.dbengine.lucene.RandomSortField;
|
import it.cavallium.dbengine.lucene.RandomSortField;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.ToIntFunction;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FloatPoint;
|
import org.apache.lucene.document.FloatPoint;
|
||||||
@ -20,13 +28,20 @@ import org.apache.lucene.search.ScoreMode;
|
|||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.SortedNumericSortField;
|
import org.apache.lucene.search.SortedNumericSortField;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
import org.rocksdb.RocksDB;
|
||||||
|
|
||||||
|
import static io.netty.buffer.Unpooled.EMPTY_BUFFER;
|
||||||
|
import static io.netty.buffer.Unpooled.wrappedBuffer;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public class LLUtils {
|
public class LLUtils {
|
||||||
|
|
||||||
private static final byte[] RESPONSE_TRUE = new byte[]{1};
|
private static final byte[] RESPONSE_TRUE = new byte[]{1};
|
||||||
private static final byte[] RESPONSE_FALSE = new byte[]{0};
|
private static final byte[] RESPONSE_FALSE = new byte[]{0};
|
||||||
|
private static final byte[] RESPONSE_TRUE_BUF = new byte[]{1};
|
||||||
|
private static final byte[] RESPONSE_FALSE_BUF = new byte[]{0};
|
||||||
public static final byte[][] LEXICONOGRAPHIC_ITERATION_SEEKS = new byte[256][1];
|
public static final byte[][] LEXICONOGRAPHIC_ITERATION_SEEKS = new byte[256][1];
|
||||||
|
|
||||||
static {
|
static {
|
||||||
@ -40,10 +55,23 @@ public class LLUtils {
|
|||||||
return response[0] == 1;
|
return response[0] == 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static boolean responseToBoolean(ByteBuf response) {
|
||||||
|
try {
|
||||||
|
assert response.readableBytes() == 1;
|
||||||
|
return response.getByte(response.readerIndex()) == 1;
|
||||||
|
} finally {
|
||||||
|
response.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static byte[] booleanToResponse(boolean bool) {
|
public static byte[] booleanToResponse(boolean bool) {
|
||||||
return bool ? RESPONSE_TRUE : RESPONSE_FALSE;
|
return bool ? RESPONSE_TRUE : RESPONSE_FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static ByteBuf booleanToResponseByteBuffer(boolean bool) {
|
||||||
|
return wrappedBuffer(booleanToResponse(bool));
|
||||||
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
public static Sort toSort(@Nullable LLSort sort) {
|
public static Sort toSort(@Nullable LLSort sort) {
|
||||||
if (sort == null) {
|
if (sort == null) {
|
||||||
@ -127,4 +155,228 @@ public class LLUtils {
|
|||||||
public static it.cavallium.dbengine.database.LLKeyScore toKeyScore(LLKeyScore hit) {
|
public static it.cavallium.dbengine.database.LLKeyScore toKeyScore(LLKeyScore hit) {
|
||||||
return new it.cavallium.dbengine.database.LLKeyScore(hit.getKey(), hit.getScore());
|
return new it.cavallium.dbengine.database.LLKeyScore(hit.getKey(), hit.getScore());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String toString(ByteBuf key) {
|
||||||
|
if (key == null) {
|
||||||
|
return "null";
|
||||||
|
} else {
|
||||||
|
int startIndex = key.readerIndex();
|
||||||
|
int iMax = key.readableBytes() - 1;
|
||||||
|
int iLimit = 128;
|
||||||
|
if (iMax <= -1) {
|
||||||
|
return "[]";
|
||||||
|
} else {
|
||||||
|
StringBuilder b = new StringBuilder();
|
||||||
|
b.append('[');
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
while(true) {
|
||||||
|
b.append(key.getByte(startIndex + i));
|
||||||
|
if (i == iLimit) {
|
||||||
|
b.append("…");
|
||||||
|
}
|
||||||
|
if (i == iMax || i == iLimit) {
|
||||||
|
return b.append(']').toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
b.append(", ");
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean equals(ByteBuf a, ByteBuf b) {
|
||||||
|
if (a == null && b == null) {
|
||||||
|
return true;
|
||||||
|
} else if (a != null && b != null) {
|
||||||
|
return ByteBufUtil.equals(a, b);
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static byte[] toArray(ByteBuf key) {
|
||||||
|
byte[] keyBytes = new byte[key.readableBytes()];
|
||||||
|
key.getBytes(key.readerIndex(), keyBytes, 0, key.readableBytes());
|
||||||
|
return keyBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<byte[]> toArray(List<ByteBuf> input) {
|
||||||
|
List<byte[]> result = new ArrayList<>(input.size());
|
||||||
|
for (ByteBuf byteBuf : input) {
|
||||||
|
result.add(toArray(byteBuf));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static int hashCode(ByteBuf buf) {
|
||||||
|
return buf == null ? 0 : buf.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public static ByteBuf readNullableDirectNioBuffer(ByteBufAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
||||||
|
ByteBuf buffer = alloc.directBuffer();
|
||||||
|
try {
|
||||||
|
ByteBuf directBuffer = null;
|
||||||
|
ByteBuffer nioBuffer;
|
||||||
|
int size;
|
||||||
|
Boolean mustBeCopied = null;
|
||||||
|
do {
|
||||||
|
if (mustBeCopied == null || !mustBeCopied) {
|
||||||
|
nioBuffer = LLUtils.toDirectFast(buffer.retain());
|
||||||
|
if (nioBuffer != null) {
|
||||||
|
nioBuffer.limit(nioBuffer.capacity());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
nioBuffer = null;
|
||||||
|
}
|
||||||
|
if ((mustBeCopied != null && mustBeCopied) || nioBuffer == null) {
|
||||||
|
directBuffer = LLUtils.toDirectCopy(buffer.retain());
|
||||||
|
nioBuffer = directBuffer.nioBuffer(0, directBuffer.capacity());
|
||||||
|
mustBeCopied = true;
|
||||||
|
} else {
|
||||||
|
mustBeCopied = false;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
assert nioBuffer.isDirect();
|
||||||
|
size = reader.applyAsInt(nioBuffer);
|
||||||
|
if (size != RocksDB.NOT_FOUND) {
|
||||||
|
if (mustBeCopied) {
|
||||||
|
buffer.writerIndex(0).writeBytes(nioBuffer);
|
||||||
|
}
|
||||||
|
if (size == nioBuffer.limit()) {
|
||||||
|
buffer.setIndex(0, size);
|
||||||
|
return buffer;
|
||||||
|
} else {
|
||||||
|
assert size > nioBuffer.limit();
|
||||||
|
assert nioBuffer.limit() > 0;
|
||||||
|
buffer.capacity(size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (nioBuffer != null) {
|
||||||
|
nioBuffer = null;
|
||||||
|
}
|
||||||
|
if(directBuffer != null) {
|
||||||
|
directBuffer.release();
|
||||||
|
directBuffer = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} while (size != RocksDB.NOT_FOUND);
|
||||||
|
} catch (Throwable t) {
|
||||||
|
buffer.release();
|
||||||
|
throw t;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public static ByteBuffer toDirectFast(ByteBuf buffer) {
|
||||||
|
try {
|
||||||
|
ByteBuffer result = buffer.nioBuffer(0, buffer.capacity());
|
||||||
|
if (result.isDirect()) {
|
||||||
|
result.limit(buffer.writerIndex());
|
||||||
|
|
||||||
|
assert result.isDirect();
|
||||||
|
assert result.capacity() == buffer.capacity();
|
||||||
|
assert buffer.readerIndex() == result.position();
|
||||||
|
assert result.limit() - result.position() == buffer.readableBytes();
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
buffer.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ByteBuf toDirectCopy(ByteBuf buffer) {
|
||||||
|
try {
|
||||||
|
ByteBuf directCopyBuf = buffer.alloc().directBuffer(buffer.capacity(), buffer.maxCapacity());
|
||||||
|
directCopyBuf.writeBytes(buffer, 0, buffer.writerIndex());
|
||||||
|
return directCopyBuf;
|
||||||
|
} finally {
|
||||||
|
buffer.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ByteBuf convertToDirectByteBuf(AbstractByteBufAllocator alloc, ByteBuf buffer) {
|
||||||
|
ByteBuf result;
|
||||||
|
ByteBuf directCopyBuf = alloc.directBuffer(buffer.capacity(), buffer.maxCapacity());
|
||||||
|
directCopyBuf.writeBytes(buffer, 0, buffer.writerIndex());
|
||||||
|
directCopyBuf.readerIndex(buffer.readerIndex());
|
||||||
|
result = directCopyBuf;
|
||||||
|
assert result.isDirect();
|
||||||
|
assert result.capacity() == buffer.capacity();
|
||||||
|
assert buffer.readerIndex() == result.readerIndex();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
public static ByteBuf readDirectNioBuffer(ByteBufAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
||||||
|
var buffer = readNullableDirectNioBuffer(alloc, reader);
|
||||||
|
if (buffer == null) {
|
||||||
|
throw new IllegalStateException("A non-nullable buffer read operation tried to return a \"not found\" element");
|
||||||
|
}
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf buffer) {
|
||||||
|
return wrappedBuffer(buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf buffer1, ByteBuf buffer2) {
|
||||||
|
assert buffer1.isDirect();
|
||||||
|
assert buffer1.nioBuffer().isDirect();
|
||||||
|
assert buffer2.isDirect();
|
||||||
|
assert buffer2.nioBuffer().isDirect();
|
||||||
|
if (buffer1.readableBytes() == 0) {
|
||||||
|
return wrappedBuffer(buffer2);
|
||||||
|
} else if (buffer2.readableBytes() == 0) {
|
||||||
|
return wrappedBuffer(buffer1);
|
||||||
|
}
|
||||||
|
CompositeByteBuf compositeBuffer = alloc.compositeDirectBuffer(2);
|
||||||
|
compositeBuffer.addComponent(true, buffer1);
|
||||||
|
compositeBuffer.addComponent(true, buffer2);
|
||||||
|
compositeBuffer.consolidate();
|
||||||
|
assert compositeBuffer.isDirect();
|
||||||
|
assert compositeBuffer.nioBuffer().isDirect();
|
||||||
|
return compositeBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf buffer1, ByteBuf buffer2, ByteBuf buffer3) {
|
||||||
|
if (buffer1.readableBytes() == 0) {
|
||||||
|
return directCompositeBuffer(alloc, buffer2, buffer3);
|
||||||
|
} else if (buffer2.readableBytes() == 0) {
|
||||||
|
return directCompositeBuffer(alloc, buffer1, buffer3);
|
||||||
|
} else if (buffer3.readableBytes() == 0) {
|
||||||
|
return directCompositeBuffer(alloc, buffer1, buffer2);
|
||||||
|
}
|
||||||
|
CompositeByteBuf compositeBuffer = alloc.compositeDirectBuffer(3);
|
||||||
|
compositeBuffer.addComponent(true, buffer1);
|
||||||
|
compositeBuffer.addComponent(true, buffer2);
|
||||||
|
compositeBuffer.addComponent(true, buffer3);
|
||||||
|
compositeBuffer.consolidate();
|
||||||
|
return compositeBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ByteBuf directCompositeBuffer(ByteBufAllocator alloc, ByteBuf... buffers) {
|
||||||
|
switch (buffers.length) {
|
||||||
|
case 0:
|
||||||
|
return EMPTY_BUFFER;
|
||||||
|
case 1:
|
||||||
|
return directCompositeBuffer(alloc, buffers[0]);
|
||||||
|
case 2:
|
||||||
|
return directCompositeBuffer(alloc, buffers[0], buffers[1]);
|
||||||
|
case 3:
|
||||||
|
return directCompositeBuffer(alloc, buffers[0], buffers[1], buffers[2]);
|
||||||
|
default:
|
||||||
|
CompositeByteBuf compositeBuffer = alloc.compositeDirectBuffer(buffers.length);
|
||||||
|
compositeBuffer.addComponents(true, buffers);
|
||||||
|
compositeBuffer.consolidate();
|
||||||
|
return compositeBuffer;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,24 +1,25 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
public class DatabaseEmpty {
|
public class DatabaseEmpty {
|
||||||
|
|
||||||
@SuppressWarnings({"unused", "InstantiationOfUtilityClass"})
|
@SuppressWarnings({"unused", "InstantiationOfUtilityClass"})
|
||||||
public static final Nothing NOTHING = new Nothing();
|
public static final Nothing NOTHING = new Nothing();
|
||||||
private static final byte[] NOTHING_BYTES = new byte[0];
|
public static final Serializer<Nothing, ByteBuf> NOTHING_SERIALIZER = new Serializer<>() {
|
||||||
public static final Serializer<Nothing, byte[]> NOTHING_SERIALIZER = new Serializer<>() {
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Nothing deserialize(byte @NotNull [] serialized) {
|
public @NotNull Nothing deserialize(@NotNull ByteBuf serialized) {
|
||||||
return NOTHING;
|
return NOTHING;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(@NotNull Nothing deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull Nothing deserialized) {
|
||||||
return NOTHING_BYTES;
|
return EMPTY_BUFFER;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
public static final Function<Nothing, Nothing> NOTHING_HASH_FUNCTION = nothing -> nothing;
|
public static final Function<Nothing, Nothing> NOTHING_HASH_FUNCTION = nothing -> nothing;
|
||||||
@ -28,7 +29,7 @@ public class DatabaseEmpty {
|
|||||||
private DatabaseEmpty() {
|
private DatabaseEmpty() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, byte[] key) {
|
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, ByteBuf key) {
|
||||||
return new DatabaseSingle<>(dictionary, key, NOTHING_SERIALIZER);
|
return new DatabaseSingle<>(dictionary, key, NOTHING_SERIALIZER);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,18 +1,19 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Arrays;
|
import java.io.IOException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
import org.rocksdb.RocksDBException;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@ -21,40 +22,44 @@ import reactor.core.publisher.Mono;
|
|||||||
*/
|
*/
|
||||||
public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> {
|
public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> {
|
||||||
|
|
||||||
private final Serializer<U, byte[]> valueSerializer;
|
private final Serializer<U, ByteBuf> valueSerializer;
|
||||||
|
|
||||||
protected DatabaseMapDictionary(LLDictionary dictionary,
|
protected DatabaseMapDictionary(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||||
Serializer<U, byte[]> valueSerializer) {
|
Serializer<U, ByteBuf> valueSerializer) {
|
||||||
|
// Do not retain or release or use the prefixKey here
|
||||||
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0);
|
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0);
|
||||||
|
prefixKey = null;
|
||||||
this.valueSerializer = valueSerializer;
|
this.valueSerializer = valueSerializer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
|
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||||
Serializer<U, byte[]> valueSerializer) {
|
Serializer<U, ByteBuf> valueSerializer) {
|
||||||
return new DatabaseMapDictionary<>(dictionary, EMPTY_BYTES, keySerializer, valueSerializer);
|
return new DatabaseMapDictionary<>(dictionary, EMPTY_BYTES, keySerializer, valueSerializer);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
|
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||||
Serializer<U, byte[]> valueSerializer) {
|
Serializer<U, ByteBuf> valueSerializer) {
|
||||||
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
|
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
|
||||||
}
|
}
|
||||||
|
|
||||||
private byte[] toKey(byte[] suffixKey) {
|
private ByteBuf toKey(ByteBuf suffixKey) {
|
||||||
assert suffixKeyConsistency(suffixKey.length);
|
assert suffixKeyConsistency(suffixKey.readableBytes());
|
||||||
byte[] key = Arrays.copyOf(keyPrefix, keyPrefix.length + suffixKey.length);
|
try {
|
||||||
System.arraycopy(suffixKey, 0, key, keyPrefix.length, suffixKey.length);
|
return LLUtils.directCompositeBuffer(dictionary.getAllocator(), keyPrefix.retain(), suffixKey.retain());
|
||||||
return key;
|
} finally {
|
||||||
|
suffixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
public Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getRange(resolveSnapshot(snapshot), range, existsAlmostCertainly)
|
.getRange(resolveSnapshot(snapshot), range.retain(), existsAlmostCertainly)
|
||||||
.collectMap(
|
.collectMap(
|
||||||
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
||||||
entry -> deserialize(entry.getValue()),
|
entry -> deserialize(entry.getValue()),
|
||||||
@ -64,7 +69,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
@Override
|
@Override
|
||||||
public Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
|
public Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.setRange(range,
|
.setRange(range.retain(),
|
||||||
Flux
|
Flux
|
||||||
.fromIterable(value.entrySet())
|
.fromIterable(value.entrySet())
|
||||||
.map(entry -> Map.entry(serializeSuffix(entry.getKey()), serialize(entry.getValue()))),
|
.map(entry -> Map.entry(serializeSuffix(entry.getKey()), serialize(entry.getValue()))),
|
||||||
@ -79,7 +84,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
@Override
|
@Override
|
||||||
public Mono<Map<T, U>> clearAndGetPrevious() {
|
public Mono<Map<T, U>> clearAndGetPrevious() {
|
||||||
return dictionary
|
return dictionary
|
||||||
.setRange(range, Flux.empty(), true)
|
.setRange(range.retain(), Flux.empty(), true)
|
||||||
.collectMap(
|
.collectMap(
|
||||||
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
entry -> deserializeSuffix(stripPrefix(entry.getKey())),
|
||||||
entry -> deserialize(entry.getValue()),
|
entry -> deserialize(entry.getValue()),
|
||||||
@ -88,96 +93,170 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return dictionary.sizeRange(resolveSnapshot(snapshot), range, fast);
|
return dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||||
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range);
|
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||||
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
return Mono
|
return Mono
|
||||||
.just(new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noop()))
|
.fromSupplier(() -> new DatabaseSingle<>(dictionary, keyBuf.retain(), Serializer.noop()))
|
||||||
.map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer));
|
.<DatabaseStageEntry<U>>map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer))
|
||||||
|
.doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
|
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
|
||||||
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
return dictionary
|
return dictionary
|
||||||
.get(resolveSnapshot(snapshot), toKey(serializeSuffix(keySuffix)), existsAlmostCertainly)
|
.get(resolveSnapshot(snapshot), keyBuf.retain(), existsAlmostCertainly)
|
||||||
.map(this::deserialize);
|
.map(this::deserialize)
|
||||||
|
.doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> putValue(T keySuffix, U value) {
|
public Mono<Void> putValue(T keySuffix, U value) {
|
||||||
return dictionary.put(toKey(serializeSuffix(keySuffix)), serialize(value), LLDictionaryResultType.VOID).then();
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
|
ByteBuf valueBuf = serialize(value);
|
||||||
|
return dictionary.put(keyBuf.retain(), valueBuf.retain(), LLDictionaryResultType.VOID).doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
valueBuf.release();
|
||||||
|
}).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> updateValue(T keySuffix,
|
public Mono<Boolean> updateValue(T keySuffix,
|
||||||
boolean existsAlmostCertainly,
|
boolean existsAlmostCertainly,
|
||||||
Function<Optional<U>, Optional<U>> updater) {
|
Function<@Nullable U, @Nullable U> updater) {
|
||||||
return dictionary.update(toKey(serializeSuffix(keySuffix)),
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
oldSerialized -> updater.apply(oldSerialized.map(this::deserialize)).map(this::serialize),
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
existsAlmostCertainly
|
return dictionary.update(keyBuf.retain(), oldSerialized -> {
|
||||||
);
|
try {
|
||||||
|
var result = updater.apply(oldSerialized == null ? null : this.deserialize(oldSerialized.retain()));
|
||||||
|
if (result == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return this.serialize(result);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (oldSerialized != null) {
|
||||||
|
oldSerialized.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, existsAlmostCertainly).doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> putValueAndGetPrevious(T keySuffix, U value) {
|
public Mono<U> putValueAndGetPrevious(T keySuffix, U value) {
|
||||||
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
|
ByteBuf valueBuf = serialize(value);
|
||||||
return dictionary
|
return dictionary
|
||||||
.put(toKey(serializeSuffix(keySuffix)), serialize(value), LLDictionaryResultType.PREVIOUS_VALUE)
|
.put(keyBuf.retain(), valueBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
||||||
.map(this::deserialize);
|
.map(this::deserialize)
|
||||||
|
.doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
valueBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> putValueAndGetStatus(T keySuffix, U value) {
|
public Mono<Boolean> putValueAndGetStatus(T keySuffix, U value) {
|
||||||
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
|
ByteBuf valueBuf = serialize(value);
|
||||||
return dictionary
|
return dictionary
|
||||||
.put(toKey(serializeSuffix(keySuffix)), serialize(value), LLDictionaryResultType.VALUE_CHANGED)
|
.put(keyBuf.retain(), valueBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
|
||||||
.map(LLUtils::responseToBoolean);
|
.map(LLUtils::responseToBoolean)
|
||||||
|
.doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
valueBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> remove(T keySuffix) {
|
public Mono<Void> remove(T keySuffix) {
|
||||||
return dictionary.remove(toKey(serializeSuffix(keySuffix)), LLDictionaryResultType.VOID).then();
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
|
return dictionary.remove(keyBuf.retain(), LLDictionaryResultType.VOID).doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
}).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> removeAndGetPrevious(T keySuffix) {
|
public Mono<U> removeAndGetPrevious(T keySuffix) {
|
||||||
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
return dictionary
|
return dictionary
|
||||||
.remove(toKey(serializeSuffix(keySuffix)), LLDictionaryResultType.PREVIOUS_VALUE)
|
.remove(keyBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
||||||
.map(this::deserialize);
|
.map(this::deserialize)
|
||||||
|
.doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> removeAndGetStatus(T keySuffix) {
|
public Mono<Boolean> removeAndGetStatus(T keySuffix) {
|
||||||
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
ByteBuf keyBuf = toKey(keySuffixBuf.retain());
|
||||||
return dictionary
|
return dictionary
|
||||||
.remove(toKey(serializeSuffix(keySuffix)), LLDictionaryResultType.VALUE_CHANGED)
|
.remove(keyBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE_EXISTENCE)
|
||||||
.map(LLUtils::responseToBoolean);
|
.map(LLUtils::responseToBoolean)
|
||||||
|
.doFinally(s -> {
|
||||||
|
keyBuf.release();
|
||||||
|
keySuffixBuf.release();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
|
public Flux<Entry<T, U>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getMulti(resolveSnapshot(snapshot), keys.map(keySuffix -> toKey(serializeSuffix(keySuffix))), existsAlmostCertainly)
|
.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> {
|
||||||
.map(entry -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())));
|
ByteBuf keySuffixBuf = serializeSuffix(keySuffix);
|
||||||
|
try {
|
||||||
|
return toKey(keySuffixBuf.retain());
|
||||||
|
} finally {
|
||||||
|
keySuffixBuf.release();
|
||||||
|
}
|
||||||
|
})), existsAlmostCertainly)
|
||||||
|
.flatMap(entry -> Mono.fromCallable(() -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue()))));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> putMulti(Flux<Entry<T, U>> entries) {
|
public Mono<Void> putMulti(Flux<Entry<T, U>> entries) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.putMulti(entries
|
.putMulti(entries.flatMap(entry -> Mono.fromCallable(() -> Map.entry(toKey(serializeSuffix(entry.getKey())),
|
||||||
.map(entry -> Map
|
serialize(entry.getValue())
|
||||||
.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))), false)
|
))), false)
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getRangeKeys(resolveSnapshot(snapshot), range)
|
.getRangeKeys(resolveSnapshot(snapshot), range.retain())
|
||||||
.map(key -> Map.entry(deserializeSuffix(stripPrefix(key)),
|
.map(key -> Map.entry(deserializeSuffix(stripPrefix(key)),
|
||||||
new DatabaseSingleMapped<>(
|
new DatabaseSingleMapped<>(
|
||||||
new DatabaseSingle<>(dictionary,
|
new DatabaseSingle<>(dictionary,
|
||||||
@ -191,7 +270,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getRange(resolveSnapshot(snapshot), range)
|
.getRange(resolveSnapshot(snapshot), range.retain())
|
||||||
.map(serializedEntry -> Map.entry(
|
.map(serializedEntry -> Map.entry(
|
||||||
deserializeSuffix(stripPrefix(serializedEntry.getKey())),
|
deserializeSuffix(stripPrefix(serializedEntry.getKey())),
|
||||||
valueSerializer.deserialize(serializedEntry.getValue())
|
valueSerializer.deserialize(serializedEntry.getValue())
|
||||||
@ -201,7 +280,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.setRange(range,
|
.setRange(range.retain(),
|
||||||
entries.map(entry ->
|
entries.map(entry ->
|
||||||
Map.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))), true)
|
Map.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))), true)
|
||||||
.map(entry -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())));
|
.map(entry -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())));
|
||||||
@ -214,22 +293,31 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
.clear();
|
.clear();
|
||||||
} else if (range.isSingle()) {
|
} else if (range.isSingle()) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.remove(range.getSingle(), LLDictionaryResultType.VOID)
|
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
||||||
.then();
|
.then();
|
||||||
} else {
|
} else {
|
||||||
return dictionary
|
return dictionary
|
||||||
.setRange(range, Flux.empty(), false)
|
.setRange(range.retain(), Flux.empty(), false)
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
/**
|
||||||
private U deserialize(byte[] bytes) {
|
* This method is just a shorter version than valueSerializer::deserialize
|
||||||
|
*/
|
||||||
|
private U deserialize(ByteBuf bytes) {
|
||||||
return valueSerializer.deserialize(bytes);
|
return valueSerializer.deserialize(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
/**
|
||||||
private byte[] serialize(U bytes) {
|
* This method is just a shorter version than valueSerializer::serialize
|
||||||
|
*/
|
||||||
|
private ByteBuf serialize(U bytes) {
|
||||||
return valueSerializer.serialize(bytes);
|
return valueSerializer.serialize(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
super.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
|
import io.netty.buffer.PooledByteBufAllocator;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import it.cavallium.dbengine.database.LLSnapshot;
|
import it.cavallium.dbengine.database.LLSnapshot;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.disk.LLLocalDictionary;
|
import it.cavallium.dbengine.database.disk.LLLocalDictionary;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
@ -14,88 +19,162 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.util.function.Tuples;
|
import reactor.util.function.Tuples;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
// todo: implement optimized methods
|
// todo: implement optimized methods
|
||||||
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
|
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
|
||||||
|
|
||||||
public static final byte[] EMPTY_BYTES = new byte[0];
|
public static final ByteBuf EMPTY_BYTES = unreleasableBuffer(directBuffer(0, 0));
|
||||||
protected final LLDictionary dictionary;
|
protected final LLDictionary dictionary;
|
||||||
|
private final ByteBufAllocator alloc;
|
||||||
protected final SubStageGetter<U, US> subStageGetter;
|
protected final SubStageGetter<U, US> subStageGetter;
|
||||||
protected final SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer;
|
protected final SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer;
|
||||||
protected final byte[] keyPrefix;
|
protected final ByteBuf keyPrefix;
|
||||||
|
protected final int keyPrefixLength;
|
||||||
protected final int keySuffixLength;
|
protected final int keySuffixLength;
|
||||||
protected final int keyExtLength;
|
protected final int keyExtLength;
|
||||||
protected final LLRange range;
|
protected final LLRange range;
|
||||||
|
|
||||||
private static byte[] incrementPrefix(byte[] key, int prefixLength) {
|
private static ByteBuf incrementPrefix(ByteBufAllocator alloc, ByteBuf originalKey, int prefixLength) {
|
||||||
boolean remainder = true;
|
try {
|
||||||
final byte ff = (byte) 0xFF;
|
assert originalKey.readableBytes() >= prefixLength;
|
||||||
for (int i = prefixLength - 1; i >= 0; i--) {
|
ByteBuf copiedBuf = alloc.directBuffer(originalKey.writerIndex(), originalKey.writerIndex() + 1);
|
||||||
if (key[i] != ff) {
|
try {
|
||||||
key[i]++;
|
boolean overflowed = true;
|
||||||
remainder = false;
|
final int ff = 0xFF;
|
||||||
break;
|
int writtenBytes = 0;
|
||||||
} else {
|
copiedBuf.writerIndex(prefixLength);
|
||||||
key[i] = 0x00;
|
for (int i = prefixLength - 1; i >= 0; i--) {
|
||||||
remainder = true;
|
int iByte = originalKey.getUnsignedByte(i);
|
||||||
|
if (iByte != ff) {
|
||||||
|
copiedBuf.setByte(i, iByte + 1);
|
||||||
|
writtenBytes++;
|
||||||
|
overflowed = false;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
copiedBuf.setByte(i, 0x00);
|
||||||
|
writtenBytes++;
|
||||||
|
overflowed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert prefixLength - writtenBytes >= 0;
|
||||||
|
if (prefixLength - writtenBytes > 0) {
|
||||||
|
copiedBuf.setBytes(0, originalKey, 0, (prefixLength - writtenBytes));
|
||||||
|
}
|
||||||
|
|
||||||
|
copiedBuf.writerIndex(copiedBuf.capacity());
|
||||||
|
|
||||||
|
if (originalKey.writerIndex() - prefixLength > 0) {
|
||||||
|
copiedBuf.setBytes(prefixLength, originalKey, prefixLength, originalKey.writerIndex() - prefixLength);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (overflowed) {
|
||||||
|
for (int i = 0; i < copiedBuf.writerIndex(); i++) {
|
||||||
|
copiedBuf.setByte(i, 0xFF);
|
||||||
|
}
|
||||||
|
copiedBuf.writeZero(1);
|
||||||
|
}
|
||||||
|
return copiedBuf.retain();
|
||||||
|
} finally {
|
||||||
|
copiedBuf.release();
|
||||||
}
|
}
|
||||||
}
|
} finally {
|
||||||
|
originalKey.release();
|
||||||
if (remainder) {
|
|
||||||
Arrays.fill(key, 0, prefixLength, (byte) 0xFF);
|
|
||||||
return Arrays.copyOf(key, key.length + 1);
|
|
||||||
} else {
|
|
||||||
return key;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static byte[] firstRangeKey(byte[] prefixKey, int prefixLength, int suffixLength, int extLength) {
|
static ByteBuf firstRangeKey(ByteBufAllocator alloc, ByteBuf prefixKey, int prefixLength, int suffixLength, int extLength) {
|
||||||
return zeroFillKeySuffixAndExt(prefixKey, prefixLength, suffixLength, extLength);
|
return zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
static byte[] nextRangeKey(byte[] prefixKey, int prefixLength, int suffixLength, int extLength) {
|
static ByteBuf nextRangeKey(ByteBufAllocator alloc, ByteBuf prefixKey, int prefixLength, int suffixLength, int extLength) {
|
||||||
byte[] nonIncremented = zeroFillKeySuffixAndExt(prefixKey, prefixLength, suffixLength, extLength);
|
try {
|
||||||
return incrementPrefix(nonIncremented, prefixLength);
|
ByteBuf nonIncremented = zeroFillKeySuffixAndExt(alloc, prefixKey.retain(), prefixLength, suffixLength, extLength);
|
||||||
|
try {
|
||||||
|
return incrementPrefix(alloc, nonIncremented.retain(), prefixLength);
|
||||||
|
} finally {
|
||||||
|
nonIncremented.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static byte[] zeroFillKeySuffixAndExt(byte[] prefixKey, int prefixLength, int suffixLength, int extLength) {
|
protected static ByteBuf zeroFillKeySuffixAndExt(ByteBufAllocator alloc, ByteBuf prefixKey, int prefixLength, int suffixLength, int extLength) {
|
||||||
assert prefixKey.length == prefixLength;
|
try {
|
||||||
|
assert prefixKey.readableBytes() == prefixLength;
|
||||||
|
assert suffixLength > 0;
|
||||||
|
assert extLength >= 0;
|
||||||
|
if (!prefixKey.isDirect()) {
|
||||||
|
throw new IllegalArgumentException("Prefix key must be a direct buffer");
|
||||||
|
}
|
||||||
|
assert prefixKey.nioBuffer().isDirect();
|
||||||
|
ByteBuf zeroSuffixAndExt = alloc.directBuffer(suffixLength + extLength, suffixLength + extLength);
|
||||||
|
try {
|
||||||
|
assert zeroSuffixAndExt.isDirect();
|
||||||
|
assert zeroSuffixAndExt.nioBuffer().isDirect();
|
||||||
|
zeroSuffixAndExt.writeZero(suffixLength + extLength);
|
||||||
|
ByteBuf result = LLUtils.directCompositeBuffer(alloc, prefixKey.retain(), zeroSuffixAndExt.retain());
|
||||||
|
assert result.isDirect();
|
||||||
|
assert result.nioBuffer().isDirect();
|
||||||
|
return result;
|
||||||
|
} finally {
|
||||||
|
zeroSuffixAndExt.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static ByteBuf firstRangeKey(
|
||||||
|
ByteBufAllocator alloc,
|
||||||
|
ByteBuf prefixKey,
|
||||||
|
ByteBuf suffixKey,
|
||||||
|
int prefixLength,
|
||||||
|
int suffixLength,
|
||||||
|
int extLength) {
|
||||||
|
return zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
||||||
|
}
|
||||||
|
|
||||||
|
static ByteBuf nextRangeKey(
|
||||||
|
ByteBufAllocator alloc,
|
||||||
|
ByteBuf prefixKey,
|
||||||
|
ByteBuf suffixKey,
|
||||||
|
int prefixLength,
|
||||||
|
int suffixLength,
|
||||||
|
int extLength) {
|
||||||
|
try {
|
||||||
|
ByteBuf nonIncremented = zeroFillKeyExt(alloc,
|
||||||
|
prefixKey.retain(),
|
||||||
|
suffixKey.retain(),
|
||||||
|
prefixLength,
|
||||||
|
suffixLength,
|
||||||
|
extLength
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
return incrementPrefix(alloc, nonIncremented.retain(), prefixLength + suffixLength);
|
||||||
|
} finally {
|
||||||
|
nonIncremented.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
suffixKey.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static ByteBuf zeroFillKeyExt(
|
||||||
|
ByteBufAllocator alloc,
|
||||||
|
ByteBuf prefixKey,
|
||||||
|
ByteBuf suffixKey,
|
||||||
|
int prefixLength,
|
||||||
|
int suffixLength,
|
||||||
|
int extLength) {
|
||||||
|
assert prefixKey.readableBytes() == prefixLength;
|
||||||
|
assert suffixKey.readableBytes() == suffixLength;
|
||||||
assert suffixLength > 0;
|
assert suffixLength > 0;
|
||||||
assert extLength >= 0;
|
assert extLength >= 0;
|
||||||
byte[] result = Arrays.copyOf(prefixKey, prefixLength + suffixLength + extLength);
|
var result = LLUtils.directCompositeBuffer(alloc, prefixKey, suffixKey, alloc.buffer(extLength, extLength).writeZero(extLength));
|
||||||
Arrays.fill(result, prefixLength, result.length, (byte) 0);
|
assert result.readableBytes() == prefixLength + suffixLength + extLength;
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
static byte[] firstRangeKey(byte[] prefixKey,
|
|
||||||
byte[] suffixKey,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
|
||||||
return zeroFillKeyExt(prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
|
||||||
}
|
|
||||||
|
|
||||||
static byte[] nextRangeKey(byte[] prefixKey,
|
|
||||||
byte[] suffixKey,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
|
||||||
byte[] nonIncremented = zeroFillKeyExt(prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
|
||||||
return incrementPrefix(nonIncremented, prefixLength + suffixLength);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static byte[] zeroFillKeyExt(byte[] prefixKey,
|
|
||||||
byte[] suffixKey,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
|
||||||
assert prefixKey.length == prefixLength;
|
|
||||||
assert suffixKey.length == suffixLength;
|
|
||||||
assert suffixLength > 0;
|
|
||||||
assert extLength >= 0;
|
|
||||||
byte[] result = Arrays.copyOf(prefixKey, prefixLength + suffixLength + extLength);
|
|
||||||
System.arraycopy(suffixKey, 0, result, prefixLength, suffixLength);
|
|
||||||
Arrays.fill(result, prefixLength + suffixLength, result.length, (byte) 0);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,41 +183,73 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
*/
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary,
|
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||||
SubStageGetterSingle<U> subStageGetter) {
|
SubStageGetterSingle<U> subStageGetter) {
|
||||||
return new DatabaseMapDictionaryDeep<>(dictionary, EMPTY_BYTES, keySerializer, subStageGetter, 0);
|
return new DatabaseMapDictionaryDeep<>(dictionary, EMPTY_BYTES, keySerializer, subStageGetter, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(LLDictionary dictionary,
|
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||||
int keyExtLength,
|
int keyExtLength,
|
||||||
SubStageGetter<U, US> subStageGetter) {
|
SubStageGetter<U, US> subStageGetter) {
|
||||||
return new DatabaseMapDictionaryDeep<>(dictionary, EMPTY_BYTES, keySerializer, subStageGetter, keyExtLength);
|
return new DatabaseMapDictionaryDeep<>(dictionary, EMPTY_BYTES, keySerializer, subStageGetter, keyExtLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(LLDictionary dictionary,
|
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||||
SubStageGetter<U, US> subStageGetter,
|
SubStageGetter<U, US> subStageGetter,
|
||||||
int keyExtLength) {
|
int keyExtLength) {
|
||||||
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength);
|
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||||
SubStageGetter<U, US> subStageGetter,
|
SubStageGetter<U, US> subStageGetter,
|
||||||
int keyExtLength) {
|
int keyExtLength) {
|
||||||
this.dictionary = dictionary;
|
try {
|
||||||
this.subStageGetter = subStageGetter;
|
this.dictionary = dictionary;
|
||||||
this.keySuffixSerializer = keySuffixSerializer;
|
this.alloc = dictionary.getAllocator();
|
||||||
this.keyPrefix = prefixKey;
|
this.subStageGetter = subStageGetter;
|
||||||
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
|
this.keySuffixSerializer = keySuffixSerializer;
|
||||||
this.keyExtLength = keyExtLength;
|
this.keyPrefix = wrappedUnmodifiableBuffer(prefixKey).retain();
|
||||||
byte[] firstKey = firstRangeKey(keyPrefix, keyPrefix.length, keySuffixLength, keyExtLength);
|
this.keyPrefixLength = keyPrefix.readableBytes();
|
||||||
byte[] nextRangeKey = nextRangeKey(keyPrefix, keyPrefix.length, keySuffixLength, keyExtLength);
|
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
|
||||||
this.range = keyPrefix.length == 0 ? LLRange.all() : LLRange.of(firstKey, nextRangeKey);
|
this.keyExtLength = keyExtLength;
|
||||||
assert subStageKeysConsistency(keyPrefix.length + keySuffixLength + keyExtLength);
|
if (!keyPrefix.isDirect()) {
|
||||||
|
throw new IllegalArgumentException("KeyPrefix must be a direct buffer");
|
||||||
|
}
|
||||||
|
assert keyPrefix.isDirect();
|
||||||
|
ByteBuf firstKey = wrappedUnmodifiableBuffer(firstRangeKey(alloc,
|
||||||
|
keyPrefix.retain(),
|
||||||
|
keyPrefixLength,
|
||||||
|
keySuffixLength,
|
||||||
|
keyExtLength
|
||||||
|
));
|
||||||
|
ByteBuf nextRangeKey = wrappedUnmodifiableBuffer(nextRangeKey(alloc,
|
||||||
|
keyPrefix.retain(),
|
||||||
|
keyPrefixLength,
|
||||||
|
keySuffixLength,
|
||||||
|
keyExtLength
|
||||||
|
));
|
||||||
|
try {
|
||||||
|
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
|
||||||
|
assert firstKey.isDirect();
|
||||||
|
assert nextRangeKey.isDirect();
|
||||||
|
assert firstKey.nioBuffer().isDirect();
|
||||||
|
assert nextRangeKey.nioBuffer().isDirect();
|
||||||
|
this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.retain(), nextRangeKey.retain());
|
||||||
|
assert range == null || !range.hasMin() || range.getMin().isDirect();
|
||||||
|
assert range == null || !range.hasMax() || range.getMax().isDirect();
|
||||||
|
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
|
||||||
|
} finally {
|
||||||
|
firstKey.release();
|
||||||
|
nextRangeKey.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
@ -159,26 +270,33 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
/**
|
/**
|
||||||
* Keep only suffix and ext
|
* Keep only suffix and ext
|
||||||
*/
|
*/
|
||||||
protected byte[] stripPrefix(byte[] key) {
|
protected ByteBuf stripPrefix(ByteBuf key) {
|
||||||
return Arrays.copyOfRange(key, this.keyPrefix.length, key.length);
|
return key.slice(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove ext from full key
|
* Remove ext from full key
|
||||||
*/
|
*/
|
||||||
protected byte[] removeExtFromFullKey(byte[] key) {
|
protected ByteBuf removeExtFromFullKey(ByteBuf key) {
|
||||||
return Arrays.copyOf(key, keyPrefix.length + keySuffixLength);
|
try {
|
||||||
|
return key.slice(key.readerIndex(), keyPrefixLength + keySuffixLength).retain();
|
||||||
|
} finally {
|
||||||
|
key.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add prefix to suffix
|
* Add prefix to suffix
|
||||||
*/
|
*/
|
||||||
protected byte[] toKeyWithoutExt(byte[] suffixKey) {
|
protected ByteBuf toKeyWithoutExt(ByteBuf suffixKey) {
|
||||||
assert suffixKey.length == keySuffixLength;
|
try {
|
||||||
byte[] result = Arrays.copyOf(keyPrefix, keyPrefix.length + keySuffixLength);
|
assert suffixKey.readableBytes() == keySuffixLength;
|
||||||
System.arraycopy(suffixKey, 0, result, keyPrefix.length, keySuffixLength);
|
ByteBuf result = LLUtils.directCompositeBuffer(alloc, keyPrefix.retain(), suffixKey.retain());
|
||||||
assert result.length == keyPrefix.length + keySuffixLength;
|
assert result.readableBytes() == keyPrefixLength + keySuffixLength;
|
||||||
return result;
|
return result;
|
||||||
|
} finally {
|
||||||
|
suffixKey.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected LLSnapshot resolveSnapshot(@Nullable CompositeSnapshot snapshot) {
|
protected LLSnapshot resolveSnapshot(@Nullable CompositeSnapshot snapshot) {
|
||||||
@ -189,71 +307,89 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected LLRange toExtRange(byte[] keySuffix) {
|
protected LLRange toExtRange(ByteBuf keySuffix) {
|
||||||
byte[] first = firstRangeKey(keyPrefix, keySuffix, keyPrefix.length, keySuffixLength, keyExtLength);
|
try {
|
||||||
byte[] end = nextRangeKey(keyPrefix, keySuffix, keyPrefix.length, keySuffixLength, keyExtLength);
|
ByteBuf first = firstRangeKey(alloc,
|
||||||
return LLRange.of(first, end);
|
keyPrefix.retain(),
|
||||||
|
keySuffix.retain(),
|
||||||
|
keyPrefixLength,
|
||||||
|
keySuffixLength,
|
||||||
|
keyExtLength
|
||||||
|
);
|
||||||
|
ByteBuf end = nextRangeKey(alloc,
|
||||||
|
keyPrefix.retain(),
|
||||||
|
keySuffix.retain(),
|
||||||
|
keyPrefixLength,
|
||||||
|
keySuffixLength,
|
||||||
|
keyExtLength
|
||||||
|
);
|
||||||
|
return LLRange.of(first, end);
|
||||||
|
} finally {
|
||||||
|
keySuffix.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return dictionary.sizeRange(resolveSnapshot(snapshot), range, fast);
|
return dictionary.sizeRange(resolveSnapshot(snapshot), range.retain(), fast);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||||
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range);
|
return dictionary.isRangeEmpty(resolveSnapshot(snapshot), range.retain());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||||
byte[] keySuffixData = serializeSuffix(keySuffix);
|
ByteBuf keySuffixData = serializeSuffix(keySuffix);
|
||||||
Flux<byte[]> keyFlux;
|
Flux<ByteBuf> keyFlux;
|
||||||
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
||||||
keyFlux = this.dictionary.getRangeKeys(resolveSnapshot(snapshot), toExtRange(keySuffixData));
|
keyFlux = this.dictionary.getRangeKeys(resolveSnapshot(snapshot), toExtRange(keySuffixData.retain()));
|
||||||
} else {
|
} else {
|
||||||
keyFlux = Flux.empty();
|
keyFlux = Flux.empty();
|
||||||
}
|
}
|
||||||
return this.subStageGetter
|
return this.subStageGetter
|
||||||
.subStage(dictionary,
|
.subStage(dictionary, snapshot, toKeyWithoutExt(keySuffixData.retain()), keyFlux)
|
||||||
snapshot,
|
.doFinally(s -> keySuffixData.release());
|
||||||
toKeyWithoutExt(keySuffixData),
|
|
||||||
keyFlux
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||||
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
if (LLLocalDictionary.DEBUG_PREFIXES_WHEN_ASSERTIONS_ARE_ENABLED && this.subStageGetter.needsDebuggingKeyFlux()) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getRangeKeysGrouped(resolveSnapshot(snapshot), range, keyPrefix.length + keySuffixLength)
|
.getRangeKeysGrouped(resolveSnapshot(snapshot), range.retain(), keyPrefixLength + keySuffixLength)
|
||||||
.flatMapSequential(rangeKeys -> {
|
.flatMapSequential(rangeKeys -> {
|
||||||
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
assert this.subStageGetter.isMultiKey() || rangeKeys.size() == 1;
|
||||||
byte[] groupKeyWithExt = rangeKeys.get(0);
|
ByteBuf groupKeyWithExt = rangeKeys.get(0).retain();
|
||||||
byte[] groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt);
|
ByteBuf groupKeyWithoutExt = removeExtFromFullKey(groupKeyWithExt.retain());
|
||||||
byte[] groupSuffix = this.stripPrefix(groupKeyWithoutExt);
|
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain());
|
||||||
assert subStageKeysConsistency(groupKeyWithExt.length);
|
assert subStageKeysConsistency(groupKeyWithExt.readableBytes());
|
||||||
return this.subStageGetter
|
return this.subStageGetter
|
||||||
.subStage(dictionary,
|
.subStage(dictionary,
|
||||||
snapshot,
|
snapshot,
|
||||||
groupKeyWithoutExt,
|
groupKeyWithoutExt,
|
||||||
Flux.fromIterable(rangeKeys)
|
Flux.fromIterable(rangeKeys)
|
||||||
)
|
)
|
||||||
.map(us -> Map.entry(this.deserializeSuffix(groupSuffix), us));
|
.map(us -> Map.entry(this.deserializeSuffix(wrappedUnmodifiableBuffer(groupSuffix.retain())), us))
|
||||||
|
.doFinally(s -> {
|
||||||
|
groupSuffix.release();
|
||||||
|
groupKeyWithoutExt.release();
|
||||||
|
groupKeyWithExt.release();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getRangeKeyPrefixes(resolveSnapshot(snapshot), range, keyPrefix.length + keySuffixLength)
|
.getRangeKeyPrefixes(resolveSnapshot(snapshot), range, keyPrefixLength + keySuffixLength)
|
||||||
.flatMapSequential(groupKeyWithoutExt -> {
|
.flatMapSequential(groupKeyWithoutExt -> {
|
||||||
byte[] groupSuffix = this.stripPrefix(groupKeyWithoutExt);
|
ByteBuf groupSuffix = this.stripPrefix(groupKeyWithoutExt);
|
||||||
assert subStageKeysConsistency(groupKeyWithoutExt.length + keyExtLength);
|
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
||||||
return this.subStageGetter
|
return this.subStageGetter
|
||||||
.subStage(dictionary,
|
.subStage(dictionary,
|
||||||
snapshot,
|
snapshot,
|
||||||
groupKeyWithoutExt,
|
groupKeyWithoutExt,
|
||||||
Flux.empty()
|
Flux.empty()
|
||||||
)
|
)
|
||||||
.map(us -> Map.entry(this.deserializeSuffix(groupSuffix), us));
|
.map(us -> Map.entry(this.deserializeSuffix(wrappedUnmodifiableBuffer(groupSuffix)), us));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -261,10 +397,10 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
private boolean subStageKeysConsistency(int totalKeyLength) {
|
private boolean subStageKeysConsistency(int totalKeyLength) {
|
||||||
if (subStageGetter instanceof SubStageGetterMapDeep) {
|
if (subStageGetter instanceof SubStageGetterMapDeep) {
|
||||||
return totalKeyLength
|
return totalKeyLength
|
||||||
== keyPrefix.length + keySuffixLength + ((SubStageGetterMapDeep<?, ?, ?>) subStageGetter).getKeyBinaryLength();
|
== keyPrefixLength + keySuffixLength + ((SubStageGetterMapDeep<?, ?, ?>) subStageGetter).getKeyBinaryLength();
|
||||||
} else if (subStageGetter instanceof SubStageGetterMap) {
|
} else if (subStageGetter instanceof SubStageGetterMap) {
|
||||||
return totalKeyLength
|
return totalKeyLength
|
||||||
== keyPrefix.length + keySuffixLength + ((SubStageGetterMap<?, ?>) subStageGetter).getKeyBinaryLength();
|
== keyPrefixLength + keySuffixLength + ((SubStageGetterMap<?, ?>) subStageGetter).getKeyBinaryLength();
|
||||||
} else {
|
} else {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -287,25 +423,37 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
.clear();
|
.clear();
|
||||||
} else if (range.isSingle()) {
|
} else if (range.isSingle()) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.remove(range.getSingle(), LLDictionaryResultType.VOID)
|
.remove(range.getSingle().retain(), LLDictionaryResultType.VOID)
|
||||||
.then();
|
.then();
|
||||||
} else {
|
} else {
|
||||||
return dictionary
|
return dictionary
|
||||||
.setRange(range, Flux.empty(), false)
|
.setRange(range.retain(), Flux.empty(), false)
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
protected T deserializeSuffix(byte[] keySuffix) {
|
protected T deserializeSuffix(ByteBuf keySuffix) {
|
||||||
assert suffixKeyConsistency(keySuffix.length);
|
assert suffixKeyConsistency(keySuffix.readableBytes());
|
||||||
return keySuffixSerializer.deserialize(keySuffix);
|
return keySuffixSerializer.deserialize(keySuffix);
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
protected byte[] serializeSuffix(T keySuffix) {
|
protected ByteBuf serializeSuffix(T keySuffix) {
|
||||||
byte[] suffixData = keySuffixSerializer.serialize(keySuffix);
|
ByteBuf suffixData = keySuffixSerializer.serialize(keySuffix);
|
||||||
assert suffixKeyConsistency(suffixData.length);
|
assert suffixKeyConsistency(suffixData.readableBytes());
|
||||||
return suffixData;
|
return suffixData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void finalize() throws Throwable {
|
||||||
|
super.finalize();
|
||||||
|
range.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
this.range.release();
|
||||||
|
this.keyPrefix.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,18 +2,18 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
import static it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
||||||
|
|
||||||
import com.google.common.primitives.Ints;
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.collections.Joiner.ValueGetter;
|
import it.cavallium.dbengine.database.collections.Joiner.ValueGetter;
|
||||||
import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBlocking;
|
import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBlocking;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
@ -23,19 +23,21 @@ import reactor.core.publisher.Mono;
|
|||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
|
public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
|
||||||
|
|
||||||
|
private final ByteBufAllocator alloc;
|
||||||
private final DatabaseMapDictionary<TH, Entry<T, U>> subDictionary;
|
private final DatabaseMapDictionary<TH, Entry<T, U>> subDictionary;
|
||||||
private final Function<T, TH> keySuffixHashFunction;
|
private final Function<T, TH> keySuffixHashFunction;
|
||||||
private final Function<T, ValueMapper<T, U>> valueMapper;
|
private final Function<T, ValueMapper<T, U>> valueMapper;
|
||||||
|
|
||||||
protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
|
protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Serializer<T, byte[]> keySuffixSerializer,
|
Serializer<T, ByteBuf> keySuffixSerializer,
|
||||||
Serializer<U, byte[]> valueSerializer,
|
Serializer<U, ByteBuf> valueSerializer,
|
||||||
Function<T, TH> keySuffixHashFunction,
|
Function<T, TH> keySuffixHashFunction,
|
||||||
SerializerFixedBinaryLength<TH, byte[]> keySuffixHashSerializer) {
|
SerializerFixedBinaryLength<TH, ByteBuf> keySuffixHashSerializer) {
|
||||||
ValueWithHashSerializer<T, U> valueWithHashSerializer = new ValueWithHashSerializer<>(keySuffixSerializer,
|
ValueWithHashSerializer<T, U> valueWithHashSerializer = new ValueWithHashSerializer<>(keySuffixSerializer,
|
||||||
valueSerializer
|
valueSerializer
|
||||||
);
|
);
|
||||||
|
this.alloc = dictionary.getAllocator();
|
||||||
this.valueMapper = ValueMapper::new;
|
this.valueMapper = ValueMapper::new;
|
||||||
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
|
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
@ -44,40 +46,35 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
this.keySuffixHashFunction = keySuffixHashFunction;
|
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class ValueWithHashSerializer<T, U> implements Serializer<Entry<T, U>, byte[]> {
|
private class ValueWithHashSerializer<T, U> implements Serializer<Entry<T, U>, ByteBuf> {
|
||||||
|
|
||||||
private final Serializer<T, byte[]> keySuffixSerializer;
|
private final Serializer<T, ByteBuf> keySuffixSerializer;
|
||||||
private final Serializer<U, byte[]> valueSerializer;
|
private final Serializer<U, ByteBuf> valueSerializer;
|
||||||
|
|
||||||
private ValueWithHashSerializer(Serializer<T, byte[]> keySuffixSerializer, Serializer<U, byte[]> valueSerializer) {
|
private ValueWithHashSerializer(Serializer<T, ByteBuf> keySuffixSerializer, Serializer<U, ByteBuf> valueSerializer) {
|
||||||
this.keySuffixSerializer = keySuffixSerializer;
|
this.keySuffixSerializer = keySuffixSerializer;
|
||||||
this.valueSerializer = valueSerializer;
|
this.valueSerializer = valueSerializer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Entry<T, U> deserialize(byte @NotNull [] serialized) {
|
public @NotNull Entry<T, U> deserialize(@NotNull ByteBuf serialized) {
|
||||||
int keySuffixLength = Ints.fromBytes(serialized[0], serialized[1], serialized[2], serialized[3]);
|
try {
|
||||||
T keySuffix = keySuffixSerializer.deserialize(Arrays.copyOfRange(serialized,
|
int keySuffixLength = serialized.readInt();
|
||||||
Integer.BYTES,
|
T keySuffix = keySuffixSerializer.deserialize(serialized.retainedSlice(serialized.readerIndex(), keySuffixLength));
|
||||||
Integer.BYTES + keySuffixLength
|
U value = valueSerializer.deserialize(serialized.retain());
|
||||||
));
|
return Map.entry(keySuffix, value);
|
||||||
U value = valueSerializer.deserialize(Arrays.copyOfRange(serialized,
|
} finally {
|
||||||
Integer.BYTES + keySuffixLength,
|
serialized.release();
|
||||||
serialized.length
|
}
|
||||||
));
|
|
||||||
return Map.entry(keySuffix, value);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(@NotNull Entry<T, U> deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull Entry<T, U> deserialized) {
|
||||||
byte[] keySuffix = keySuffixSerializer.serialize(deserialized.getKey());
|
ByteBuf keySuffix = keySuffixSerializer.serialize(deserialized.getKey());
|
||||||
byte[] value = valueSerializer.serialize(deserialized.getValue());
|
ByteBuf value = valueSerializer.serialize(deserialized.getValue());
|
||||||
byte[] result = new byte[Integer.BYTES + keySuffix.length + value.length];
|
ByteBuf keySuffixLen = alloc.buffer(Integer.BYTES, Integer.BYTES);
|
||||||
byte[] keySuffixLen = Ints.toByteArray(keySuffix.length);
|
keySuffixLen.writeInt(keySuffix.readableBytes());
|
||||||
System.arraycopy(keySuffixLen, 0, result, 0, Integer.BYTES);
|
return LLUtils.directCompositeBuffer(alloc, keySuffixLen, keySuffix, value);
|
||||||
System.arraycopy(keySuffix, 0, result, Integer.BYTES, keySuffix.length);
|
|
||||||
System.arraycopy(value, 0, result, Integer.BYTES + keySuffix.length, value.length);
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -101,10 +98,10 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
|
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
|
||||||
Serializer<T, byte[]> keySerializer,
|
Serializer<T, ByteBuf> keySerializer,
|
||||||
Serializer<U, byte[]> valueSerializer,
|
Serializer<U, ByteBuf> valueSerializer,
|
||||||
Function<T, UH> keyHashFunction,
|
Function<T, UH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<UH, byte[]> keyHashSerializer) {
|
SerializerFixedBinaryLength<UH, ByteBuf> keyHashSerializer) {
|
||||||
return new DatabaseMapDictionaryHashed<>(dictionary,
|
return new DatabaseMapDictionaryHashed<>(dictionary,
|
||||||
EMPTY_BYTES,
|
EMPTY_BYTES,
|
||||||
keySerializer,
|
keySerializer,
|
||||||
@ -115,11 +112,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary,
|
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Serializer<T, byte[]> keySuffixSerializer,
|
Serializer<T, ByteBuf> keySuffixSerializer,
|
||||||
Serializer<U, byte[]> valueSerializer,
|
Serializer<U, ByteBuf> valueSerializer,
|
||||||
Function<T, UH> keySuffixHashFunction,
|
Function<T, UH> keySuffixHashFunction,
|
||||||
SerializerFixedBinaryLength<UH, byte[]> keySuffixHashSerializer) {
|
SerializerFixedBinaryLength<UH, ByteBuf> keySuffixHashSerializer) {
|
||||||
return new DatabaseMapDictionaryHashed<>(dictionary,
|
return new DatabaseMapDictionaryHashed<>(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySuffixSerializer,
|
keySuffixSerializer,
|
||||||
@ -157,13 +154,20 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> setAndGetStatus(Map<T, U> map) {
|
public Mono<Boolean> setAndGetChanged(Map<T, U> map) {
|
||||||
return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(subDictionary::setAndGetStatus);
|
return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(subDictionary::setAndGetChanged).single();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> update(Function<Optional<Map<T, U>>, Optional<Map<T, U>>> updater) {
|
public Mono<Boolean> update(Function<@Nullable Map<T, U>, @Nullable Map<T, U>> updater) {
|
||||||
return subDictionary.update(old -> updater.apply(old.map(this::deserializeMap)).map(this::serializeMap));
|
return subDictionary.update(old -> {
|
||||||
|
var result = updater.apply(old == null ? null : this.deserializeMap(old));
|
||||||
|
if (result == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return this.serializeMap(result);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -186,6 +190,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
this.subDictionary.release();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) {
|
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) {
|
||||||
return subDictionary
|
return subDictionary
|
||||||
@ -218,18 +227,27 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> updateValue(T key, boolean existsAlmostCertainly, Function<Optional<U>, Optional<U>> updater) {
|
public Mono<Boolean> updateValue(T key, boolean existsAlmostCertainly, Function<@Nullable U, @Nullable U> updater) {
|
||||||
return subDictionary.updateValue(keySuffixHashFunction.apply(key),
|
return subDictionary.updateValue(keySuffixHashFunction.apply(key), existsAlmostCertainly, old -> {
|
||||||
existsAlmostCertainly,
|
var result = updater.apply(old == null ? null : old.getValue());
|
||||||
old -> updater.apply(old.map(Entry::getValue)).map(newV -> Map.entry(key, newV))
|
if (result == null) {
|
||||||
);
|
return null;
|
||||||
|
} else {
|
||||||
|
return Map.entry(key, result);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> updateValue(T key, Function<Optional<U>, Optional<U>> updater) {
|
public Mono<Boolean> updateValue(T key, Function<@Nullable U, @Nullable U> updater) {
|
||||||
return subDictionary.updateValue(keySuffixHashFunction.apply(key),
|
return subDictionary.updateValue(keySuffixHashFunction.apply(key), old -> {
|
||||||
old -> updater.apply(old.map(Entry::getValue)).map(newV -> Map.entry(key, newV))
|
var result = updater.apply(old == null ? null : old.getValue());
|
||||||
);
|
if (result == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return Map.entry(key, result);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -346,10 +364,16 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> update(Function<Optional<Map<T, U>>, Optional<Map<T, U>>> updater,
|
public Mono<Boolean> update(Function<@Nullable Map<T, U>, @Nullable Map<T, U>> updater,
|
||||||
boolean existsAlmostCertainly) {
|
boolean existsAlmostCertainly) {
|
||||||
return subDictionary
|
return subDictionary.update(item -> {
|
||||||
.update(item -> updater.apply(item.map(this::deserializeMap)).map(this::serializeMap), existsAlmostCertainly);
|
var result = updater.apply(item == null ? null : this.deserializeMap(item));
|
||||||
|
if (result == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return this.serializeMap(result);
|
||||||
|
}
|
||||||
|
}, existsAlmostCertainly);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||||
@ -14,19 +15,19 @@ import reactor.core.publisher.Mono;
|
|||||||
public class DatabaseSetDictionary<T> extends DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>> {
|
public class DatabaseSetDictionary<T> extends DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>> {
|
||||||
|
|
||||||
protected DatabaseSetDictionary(LLDictionary dictionary,
|
protected DatabaseSetDictionary(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer) {
|
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer) {
|
||||||
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.createSubStageGetter(), 0);
|
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.createSubStageGetter(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
|
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySerializer) {
|
SerializerFixedBinaryLength<T, ByteBuf> keySerializer) {
|
||||||
return new DatabaseSetDictionary<>(dictionary, EMPTY_BYTES, keySerializer);
|
return new DatabaseSetDictionary<>(dictionary, EMPTY_BYTES, keySerializer);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
|
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer) {
|
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer) {
|
||||||
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer);
|
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
import static it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep.EMPTY_BYTES;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||||
@ -18,10 +19,10 @@ import reactor.core.publisher.Mono;
|
|||||||
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
|
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
|
||||||
|
|
||||||
protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
|
protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Serializer<T, byte[]> keySuffixSerializer,
|
Serializer<T, ByteBuf> keySuffixSerializer,
|
||||||
Function<T, TH> keySuffixHashFunction,
|
Function<T, TH> keySuffixHashFunction,
|
||||||
SerializerFixedBinaryLength<TH, byte[]> keySuffixHashSerializer) {
|
SerializerFixedBinaryLength<TH, ByteBuf> keySuffixHashSerializer) {
|
||||||
super(dictionary,
|
super(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySuffixSerializer,
|
keySuffixSerializer,
|
||||||
@ -32,9 +33,9 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
|
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
|
||||||
Serializer<T, byte[]> keySerializer,
|
Serializer<T, ByteBuf> keySerializer,
|
||||||
Function<T, TH> keyHashFunction,
|
Function<T, TH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<TH, byte[]> keyHashSerializer) {
|
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) {
|
||||||
return new DatabaseSetDictionaryHashed<>(dictionary,
|
return new DatabaseSetDictionaryHashed<>(dictionary,
|
||||||
EMPTY_BYTES,
|
EMPTY_BYTES,
|
||||||
keySerializer,
|
keySerializer,
|
||||||
@ -44,10 +45,10 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary,
|
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Serializer<T, byte[]> keySuffixSerializer,
|
Serializer<T, ByteBuf> keySuffixSerializer,
|
||||||
Function<T, TH> keyHashFunction,
|
Function<T, TH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<TH, byte[]> keyHashSerializer) {
|
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) {
|
||||||
return new DatabaseSetDictionaryHashed<>(dictionary,
|
return new DatabaseSetDictionaryHashed<>(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySuffixSerializer,
|
keySuffixSerializer,
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||||
@ -10,15 +11,19 @@ import java.util.Optional;
|
|||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
||||||
|
|
||||||
private final LLDictionary dictionary;
|
private final LLDictionary dictionary;
|
||||||
private final byte[] key;
|
private final ByteBuf key;
|
||||||
private final Serializer<U, byte[]> serializer;
|
private final Serializer<U, ByteBuf> serializer;
|
||||||
|
|
||||||
public DatabaseSingle(LLDictionary dictionary, byte[] key, Serializer<U, byte[]> serializer) {
|
public DatabaseSingle(LLDictionary dictionary, ByteBuf key, Serializer<U, ByteBuf> serializer) {
|
||||||
this.dictionary = dictionary;
|
this.dictionary = dictionary;
|
||||||
|
if (!key.isDirect()) {
|
||||||
|
throw new IllegalArgumentException("Key must be direct");
|
||||||
|
}
|
||||||
this.key = key;
|
this.key = key;
|
||||||
this.serializer = serializer;
|
this.serializer = serializer;
|
||||||
}
|
}
|
||||||
@ -33,47 +38,60 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
public Mono<U> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
||||||
return dictionary.get(resolveSnapshot(snapshot), key, existsAlmostCertainly).map(this::deserialize);
|
return dictionary.get(resolveSnapshot(snapshot), key.retain(), existsAlmostCertainly).map(this::deserialize);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> setAndGetPrevious(U value) {
|
public Mono<U> setAndGetPrevious(U value) {
|
||||||
return dictionary.put(key, serialize(value), LLDictionaryResultType.PREVIOUS_VALUE).map(this::deserialize);
|
ByteBuf valueByteBuf = serialize(value);
|
||||||
|
return dictionary
|
||||||
|
.put(key.retain(), valueByteBuf.retain(), LLDictionaryResultType.PREVIOUS_VALUE)
|
||||||
|
.map(this::deserialize)
|
||||||
|
.doFinally(s -> valueByteBuf.release());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> update(Function<Optional<U>, Optional<U>> updater, boolean existsAlmostCertainly) {
|
public Mono<Boolean> update(Function<@Nullable U, @Nullable U> updater, boolean existsAlmostCertainly) {
|
||||||
return dictionary.update(key,
|
return dictionary.update(key.retain(), (oldValueSer) -> {
|
||||||
(oldValueSer) -> updater.apply(oldValueSer.map(this::deserialize)).map(this::serialize),
|
var result = updater.apply(oldValueSer == null ? null : this.deserialize(oldValueSer));
|
||||||
existsAlmostCertainly
|
if (result == null) {
|
||||||
);
|
return null;
|
||||||
|
} else {
|
||||||
|
return this.serialize(result);
|
||||||
|
}
|
||||||
|
}, existsAlmostCertainly);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> clearAndGetPrevious() {
|
public Mono<U> clearAndGetPrevious() {
|
||||||
return dictionary.remove(key, LLDictionaryResultType.PREVIOUS_VALUE).map(this::deserialize);
|
return dictionary.remove(key.retain(), LLDictionaryResultType.PREVIOUS_VALUE).map(this::deserialize);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key))
|
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key.retain()))
|
||||||
.map(empty -> empty ? 0L : 1L);
|
.map(empty -> empty ? 0L : 1L);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key));
|
.isRangeEmpty(resolveSnapshot(snapshot), LLRange.single(key.retain()));
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
private U deserialize(byte[] bytes) {
|
private U deserialize(ByteBuf bytes) {
|
||||||
return serializer.deserialize(bytes);
|
return serializer.deserialize(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
private byte[] serialize(U bytes) {
|
private ByteBuf serialize(U bytes) {
|
||||||
return serializer.serialize(bytes);
|
return serializer.serialize(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
key.release();
|
||||||
|
}
|
||||||
}
|
}
|
@ -2,7 +2,6 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
@ -39,14 +38,20 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> setAndGetStatus(A value) {
|
public Mono<Boolean> setAndGetChanged(A value) {
|
||||||
return serializedSingle.setAndGetStatus(serialize(value));
|
return serializedSingle.setAndGetChanged(serialize(value)).single();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> update(Function<Optional<A>, Optional<A>> updater, boolean existsAlmostCertainly) {
|
public Mono<Boolean> update(Function<@Nullable A, @Nullable A> updater, boolean existsAlmostCertainly) {
|
||||||
return serializedSingle
|
return serializedSingle.update(oldValue -> {
|
||||||
.update(oldValue -> updater.apply(oldValue.map(this::deserialize)).map(this::serialize), existsAlmostCertainly);
|
var result = updater.apply(oldValue == null ? null : this.deserialize(oldValue));
|
||||||
|
if (result == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return this.serialize(result);
|
||||||
|
}
|
||||||
|
}, existsAlmostCertainly);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -84,6 +89,11 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
serializedSingle.release();
|
||||||
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
private A deserialize(B bytes) {
|
private A deserialize(B bytes) {
|
||||||
return serializer.deserialize(bytes);
|
return serializer.deserialize(bytes);
|
||||||
|
@ -2,7 +2,6 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
@ -26,18 +25,18 @@ public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> set(T value) {
|
default Mono<Void> set(T value) {
|
||||||
return setAndGetStatus(value).then();
|
return setAndGetChanged(value).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<T> setAndGetPrevious(T value);
|
Mono<T> setAndGetPrevious(T value);
|
||||||
|
|
||||||
default Mono<Boolean> setAndGetStatus(T value) {
|
default Mono<Boolean> setAndGetChanged(T value) {
|
||||||
return setAndGetPrevious(value).map(oldValue -> !Objects.equals(oldValue, value)).defaultIfEmpty(false);
|
return setAndGetPrevious(value).map(oldValue -> !Objects.equals(oldValue, value)).defaultIfEmpty(value != null);
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<Boolean> update(Function<Optional<T>, Optional<T>> updater, boolean existsAlmostCertainly);
|
Mono<Boolean> update(Function<@Nullable T, @Nullable T> updater, boolean existsAlmostCertainly);
|
||||||
|
|
||||||
default Mono<Boolean> update(Function<Optional<T>, Optional<T>> updater) {
|
default Mono<Boolean> update(Function<@Nullable T, @Nullable T> updater) {
|
||||||
return update(updater, false);
|
return update(updater, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,6 +50,8 @@ public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
|||||||
return clearAndGetPrevious().map(Objects::nonNull).defaultIfEmpty(false);
|
return clearAndGetPrevious().map(Objects::nonNull).defaultIfEmpty(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void release();
|
||||||
|
|
||||||
default Mono<Void> close() {
|
default Mono<Void> close() {
|
||||||
return Mono.empty();
|
return Mono.empty();
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,6 @@ import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBloc
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
@ -18,7 +17,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
||||||
|
|
||||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
||||||
return this.at(snapshot, key).flatMap(v -> v.get(snapshot, existsAlmostCertainly));
|
return this.at(snapshot, key).flatMap(v -> v.get(snapshot, existsAlmostCertainly).doFinally(s -> v.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
||||||
@ -30,23 +29,29 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> putValue(T key, U value) {
|
default Mono<Void> putValue(T key, U value) {
|
||||||
return at(null, key).single().flatMap(v -> v.set(value));
|
return at(null, key).single().flatMap(v -> v.set(value).doFinally(s -> v.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Boolean> updateValue(T key, boolean existsAlmostCertainly, Function<Optional<U>, Optional<U>> updater) {
|
default Mono<Boolean> updateValue(T key, boolean existsAlmostCertainly, Function<@Nullable U, @Nullable U> updater) {
|
||||||
return at(null, key).single().flatMap(v -> v.update(updater, existsAlmostCertainly));
|
return at(null, key).single().flatMap(v -> v.update(updater, existsAlmostCertainly).doFinally(s -> v.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Boolean> updateValue(T key, Function<Optional<U>, Optional<U>> updater) {
|
default Mono<Boolean> updateValue(T key, Function<@Nullable U, @Nullable U> updater) {
|
||||||
return updateValue(key, false, updater);
|
return updateValue(key, false, updater);
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
||||||
return at(null, key).single().flatMap(v -> v.setAndGetPrevious(value));
|
return at(null, key).single().flatMap(v -> v.setAndGetPrevious(value).doFinally(s -> v.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param key
|
||||||
|
* @param value
|
||||||
|
* @return true if the key was associated with any value, false if the key didn't exist.
|
||||||
|
*/
|
||||||
default Mono<Boolean> putValueAndGetStatus(T key, U value) {
|
default Mono<Boolean> putValueAndGetStatus(T key, U value) {
|
||||||
return at(null, key).single().flatMap(v -> v.setAndGetStatus(value));
|
return at(null, key).single().flatMap(v -> v.setAndGetChanged(value).doFinally(s -> v.release())).single();
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> remove(T key) {
|
default Mono<Void> remove(T key) {
|
||||||
@ -54,7 +59,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> removeAndGetPrevious(T key) {
|
default Mono<U> removeAndGetPrevious(T key) {
|
||||||
return at(null, key).flatMap(DatabaseStage::clearAndGetPrevious);
|
return at(null, key).flatMap(v -> v.clearAndGetPrevious().doFinally(s -> v.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Boolean> removeAndGetStatus(T key) {
|
default Mono<Boolean> removeAndGetStatus(T key) {
|
||||||
@ -106,7 +111,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.flatMap(entriesReplacer)
|
.flatMap(entriesReplacer)
|
||||||
.flatMap(replacedEntry -> this
|
.flatMap(replacedEntry -> this
|
||||||
.at(null, replacedEntry.getKey())
|
.at(null, replacedEntry.getKey())
|
||||||
.map(entry -> entry.set(replacedEntry.getValue())))
|
.map(v -> v.set(replacedEntry.getValue()).doFinally(s -> v.release())))
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -126,15 +131,23 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
default Mono<Boolean> update(Function<Optional<Map<T, U>>, Optional<Map<T, U>>> updater, boolean existsAlmostCertainly) {
|
default Mono<Boolean> update(Function<@Nullable Map<T, U>, @Nullable Map<T, U>> updater, boolean existsAlmostCertainly) {
|
||||||
return this
|
return this
|
||||||
.getAllValues(null)
|
.getAllValues(null)
|
||||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||||
.single()
|
.single()
|
||||||
.map(v -> v.isEmpty() ? Optional.<Map<T, U>>empty() : Optional.of(v))
|
.<Map<T, U>>handle((v, sink) -> {
|
||||||
.map(updater)
|
if (v == null || v.isEmpty()) {
|
||||||
.filter(Optional::isPresent)
|
sink.complete();
|
||||||
.map(Optional::get)
|
} else {
|
||||||
|
var result = updater.apply(v);
|
||||||
|
if (result == null) {
|
||||||
|
sink.complete();
|
||||||
|
} else {
|
||||||
|
sink.next(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
.flatMap(values -> this.setAllValues(Flux.fromIterable(values.entrySet())))
|
.flatMap(values -> this.setAllValues(Flux.fromIterable(values.entrySet())))
|
||||||
//todo: can be optimized by calculating the correct return value
|
//todo: can be optimized by calculating the correct return value
|
||||||
.thenReturn(true);
|
.thenReturn(true);
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
@ -10,8 +11,8 @@ public interface SubStageGetter<U, US extends DatabaseStage<U>> {
|
|||||||
|
|
||||||
Mono<US> subStage(LLDictionary dictionary,
|
Mono<US> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Flux<byte[]> debuggingKeyFlux);
|
Flux<ByteBuf> debuggingKeyFlux);
|
||||||
|
|
||||||
boolean isMultiKey();
|
boolean isMultiKey();
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
@ -23,15 +24,15 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
|||||||
assertsEnabled = assertsEnabledTmp;
|
assertsEnabled = assertsEnabledTmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Serializer<T, byte[]> keySerializer;
|
private final Serializer<T, ByteBuf> keySerializer;
|
||||||
private final Serializer<U, byte[]> valueSerializer;
|
private final Serializer<U, ByteBuf> valueSerializer;
|
||||||
private final Function<T, TH> keyHashFunction;
|
private final Function<T, TH> keyHashFunction;
|
||||||
private final SerializerFixedBinaryLength<TH, byte[]> keyHashSerializer;
|
private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer;
|
||||||
|
|
||||||
public SubStageGetterHashMap(Serializer<T, byte[]> keySerializer,
|
public SubStageGetterHashMap(Serializer<T, ByteBuf> keySerializer,
|
||||||
Serializer<U, byte[]> valueSerializer,
|
Serializer<U, ByteBuf> valueSerializer,
|
||||||
Function<T, TH> keyHashFunction,
|
Function<T, TH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<TH, byte[]> keyHashSerializer) {
|
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) {
|
||||||
this.keySerializer = keySerializer;
|
this.keySerializer = keySerializer;
|
||||||
this.valueSerializer = valueSerializer;
|
this.valueSerializer = valueSerializer;
|
||||||
this.keyHashFunction = keyHashFunction;
|
this.keyHashFunction = keyHashFunction;
|
||||||
@ -41,8 +42,8 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
|||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Flux<byte[]> debuggingKeyFlux) {
|
Flux<ByteBuf> debuggingKeyFlux) {
|
||||||
Mono<DatabaseMapDictionaryHashed<T, U, TH>> result = Mono.just(DatabaseMapDictionaryHashed.tail(dictionary,
|
Mono<DatabaseMapDictionaryHashed<T, U, TH>> result = Mono.just(DatabaseMapDictionaryHashed.tail(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySerializer,
|
keySerializer,
|
||||||
@ -67,9 +68,9 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
|||||||
return assertsEnabled;
|
return assertsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Void> checkKeyFluxConsistency(byte[] prefixKey, Flux<byte[]> keyFlux) {
|
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||||
return keyFlux.doOnNext(key -> {
|
return keyFlux.doOnNext(key -> {
|
||||||
assert key.length == prefixKey.length + getKeyHashBinaryLength();
|
assert key.readableBytes() == prefixKey.readableBytes() + getKeyHashBinaryLength();
|
||||||
}).then();
|
}).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||||
@ -24,13 +25,13 @@ public class SubStageGetterHashSet<T, TH> implements
|
|||||||
assertsEnabled = assertsEnabledTmp;
|
assertsEnabled = assertsEnabledTmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Serializer<T, byte[]> keySerializer;
|
private final Serializer<T, ByteBuf> keySerializer;
|
||||||
private final Function<T, TH> keyHashFunction;
|
private final Function<T, TH> keyHashFunction;
|
||||||
private final SerializerFixedBinaryLength<TH, byte[]> keyHashSerializer;
|
private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer;
|
||||||
|
|
||||||
public SubStageGetterHashSet(Serializer<T, byte[]> keySerializer,
|
public SubStageGetterHashSet(Serializer<T, ByteBuf> keySerializer,
|
||||||
Function<T, TH> keyHashFunction,
|
Function<T, TH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<TH, byte[]> keyHashSerializer) {
|
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) {
|
||||||
this.keySerializer = keySerializer;
|
this.keySerializer = keySerializer;
|
||||||
this.keyHashFunction = keyHashFunction;
|
this.keyHashFunction = keyHashFunction;
|
||||||
this.keyHashSerializer = keyHashSerializer;
|
this.keyHashSerializer = keyHashSerializer;
|
||||||
@ -39,8 +40,8 @@ public class SubStageGetterHashSet<T, TH> implements
|
|||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Flux<byte[]> debuggingKeyFlux) {
|
Flux<ByteBuf> debuggingKeyFlux) {
|
||||||
Mono<DatabaseSetDictionaryHashed<T, TH>> result = Mono.just(DatabaseSetDictionaryHashed.tail(dictionary,
|
Mono<DatabaseSetDictionaryHashed<T, TH>> result = Mono.just(DatabaseSetDictionaryHashed.tail(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySerializer,
|
keySerializer,
|
||||||
@ -64,9 +65,9 @@ public class SubStageGetterHashSet<T, TH> implements
|
|||||||
return assertsEnabled;
|
return assertsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Void> checkKeyFluxConsistency(byte[] prefixKey, Flux<byte[]> keyFlux) {
|
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||||
return keyFlux.doOnNext(key -> {
|
return keyFlux.doOnNext(key -> {
|
||||||
assert key.length == prefixKey.length + getKeyHashBinaryLength();
|
assert key.readableBytes() == prefixKey.readableBytes() + getKeyHashBinaryLength();
|
||||||
}).then();
|
}).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
@ -20,11 +21,11 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
|||||||
assertsEnabled = assertsEnabledTmp;
|
assertsEnabled = assertsEnabledTmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
|
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
|
||||||
private final Serializer<U, byte[]> valueSerializer;
|
private final Serializer<U, ByteBuf> valueSerializer;
|
||||||
|
|
||||||
public SubStageGetterMap(SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
public SubStageGetterMap(SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||||
Serializer<U, byte[]> valueSerializer) {
|
Serializer<U, ByteBuf> valueSerializer) {
|
||||||
this.keySerializer = keySerializer;
|
this.keySerializer = keySerializer;
|
||||||
this.valueSerializer = valueSerializer;
|
this.valueSerializer = valueSerializer;
|
||||||
}
|
}
|
||||||
@ -32,8 +33,8 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
|||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Flux<byte[]> debuggingKeyFlux) {
|
Flux<ByteBuf> debuggingKeyFlux) {
|
||||||
Mono<DatabaseMapDictionary<T, U>> result = Mono.just(DatabaseMapDictionary.tail(dictionary, prefixKey, keySerializer,
|
Mono<DatabaseMapDictionary<T, U>> result = Mono.just(DatabaseMapDictionary.tail(dictionary, prefixKey, keySerializer,
|
||||||
valueSerializer
|
valueSerializer
|
||||||
));
|
));
|
||||||
@ -54,9 +55,9 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
|||||||
return assertsEnabled;
|
return assertsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Void> checkKeyFluxConsistency(byte[] prefixKey, Flux<byte[]> keyFlux) {
|
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||||
return keyFlux.doOnNext(key -> {
|
return keyFlux.doOnNext(key -> {
|
||||||
assert key.length == prefixKey.length + getKeyBinaryLength();
|
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||||
}).then();
|
}).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
@ -20,11 +21,11 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
private final SubStageGetter<U, US> subStageGetter;
|
private final SubStageGetter<U, US> subStageGetter;
|
||||||
private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
|
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
|
||||||
private final int keyExtLength;
|
private final int keyExtLength;
|
||||||
|
|
||||||
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
|
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
|
||||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||||
int keyExtLength) {
|
int keyExtLength) {
|
||||||
this.subStageGetter = subStageGetter;
|
this.subStageGetter = subStageGetter;
|
||||||
this.keySerializer = keySerializer;
|
this.keySerializer = keySerializer;
|
||||||
@ -46,8 +47,8 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
|||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Flux<byte[]> debuggingKeyFlux) {
|
Flux<ByteBuf> debuggingKeyFlux) {
|
||||||
Mono<DatabaseMapDictionaryDeep<T, U, US>> result = Mono.just(DatabaseMapDictionaryDeep.deepIntermediate(dictionary,
|
Mono<DatabaseMapDictionaryDeep<T, U, US>> result = Mono.just(DatabaseMapDictionaryDeep.deepIntermediate(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySerializer,
|
keySerializer,
|
||||||
@ -71,9 +72,9 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
|||||||
return assertsEnabled;
|
return assertsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Void> checkKeyFluxConsistency(byte[] prefixKey, Flux<byte[]> keyFlux) {
|
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||||
return keyFlux.doOnNext(key -> {
|
return keyFlux.doOnNext(key -> {
|
||||||
assert key.length == prefixKey.length + getKeyBinaryLength();
|
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||||
}).then();
|
}).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||||
@ -20,22 +21,27 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
|||||||
assertsEnabled = assertsEnabledTmp;
|
assertsEnabled = assertsEnabledTmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
|
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
|
||||||
|
|
||||||
public SubStageGetterSet(SerializerFixedBinaryLength<T, byte[]> keySerializer) {
|
public SubStageGetterSet(SerializerFixedBinaryLength<T, ByteBuf> keySerializer) {
|
||||||
this.keySerializer = keySerializer;
|
this.keySerializer = keySerializer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] prefixKey,
|
ByteBuf prefixKey,
|
||||||
Flux<byte[]> debuggingKeyFlux) {
|
Flux<ByteBuf> debuggingKeyFlux) {
|
||||||
Mono<DatabaseSetDictionary<T>> result = Mono.just(DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer));
|
try {
|
||||||
if (assertsEnabled) {
|
Mono<DatabaseSetDictionary<T>> result = Mono
|
||||||
return checkKeyFluxConsistency(prefixKey, debuggingKeyFlux).then(result);
|
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer));
|
||||||
} else {
|
if (assertsEnabled) {
|
||||||
return result;
|
return checkKeyFluxConsistency(prefixKey.retain(), debuggingKeyFlux).then(result);
|
||||||
|
} else {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
prefixKey.release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,10 +55,10 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
|||||||
return assertsEnabled;
|
return assertsEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Void> checkKeyFluxConsistency(byte[] prefixKey, Flux<byte[]> keyFlux) {
|
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, Flux<ByteBuf> keyFlux) {
|
||||||
return keyFlux.doOnNext(key -> {
|
return keyFlux.doOnNext(key -> {
|
||||||
assert key.length == prefixKey.length + getKeyBinaryLength();
|
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
|
||||||
}).then();
|
}).doFinally(s -> prefixKey.release()).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getKeyBinaryLength() {
|
public int getKeyBinaryLength() {
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
@ -19,22 +21,22 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
|
|||||||
assertsEnabled = assertsEnabledTmp;
|
assertsEnabled = assertsEnabledTmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Serializer<T, byte[]> serializer;
|
private final Serializer<T, ByteBuf> serializer;
|
||||||
|
|
||||||
public SubStageGetterSingle(Serializer<T, byte[]> serializer) {
|
public SubStageGetterSingle(Serializer<T, ByteBuf> serializer) {
|
||||||
this.serializer = serializer;
|
this.serializer = serializer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
byte[] keyPrefix,
|
ByteBuf keyPrefix,
|
||||||
Flux<byte[]> debuggingKeyFlux) {
|
Flux<ByteBuf> debuggingKeyFlux) {
|
||||||
return debuggingKeyFlux
|
return debuggingKeyFlux
|
||||||
.singleOrEmpty()
|
.singleOrEmpty()
|
||||||
.flatMap(key -> Mono
|
.flatMap(key -> Mono
|
||||||
.<DatabaseStageEntry<T>>fromCallable(() -> {
|
.<DatabaseStageEntry<T>>fromCallable(() -> {
|
||||||
if (!Arrays.equals(keyPrefix, key)) {
|
if (!LLUtils.equals(keyPrefix, key)) {
|
||||||
throw new IndexOutOfBoundsException("Found more than one element!");
|
throw new IndexOutOfBoundsException("Found more than one element!");
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
|
|
||||||
public class SubStageGetterSingleBytes extends SubStageGetterSingle<byte[]> {
|
public class SubStageGetterSingleBytes extends SubStageGetterSingle<ByteBuf> {
|
||||||
|
|
||||||
public SubStageGetterSingleBytes() {
|
public SubStageGetterSingleBytes() {
|
||||||
super(Serializer.noop());
|
super(Serializer.noop());
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
public enum IterateBound {
|
||||||
|
LOWER,
|
||||||
|
UPPER
|
||||||
|
}
|
@ -36,14 +36,18 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LLLocalKeyValueDatabase> getDatabase(String name, List<Column> columns, boolean lowMemory) {
|
public Mono<LLLocalKeyValueDatabase> getDatabase(String name,
|
||||||
|
List<Column> columns,
|
||||||
|
boolean lowMemory,
|
||||||
|
boolean inMemory) {
|
||||||
return Mono
|
return Mono
|
||||||
.fromCallable(() -> new LLLocalKeyValueDatabase(name,
|
.fromCallable(() -> new LLLocalKeyValueDatabase(name,
|
||||||
basePath.resolve("database_" + name),
|
basePath.resolve("database_" + name),
|
||||||
columns,
|
columns,
|
||||||
new LinkedList<>(),
|
new LinkedList<>(),
|
||||||
crashIfWalError,
|
crashIfWalError,
|
||||||
lowMemory
|
lowMemory,
|
||||||
|
inMemory
|
||||||
))
|
))
|
||||||
.subscribeOn(Schedulers.boundedElastic());
|
.subscribeOn(Schedulers.boundedElastic());
|
||||||
}
|
}
|
||||||
@ -55,7 +59,8 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
|||||||
TextFieldsSimilarity textFieldsSimilarity,
|
TextFieldsSimilarity textFieldsSimilarity,
|
||||||
Duration queryRefreshDebounceTime,
|
Duration queryRefreshDebounceTime,
|
||||||
Duration commitDebounceTime,
|
Duration commitDebounceTime,
|
||||||
boolean lowMemory) {
|
boolean lowMemory,
|
||||||
|
boolean inMemory) {
|
||||||
return Mono
|
return Mono
|
||||||
.fromCallable(() -> {
|
.fromCallable(() -> {
|
||||||
if (instancesCount != 1) {
|
if (instancesCount != 1) {
|
||||||
@ -66,7 +71,8 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
|||||||
textFieldsSimilarity,
|
textFieldsSimilarity,
|
||||||
queryRefreshDebounceTime,
|
queryRefreshDebounceTime,
|
||||||
commitDebounceTime,
|
commitDebounceTime,
|
||||||
lowMemory
|
lowMemory,
|
||||||
|
inMemory
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
return new LLLocalLuceneIndex(basePath.resolve("lucene"),
|
return new LLLocalLuceneIndex(basePath.resolve("lucene"),
|
||||||
@ -76,6 +82,7 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
|
|||||||
queryRefreshDebounceTime,
|
queryRefreshDebounceTime,
|
||||||
commitDebounceTime,
|
commitDebounceTime,
|
||||||
lowMemory,
|
lowMemory,
|
||||||
|
inMemory,
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,7 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
@ -7,17 +9,18 @@ import org.rocksdb.ColumnFamilyHandle;
|
|||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
|
|
||||||
public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator<Entry<byte[], byte[]>> {
|
public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator<Entry<ByteBuf, ByteBuf>> {
|
||||||
|
|
||||||
public LLLocalEntryReactiveRocksIterator(RocksDB db,
|
public LLLocalEntryReactiveRocksIterator(RocksDB db,
|
||||||
|
ByteBufAllocator alloc,
|
||||||
ColumnFamilyHandle cfh,
|
ColumnFamilyHandle cfh,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions) {
|
ReadOptions readOptions) {
|
||||||
super(db, cfh, range, readOptions, true);
|
super(db, alloc, cfh, range, readOptions, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Entry<byte[], byte[]> getEntry(byte[] key, byte[] value) {
|
public Entry<ByteBuf, ByteBuf> getEntry(ByteBuf key, ByteBuf value) {
|
||||||
return Map.entry(key, value);
|
return Map.entry(key, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
@ -8,19 +10,18 @@ import org.rocksdb.ReadOptions;
|
|||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
|
|
||||||
public class LLLocalGroupedEntryReactiveRocksIterator extends
|
public class LLLocalGroupedEntryReactiveRocksIterator extends
|
||||||
LLLocalGroupedReactiveRocksIterator<Entry<byte[], byte[]>> {
|
LLLocalGroupedReactiveRocksIterator<Entry<ByteBuf, ByteBuf>> {
|
||||||
|
|
||||||
public LLLocalGroupedEntryReactiveRocksIterator(RocksDB db,
|
public LLLocalGroupedEntryReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh,
|
||||||
ColumnFamilyHandle cfh,
|
|
||||||
int prefixLength,
|
int prefixLength,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
String debugName) {
|
String debugName) {
|
||||||
super(db, cfh, prefixLength, range, readOptions, false, true);
|
super(db, alloc, cfh, prefixLength, range, readOptions, false, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Entry<byte[], byte[]> getEntry(byte[] key, byte[] value) {
|
public Entry<ByteBuf, ByteBuf> getEntry(ByteBuf key, ByteBuf value) {
|
||||||
return Map.entry(key, value);
|
return Map.entry(key, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,23 +1,26 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
|
|
||||||
public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<byte[]> {
|
public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<ByteBuf> {
|
||||||
|
|
||||||
public LLLocalGroupedKeyReactiveRocksIterator(RocksDB db,
|
public LLLocalGroupedKeyReactiveRocksIterator(RocksDB db,
|
||||||
|
ByteBufAllocator alloc,
|
||||||
ColumnFamilyHandle cfh,
|
ColumnFamilyHandle cfh,
|
||||||
int prefixLength,
|
int prefixLength,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
String debugName) {
|
String debugName) {
|
||||||
super(db, cfh, prefixLength, range, readOptions, true, false);
|
super(db, alloc, cfh, prefixLength, range, readOptions, true, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getEntry(byte[] key, byte[] value) {
|
public ByteBuf getEntry(ByteBuf key, ByteBuf value) {
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,21 +2,24 @@ package it.cavallium.dbengine.database.disk;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
|
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksMutableObject;
|
import org.rocksdb.RocksMutableObject;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
||||||
|
|
||||||
private static final byte[] EMPTY = new byte[0];
|
|
||||||
|
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
|
private final ByteBufAllocator alloc;
|
||||||
private final ColumnFamilyHandle cfh;
|
private final ColumnFamilyHandle cfh;
|
||||||
private final int prefixLength;
|
private final int prefixLength;
|
||||||
private final LLRange range;
|
private final LLRange range;
|
||||||
@ -24,14 +27,14 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
private final boolean canFillCache;
|
private final boolean canFillCache;
|
||||||
private final boolean readValues;
|
private final boolean readValues;
|
||||||
|
|
||||||
public LLLocalGroupedReactiveRocksIterator(RocksDB db,
|
public LLLocalGroupedReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh,
|
||||||
ColumnFamilyHandle cfh,
|
|
||||||
int prefixLength,
|
int prefixLength,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
boolean canFillCache,
|
boolean canFillCache,
|
||||||
boolean readValues) {
|
boolean readValues) {
|
||||||
this.db = db;
|
this.db = db;
|
||||||
|
this.alloc = alloc;
|
||||||
this.cfh = cfh;
|
this.cfh = cfh;
|
||||||
this.prefixLength = prefixLength;
|
this.prefixLength = prefixLength;
|
||||||
this.range = range;
|
this.range = range;
|
||||||
@ -50,18 +53,33 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
}, (tuple, sink) -> {
|
}, (tuple, sink) -> {
|
||||||
var rocksIterator = tuple.getT1();
|
var rocksIterator = tuple.getT1();
|
||||||
ObjectArrayList<T> values = new ObjectArrayList<>();
|
ObjectArrayList<T> values = new ObjectArrayList<>();
|
||||||
byte[] firstGroupKey = null;
|
ByteBuf firstGroupKey = null;
|
||||||
|
|
||||||
while (rocksIterator.isValid()) {
|
try {
|
||||||
byte[] key = rocksIterator.key();
|
while (rocksIterator.isValid()) {
|
||||||
if (firstGroupKey == null) {
|
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||||
firstGroupKey = key;
|
try {
|
||||||
} else if (!Arrays.equals(firstGroupKey, 0, prefixLength, key, 0, prefixLength)) {
|
if (firstGroupKey == null) {
|
||||||
break;
|
firstGroupKey = key.retainedSlice();
|
||||||
|
} else if (!ByteBufUtil.equals(firstGroupKey, 0, key, 0, prefixLength)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ByteBuf value = readValues ? LLUtils.readDirectNioBuffer(alloc, rocksIterator::value) : EMPTY_BUFFER;
|
||||||
|
try {
|
||||||
|
rocksIterator.next();
|
||||||
|
T entry = getEntry(key.retain(), value.retain());
|
||||||
|
values.add(entry);
|
||||||
|
} finally {
|
||||||
|
value.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
key.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (firstGroupKey != null) {
|
||||||
|
firstGroupKey.release();
|
||||||
}
|
}
|
||||||
byte[] value = readValues ? rocksIterator.value() : EMPTY;
|
|
||||||
rocksIterator.next();
|
|
||||||
values.add(getEntry(key, value));
|
|
||||||
}
|
}
|
||||||
if (!values.isEmpty()) {
|
if (!values.isEmpty()) {
|
||||||
sink.next(values);
|
sink.next(values);
|
||||||
@ -72,10 +90,11 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
}, tuple -> {
|
}, tuple -> {
|
||||||
var rocksIterator = tuple.getT1();
|
var rocksIterator = tuple.getT1();
|
||||||
rocksIterator.close();
|
rocksIterator.close();
|
||||||
tuple.getT2().ifPresent(RocksMutableObject::close);
|
tuple.getT2().release();
|
||||||
tuple.getT3().ifPresent(RocksMutableObject::close);
|
tuple.getT3().release();
|
||||||
|
range.release();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract T getEntry(byte[] key, byte[] value);
|
public abstract T getEntry(ByteBuf key, ByteBuf value);
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,22 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksMutableObject;
|
import org.rocksdb.RocksMutableObject;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
public class LLLocalKeyPrefixReactiveRocksIterator {
|
public class LLLocalKeyPrefixReactiveRocksIterator {
|
||||||
|
|
||||||
private static final byte[] EMPTY = new byte[0];
|
|
||||||
|
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
|
private final ByteBufAllocator alloc;
|
||||||
private final ColumnFamilyHandle cfh;
|
private final ColumnFamilyHandle cfh;
|
||||||
private final int prefixLength;
|
private final int prefixLength;
|
||||||
private final LLRange range;
|
private final LLRange range;
|
||||||
@ -20,14 +24,14 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
private final boolean canFillCache;
|
private final boolean canFillCache;
|
||||||
private final String debugName;
|
private final String debugName;
|
||||||
|
|
||||||
public LLLocalKeyPrefixReactiveRocksIterator(RocksDB db,
|
public LLLocalKeyPrefixReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh,
|
||||||
ColumnFamilyHandle cfh,
|
|
||||||
int prefixLength,
|
int prefixLength,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
boolean canFillCache,
|
boolean canFillCache,
|
||||||
String debugName) {
|
String debugName) {
|
||||||
this.db = db;
|
this.db = db;
|
||||||
|
this.alloc = alloc;
|
||||||
this.cfh = cfh;
|
this.cfh = cfh;
|
||||||
this.prefixLength = prefixLength;
|
this.prefixLength = prefixLength;
|
||||||
this.range = range;
|
this.range = range;
|
||||||
@ -37,7 +41,7 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public Flux<byte[]> flux() {
|
public Flux<ByteBuf> flux() {
|
||||||
return Flux
|
return Flux
|
||||||
.generate(() -> {
|
.generate(() -> {
|
||||||
var readOptions = new ReadOptions(this.readOptions);
|
var readOptions = new ReadOptions(this.readOptions);
|
||||||
@ -45,32 +49,42 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
//readOptions.setReadaheadSize(2 * 1024 * 1024);
|
//readOptions.setReadaheadSize(2 * 1024 * 1024);
|
||||||
readOptions.setFillCache(canFillCache);
|
readOptions.setFillCache(canFillCache);
|
||||||
}
|
}
|
||||||
return LLLocalDictionary.getRocksIterator(readOptions, range, db, cfh);
|
return LLLocalDictionary.getRocksIterator(readOptions, range.retain(), db, cfh);
|
||||||
}, (tuple, sink) -> {
|
}, (tuple, sink) -> {
|
||||||
var rocksIterator = tuple.getT1();
|
var rocksIterator = tuple.getT1();
|
||||||
byte[] firstGroupKey = null;
|
ByteBuf firstGroupKey = null;
|
||||||
|
try {
|
||||||
while (rocksIterator.isValid()) {
|
while (rocksIterator.isValid()) {
|
||||||
byte[] key = rocksIterator.key();
|
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||||
if (firstGroupKey == null) {
|
try {
|
||||||
firstGroupKey = key;
|
if (firstGroupKey == null) {
|
||||||
} else if (!Arrays.equals(firstGroupKey, 0, prefixLength, key, 0, prefixLength)) {
|
firstGroupKey = key.retain();
|
||||||
break;
|
} else if (!ByteBufUtil.equals(firstGroupKey, 0, key, 0, prefixLength)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
rocksIterator.next();
|
||||||
|
} finally {
|
||||||
|
key.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (firstGroupKey != null) {
|
||||||
|
var groupKeyPrefix = firstGroupKey.slice(0, prefixLength);
|
||||||
|
sink.next(groupKeyPrefix.retain());
|
||||||
|
} else {
|
||||||
|
sink.complete();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (firstGroupKey != null) {
|
||||||
|
firstGroupKey.release();
|
||||||
}
|
}
|
||||||
rocksIterator.next();
|
|
||||||
}
|
|
||||||
if (firstGroupKey != null) {
|
|
||||||
var groupKeyPrefix = Arrays.copyOf(firstGroupKey, prefixLength);
|
|
||||||
sink.next(groupKeyPrefix);
|
|
||||||
} else {
|
|
||||||
sink.complete();
|
|
||||||
}
|
}
|
||||||
return tuple;
|
return tuple;
|
||||||
}, tuple -> {
|
}, tuple -> {
|
||||||
var rocksIterator = tuple.getT1();
|
var rocksIterator = tuple.getT1();
|
||||||
rocksIterator.close();
|
rocksIterator.close();
|
||||||
tuple.getT2().ifPresent(RocksMutableObject::close);
|
tuple.getT2().release();
|
||||||
tuple.getT3().ifPresent(RocksMutableObject::close);
|
tuple.getT3().release();
|
||||||
|
range.release();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,21 +1,24 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
|
|
||||||
public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterator<byte[]> {
|
public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterator<ByteBuf> {
|
||||||
|
|
||||||
public LLLocalKeyReactiveRocksIterator(RocksDB db,
|
public LLLocalKeyReactiveRocksIterator(RocksDB db,
|
||||||
|
ByteBufAllocator alloc,
|
||||||
ColumnFamilyHandle cfh,
|
ColumnFamilyHandle cfh,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions) {
|
ReadOptions readOptions) {
|
||||||
super(db, cfh, range, readOptions, false);
|
super(db, alloc, cfh, range, readOptions, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getEntry(byte[] key, byte[] value) {
|
public ByteBuf getEntry(ByteBuf key, ByteBuf value) {
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,6 +59,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
|
|
||||||
private final Scheduler dbScheduler;
|
private final Scheduler dbScheduler;
|
||||||
private final Path dbPath;
|
private final Path dbPath;
|
||||||
|
private final boolean inMemory;
|
||||||
private final String name;
|
private final String name;
|
||||||
private RocksDB db;
|
private RocksDB db;
|
||||||
private final Map<Column, ColumnFamilyHandle> handles;
|
private final Map<Column, ColumnFamilyHandle> handles;
|
||||||
@ -66,7 +67,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
private final AtomicLong nextSnapshotNumbers = new AtomicLong(1);
|
private final AtomicLong nextSnapshotNumbers = new AtomicLong(1);
|
||||||
|
|
||||||
public LLLocalKeyValueDatabase(String name, Path path, List<Column> columns, List<ColumnFamilyHandle> handles,
|
public LLLocalKeyValueDatabase(String name, Path path, List<Column> columns, List<ColumnFamilyHandle> handles,
|
||||||
boolean crashIfWalError, boolean lowMemory) throws IOException {
|
boolean crashIfWalError, boolean lowMemory, boolean inMemory) throws IOException {
|
||||||
Options options = openRocksDb(path, crashIfWalError, lowMemory);
|
Options options = openRocksDb(path, crashIfWalError, lowMemory);
|
||||||
try {
|
try {
|
||||||
List<ColumnFamilyDescriptor> descriptors = new LinkedList<>();
|
List<ColumnFamilyDescriptor> descriptors = new LinkedList<>();
|
||||||
@ -83,6 +84,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
String dbPathString = databasesDirPath.toString() + File.separatorChar + path.getFileName();
|
String dbPathString = databasesDirPath.toString() + File.separatorChar + path.getFileName();
|
||||||
Path dbPath = Paths.get(dbPathString);
|
Path dbPath = Paths.get(dbPathString);
|
||||||
this.dbPath = dbPath;
|
this.dbPath = dbPath;
|
||||||
|
this.inMemory = inMemory;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.dbScheduler = Schedulers.newBoundedElastic(lowMemory ? Runtime.getRuntime().availableProcessors()
|
this.dbScheduler = Schedulers.newBoundedElastic(lowMemory ? Runtime.getRuntime().availableProcessors()
|
||||||
: Math.max(8, Runtime.getRuntime().availableProcessors()),
|
: Math.max(8, Runtime.getRuntime().availableProcessors()),
|
||||||
@ -92,12 +94,17 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
true
|
true
|
||||||
);
|
);
|
||||||
|
|
||||||
createIfNotExists(descriptors, options, dbPath, dbPathString);
|
createIfNotExists(descriptors, options, inMemory, this.dbPath, dbPathString);
|
||||||
// Create all column families that don't exist
|
// Create all column families that don't exist
|
||||||
createAllColumns(descriptors, options, dbPathString);
|
createAllColumns(descriptors, options, inMemory, dbPathString);
|
||||||
|
|
||||||
// a factory method that returns a RocksDB instance
|
// a factory method that returns a RocksDB instance
|
||||||
this.db = RocksDB.open(new DBOptions(options), dbPathString, descriptors, handles);
|
this.db = RocksDB.open(new DBOptions(options),
|
||||||
|
dbPathString,
|
||||||
|
inMemory ? List.of(DEFAULT_COLUMN_FAMILY) : descriptors,
|
||||||
|
handles
|
||||||
|
);
|
||||||
|
createInMemoryColumns(descriptors, inMemory, handles);
|
||||||
this.handles = new HashMap<>();
|
this.handles = new HashMap<>();
|
||||||
for (int i = 0; i < columns.size(); i++) {
|
for (int i = 0; i < columns.size(); i++) {
|
||||||
this.handles.put(columns.get(i), handles.get(i));
|
this.handles.put(columns.get(i), handles.get(i));
|
||||||
@ -252,8 +259,10 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createAllColumns(List<ColumnFamilyDescriptor> totalDescriptors, Options options,
|
private void createAllColumns(List<ColumnFamilyDescriptor> totalDescriptors, Options options, boolean inMemory, String dbPathString) throws RocksDBException {
|
||||||
String dbPathString) throws RocksDBException {
|
if (inMemory) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
List<byte[]> columnFamiliesToCreate = new LinkedList<>();
|
List<byte[]> columnFamiliesToCreate = new LinkedList<>();
|
||||||
|
|
||||||
for (ColumnFamilyDescriptor descriptor : totalDescriptors) {
|
for (ColumnFamilyDescriptor descriptor : totalDescriptors) {
|
||||||
@ -293,8 +302,35 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
flushAndCloseDb(db, handles);
|
flushAndCloseDb(db, handles);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createIfNotExists(List<ColumnFamilyDescriptor> descriptors, Options options,
|
private void createInMemoryColumns(List<ColumnFamilyDescriptor> totalDescriptors,
|
||||||
Path dbPath, String dbPathString) throws RocksDBException {
|
boolean inMemory,
|
||||||
|
List<ColumnFamilyHandle> handles)
|
||||||
|
throws RocksDBException {
|
||||||
|
if (!inMemory) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
List<byte[]> columnFamiliesToCreate = new LinkedList<>();
|
||||||
|
|
||||||
|
for (ColumnFamilyDescriptor descriptor : totalDescriptors) {
|
||||||
|
columnFamiliesToCreate.add(descriptor.getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (byte[] name : columnFamiliesToCreate) {
|
||||||
|
if (!Arrays.equals(name, DEFAULT_COLUMN_FAMILY.getName())) {
|
||||||
|
var descriptor = new ColumnFamilyDescriptor(name);
|
||||||
|
handles.add(db.createColumnFamily(descriptor));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createIfNotExists(List<ColumnFamilyDescriptor> descriptors,
|
||||||
|
Options options,
|
||||||
|
boolean inMemory,
|
||||||
|
Path dbPath,
|
||||||
|
String dbPathString) throws RocksDBException {
|
||||||
|
if (inMemory) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (Files.notExists(dbPath)) {
|
if (Files.notExists(dbPath)) {
|
||||||
// Check if handles are all different
|
// Check if handles are all different
|
||||||
var descriptorsSet = new HashSet<>(descriptors);
|
var descriptorsSet = new HashSet<>(descriptors);
|
||||||
@ -318,7 +354,9 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
handles.add(db.createColumnFamily(columnFamilyDescriptor));
|
handles.add(db.createColumnFamily(columnFamilyDescriptor));
|
||||||
}
|
}
|
||||||
|
|
||||||
flushAndCloseDb(db, handles);
|
if (!inMemory) {
|
||||||
|
flushAndCloseDb(db, handles);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,6 +53,9 @@ import org.apache.lucene.search.similarities.Similarity;
|
|||||||
import org.apache.lucene.search.similarities.TFIDFSimilarity;
|
import org.apache.lucene.search.similarities.TFIDFSimilarity;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.store.MMapDirectory;
|
||||||
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
import org.apache.solr.core.RAMDirectoryFactory;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.warp.commonutils.log.Logger;
|
import org.warp.commonutils.log.Logger;
|
||||||
import org.warp.commonutils.log.LoggerFactory;
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
@ -113,13 +116,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
TextFieldsSimilarity similarity,
|
TextFieldsSimilarity similarity,
|
||||||
Duration queryRefreshDebounceTime,
|
Duration queryRefreshDebounceTime,
|
||||||
Duration commitDebounceTime,
|
Duration commitDebounceTime,
|
||||||
boolean lowMemory,
|
boolean lowMemory, boolean inMemory, @Nullable LLSearchCollectionStatisticsGetter distributedCollectionStatisticsGetter) throws IOException {
|
||||||
@Nullable LLSearchCollectionStatisticsGetter distributedCollectionStatisticsGetter) throws IOException {
|
|
||||||
if (name.length() == 0) {
|
if (name.length() == 0) {
|
||||||
throw new IOException("Empty lucene database name");
|
throw new IOException("Empty lucene database name");
|
||||||
}
|
}
|
||||||
Path directoryPath = luceneBasePath.resolve(name + ".lucene.db");
|
Path directoryPath = luceneBasePath.resolve(name + ".lucene.db");
|
||||||
this.directory = FSDirectory.open(directoryPath);
|
this.directory = inMemory ? new RAMDirectory() : FSDirectory.open(directoryPath);
|
||||||
this.luceneIndexName = name;
|
this.luceneIndexName = name;
|
||||||
this.snapshotter = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
this.snapshotter = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||||
this.lowMemory = lowMemory;
|
this.lowMemory = lowMemory;
|
||||||
|
@ -58,7 +58,7 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
|||||||
TextFieldsSimilarity textFieldsSimilarity,
|
TextFieldsSimilarity textFieldsSimilarity,
|
||||||
Duration queryRefreshDebounceTime,
|
Duration queryRefreshDebounceTime,
|
||||||
Duration commitDebounceTime,
|
Duration commitDebounceTime,
|
||||||
boolean lowMemory) throws IOException {
|
boolean lowMemory, boolean inMemory) throws IOException {
|
||||||
|
|
||||||
if (instancesCount <= 1 || instancesCount > 100) {
|
if (instancesCount <= 1 || instancesCount > 100) {
|
||||||
throw new IOException("Unsupported instances count: " + instancesCount);
|
throw new IOException("Unsupported instances count: " + instancesCount);
|
||||||
@ -79,8 +79,7 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
|||||||
textFieldsSimilarity,
|
textFieldsSimilarity,
|
||||||
queryRefreshDebounceTime,
|
queryRefreshDebounceTime,
|
||||||
commitDebounceTime,
|
commitDebounceTime,
|
||||||
lowMemory,
|
lowMemory, inMemory, (indexSearcher, field, distributedPre, actionId) -> distributedCustomCollectionStatistics(finalI,
|
||||||
(indexSearcher, field, distributedPre, actionId) -> distributedCustomCollectionStatistics(finalI,
|
|
||||||
indexSearcher,
|
indexSearcher,
|
||||||
field,
|
field,
|
||||||
distributedPre,
|
distributedPre,
|
||||||
|
@ -2,29 +2,34 @@ package it.cavallium.dbengine.database.disk;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
|
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksMutableObject;
|
import org.rocksdb.RocksMutableObject;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
public abstract class LLLocalReactiveRocksIterator<T> {
|
public abstract class LLLocalReactiveRocksIterator<T> {
|
||||||
|
|
||||||
private static final byte[] EMPTY = new byte[0];
|
|
||||||
|
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
|
private final ByteBufAllocator alloc;
|
||||||
private final ColumnFamilyHandle cfh;
|
private final ColumnFamilyHandle cfh;
|
||||||
private final LLRange range;
|
private final LLRange range;
|
||||||
private final ReadOptions readOptions;
|
private final ReadOptions readOptions;
|
||||||
private final boolean readValues;
|
private final boolean readValues;
|
||||||
|
|
||||||
public LLLocalReactiveRocksIterator(RocksDB db,
|
public LLLocalReactiveRocksIterator(RocksDB db,
|
||||||
|
ByteBufAllocator alloc,
|
||||||
ColumnFamilyHandle cfh,
|
ColumnFamilyHandle cfh,
|
||||||
LLRange range,
|
LLRange range,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
boolean readValues) {
|
boolean readValues) {
|
||||||
this.db = db;
|
this.db = db;
|
||||||
|
this.alloc = alloc;
|
||||||
this.cfh = cfh;
|
this.cfh = cfh;
|
||||||
this.range = range;
|
this.range = range;
|
||||||
this.readOptions = readOptions;
|
this.readOptions = readOptions;
|
||||||
@ -39,14 +44,22 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
readOptions.setReadaheadSize(2 * 1024 * 1024);
|
readOptions.setReadaheadSize(2 * 1024 * 1024);
|
||||||
readOptions.setFillCache(false);
|
readOptions.setFillCache(false);
|
||||||
}
|
}
|
||||||
return getRocksIterator(readOptions, range, db, cfh);
|
return getRocksIterator(readOptions, range.retain(), db, cfh);
|
||||||
}, (tuple, sink) -> {
|
}, (tuple, sink) -> {
|
||||||
var rocksIterator = tuple.getT1();
|
var rocksIterator = tuple.getT1();
|
||||||
if (rocksIterator.isValid()) {
|
if (rocksIterator.isValid()) {
|
||||||
byte[] key = rocksIterator.key();
|
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||||
byte[] value = readValues ? rocksIterator.value() : EMPTY;
|
try {
|
||||||
rocksIterator.next();
|
ByteBuf value = readValues ? LLUtils.readDirectNioBuffer(alloc, rocksIterator::value) : EMPTY_BUFFER;
|
||||||
sink.next(getEntry(key, value));
|
try {
|
||||||
|
rocksIterator.next();
|
||||||
|
sink.next(getEntry(key.retain(), value.retain()));
|
||||||
|
} finally {
|
||||||
|
value.release();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
key.release();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
sink.complete();
|
sink.complete();
|
||||||
}
|
}
|
||||||
@ -54,10 +67,10 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
}, tuple -> {
|
}, tuple -> {
|
||||||
var rocksIterator = tuple.getT1();
|
var rocksIterator = tuple.getT1();
|
||||||
rocksIterator.close();
|
rocksIterator.close();
|
||||||
tuple.getT2().ifPresent(RocksMutableObject::close);
|
tuple.getT2().release();
|
||||||
tuple.getT3().ifPresent(RocksMutableObject::close);
|
tuple.getT3().release();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract T getEntry(byte[] key, byte[] value);
|
public abstract T getEntry(ByteBuf key, ByteBuf value);
|
||||||
}
|
}
|
||||||
|
@ -3,13 +3,14 @@ package it.cavallium.dbengine.database.serialization;
|
|||||||
import io.netty.buffer.ByteBuf;
|
import io.netty.buffer.ByteBuf;
|
||||||
import io.netty.buffer.ByteBufInputStream;
|
import io.netty.buffer.ByteBufInputStream;
|
||||||
import io.netty.buffer.ByteBufOutputStream;
|
import io.netty.buffer.ByteBufOutputStream;
|
||||||
|
import io.netty.buffer.PooledByteBufAllocator;
|
||||||
import io.netty.buffer.Unpooled;
|
import io.netty.buffer.Unpooled;
|
||||||
import java.io.IOError;
|
import java.io.IOError;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
||||||
|
|
||||||
public class CodecSerializer<A> implements Serializer<A, byte[]> {
|
public class CodecSerializer<A> implements Serializer<A, ByteBuf> {
|
||||||
|
|
||||||
private final Codecs<A> deserializationCodecs;
|
private final Codecs<A> deserializationCodecs;
|
||||||
private final Codec<A> serializationCodec;
|
private final Codec<A> serializationCodec;
|
||||||
@ -34,9 +35,8 @@ public class CodecSerializer<A> implements Serializer<A, byte[]> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull A deserialize(byte @NotNull [] serialized) {
|
public @NotNull A deserialize(@NotNull ByteBuf serialized) {
|
||||||
ByteBuf buf = Unpooled.wrappedBuffer(serialized);
|
try (var is = new ByteBufInputStream(serialized)) {
|
||||||
try (var is = new ByteBufInputStream(buf)) {
|
|
||||||
int codecId;
|
int codecId;
|
||||||
if (microCodecs) {
|
if (microCodecs) {
|
||||||
codecId = is.readUnsignedByte();
|
codecId = is.readUnsignedByte();
|
||||||
@ -48,12 +48,14 @@ public class CodecSerializer<A> implements Serializer<A, byte[]> {
|
|||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
// This shouldn't happen
|
// This shouldn't happen
|
||||||
throw new IOError(ex);
|
throw new IOError(ex);
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(@NotNull A deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull A deserialized) {
|
||||||
ByteBuf buf = Unpooled.buffer(256);
|
ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer();
|
||||||
try (var os = new ByteBufOutputStream(buf)) {
|
try (var os = new ByteBufOutputStream(buf)) {
|
||||||
if (microCodecs) {
|
if (microCodecs) {
|
||||||
os.writeByte(serializationCodecId);
|
os.writeByte(serializationCodecId);
|
||||||
@ -61,14 +63,11 @@ public class CodecSerializer<A> implements Serializer<A, byte[]> {
|
|||||||
os.writeInt(serializationCodecId);
|
os.writeInt(serializationCodecId);
|
||||||
}
|
}
|
||||||
serializationCodec.serialize(os, deserialized);
|
serializationCodec.serialize(os, deserialized);
|
||||||
os.flush();
|
|
||||||
var bytes = new byte[buf.readableBytes()];
|
|
||||||
buf.readBytes(bytes);
|
|
||||||
return bytes;
|
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
// This shouldn't happen
|
// This shouldn't happen
|
||||||
throw new IOError(ex);
|
throw new IOError(ex);
|
||||||
}
|
}
|
||||||
|
return buf;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
package it.cavallium.dbengine.database.serialization;
|
package it.cavallium.dbengine.database.serialization;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
|
import io.netty.buffer.PooledByteBufAllocator;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
|
||||||
public interface Serializer<A, B> {
|
public interface Serializer<A, B> {
|
||||||
@ -8,19 +12,50 @@ public interface Serializer<A, B> {
|
|||||||
|
|
||||||
@NotNull B serialize(@NotNull A deserialized);
|
@NotNull B serialize(@NotNull A deserialized);
|
||||||
|
|
||||||
Serializer<byte[], byte[]> NOOP_SERIALIZER = new Serializer<>() {
|
Serializer<ByteBuf, ByteBuf> NOOP_SERIALIZER = new Serializer<>() {
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] deserialize(byte @NotNull [] serialized) {
|
public @NotNull ByteBuf deserialize(@NotNull ByteBuf serialized) {
|
||||||
return serialized;
|
try {
|
||||||
|
return serialized.retainedSlice();
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(byte @NotNull [] deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull ByteBuf deserialized) {
|
||||||
return deserialized;
|
try {
|
||||||
|
return deserialized.retainedSlice();
|
||||||
|
} finally {
|
||||||
|
deserialized.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static Serializer<byte[], byte[]> noop() {
|
Serializer<String, ByteBuf> UTF8_SERIALIZER = new Serializer<>() {
|
||||||
|
@Override
|
||||||
|
public @NotNull String deserialize(@NotNull ByteBuf serialized) {
|
||||||
|
try {
|
||||||
|
return serialized.toString(StandardCharsets.UTF_8);
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public @NotNull ByteBuf serialize(@NotNull String deserialized) {
|
||||||
|
// UTF-8 uses max. 3 bytes per char, so calculate the worst case.
|
||||||
|
ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer(ByteBufUtil.utf8MaxBytes(deserialized));
|
||||||
|
ByteBufUtil.writeUtf8(buf, deserialized);
|
||||||
|
return buf;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static Serializer<ByteBuf, ByteBuf> noop() {
|
||||||
return NOOP_SERIALIZER;
|
return NOOP_SERIALIZER;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Serializer<String, ByteBuf> utf8() {
|
||||||
|
return UTF8_SERIALIZER;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,12 @@ package it.cavallium.dbengine.database.serialization;
|
|||||||
|
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufUtil;
|
||||||
|
import io.netty.buffer.PooledByteBufAllocator;
|
||||||
|
import java.io.NotSerializableException;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import org.apache.commons.lang3.SerializationException;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
@ -9,18 +15,31 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
|
|||||||
|
|
||||||
int getSerializedBinaryLength();
|
int getSerializedBinaryLength();
|
||||||
|
|
||||||
static SerializerFixedBinaryLength<byte[], byte[]> noop(int length) {
|
static SerializerFixedBinaryLength<ByteBuf, ByteBuf> noop(int length) {
|
||||||
return new SerializerFixedBinaryLength<>() {
|
return new SerializerFixedBinaryLength<>() {
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] deserialize(byte @NotNull [] serialized) {
|
public @NotNull ByteBuf deserialize(@NotNull ByteBuf serialized) {
|
||||||
assert serialized.length == getSerializedBinaryLength();
|
try {
|
||||||
return serialized;
|
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
|
||||||
|
+ serialized.readableBytes() + " bytes instead");
|
||||||
|
}
|
||||||
|
return serialized.retain();
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(byte @NotNull [] deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull ByteBuf deserialized) {
|
||||||
assert deserialized.length == getSerializedBinaryLength();
|
ByteBuf buf = deserialized.retain();
|
||||||
return deserialized;
|
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to serialize an element with "
|
||||||
|
+ buf.readableBytes() + " bytes instead");
|
||||||
|
}
|
||||||
|
return buf;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -30,17 +49,65 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
static SerializerFixedBinaryLength<Integer, byte[]> intSerializer() {
|
static SerializerFixedBinaryLength<String, ByteBuf> utf8(int length) {
|
||||||
return new SerializerFixedBinaryLength<>() {
|
return new SerializerFixedBinaryLength<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Integer deserialize(byte @NotNull [] serialized) {
|
public @NotNull String deserialize(@NotNull ByteBuf serialized) {
|
||||||
assert serialized.length == getSerializedBinaryLength();
|
try {
|
||||||
return Ints.fromByteArray(serialized);
|
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
||||||
|
throw new SerializationException(
|
||||||
|
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
|
||||||
|
+ serialized.readableBytes() + " bytes instead");
|
||||||
|
}
|
||||||
|
return serialized.toString(StandardCharsets.UTF_8);
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(@NotNull Integer deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull String deserialized) {
|
||||||
return Ints.toByteArray(deserialized);
|
// UTF-8 uses max. 3 bytes per char, so calculate the worst case.
|
||||||
|
ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer(ByteBufUtil.utf8MaxBytes(deserialized));
|
||||||
|
try {
|
||||||
|
ByteBufUtil.writeUtf8(buf, deserialized);
|
||||||
|
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
||||||
|
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to serialize an element with "
|
||||||
|
+ buf.readableBytes() + " bytes instead");
|
||||||
|
}
|
||||||
|
return buf.retain();
|
||||||
|
} finally {
|
||||||
|
buf.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getSerializedBinaryLength() {
|
||||||
|
return length;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static SerializerFixedBinaryLength<Integer, ByteBuf> intSerializer() {
|
||||||
|
return new SerializerFixedBinaryLength<>() {
|
||||||
|
@Override
|
||||||
|
public @NotNull Integer deserialize(@NotNull ByteBuf serialized) {
|
||||||
|
try {
|
||||||
|
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
|
||||||
|
+ serialized.readableBytes() + " bytes instead");
|
||||||
|
}
|
||||||
|
return serialized.readInt();
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public @NotNull ByteBuf serialize(@NotNull Integer deserialized) {
|
||||||
|
ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer(Integer.BYTES, Integer.BYTES);
|
||||||
|
return buf.writeInt(deserialized);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -50,17 +117,26 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
static SerializerFixedBinaryLength<Long, byte[]> longSerializer() {
|
static SerializerFixedBinaryLength<Long, ByteBuf> longSerializer() {
|
||||||
return new SerializerFixedBinaryLength<>() {
|
return new SerializerFixedBinaryLength<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Long deserialize(byte @NotNull [] serialized) {
|
public @NotNull Long deserialize(@NotNull ByteBuf serialized) {
|
||||||
assert serialized.length == getSerializedBinaryLength();
|
try {
|
||||||
return Longs.fromByteArray(serialized);
|
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
|
||||||
|
+ serialized.readableBytes() + " bytes instead");
|
||||||
|
}
|
||||||
|
return serialized.readLong();
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(@NotNull Long deserialized) {
|
public @NotNull ByteBuf serialize(@NotNull Long deserialized) {
|
||||||
return Longs.toByteArray(deserialized);
|
ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer(Integer.BYTES, Integer.BYTES);
|
||||||
|
return buf.writeLong(deserialized);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package org.rocksdb;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import org.rocksdb.AbstractWriteBatch;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksDBException;
|
import org.rocksdb.RocksDBException;
|
||||||
@ -10,13 +15,13 @@ import org.rocksdb.WriteOptions;
|
|||||||
import org.warp.commonutils.concurrency.atomicity.NotAtomic;
|
import org.warp.commonutils.concurrency.atomicity.NotAtomic;
|
||||||
|
|
||||||
@NotAtomic
|
@NotAtomic
|
||||||
public class CappedWriteBatch implements WriteBatchInterface, AutoCloseable {
|
public class CappedWriteBatch extends WriteBatch {
|
||||||
|
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
private final int cap;
|
private final int cap;
|
||||||
private final WriteOptions writeOptions;
|
private final WriteOptions writeOptions;
|
||||||
|
|
||||||
private final WriteBatch writeBatch;
|
private final List<ByteBuf> buffersToRelease;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param cap The limit of operations
|
* @param cap The limit of operations
|
||||||
@ -26,158 +31,224 @@ public class CappedWriteBatch implements WriteBatchInterface, AutoCloseable {
|
|||||||
int reservedWriteBatchSize,
|
int reservedWriteBatchSize,
|
||||||
long maxWriteBatchSize,
|
long maxWriteBatchSize,
|
||||||
WriteOptions writeOptions) {
|
WriteOptions writeOptions) {
|
||||||
|
super(reservedWriteBatchSize);
|
||||||
this.db = db;
|
this.db = db;
|
||||||
this.cap = cap;
|
this.cap = cap;
|
||||||
this.writeOptions = writeOptions;
|
this.writeOptions = writeOptions;
|
||||||
this.writeBatch = new WriteBatch(reservedWriteBatchSize);
|
this.setMaxBytes(maxWriteBatchSize);
|
||||||
this.writeBatch.setMaxBytes(maxWriteBatchSize);
|
this.buffersToRelease = new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized void flushIfNeeded(boolean force) throws RocksDBException {
|
private synchronized void flushIfNeeded(boolean force) throws RocksDBException {
|
||||||
if (this.writeBatch.count() >= (force ? 1 : cap)) {
|
if (this.count() >= (force ? 1 : cap)) {
|
||||||
db.write(writeOptions, this.writeBatch);
|
db.write(writeOptions, this);
|
||||||
this.writeBatch.clear();
|
this.clear();
|
||||||
|
releaseAllBuffers();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private synchronized void releaseAllBuffers() {
|
||||||
|
for (ByteBuf byteBuffer : buffersToRelease) {
|
||||||
|
byteBuffer.release();
|
||||||
|
}
|
||||||
|
buffersToRelease.clear();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized int count() {
|
public synchronized int count() {
|
||||||
return writeBatch.count();
|
return super.count();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void put(byte[] key, byte[] value) throws RocksDBException {
|
public synchronized void put(byte[] key, byte[] value) throws RocksDBException {
|
||||||
writeBatch.put(key, value);
|
super.put(key, value);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, byte[] key, byte[] value) throws RocksDBException {
|
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, byte[] key, byte[] value) throws RocksDBException {
|
||||||
writeBatch.put(columnFamilyHandle, key, value);
|
super.put(columnFamilyHandle, key, value);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void put(ByteBuffer key, ByteBuffer value) throws RocksDBException {
|
public synchronized void put(ByteBuffer key, ByteBuffer value) throws RocksDBException {
|
||||||
writeBatch.put(key, value);
|
super.put(key, value);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, ByteBuffer key, ByteBuffer value) throws RocksDBException {
|
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, ByteBuffer key, ByteBuffer value) throws RocksDBException {
|
||||||
writeBatch.put(columnFamilyHandle, key, value);
|
super.put(columnFamilyHandle, key, value);
|
||||||
|
flushIfNeeded(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, ByteBuf key, ByteBuf value) throws RocksDBException {
|
||||||
|
buffersToRelease.add(key);
|
||||||
|
buffersToRelease.add(value);
|
||||||
|
ByteBuf keyDirectBuf = key.retain();
|
||||||
|
ByteBuffer keyNioBuffer = LLUtils.toDirectFast(keyDirectBuf.retain());
|
||||||
|
if (keyNioBuffer == null) {
|
||||||
|
keyDirectBuf.release();
|
||||||
|
keyDirectBuf = LLUtils.toDirectCopy(key.retain());
|
||||||
|
keyNioBuffer = keyDirectBuf.nioBuffer();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
assert keyNioBuffer.isDirect();
|
||||||
|
|
||||||
|
ByteBuf valueDirectBuf = value.retain();
|
||||||
|
ByteBuffer valueNioBuffer = LLUtils.toDirectFast(valueDirectBuf.retain());
|
||||||
|
if (valueNioBuffer == null) {
|
||||||
|
valueDirectBuf.release();
|
||||||
|
valueDirectBuf = LLUtils.toDirectCopy(value.retain());
|
||||||
|
valueNioBuffer = valueDirectBuf.nioBuffer();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
assert valueNioBuffer.isDirect();
|
||||||
|
super.put(columnFamilyHandle, keyNioBuffer, valueNioBuffer);
|
||||||
|
} finally {
|
||||||
|
buffersToRelease.add(valueDirectBuf);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
buffersToRelease.add(keyDirectBuf);
|
||||||
|
}
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void merge(byte[] key, byte[] value) throws RocksDBException {
|
public synchronized void merge(byte[] key, byte[] value) throws RocksDBException {
|
||||||
writeBatch.merge(key, value);
|
super.merge(key, value);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void merge(ColumnFamilyHandle columnFamilyHandle, byte[] key, byte[] value) throws RocksDBException {
|
public synchronized void merge(ColumnFamilyHandle columnFamilyHandle, byte[] key, byte[] value) throws RocksDBException {
|
||||||
writeBatch.merge(columnFamilyHandle, key, value);
|
super.merge(columnFamilyHandle, key, value);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Deprecated
|
@Deprecated
|
||||||
@Override
|
@Override
|
||||||
public synchronized void remove(byte[] key) throws RocksDBException {
|
public synchronized void remove(byte[] key) throws RocksDBException {
|
||||||
writeBatch.remove(key);
|
super.remove(key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Deprecated
|
@Deprecated
|
||||||
@Override
|
@Override
|
||||||
public synchronized void remove(ColumnFamilyHandle columnFamilyHandle, byte[] key) throws RocksDBException {
|
public synchronized void remove(ColumnFamilyHandle columnFamilyHandle, byte[] key) throws RocksDBException {
|
||||||
writeBatch.remove(columnFamilyHandle, key);
|
super.remove(columnFamilyHandle, key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void delete(byte[] key) throws RocksDBException {
|
public synchronized void delete(byte[] key) throws RocksDBException {
|
||||||
writeBatch.delete(key);
|
super.delete(key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, byte[] key) throws RocksDBException {
|
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, byte[] key) throws RocksDBException {
|
||||||
writeBatch.delete(columnFamilyHandle, key);
|
super.delete(columnFamilyHandle, key);
|
||||||
|
flushIfNeeded(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, ByteBuf key) throws RocksDBException {
|
||||||
|
buffersToRelease.add(key);
|
||||||
|
ByteBuf keyDirectBuf = key.retain();
|
||||||
|
ByteBuffer keyNioBuffer = LLUtils.toDirectFast(keyDirectBuf.retain());
|
||||||
|
if (keyNioBuffer == null) {
|
||||||
|
keyDirectBuf.release();
|
||||||
|
keyDirectBuf = LLUtils.toDirectCopy(key.retain());
|
||||||
|
keyNioBuffer = keyDirectBuf.nioBuffer();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
assert keyNioBuffer.isDirect();
|
||||||
|
removeDirect(nativeHandle_,
|
||||||
|
keyNioBuffer,
|
||||||
|
keyNioBuffer.position(),
|
||||||
|
keyNioBuffer.remaining(),
|
||||||
|
columnFamilyHandle.nativeHandle_
|
||||||
|
);
|
||||||
|
keyNioBuffer.position(keyNioBuffer.limit());
|
||||||
|
} finally {
|
||||||
|
buffersToRelease.add(keyDirectBuf);
|
||||||
|
}
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void singleDelete(byte[] key) throws RocksDBException {
|
public synchronized void singleDelete(byte[] key) throws RocksDBException {
|
||||||
writeBatch.singleDelete(key);
|
super.singleDelete(key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void singleDelete(ColumnFamilyHandle columnFamilyHandle, byte[] key) throws RocksDBException {
|
public synchronized void singleDelete(ColumnFamilyHandle columnFamilyHandle, byte[] key) throws RocksDBException {
|
||||||
writeBatch.singleDelete(columnFamilyHandle, key);
|
super.singleDelete(columnFamilyHandle, key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void remove(ByteBuffer key) throws RocksDBException {
|
public synchronized void remove(ByteBuffer key) throws RocksDBException {
|
||||||
writeBatch.remove(key);
|
super.remove(key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void remove(ColumnFamilyHandle columnFamilyHandle, ByteBuffer key) throws RocksDBException {
|
public synchronized void remove(ColumnFamilyHandle columnFamilyHandle, ByteBuffer key) throws RocksDBException {
|
||||||
writeBatch.remove(columnFamilyHandle, key);
|
super.remove(columnFamilyHandle, key);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void deleteRange(byte[] beginKey, byte[] endKey) throws RocksDBException {
|
public synchronized void deleteRange(byte[] beginKey, byte[] endKey) throws RocksDBException {
|
||||||
writeBatch.deleteRange(beginKey, endKey);
|
super.deleteRange(beginKey, endKey);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void deleteRange(ColumnFamilyHandle columnFamilyHandle, byte[] beginKey, byte[] endKey)
|
public synchronized void deleteRange(ColumnFamilyHandle columnFamilyHandle, byte[] beginKey, byte[] endKey)
|
||||||
throws RocksDBException {
|
throws RocksDBException {
|
||||||
writeBatch.deleteRange(columnFamilyHandle, beginKey, endKey);
|
super.deleteRange(columnFamilyHandle, beginKey, endKey);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void putLogData(byte[] blob) throws RocksDBException {
|
public synchronized void putLogData(byte[] blob) throws RocksDBException {
|
||||||
writeBatch.putLogData(blob);
|
super.putLogData(blob);
|
||||||
flushIfNeeded(false);
|
flushIfNeeded(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void clear() {
|
public synchronized void clear() {
|
||||||
writeBatch.clear();
|
super.clear();
|
||||||
|
releaseAllBuffers();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setSavePoint() {
|
public synchronized void setSavePoint() {
|
||||||
writeBatch.setSavePoint();
|
super.setSavePoint();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void rollbackToSavePoint() throws RocksDBException {
|
public synchronized void rollbackToSavePoint() throws RocksDBException {
|
||||||
writeBatch.rollbackToSavePoint();
|
super.rollbackToSavePoint();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void popSavePoint() throws RocksDBException {
|
public synchronized void popSavePoint() throws RocksDBException {
|
||||||
writeBatch.popSavePoint();
|
super.popSavePoint();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setMaxBytes(long maxBytes) {
|
public synchronized void setMaxBytes(long maxBytes) {
|
||||||
writeBatch.setMaxBytes(maxBytes);
|
super.setMaxBytes(maxBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized WriteBatch getWriteBatch() {
|
public synchronized WriteBatch getWriteBatch() {
|
||||||
return writeBatch;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized void writeToDbAndClose() throws RocksDBException {
|
public synchronized void writeToDbAndClose() throws RocksDBException {
|
||||||
@ -186,6 +257,7 @@ public class CappedWriteBatch implements WriteBatchInterface, AutoCloseable {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void close() {
|
public synchronized void close() {
|
||||||
writeBatch.close();
|
super.close();
|
||||||
|
releaseAllBuffers();
|
||||||
}
|
}
|
||||||
}
|
}
|
106
src/test/java/it/cavallium/dbengine/client/DbTestUtils.java
Normal file
106
src/test/java/it/cavallium/dbengine/client/DbTestUtils.java
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.database.Column;
|
||||||
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||||
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
||||||
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||||
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryHashed;
|
||||||
|
import it.cavallium.dbengine.database.collections.SubStageGetterHashMap;
|
||||||
|
import it.cavallium.dbengine.database.collections.SubStageGetterMap;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
|
||||||
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.CompletionException;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import org.reactivestreams.Publisher;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
|
public class DbTestUtils {
|
||||||
|
|
||||||
|
public static final AtomicInteger dbId = new AtomicInteger(0);
|
||||||
|
|
||||||
|
public static <U> Flux<U> tempDb(Function<LLKeyValueDatabase, Publisher<U>> action) {
|
||||||
|
var wrkspcPath = Path.of("/tmp/.cache/tempdb-" + dbId.incrementAndGet() + "/");
|
||||||
|
return Flux.usingWhen(Mono
|
||||||
|
.<LLKeyValueDatabase>fromCallable(() -> {
|
||||||
|
if (Files.exists(wrkspcPath)) {
|
||||||
|
Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> {
|
||||||
|
try {
|
||||||
|
Files.delete(file);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new CompletionException(ex);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Files.createDirectories(wrkspcPath);
|
||||||
|
return null;
|
||||||
|
})
|
||||||
|
.subscribeOn(Schedulers.boundedElastic())
|
||||||
|
.then(new LLLocalDatabaseConnection(wrkspcPath, true).connect())
|
||||||
|
.flatMap(conn -> conn.getDatabase("testdb",
|
||||||
|
List.of(Column.dictionary("testmap"), Column.special("ints"), Column.special("longs")),
|
||||||
|
false, true
|
||||||
|
)),
|
||||||
|
action,
|
||||||
|
db -> db.close().then(Mono.fromCallable(() -> {
|
||||||
|
if (Files.exists(wrkspcPath)) {
|
||||||
|
Files.walk(wrkspcPath).sorted(Comparator.reverseOrder()).forEach(file -> {
|
||||||
|
try {
|
||||||
|
Files.delete(file);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new CompletionException(ex);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}).subscribeOn(Schedulers.boundedElastic()))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
public static Mono<? extends LLDictionary> tempDictionary(LLKeyValueDatabase database, UpdateMode updateMode) {
|
||||||
|
return tempDictionary(database, "testmap", updateMode);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mono<? extends LLDictionary> tempDictionary(LLKeyValueDatabase database,
|
||||||
|
String name,
|
||||||
|
UpdateMode updateMode) {
|
||||||
|
return database.getDictionary(name, updateMode);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static DatabaseMapDictionary<String, String> tempDatabaseMapDictionaryMap(
|
||||||
|
LLDictionary dictionary,
|
||||||
|
int keyBytes) {
|
||||||
|
return DatabaseMapDictionary.simple(dictionary, SerializerFixedBinaryLength.utf8(keyBytes), Serializer.utf8());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T, U> DatabaseMapDictionaryDeep<String, Map<String, String>, DatabaseMapDictionary<String, String>> tempDatabaseMapDictionaryDeepMap(
|
||||||
|
LLDictionary dictionary,
|
||||||
|
int key1Bytes,
|
||||||
|
int key2Bytes) {
|
||||||
|
return DatabaseMapDictionaryDeep.deepTail(dictionary,
|
||||||
|
SerializerFixedBinaryLength.utf8(key1Bytes),
|
||||||
|
key2Bytes,
|
||||||
|
new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(key2Bytes), Serializer.UTF8_SERIALIZER)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T, U> DatabaseMapDictionaryHashed<String, String, Integer> tempDatabaseMapDictionaryHashMap(
|
||||||
|
LLDictionary dictionary) {
|
||||||
|
return DatabaseMapDictionaryHashed.simple(dictionary,
|
||||||
|
Serializer.utf8(),
|
||||||
|
Serializer.utf8(),
|
||||||
|
String::hashCode,
|
||||||
|
SerializerFixedBinaryLength.intSerializer()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -2,15 +2,19 @@ package it.cavallium.dbengine.client;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.client.CompositeDatabasePartLocation.CompositeDatabasePartType.KV_DATABASE;
|
import static it.cavallium.dbengine.client.CompositeDatabasePartLocation.CompositeDatabasePartType.KV_DATABASE;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.Unpooled;
|
||||||
import it.cavallium.dbengine.database.Column;
|
import it.cavallium.dbengine.database.Column;
|
||||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||||
import it.cavallium.dbengine.database.UpdateMode;
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||||
import it.cavallium.dbengine.database.collections.SubStageGetterMap;
|
import it.cavallium.dbengine.database.collections.SubStageGetterMap;
|
||||||
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
|
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
@ -20,6 +24,7 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.concurrent.CompletionException;
|
import java.util.concurrent.CompletionException;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
@ -29,7 +34,61 @@ import reactor.core.scheduler.Schedulers;
|
|||||||
import reactor.test.StepVerifier;
|
import reactor.test.StepVerifier;
|
||||||
import reactor.util.function.Tuples;
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
public class Database {
|
public class OldDatabaseTests {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatabaseAddKeysAndCheckSize() {
|
||||||
|
LinkedHashSet<String> originalKeys = new LinkedHashSet<>(List.of("K1a", "K1b", "K1c"));
|
||||||
|
|
||||||
|
StepVerifier
|
||||||
|
.create(
|
||||||
|
tempDb()
|
||||||
|
.flatMap(db -> db
|
||||||
|
.getDictionary("testmap", UpdateMode.DISALLOW)
|
||||||
|
.map(dictionary -> DatabaseMapDictionary.simple(dictionary,
|
||||||
|
new FixedStringSerializer(3),
|
||||||
|
Serializer.noop()
|
||||||
|
))
|
||||||
|
.flatMap(collection -> Flux
|
||||||
|
.fromIterable(originalKeys)
|
||||||
|
.flatMap(k1 -> collection.putValue(k1, DUMMY_VALUE))
|
||||||
|
.then(collection.leavesCount(null, false))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.expectNext((long) originalKeys.size())
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeepDatabaseAddKeysAndCheckSize() {
|
||||||
|
LinkedHashSet<String> originalSuperKeys = new LinkedHashSet<>(List.of("K1a", "K1b", "K1c"));
|
||||||
|
LinkedHashSet<String> originalSubKeys = new LinkedHashSet<>(List.of("K2aa", "K2bb", "K2cc"));
|
||||||
|
|
||||||
|
StepVerifier
|
||||||
|
.create(
|
||||||
|
tempDb()
|
||||||
|
.flatMap(db -> db
|
||||||
|
.getDictionary("testmap", UpdateMode.DISALLOW)
|
||||||
|
.map(dictionary -> DatabaseMapDictionaryDeep.deepTail(dictionary,
|
||||||
|
new FixedStringSerializer(3),
|
||||||
|
4,
|
||||||
|
new SubStageGetterMap<>(new FixedStringSerializer(4), Serializer.noop())
|
||||||
|
))
|
||||||
|
.flatMap(collection -> Flux
|
||||||
|
.fromIterable(originalSuperKeys)
|
||||||
|
.flatMap(k1 -> collection.at(null, k1))
|
||||||
|
.flatMap(k1at -> Flux
|
||||||
|
.fromIterable(originalSubKeys)
|
||||||
|
.flatMap(k2 -> k1at.putValue(k2, DUMMY_VALUE))
|
||||||
|
)
|
||||||
|
.then(collection.leavesCount(null, false))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.expectNext((long) originalSuperKeys.size() * originalSubKeys.size())
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeepDatabaseAddKeysAndConvertToLongerOnes() {
|
public void testDeepDatabaseAddKeysAndConvertToLongerOnes() {
|
||||||
@ -53,7 +112,7 @@ public class Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static <U> Mono<? extends LLKeyValueDatabase> tempDb() {
|
public static <U> Mono<? extends LLKeyValueDatabase> tempDb() {
|
||||||
var wrkspcPath = Path.of("/tmp/.cache/tempdb/");
|
var wrkspcPath = Path.of("/tmp/.cache/tempdb-" + DbTestUtils.dbId.incrementAndGet() + "/");
|
||||||
return Mono
|
return Mono
|
||||||
.fromCallable(() -> {
|
.fromCallable(() -> {
|
||||||
if (Files.exists(wrkspcPath)) {
|
if (Files.exists(wrkspcPath)) {
|
||||||
@ -72,10 +131,16 @@ public class Database {
|
|||||||
})
|
})
|
||||||
.subscribeOn(Schedulers.boundedElastic())
|
.subscribeOn(Schedulers.boundedElastic())
|
||||||
.then(new LLLocalDatabaseConnection(wrkspcPath, true).connect())
|
.then(new LLLocalDatabaseConnection(wrkspcPath, true).connect())
|
||||||
.flatMap(conn -> conn.getDatabase("testdb", List.of(Column.dictionary("testmap")), false));
|
.flatMap(conn -> conn.getDatabase("testdb", List.of(Column.dictionary("testmap")), false, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final byte[] DUMMY_VALUE = new byte[] {0x01, 0x03};
|
private static final ByteBuf DUMMY_VALUE;
|
||||||
|
static {
|
||||||
|
ByteBuf buf = Unpooled.directBuffer(2, 2);
|
||||||
|
buf.writeByte(0x01);
|
||||||
|
buf.writeByte(0x03);
|
||||||
|
DUMMY_VALUE = buf;
|
||||||
|
}
|
||||||
|
|
||||||
private Flux<Entry<String, String>> addKeysAndConvertToLongerOnes(LLKeyValueDatabase db,
|
private Flux<Entry<String, String>> addKeysAndConvertToLongerOnes(LLKeyValueDatabase db,
|
||||||
LinkedHashSet<String> originalSuperKeys,
|
LinkedHashSet<String> originalSuperKeys,
|
||||||
@ -157,7 +222,7 @@ public class Database {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class FixedStringSerializer implements SerializerFixedBinaryLength<String, byte[]> {
|
private static class FixedStringSerializer implements SerializerFixedBinaryLength<String, ByteBuf> {
|
||||||
|
|
||||||
private final int size;
|
private final int size;
|
||||||
|
|
||||||
@ -171,13 +236,21 @@ public class Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull String deserialize(byte @NotNull [] serialized) {
|
public @NotNull String deserialize(ByteBuf serialized) {
|
||||||
return new String(serialized, StandardCharsets.US_ASCII);
|
try {
|
||||||
|
return serialized.toString(StandardCharsets.US_ASCII);
|
||||||
|
} finally {
|
||||||
|
serialized.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte @NotNull [] serialize(@NotNull String deserialized) {
|
public ByteBuf serialize(@NotNull String deserialized) {
|
||||||
return deserialized.getBytes(StandardCharsets.US_ASCII);
|
var serialized = deserialized.getBytes(StandardCharsets.US_ASCII);
|
||||||
|
var serializedBuf = Unpooled.directBuffer(serialized.length, serialized.length);
|
||||||
|
serializedBuf.writeBytes(serialized);
|
||||||
|
assert serializedBuf.isDirect();
|
||||||
|
return serializedBuf;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -0,0 +1,622 @@
|
|||||||
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import static it.cavallium.dbengine.client.DbTestUtils.*;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.ConcurrentSkipListSet;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.test.StepVerifier;
|
||||||
|
import reactor.test.StepVerifier.FirstStep;
|
||||||
|
import reactor.test.StepVerifier.Step;
|
||||||
|
import reactor.util.function.Tuple2;
|
||||||
|
import reactor.util.function.Tuple3;
|
||||||
|
import reactor.util.function.Tuple4;
|
||||||
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
|
public class TestDictionaryMap {
|
||||||
|
|
||||||
|
private static Stream<Arguments> provideArgumentsCreate() {
|
||||||
|
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsCreate")
|
||||||
|
public void testCreate(UpdateMode updateMode) {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.flatMap(LLDictionary::clear)
|
||||||
|
.then()
|
||||||
|
))
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> provideArgumentsPut() {
|
||||||
|
var goodKeys = Set.of("12345", "zebra");
|
||||||
|
var badKeys = Set.of("", "a", "aaaa", "aaaaaa");
|
||||||
|
Set<Tuple2<String, Boolean>> keys = Stream.concat(
|
||||||
|
goodKeys.stream().map(s -> Tuples.of(s, false)),
|
||||||
|
badKeys.stream().map(s -> Tuples.of(s, true))
|
||||||
|
).collect(Collectors.toSet());
|
||||||
|
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", "bzzazazqzeztzgzzhz!");
|
||||||
|
|
||||||
|
return keys
|
||||||
|
.stream()
|
||||||
|
.flatMap(keyTuple -> {
|
||||||
|
Stream<String> strm;
|
||||||
|
if (keyTuple.getT2()) {
|
||||||
|
strm = values.stream().limit(1);
|
||||||
|
} else {
|
||||||
|
strm = values.stream();
|
||||||
|
}
|
||||||
|
return strm.map(val -> Tuples.of(keyTuple.getT1(), val, keyTuple.getT2()));
|
||||||
|
})
|
||||||
|
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode,
|
||||||
|
entryTuple.getT1(),
|
||||||
|
entryTuple.getT2(),
|
||||||
|
entryTuple.getT3()
|
||||||
|
)))
|
||||||
|
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3(), fullTuple.getT4()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testPut(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMap(map -> map
|
||||||
|
.putValue(key, value)
|
||||||
|
.then(map.getValue(null, key))
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(value).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testAtSetAtGet(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMap(map -> map
|
||||||
|
.at(null, key).flatMap(v -> v.set(value).doFinally(s -> v.release()))
|
||||||
|
.then(map.at(null, key).flatMap(v -> v.get(null).doFinally(s -> v.release())))
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(value).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testPutAndGetPrevious(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.putValueAndGetPrevious(key, "error?"),
|
||||||
|
map.putValueAndGetPrevious(key, value),
|
||||||
|
map.putValueAndGetPrevious(key, value)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext("error?").expectNext(value).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testPutValueRemoveAndGetPrevious(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.removeAndGetPrevious(key),
|
||||||
|
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||||
|
map.removeAndGetPrevious(key)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(value).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testPutValueRemoveAndGetStatus(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.removeAndGetStatus(key),
|
||||||
|
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||||
|
map.removeAndGetStatus(key)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(false, true, false).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testUpdate(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.updateValue(key, old -> {
|
||||||
|
assert old == null;
|
||||||
|
return "error?";
|
||||||
|
}),
|
||||||
|
map.updateValue(key, false, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return "error?";
|
||||||
|
}),
|
||||||
|
map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return "error?";
|
||||||
|
}),
|
||||||
|
map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return value;
|
||||||
|
}),
|
||||||
|
map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, value);
|
||||||
|
return value;
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(true, false, false, true, false).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testUpdateGet(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.updateValue(key, old -> {
|
||||||
|
assert old == null;
|
||||||
|
return "error?";
|
||||||
|
}).then(map.getValue(null, key)),
|
||||||
|
map.updateValue(key, false, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return "error?";
|
||||||
|
}).then(map.getValue(null, key)),
|
||||||
|
map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return "error?";
|
||||||
|
}).then(map.getValue(null, key)),
|
||||||
|
map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return value;
|
||||||
|
}).then(map.getValue(null, key)),
|
||||||
|
map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, value);
|
||||||
|
return value;
|
||||||
|
}).then(map.getValue(null, key))
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (updateMode == UpdateMode.DISALLOW || shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext("error?", "error?", "error?", value, value).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPut")
|
||||||
|
public void testPutAndGetStatus(UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
|
var stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.putValueAndGetStatus(key, "error?").single(),
|
||||||
|
map.putValueAndGetStatus(key, value).single(),
|
||||||
|
map.putValueAndGetStatus(key, value).single(),
|
||||||
|
map.remove(key),
|
||||||
|
map.putValueAndGetStatus(key, "error?").single()
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(false, true, true, false).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> provideArgumentsPutMulti() {
|
||||||
|
var goodKeys = Set.of(Set.of("12345", "67890"), Set.of("zebra"), Set.<String>of());
|
||||||
|
var badKeys = Set.of(Set.of("", "12345"), Set.of("12345", "a"), Set.of("45678", "aaaa"), Set.of("aaaaaa", "capra"));
|
||||||
|
Set<Tuple2<Set<String>, Boolean>> keys = Stream.concat(
|
||||||
|
goodKeys.stream().map(s -> Tuples.of(s, false)),
|
||||||
|
badKeys.stream().map(s -> Tuples.of(s, true))
|
||||||
|
).collect(Collectors.toSet());
|
||||||
|
var values = Set.of("a", "", "\0", "\0\0", "z", "azzszgzczqz", "bzzazazqzeztzgzzhz!");
|
||||||
|
|
||||||
|
return keys
|
||||||
|
.stream()
|
||||||
|
.map(keyTuple -> keyTuple.mapT1(ks -> Flux
|
||||||
|
.zip(Flux.fromIterable(ks), Flux.fromIterable(values))
|
||||||
|
.collectMap(Tuple2::getT1, Tuple2::getT2)
|
||||||
|
.block()
|
||||||
|
))
|
||||||
|
.flatMap(entryTuple -> Arrays.stream(UpdateMode.values()).map(updateMode -> Tuples.of(updateMode,
|
||||||
|
entryTuple.getT1(),
|
||||||
|
entryTuple.getT2()
|
||||||
|
)))
|
||||||
|
.map(fullTuple -> Arguments.of(fullTuple.getT1(), fullTuple.getT2(), fullTuple.getT3()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testPutMultiGetMulti(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testSetAllValuesGetMulti(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> map
|
||||||
|
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||||
|
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testSetAllValuesAndGetPrevious(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||||
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testSetGetMulti(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.set(entries).then(Mono.empty()),
|
||||||
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testSetAndGetStatus(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Boolean> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> {
|
||||||
|
Mono<Void> removalMono;
|
||||||
|
if (entries.isEmpty()) {
|
||||||
|
removalMono = Mono.empty();
|
||||||
|
} else {
|
||||||
|
removalMono = map.remove(entries.keySet().stream().findAny().orElseThrow());
|
||||||
|
}
|
||||||
|
return Flux
|
||||||
|
.concat(
|
||||||
|
map.setAndGetChanged(entries).single(),
|
||||||
|
map.setAndGetChanged(entries).single(),
|
||||||
|
removalMono.then(Mono.empty()),
|
||||||
|
map.setAndGetChanged(entries).single()
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release());
|
||||||
|
})
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(!entries.isEmpty(), false, !entries.isEmpty()).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testSetAndGetPrevious(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
||||||
|
.map(Map::entrySet)
|
||||||
|
.flatMap(Flux::fromIterable)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testSetClearAndGetPreviousGet(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||||
|
.map(Map::entrySet)
|
||||||
|
.flatMap(Flux::fromIterable)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testPutMultiGetAllValues(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
|
map.getAllValues(null)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testPutMultiGet(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
|
map.get(null)
|
||||||
|
.map(Map::entrySet)
|
||||||
|
.flatMapMany(Flux::fromIterable)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testPutMultiGetAllStagesGet(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Entry<String, String>> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
|
map
|
||||||
|
.getAllStages(null)
|
||||||
|
.flatMap(stage -> stage
|
||||||
|
.getValue()
|
||||||
|
.get(null)
|
||||||
|
.map(val -> Map.entry(stage.getKey(), val))
|
||||||
|
.doFinally(s -> stage.getValue().release())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.verifyError();
|
||||||
|
} else {
|
||||||
|
entries.forEach((k, v) -> remainingEntries.add(Map.entry(k, v)));
|
||||||
|
for (Entry<String, String> ignored : remainingEntries) {
|
||||||
|
stpVer = stpVer.expectNextMatches(remainingEntries::remove);
|
||||||
|
}
|
||||||
|
stpVer.verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testPutMultiIsEmpty(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Boolean> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.isEmpty(null),
|
||||||
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
|
map.isEmpty(null)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.expectNext(true).verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(true, entries.isEmpty()).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsPutMulti")
|
||||||
|
public void testPutMultiClear(UpdateMode updateMode, Map<String, String> entries, boolean shouldFail) {
|
||||||
|
var remainingEntries = new ConcurrentHashMap<Entry<String, String>, Boolean>().keySet(true);
|
||||||
|
Step<Boolean> stpVer = StepVerifier
|
||||||
|
.create(tempDb(db -> tempDictionary(db, updateMode)
|
||||||
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, 5))
|
||||||
|
.flatMapMany(map -> Flux
|
||||||
|
.concat(
|
||||||
|
map.isEmpty(null),
|
||||||
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
|
map.isEmpty(null),
|
||||||
|
map.clear().then(Mono.empty()),
|
||||||
|
map.isEmpty(null)
|
||||||
|
)
|
||||||
|
.doFinally(s -> map.release())
|
||||||
|
)
|
||||||
|
));
|
||||||
|
if (shouldFail) {
|
||||||
|
stpVer.expectNext(true).verifyError();
|
||||||
|
} else {
|
||||||
|
stpVer.expectNext(true, entries.isEmpty(), true).verifyComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
120
src/test/java/it/cavallium/dbengine/client/TestSingletons.java
Normal file
120
src/test/java/it/cavallium/dbengine/client/TestSingletons.java
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import static it.cavallium.dbengine.client.DbTestUtils.tempDb;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||||
|
import it.cavallium.dbengine.database.collections.DatabaseInt;
|
||||||
|
import it.cavallium.dbengine.database.collections.DatabaseLong;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.ArgumentsProvider;
|
||||||
|
import org.junit.jupiter.params.provider.ArgumentsSource;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
import org.junit.jupiter.params.provider.ValueSource;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.test.StepVerifier;
|
||||||
|
|
||||||
|
public class TestSingletons {
|
||||||
|
|
||||||
|
private static Stream<Arguments> provideNumberWithRepeats() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(Integer.MIN_VALUE, 2),
|
||||||
|
Arguments.of(-11, 2),
|
||||||
|
Arguments.of(0, 3),
|
||||||
|
Arguments.of(102, 5)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Stream<Arguments> provideLongNumberWithRepeats() {
|
||||||
|
return Stream.of(
|
||||||
|
Arguments.of(Long.MIN_VALUE, 2),
|
||||||
|
Arguments.of(-11L, 2),
|
||||||
|
Arguments.of(0L, 3),
|
||||||
|
Arguments.of(102L, 5)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateInteger() {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempInt(db, "test", 0)
|
||||||
|
.flatMap(dbInt -> dbInt.get(null))
|
||||||
|
.then()
|
||||||
|
))
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCreateLong() {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempLong(db, "test", 0)
|
||||||
|
.flatMap(dbLong -> dbLong.get(null))
|
||||||
|
.then()
|
||||||
|
))
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@ValueSource(ints = {Integer.MIN_VALUE, -192, -2, -1, 0, 1, 2, 1292, Integer.MAX_VALUE})
|
||||||
|
public void testDefaultValueInteger(int i) {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempInt(db, "test", i)
|
||||||
|
.flatMap(dbInt -> dbInt.get(null))
|
||||||
|
))
|
||||||
|
.expectNext(i)
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@ValueSource(longs = {Long.MIN_VALUE, -192, -2, -1, 0, 1, 2, 1292, Long.MAX_VALUE})
|
||||||
|
public void testDefaultValueLong(long i) {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempLong(db, "test", i)
|
||||||
|
.flatMap(dbLong -> dbLong.get(null))
|
||||||
|
))
|
||||||
|
.expectNext(i)
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideNumberWithRepeats")
|
||||||
|
public void testSetInteger(Integer i, Integer repeats) {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempInt(db, "test", 0)
|
||||||
|
.flatMap(dbInt -> Mono
|
||||||
|
.defer(() -> dbInt.set((int) System.currentTimeMillis()))
|
||||||
|
.repeat(repeats)
|
||||||
|
.then(dbInt.set(i))
|
||||||
|
.then(dbInt.get(null)))
|
||||||
|
))
|
||||||
|
.expectNext(i)
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideLongNumberWithRepeats")
|
||||||
|
public void testSetLong(Long i, Integer repeats) {
|
||||||
|
StepVerifier
|
||||||
|
.create(tempDb(db -> tempLong(db, "test", 0)
|
||||||
|
.flatMap(dbLong -> Mono
|
||||||
|
.defer(() -> dbLong.set(System.currentTimeMillis()))
|
||||||
|
.repeat(repeats)
|
||||||
|
.then(dbLong.set(i))
|
||||||
|
.then(dbLong.get(null)))
|
||||||
|
))
|
||||||
|
.expectNext(i)
|
||||||
|
.verifyComplete();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mono<DatabaseInt> tempInt(LLKeyValueDatabase database, String name, int defaultValue) {
|
||||||
|
return database
|
||||||
|
.getInteger("ints", name, defaultValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mono<DatabaseLong> tempLong(LLKeyValueDatabase database, String name, long defaultValue) {
|
||||||
|
return database
|
||||||
|
.getLong("longs", name, defaultValue);
|
||||||
|
}
|
||||||
|
}
|
@ -1,10 +1,21 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBufAllocator;
|
||||||
|
import io.netty.buffer.PooledByteBufAllocator;
|
||||||
|
import io.netty.buffer.Unpooled;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import static io.netty.buffer.Unpooled.*;
|
||||||
|
|
||||||
public class TestRanges {
|
public class TestRanges {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDirectBuffer() {
|
||||||
|
Assertions.assertTrue(wrappedBuffer(Unpooled.directBuffer(10, 10), Unpooled.buffer(10, 10)).isDirect());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNextRangeKey() {
|
public void testNextRangeKey() {
|
||||||
testNextRangeKey(new byte[] {0x00, 0x00, 0x00});
|
testNextRangeKey(new byte[] {0x00, 0x00, 0x00});
|
||||||
@ -21,11 +32,21 @@ public class TestRanges {
|
|||||||
|
|
||||||
public void testNextRangeKey(byte[] prefixKey) {
|
public void testNextRangeKey(byte[] prefixKey) {
|
||||||
|
|
||||||
byte[] firstRangeKey = DatabaseMapDictionaryDeep.firstRangeKey(prefixKey, prefixKey.length, 7, 3);
|
byte[] firstRangeKey = LLUtils.toArray(DatabaseMapDictionaryDeep.firstRangeKey(PooledByteBufAllocator.DEFAULT,
|
||||||
byte[] nextRangeKey = DatabaseMapDictionaryDeep.nextRangeKey(prefixKey, prefixKey.length, 7, 3);
|
LLUtils.convertToDirectByteBuf(PooledByteBufAllocator.DEFAULT, wrappedBuffer(prefixKey)),
|
||||||
|
prefixKey.length,
|
||||||
|
7,
|
||||||
|
3
|
||||||
|
));
|
||||||
|
byte[] nextRangeKey = LLUtils.toArray(DatabaseMapDictionaryDeep.nextRangeKey(PooledByteBufAllocator.DEFAULT,
|
||||||
|
LLUtils.convertToDirectByteBuf(PooledByteBufAllocator.DEFAULT, wrappedBuffer(prefixKey)),
|
||||||
|
prefixKey.length,
|
||||||
|
7,
|
||||||
|
3
|
||||||
|
));
|
||||||
|
|
||||||
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
|
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
|
||||||
Assertions.assertArrayEquals(new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, nextRangeKey);
|
Assertions.assertArrayEquals(new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 0}, nextRangeKey);
|
||||||
} else {
|
} else {
|
||||||
long biPrefix = 0;
|
long biPrefix = 0;
|
||||||
var s = 0;
|
var s = 0;
|
||||||
@ -64,11 +85,23 @@ public class TestRanges {
|
|||||||
|
|
||||||
public void testNextRangeKeyWithSuffix(byte[] prefixKey, byte[] suffixKey) {
|
public void testNextRangeKeyWithSuffix(byte[] prefixKey, byte[] suffixKey) {
|
||||||
|
|
||||||
byte[] firstRangeKey = DatabaseMapDictionaryDeep.firstRangeKey(prefixKey, suffixKey, prefixKey.length, 3, 7);
|
byte[] firstRangeKey = LLUtils.toArray(DatabaseMapDictionaryDeep.firstRangeKey(ByteBufAllocator.DEFAULT,
|
||||||
byte[] nextRangeKey = DatabaseMapDictionaryDeep.nextRangeKey(prefixKey, suffixKey, prefixKey.length, 3, 7);
|
LLUtils.convertToDirectByteBuf(PooledByteBufAllocator.DEFAULT, wrappedBuffer(prefixKey)),
|
||||||
|
LLUtils.convertToDirectByteBuf(PooledByteBufAllocator.DEFAULT, wrappedBuffer(suffixKey)),
|
||||||
|
prefixKey.length,
|
||||||
|
3,
|
||||||
|
7
|
||||||
|
));
|
||||||
|
byte[] nextRangeKey = LLUtils.toArray(DatabaseMapDictionaryDeep.nextRangeKey(ByteBufAllocator.DEFAULT,
|
||||||
|
LLUtils.convertToDirectByteBuf(PooledByteBufAllocator.DEFAULT, wrappedBuffer(prefixKey)),
|
||||||
|
LLUtils.convertToDirectByteBuf(PooledByteBufAllocator.DEFAULT, wrappedBuffer(suffixKey)),
|
||||||
|
prefixKey.length,
|
||||||
|
3,
|
||||||
|
7
|
||||||
|
));
|
||||||
|
|
||||||
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF}) && Arrays.equals(suffixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
|
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF}) && Arrays.equals(suffixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
|
||||||
Assertions.assertArrayEquals(new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 0, 0, 0, 0, 0, 0, 0, 0}, nextRangeKey);
|
Assertions.assertArrayEquals(new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, 0}, nextRangeKey);
|
||||||
} else {
|
} else {
|
||||||
long biPrefix = 0;
|
long biPrefix = 0;
|
||||||
var s = 0;
|
var s = 0;
|
||||||
|
Loading…
Reference in New Issue
Block a user