Update Example.java, DatabaseMapDictionary.java, and 9 more files...

This commit is contained in:
Andrea Cavalli 2021-02-01 12:19:17 +01:00
parent 1b9b2b187a
commit 023bc3b0dd
11 changed files with 64 additions and 136 deletions

View File

@ -1,8 +1,6 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.database.Column; import it.cavallium.dbengine.database.Column;
import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary; import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
@ -65,7 +63,6 @@ public class Example {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var itemKey = new byte[]{0, 1, 2, 3}; var itemKey = new byte[]{0, 1, 2, 3};
var newValue = new byte[]{4, 5, 6, 7}; var newValue = new byte[]{4, 5, 6, 7};
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
return test("MapDictionaryDeep::at::put (same key, same value, " + batchSize + " times)", return test("MapDictionaryDeep::at::put (same key, same value, " + batchSize + " times)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
@ -76,7 +73,7 @@ public class Example {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
}) })
.then(tuple.getT2().at(null, itemKeyBuffer)) .then(tuple.getT2().at(null, itemKey))
.flatMap(handle -> handle.setAndGetPrevious(newValue)) .flatMap(handle -> handle.setAndGetPrevious(newValue))
.doOnSuccess(oldValue -> { .doOnSuccess(oldValue -> {
if (printPreviousValue) if (printPreviousValue)
@ -93,7 +90,6 @@ public class Example {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var itemKey = new byte[]{0, 1, 2, 3}; var itemKey = new byte[]{0, 1, 2, 3};
var newValue = new byte[]{4, 5, 6, 7}; var newValue = new byte[]{4, 5, 6, 7};
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
return test("MapDictionaryDeep::putValueAndGetPrevious (same key, same value, " + batchSize + " times)", return test("MapDictionaryDeep::putValueAndGetPrevious (same key, same value, " + batchSize + " times)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
@ -104,7 +100,7 @@ public class Example {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
}) })
.then(tuple.getT2().putValueAndGetPrevious(itemKeyBuffer, newValue)) .then(tuple.getT2().putValueAndGetPrevious(itemKey, newValue))
.doOnSuccess(oldValue -> { .doOnSuccess(oldValue -> {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue))); System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue)));
@ -120,7 +116,6 @@ public class Example {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var itemKey = new byte[]{0, 1, 2, 3}; var itemKey = new byte[]{0, 1, 2, 3};
var newValue = new byte[]{4, 5, 6, 7}; var newValue = new byte[]{4, 5, 6, 7};
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
return test("MapDictionaryDeep::putValue (same key, same value, " + batchSize + " times)", return test("MapDictionaryDeep::putValue (same key, same value, " + batchSize + " times)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
@ -131,7 +126,7 @@ public class Example {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
}) })
.then(tuple.getT2().putValue(itemKeyBuffer, newValue)) .then(tuple.getT2().putValue(itemKey, newValue))
)) ))
.then(), .then(),
numRepeats, numRepeats,
@ -141,9 +136,9 @@ public class Example {
private static Mono<Void> testPutMulti() { private static Mono<Void> testPutMulti() {
var ssg = new SubStageGetterSingleBytes(); var ssg = new SubStageGetterSingleBytes();
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
HashMap<ByteBuf, byte[]> keysToPut = new HashMap<>(); HashMap<byte[], byte[]> keysToPut = new HashMap<>();
for (int i = 0; i < batchSize; i++) { for (int i = 0; i < batchSize; i++) {
keysToPut.put(Unpooled.wrappedBuffer(Ints.toByteArray(i * 3)), Ints.toByteArray(i * 11)); keysToPut.put(Ints.toByteArray(i * 3), Ints.toByteArray(i * 11));
} }
var putMultiFlux = Flux.fromIterable(keysToPut.entrySet()); var putMultiFlux = Flux.fromIterable(keysToPut.entrySet());
return test("MapDictionaryDeep::putMulti (batch of " + batchSize + " entries)", return test("MapDictionaryDeep::putMulti (batch of " + batchSize + " entries)",
@ -159,10 +154,9 @@ public class Example {
private static Mono<Void> rangeTestAtPut() { private static Mono<Void> rangeTestAtPut() {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var vser = Serializer.noopBytes(); var vser = Serializer.noop();
var itemKey = new byte[]{0, 1, 2, 3}; var itemKey = new byte[]{0, 1, 2, 3};
var newValue = new byte[]{4, 5, 6, 7}; var newValue = new byte[]{4, 5, 6, 7};
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
return test("MapDictionary::at::put (same key, same value, " + batchSize + " times)", return test("MapDictionary::at::put (same key, same value, " + batchSize + " times)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
@ -173,7 +167,7 @@ public class Example {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
}) })
.then(tuple.getT2().at(null, itemKeyBuffer)) .then(tuple.getT2().at(null, itemKey))
.flatMap(handle -> handle.setAndGetPrevious(newValue)) .flatMap(handle -> handle.setAndGetPrevious(newValue))
.doOnSuccess(oldValue -> { .doOnSuccess(oldValue -> {
if (printPreviousValue) if (printPreviousValue)
@ -187,10 +181,9 @@ public class Example {
private static Mono<Void> rangeTestPutValueAndGetPrevious() { private static Mono<Void> rangeTestPutValueAndGetPrevious() {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var vser = Serializer.noopBytes(); var vser = Serializer.noop();
var itemKey = new byte[]{0, 1, 2, 3}; var itemKey = new byte[]{0, 1, 2, 3};
var newValue = new byte[]{4, 5, 6, 7}; var newValue = new byte[]{4, 5, 6, 7};
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
return test("MapDictionary::putValueAndGetPrevious (same key, same value, " + batchSize + " times)", return test("MapDictionary::putValueAndGetPrevious (same key, same value, " + batchSize + " times)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
@ -201,7 +194,7 @@ public class Example {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
}) })
.then(tuple.getT2().putValueAndGetPrevious(itemKeyBuffer, newValue)) .then(tuple.getT2().putValueAndGetPrevious(itemKey, newValue))
.doOnSuccess(oldValue -> { .doOnSuccess(oldValue -> {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue))); System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue)));
@ -214,10 +207,9 @@ public class Example {
private static Mono<Void> rangeTestPutValue() { private static Mono<Void> rangeTestPutValue() {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var vser = Serializer.noopBytes(); var vser = Serializer.noop();
var itemKey = new byte[]{0, 1, 2, 3}; var itemKey = new byte[]{0, 1, 2, 3};
var newValue = new byte[]{4, 5, 6, 7}; var newValue = new byte[]{4, 5, 6, 7};
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
return test("MapDictionary::putValue (same key, same value, " + batchSize + " times)", return test("MapDictionary::putValue (same key, same value, " + batchSize + " times)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
@ -228,7 +220,7 @@ public class Example {
if (printPreviousValue) if (printPreviousValue)
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue)); System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
}) })
.then(tuple.getT2().putValue(itemKeyBuffer, newValue)) .then(tuple.getT2().putValue(itemKey, newValue))
)) ))
.then(), .then(),
numRepeats, numRepeats,
@ -237,10 +229,10 @@ public class Example {
private static Mono<Void> rangeTestPutMulti() { private static Mono<Void> rangeTestPutMulti() {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var vser = Serializer.noopBytes(); var vser = Serializer.noop();
HashMap<ByteBuf, byte[]> keysToPut = new HashMap<>(); HashMap<byte[], byte[]> keysToPut = new HashMap<>();
for (int i = 0; i < batchSize; i++) { for (int i = 0; i < batchSize; i++) {
keysToPut.put(Unpooled.wrappedBuffer(Ints.toByteArray(i * 3)), Ints.toByteArray(i * 11)); keysToPut.put(Ints.toByteArray(i * 3), Ints.toByteArray(i * 11));
} }
var putMultiFlux = Flux.fromIterable(keysToPut.entrySet()); var putMultiFlux = Flux.fromIterable(keysToPut.entrySet());
return test("MapDictionary::putMulti (batch of " + batchSize + " entries)", return test("MapDictionary::putMulti (batch of " + batchSize + " entries)",

View File

@ -1,7 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
@ -19,25 +17,25 @@ import reactor.core.publisher.Mono;
*/ */
public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> { public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> {
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, byte[]> valueSerializer;
protected DatabaseMapDictionary(LLDictionary dictionary, protected DatabaseMapDictionary(LLDictionary dictionary,
byte[] prefixKey, byte[] prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, byte[]> valueSerializer) {
super(dictionary, new SubStageGetterSingle<>(valueSerializer), keySuffixSerializer, prefixKey, 0); super(dictionary, new SubStageGetterSingle<>(valueSerializer), keySuffixSerializer, prefixKey, 0);
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary, public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, byte[]> keySerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, byte[]> valueSerializer) {
return new DatabaseMapDictionary<>(dictionary, EMPTY_BYTES, keySerializer, valueSerializer); return new DatabaseMapDictionary<>(dictionary, EMPTY_BYTES, keySerializer, valueSerializer);
} }
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary, public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
Serializer<U, ByteBuf> valueSerializer, Serializer<U, byte[]> valueSerializer,
byte[] prefixKey) { byte[] prefixKey) {
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer); return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
} }
@ -97,7 +95,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) { public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
return Mono return Mono
.just(new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noopBytes())) .just(new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noop()))
.map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer)); .map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer));
} }
@ -168,7 +166,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
new DatabaseSingleMapped<>( new DatabaseSingleMapped<>(
new DatabaseSingle<>(dictionary, new DatabaseSingle<>(dictionary,
toKey(stripPrefix(keySuffix)), toKey(stripPrefix(keySuffix)),
Serializer.noopBytes()), Serializer.noop()),
valueSerializer valueSerializer
) )
)); ));
@ -184,18 +182,11 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private U deserialize(byte[] bytes) { private U deserialize(byte[] bytes) {
var serialized = Unpooled.wrappedBuffer(bytes); return valueSerializer.deserialize(bytes);
return valueSerializer.deserialize(serialized);
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private byte[] serialize(U bytes) { private byte[] serialize(U bytes) {
var output = Unpooled.buffer(); return valueSerializer.serialize(bytes);
valueSerializer.serialize(bytes, output);
output.resetReaderIndex();
int length = output.readableBytes();
var outputBytes = new byte[length];
output.getBytes(0, outputBytes, 0, length);
return outputBytes;
} }
} }

View File

@ -1,7 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
@ -21,7 +19,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
public static final byte[] EMPTY_BYTES = new byte[0]; public static final byte[] EMPTY_BYTES = new byte[0];
protected final LLDictionary dictionary; protected final LLDictionary dictionary;
protected final SubStageGetter<U, US> subStageGetter; protected final SubStageGetter<U, US> subStageGetter;
protected final SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer; protected final SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer;
protected final byte[] keyPrefix; protected final byte[] keyPrefix;
protected final int keySuffixLength; protected final int keySuffixLength;
protected final int keyExtLength; protected final int keyExtLength;
@ -87,14 +85,14 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple( public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(
LLDictionary dictionary, LLDictionary dictionary,
SubStageGetterSingle<U> subStageGetter, SubStageGetterSingle<U> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer) { SerializerFixedBinaryLength<T, byte[]> keySerializer) {
return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySerializer, EMPTY_BYTES, 0); return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySerializer, EMPTY_BYTES, 0);
} }
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail( public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(
LLDictionary dictionary, LLDictionary dictionary,
SubStageGetter<U, US> subStageGetter, SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, byte[]> keySerializer,
int keyExtLength) { int keyExtLength) {
return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySerializer, EMPTY_BYTES, keyExtLength); return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySerializer, EMPTY_BYTES, keyExtLength);
} }
@ -102,7 +100,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate( public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(
LLDictionary dictionary, LLDictionary dictionary,
SubStageGetter<U, US> subStageGetter, SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
byte[] prefixKey, byte[] prefixKey,
int keyExtLength) { int keyExtLength) {
return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySuffixSerializer, prefixKey, keyExtLength); return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySuffixSerializer, prefixKey, keyExtLength);
@ -110,7 +108,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
protected DatabaseMapDictionaryDeep(LLDictionary dictionary, protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
SubStageGetter<U, US> subStageGetter, SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
byte[] prefixKey, byte[] prefixKey,
int keyExtLength) { int keyExtLength) {
this.dictionary = dictionary; this.dictionary = dictionary;
@ -230,16 +228,11 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
protected T deserializeSuffix(byte[] keySuffix) { protected T deserializeSuffix(byte[] keySuffix) {
var serialized = Unpooled.wrappedBuffer(keySuffix); return keySuffixSerializer.deserialize(keySuffix);
return keySuffixSerializer.deserialize(serialized);
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
protected byte[] serializeSuffix(T keySuffix) { protected byte[] serializeSuffix(T keySuffix) {
var output = Unpooled.buffer(keySuffixLength, keySuffixLength); return keySuffixSerializer.serialize(keySuffix);
var outputBytes = new byte[keySuffixLength];
keySuffixSerializer.serialize(keySuffix, output);
output.getBytes(0, outputBytes, 0, keySuffixLength);
return outputBytes;
} }
} }

View File

@ -1,7 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
@ -14,9 +12,9 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
private final LLDictionary dictionary; private final LLDictionary dictionary;
private final byte[] key; private final byte[] key;
private final Serializer<U, ByteBuf> serializer; private final Serializer<U, byte[]> serializer;
public DatabaseSingle(LLDictionary dictionary, byte[] key, Serializer<U, ByteBuf> serializer) { public DatabaseSingle(LLDictionary dictionary, byte[] key, Serializer<U, byte[]> serializer) {
this.dictionary = dictionary; this.dictionary = dictionary;
this.key = key; this.key = key;
this.serializer = serializer; this.serializer = serializer;
@ -60,18 +58,11 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private U deserialize(byte[] bytes) { private U deserialize(byte[] bytes) {
var serialized = Unpooled.wrappedBuffer(bytes); return serializer.deserialize(bytes);
return serializer.deserialize(serialized);
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private byte[] serialize(U bytes) { private byte[] serialize(U bytes) {
var output = Unpooled.buffer(); return serializer.serialize(bytes);
serializer.serialize(bytes, output);
output.resetReaderIndex();
int length = output.readableBytes();
var outputBytes = new byte[length];
output.getBytes(0, outputBytes, 0, length);
return outputBytes;
} }
} }

View File

@ -1,7 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
@ -9,9 +7,9 @@ import reactor.core.publisher.Mono;
public class DatabaseSingleMapped<U> implements DatabaseStageEntry<U> { public class DatabaseSingleMapped<U> implements DatabaseStageEntry<U> {
private final DatabaseSingle<byte[]> serializedSingle; private final DatabaseSingle<byte[]> serializedSingle;
private final Serializer<U, ByteBuf> serializer; private final Serializer<U, byte[]> serializer;
public DatabaseSingleMapped(DatabaseSingle<byte[]> serializedSingle, Serializer<U, ByteBuf> serializer) { public DatabaseSingleMapped(DatabaseSingle<byte[]> serializedSingle, Serializer<U, byte[]> serializer) {
this.serializedSingle = serializedSingle; this.serializedSingle = serializedSingle;
this.serializer = serializer; this.serializer = serializer;
} }
@ -78,18 +76,11 @@ public class DatabaseSingleMapped<U> implements DatabaseStageEntry<U> {
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private U deserialize(byte[] bytes) { private U deserialize(byte[] bytes) {
var serialized = Unpooled.wrappedBuffer(bytes); return serializer.deserialize(bytes);
return serializer.deserialize(serialized);
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private byte[] serialize(U bytes) { private byte[] serialize(U bytes) {
var output = Unpooled.buffer(); return serializer.serialize(bytes);
serializer.serialize(bytes, output);
output.resetReaderIndex();
int length = output.readableBytes();
var outputBytes = new byte[length];
output.getBytes(0, outputBytes, 0, length);
return outputBytes;
} }
} }

View File

@ -1,40 +1,21 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
public interface Serializer<A, B> { public interface Serializer<A, B> {
A deserialize(B serialized); A deserialize(B serialized);
void serialize(A deserialized, B output); B serialize(A deserialized);
static Serializer<ByteBuf, ByteBuf> noop() { static Serializer<byte[], byte[]> noop() {
return new Serializer<>() { return new Serializer<>() {
@Override @Override
public ByteBuf deserialize(ByteBuf serialized) { public byte[] deserialize(byte[] serialized) {
return serialized.readSlice(serialized.readableBytes()); return serialized;
} }
@Override @Override
public void serialize(ByteBuf deserialized, ByteBuf output) { public byte[] serialize(byte[] deserialized) {
deserialized.resetReaderIndex(); return deserialized;
output.writeBytes(deserialized, deserialized.readableBytes());
}
};
}
static Serializer<byte[], ByteBuf> noopBytes() {
return new Serializer<>() {
@Override
public byte[] deserialize(ByteBuf serialized) {
var result = new byte[serialized.readableBytes()];
serialized.readBytes(result);
return result;
}
@Override
public void serialize(byte[] deserialized, ByteBuf output) {
output.writeBytes(deserialized);
} }
}; };
} }

View File

@ -1,21 +1,21 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> { public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
int getSerializedBinaryLength(); int getSerializedBinaryLength();
static SerializerFixedBinaryLength<ByteBuf, ByteBuf> noop(int length) { static SerializerFixedBinaryLength<byte[], byte[]> noop(int length) {
return new SerializerFixedBinaryLength<>() { return new SerializerFixedBinaryLength<>() {
@Override @Override
public ByteBuf deserialize(ByteBuf serialized) { public byte[] deserialize(byte[] serialized) {
return serialized.readSlice(length); assert serialized.length == getSerializedBinaryLength();
return serialized;
} }
@Override @Override
public void serialize(ByteBuf deserialized, ByteBuf output) { public byte[] serialize(byte[] deserialized) {
output.writeBytes(deserialized.slice(), length); assert deserialized.length == getSerializedBinaryLength();
return deserialized;
} }
@Override @Override

View File

@ -1,6 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import java.util.Map; import java.util.Map;
@ -10,11 +9,11 @@ import reactor.core.publisher.Mono;
public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseStageEntry<Map<T, U>>> { public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseStageEntry<Map<T, U>>> {
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, byte[]> valueSerializer;
public SubStageGetterMap(SerializerFixedBinaryLength<T, ByteBuf> keySerializer, public SubStageGetterMap(SerializerFixedBinaryLength<T, byte[]> keySerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, byte[]> valueSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }

View File

@ -1,6 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import java.util.Map; import java.util.Map;
@ -12,11 +11,11 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
SubStageGetter<Map<T, U>, DatabaseStageEntry<Map<T, U>>> { SubStageGetter<Map<T, U>, DatabaseStageEntry<Map<T, U>>> {
private final SubStageGetter<U, US> subStageGetter; private final SubStageGetter<U, US> subStageGetter;
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
private final int keyExtLength; private final int keyExtLength;
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter, public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, byte[]> keySerializer,
int keyExtLength) { int keyExtLength) {
this.subStageGetter = subStageGetter; this.subStageGetter = subStageGetter;
this.keySerializer = keySerializer; this.keySerializer = keySerializer;

View File

@ -1,7 +1,5 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import java.util.Arrays; import java.util.Arrays;
@ -11,9 +9,9 @@ import reactor.core.publisher.Mono;
public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> { public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> {
private final Serializer<T, ByteBuf> serializer; private final Serializer<T, byte[]> serializer;
public SubStageGetterSingle(Serializer<T, ByteBuf> serializer) { public SubStageGetterSingle(Serializer<T, byte[]> serializer) {
this.serializer = serializer; this.serializer = serializer;
} }
@ -32,18 +30,11 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private T deserialize(byte[] bytes) { private T deserialize(byte[] bytes) {
var serialized = Unpooled.wrappedBuffer(bytes); return serializer.deserialize(bytes);
return serializer.deserialize(serialized);
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
private byte[] serialize(T bytes) { private byte[] serialize(T bytes) {
var output = Unpooled.buffer(); return serializer.serialize(bytes);
serializer.serialize(bytes, output);
output.resetReaderIndex();
int length = output.readableBytes();
var outputBytes = new byte[length];
output.getBytes(0, outputBytes, 0, length);
return outputBytes;
} }
} }

View File

@ -3,6 +3,6 @@ package it.cavallium.dbengine.database.collections;
public class SubStageGetterSingleBytes extends SubStageGetterSingle<byte[]> { public class SubStageGetterSingleBytes extends SubStageGetterSingle<byte[]> {
public SubStageGetterSingleBytes() { public SubStageGetterSingleBytes() {
super(Serializer.noopBytes()); super(Serializer.noop());
} }
} }