Update Example.java, DatabaseMapDictionary.java, and 9 more files...
This commit is contained in:
parent
1b9b2b187a
commit
023bc3b0dd
@ -1,8 +1,6 @@
|
||||
package it.cavallium.dbengine.client;
|
||||
|
||||
import com.google.common.primitives.Ints;
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.database.Column;
|
||||
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
||||
@ -65,7 +63,6 @@ public class Example {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var itemKey = new byte[]{0, 1, 2, 3};
|
||||
var newValue = new byte[]{4, 5, 6, 7};
|
||||
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
|
||||
return test("MapDictionaryDeep::at::put (same key, same value, " + batchSize + " times)",
|
||||
tempDb()
|
||||
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
|
||||
@ -76,7 +73,7 @@ public class Example {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
|
||||
})
|
||||
.then(tuple.getT2().at(null, itemKeyBuffer))
|
||||
.then(tuple.getT2().at(null, itemKey))
|
||||
.flatMap(handle -> handle.setAndGetPrevious(newValue))
|
||||
.doOnSuccess(oldValue -> {
|
||||
if (printPreviousValue)
|
||||
@ -93,7 +90,6 @@ public class Example {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var itemKey = new byte[]{0, 1, 2, 3};
|
||||
var newValue = new byte[]{4, 5, 6, 7};
|
||||
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
|
||||
return test("MapDictionaryDeep::putValueAndGetPrevious (same key, same value, " + batchSize + " times)",
|
||||
tempDb()
|
||||
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
|
||||
@ -104,7 +100,7 @@ public class Example {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
|
||||
})
|
||||
.then(tuple.getT2().putValueAndGetPrevious(itemKeyBuffer, newValue))
|
||||
.then(tuple.getT2().putValueAndGetPrevious(itemKey, newValue))
|
||||
.doOnSuccess(oldValue -> {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue)));
|
||||
@ -120,7 +116,6 @@ public class Example {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var itemKey = new byte[]{0, 1, 2, 3};
|
||||
var newValue = new byte[]{4, 5, 6, 7};
|
||||
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
|
||||
return test("MapDictionaryDeep::putValue (same key, same value, " + batchSize + " times)",
|
||||
tempDb()
|
||||
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
|
||||
@ -131,7 +126,7 @@ public class Example {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
|
||||
})
|
||||
.then(tuple.getT2().putValue(itemKeyBuffer, newValue))
|
||||
.then(tuple.getT2().putValue(itemKey, newValue))
|
||||
))
|
||||
.then(),
|
||||
numRepeats,
|
||||
@ -141,9 +136,9 @@ public class Example {
|
||||
private static Mono<Void> testPutMulti() {
|
||||
var ssg = new SubStageGetterSingleBytes();
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
HashMap<ByteBuf, byte[]> keysToPut = new HashMap<>();
|
||||
HashMap<byte[], byte[]> keysToPut = new HashMap<>();
|
||||
for (int i = 0; i < batchSize; i++) {
|
||||
keysToPut.put(Unpooled.wrappedBuffer(Ints.toByteArray(i * 3)), Ints.toByteArray(i * 11));
|
||||
keysToPut.put(Ints.toByteArray(i * 3), Ints.toByteArray(i * 11));
|
||||
}
|
||||
var putMultiFlux = Flux.fromIterable(keysToPut.entrySet());
|
||||
return test("MapDictionaryDeep::putMulti (batch of " + batchSize + " entries)",
|
||||
@ -159,10 +154,9 @@ public class Example {
|
||||
|
||||
private static Mono<Void> rangeTestAtPut() {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var vser = Serializer.noopBytes();
|
||||
var vser = Serializer.noop();
|
||||
var itemKey = new byte[]{0, 1, 2, 3};
|
||||
var newValue = new byte[]{4, 5, 6, 7};
|
||||
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
|
||||
return test("MapDictionary::at::put (same key, same value, " + batchSize + " times)",
|
||||
tempDb()
|
||||
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
|
||||
@ -173,7 +167,7 @@ public class Example {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
|
||||
})
|
||||
.then(tuple.getT2().at(null, itemKeyBuffer))
|
||||
.then(tuple.getT2().at(null, itemKey))
|
||||
.flatMap(handle -> handle.setAndGetPrevious(newValue))
|
||||
.doOnSuccess(oldValue -> {
|
||||
if (printPreviousValue)
|
||||
@ -187,10 +181,9 @@ public class Example {
|
||||
|
||||
private static Mono<Void> rangeTestPutValueAndGetPrevious() {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var vser = Serializer.noopBytes();
|
||||
var vser = Serializer.noop();
|
||||
var itemKey = new byte[]{0, 1, 2, 3};
|
||||
var newValue = new byte[]{4, 5, 6, 7};
|
||||
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
|
||||
return test("MapDictionary::putValueAndGetPrevious (same key, same value, " + batchSize + " times)",
|
||||
tempDb()
|
||||
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
|
||||
@ -201,7 +194,7 @@ public class Example {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
|
||||
})
|
||||
.then(tuple.getT2().putValueAndGetPrevious(itemKeyBuffer, newValue))
|
||||
.then(tuple.getT2().putValueAndGetPrevious(itemKey, newValue))
|
||||
.doOnSuccess(oldValue -> {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Old value: " + (oldValue == null ? "None" : Arrays.toString(oldValue)));
|
||||
@ -214,10 +207,9 @@ public class Example {
|
||||
|
||||
private static Mono<Void> rangeTestPutValue() {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var vser = Serializer.noopBytes();
|
||||
var vser = Serializer.noop();
|
||||
var itemKey = new byte[]{0, 1, 2, 3};
|
||||
var newValue = new byte[]{4, 5, 6, 7};
|
||||
var itemKeyBuffer = Unpooled.wrappedBuffer(itemKey);
|
||||
return test("MapDictionary::putValue (same key, same value, " + batchSize + " times)",
|
||||
tempDb()
|
||||
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
|
||||
@ -228,7 +220,7 @@ public class Example {
|
||||
if (printPreviousValue)
|
||||
System.out.println("Setting new value at key " + Arrays.toString(itemKey) + ": " + Arrays.toString(newValue));
|
||||
})
|
||||
.then(tuple.getT2().putValue(itemKeyBuffer, newValue))
|
||||
.then(tuple.getT2().putValue(itemKey, newValue))
|
||||
))
|
||||
.then(),
|
||||
numRepeats,
|
||||
@ -237,10 +229,10 @@ public class Example {
|
||||
|
||||
private static Mono<Void> rangeTestPutMulti() {
|
||||
var ser = SerializerFixedBinaryLength.noop(4);
|
||||
var vser = Serializer.noopBytes();
|
||||
HashMap<ByteBuf, byte[]> keysToPut = new HashMap<>();
|
||||
var vser = Serializer.noop();
|
||||
HashMap<byte[], byte[]> keysToPut = new HashMap<>();
|
||||
for (int i = 0; i < batchSize; i++) {
|
||||
keysToPut.put(Unpooled.wrappedBuffer(Ints.toByteArray(i * 3)), Ints.toByteArray(i * 11));
|
||||
keysToPut.put(Ints.toByteArray(i * 3), Ints.toByteArray(i * 11));
|
||||
}
|
||||
var putMultiFlux = Flux.fromIterable(keysToPut.entrySet());
|
||||
return test("MapDictionary::putMulti (batch of " + batchSize + " entries)",
|
||||
|
@ -1,7 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||
@ -19,25 +17,25 @@ import reactor.core.publisher.Mono;
|
||||
*/
|
||||
public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> {
|
||||
|
||||
private final Serializer<U, ByteBuf> valueSerializer;
|
||||
private final Serializer<U, byte[]> valueSerializer;
|
||||
|
||||
protected DatabaseMapDictionary(LLDictionary dictionary,
|
||||
byte[] prefixKey,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||
Serializer<U, ByteBuf> valueSerializer) {
|
||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
||||
Serializer<U, byte[]> valueSerializer) {
|
||||
super(dictionary, new SubStageGetterSingle<>(valueSerializer), keySuffixSerializer, prefixKey, 0);
|
||||
this.valueSerializer = valueSerializer;
|
||||
}
|
||||
|
||||
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||
Serializer<U, ByteBuf> valueSerializer) {
|
||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
||||
Serializer<U, byte[]> valueSerializer) {
|
||||
return new DatabaseMapDictionary<>(dictionary, EMPTY_BYTES, keySerializer, valueSerializer);
|
||||
}
|
||||
|
||||
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||
Serializer<U, ByteBuf> valueSerializer,
|
||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
||||
Serializer<U, byte[]> valueSerializer,
|
||||
byte[] prefixKey) {
|
||||
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
|
||||
}
|
||||
@ -97,7 +95,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
@Override
|
||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||
return Mono
|
||||
.just(new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noopBytes()))
|
||||
.just(new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), Serializer.noop()))
|
||||
.map(entry -> new DatabaseSingleMapped<>(entry, valueSerializer));
|
||||
}
|
||||
|
||||
@ -168,7 +166,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
new DatabaseSingleMapped<>(
|
||||
new DatabaseSingle<>(dictionary,
|
||||
toKey(stripPrefix(keySuffix)),
|
||||
Serializer.noopBytes()),
|
||||
Serializer.noop()),
|
||||
valueSerializer
|
||||
)
|
||||
));
|
||||
@ -184,18 +182,11 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private U deserialize(byte[] bytes) {
|
||||
var serialized = Unpooled.wrappedBuffer(bytes);
|
||||
return valueSerializer.deserialize(serialized);
|
||||
return valueSerializer.deserialize(bytes);
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private byte[] serialize(U bytes) {
|
||||
var output = Unpooled.buffer();
|
||||
valueSerializer.serialize(bytes, output);
|
||||
output.resetReaderIndex();
|
||||
int length = output.readableBytes();
|
||||
var outputBytes = new byte[length];
|
||||
output.getBytes(0, outputBytes, 0, length);
|
||||
return outputBytes;
|
||||
return valueSerializer.serialize(bytes);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLRange;
|
||||
@ -21,7 +19,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
public static final byte[] EMPTY_BYTES = new byte[0];
|
||||
protected final LLDictionary dictionary;
|
||||
protected final SubStageGetter<U, US> subStageGetter;
|
||||
protected final SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer;
|
||||
protected final SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer;
|
||||
protected final byte[] keyPrefix;
|
||||
protected final int keySuffixLength;
|
||||
protected final int keyExtLength;
|
||||
@ -87,14 +85,14 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(
|
||||
LLDictionary dictionary,
|
||||
SubStageGetterSingle<U> subStageGetter,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySerializer) {
|
||||
SerializerFixedBinaryLength<T, byte[]> keySerializer) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySerializer, EMPTY_BYTES, 0);
|
||||
}
|
||||
|
||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(
|
||||
LLDictionary dictionary,
|
||||
SubStageGetter<U, US> subStageGetter,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
||||
int keyExtLength) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySerializer, EMPTY_BYTES, keyExtLength);
|
||||
}
|
||||
@ -102,7 +100,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(
|
||||
LLDictionary dictionary,
|
||||
SubStageGetter<U, US> subStageGetter,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
||||
byte[] prefixKey,
|
||||
int keyExtLength) {
|
||||
return new DatabaseMapDictionaryDeep<>(dictionary, subStageGetter, keySuffixSerializer, prefixKey, keyExtLength);
|
||||
@ -110,7 +108,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
|
||||
protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
||||
SubStageGetter<U, US> subStageGetter,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer,
|
||||
SerializerFixedBinaryLength<T, byte[]> keySuffixSerializer,
|
||||
byte[] prefixKey,
|
||||
int keyExtLength) {
|
||||
this.dictionary = dictionary;
|
||||
@ -230,16 +228,11 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
protected T deserializeSuffix(byte[] keySuffix) {
|
||||
var serialized = Unpooled.wrappedBuffer(keySuffix);
|
||||
return keySuffixSerializer.deserialize(serialized);
|
||||
return keySuffixSerializer.deserialize(keySuffix);
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
protected byte[] serializeSuffix(T keySuffix) {
|
||||
var output = Unpooled.buffer(keySuffixLength, keySuffixLength);
|
||||
var outputBytes = new byte[keySuffixLength];
|
||||
keySuffixSerializer.serialize(keySuffix, output);
|
||||
output.getBytes(0, outputBytes, 0, keySuffixLength);
|
||||
return outputBytes;
|
||||
return keySuffixSerializer.serialize(keySuffix);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||
@ -14,9 +12,9 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
||||
|
||||
private final LLDictionary dictionary;
|
||||
private final byte[] key;
|
||||
private final Serializer<U, ByteBuf> serializer;
|
||||
private final Serializer<U, byte[]> serializer;
|
||||
|
||||
public DatabaseSingle(LLDictionary dictionary, byte[] key, Serializer<U, ByteBuf> serializer) {
|
||||
public DatabaseSingle(LLDictionary dictionary, byte[] key, Serializer<U, byte[]> serializer) {
|
||||
this.dictionary = dictionary;
|
||||
this.key = key;
|
||||
this.serializer = serializer;
|
||||
@ -60,18 +58,11 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private U deserialize(byte[] bytes) {
|
||||
var serialized = Unpooled.wrappedBuffer(bytes);
|
||||
return serializer.deserialize(serialized);
|
||||
return serializer.deserialize(bytes);
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private byte[] serialize(U bytes) {
|
||||
var output = Unpooled.buffer();
|
||||
serializer.serialize(bytes, output);
|
||||
output.resetReaderIndex();
|
||||
int length = output.readableBytes();
|
||||
var outputBytes = new byte[length];
|
||||
output.getBytes(0, outputBytes, 0, length);
|
||||
return outputBytes;
|
||||
return serializer.serialize(bytes);
|
||||
}
|
||||
}
|
@ -1,7 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import reactor.core.publisher.Mono;
|
||||
@ -9,9 +7,9 @@ import reactor.core.publisher.Mono;
|
||||
public class DatabaseSingleMapped<U> implements DatabaseStageEntry<U> {
|
||||
|
||||
private final DatabaseSingle<byte[]> serializedSingle;
|
||||
private final Serializer<U, ByteBuf> serializer;
|
||||
private final Serializer<U, byte[]> serializer;
|
||||
|
||||
public DatabaseSingleMapped(DatabaseSingle<byte[]> serializedSingle, Serializer<U, ByteBuf> serializer) {
|
||||
public DatabaseSingleMapped(DatabaseSingle<byte[]> serializedSingle, Serializer<U, byte[]> serializer) {
|
||||
this.serializedSingle = serializedSingle;
|
||||
this.serializer = serializer;
|
||||
}
|
||||
@ -78,18 +76,11 @@ public class DatabaseSingleMapped<U> implements DatabaseStageEntry<U> {
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private U deserialize(byte[] bytes) {
|
||||
var serialized = Unpooled.wrappedBuffer(bytes);
|
||||
return serializer.deserialize(serialized);
|
||||
return serializer.deserialize(bytes);
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private byte[] serialize(U bytes) {
|
||||
var output = Unpooled.buffer();
|
||||
serializer.serialize(bytes, output);
|
||||
output.resetReaderIndex();
|
||||
int length = output.readableBytes();
|
||||
var outputBytes = new byte[length];
|
||||
output.getBytes(0, outputBytes, 0, length);
|
||||
return outputBytes;
|
||||
return serializer.serialize(bytes);
|
||||
}
|
||||
}
|
||||
|
@ -1,40 +1,21 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
|
||||
public interface Serializer<A, B> {
|
||||
|
||||
A deserialize(B serialized);
|
||||
|
||||
void serialize(A deserialized, B output);
|
||||
B serialize(A deserialized);
|
||||
|
||||
static Serializer<ByteBuf, ByteBuf> noop() {
|
||||
static Serializer<byte[], byte[]> noop() {
|
||||
return new Serializer<>() {
|
||||
@Override
|
||||
public ByteBuf deserialize(ByteBuf serialized) {
|
||||
return serialized.readSlice(serialized.readableBytes());
|
||||
public byte[] deserialize(byte[] serialized) {
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(ByteBuf deserialized, ByteBuf output) {
|
||||
deserialized.resetReaderIndex();
|
||||
output.writeBytes(deserialized, deserialized.readableBytes());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static Serializer<byte[], ByteBuf> noopBytes() {
|
||||
return new Serializer<>() {
|
||||
@Override
|
||||
public byte[] deserialize(ByteBuf serialized) {
|
||||
var result = new byte[serialized.readableBytes()];
|
||||
serialized.readBytes(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(byte[] deserialized, ByteBuf output) {
|
||||
output.writeBytes(deserialized);
|
||||
public byte[] serialize(byte[] deserialized) {
|
||||
return deserialized;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -1,21 +1,21 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
|
||||
public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
|
||||
|
||||
int getSerializedBinaryLength();
|
||||
|
||||
static SerializerFixedBinaryLength<ByteBuf, ByteBuf> noop(int length) {
|
||||
static SerializerFixedBinaryLength<byte[], byte[]> noop(int length) {
|
||||
return new SerializerFixedBinaryLength<>() {
|
||||
@Override
|
||||
public ByteBuf deserialize(ByteBuf serialized) {
|
||||
return serialized.readSlice(length);
|
||||
public byte[] deserialize(byte[] serialized) {
|
||||
assert serialized.length == getSerializedBinaryLength();
|
||||
return serialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(ByteBuf deserialized, ByteBuf output) {
|
||||
output.writeBytes(deserialized.slice(), length);
|
||||
public byte[] serialize(byte[] deserialized) {
|
||||
assert deserialized.length == getSerializedBinaryLength();
|
||||
return deserialized;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,6 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import java.util.Map;
|
||||
@ -10,11 +9,11 @@ import reactor.core.publisher.Mono;
|
||||
|
||||
public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseStageEntry<Map<T, U>>> {
|
||||
|
||||
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
|
||||
private final Serializer<U, ByteBuf> valueSerializer;
|
||||
private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
|
||||
private final Serializer<U, byte[]> valueSerializer;
|
||||
|
||||
public SubStageGetterMap(SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||
Serializer<U, ByteBuf> valueSerializer) {
|
||||
public SubStageGetterMap(SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
||||
Serializer<U, byte[]> valueSerializer) {
|
||||
this.keySerializer = keySerializer;
|
||||
this.valueSerializer = valueSerializer;
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import java.util.Map;
|
||||
@ -12,11 +11,11 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
||||
SubStageGetter<Map<T, U>, DatabaseStageEntry<Map<T, U>>> {
|
||||
|
||||
private final SubStageGetter<U, US> subStageGetter;
|
||||
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer;
|
||||
private final SerializerFixedBinaryLength<T, byte[]> keySerializer;
|
||||
private final int keyExtLength;
|
||||
|
||||
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
|
||||
SerializerFixedBinaryLength<T, ByteBuf> keySerializer,
|
||||
SerializerFixedBinaryLength<T, byte[]> keySerializer,
|
||||
int keyExtLength) {
|
||||
this.subStageGetter = subStageGetter;
|
||||
this.keySerializer = keySerializer;
|
||||
|
@ -1,7 +1,5 @@
|
||||
package it.cavallium.dbengine.database.collections;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||
import it.cavallium.dbengine.database.LLDictionary;
|
||||
import java.util.Arrays;
|
||||
@ -11,9 +9,9 @@ import reactor.core.publisher.Mono;
|
||||
|
||||
public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> {
|
||||
|
||||
private final Serializer<T, ByteBuf> serializer;
|
||||
private final Serializer<T, byte[]> serializer;
|
||||
|
||||
public SubStageGetterSingle(Serializer<T, ByteBuf> serializer) {
|
||||
public SubStageGetterSingle(Serializer<T, byte[]> serializer) {
|
||||
this.serializer = serializer;
|
||||
}
|
||||
|
||||
@ -32,18 +30,11 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private T deserialize(byte[] bytes) {
|
||||
var serialized = Unpooled.wrappedBuffer(bytes);
|
||||
return serializer.deserialize(serialized);
|
||||
return serializer.deserialize(bytes);
|
||||
}
|
||||
|
||||
//todo: temporary wrapper. convert the whole class to buffers
|
||||
private byte[] serialize(T bytes) {
|
||||
var output = Unpooled.buffer();
|
||||
serializer.serialize(bytes, output);
|
||||
output.resetReaderIndex();
|
||||
int length = output.readableBytes();
|
||||
var outputBytes = new byte[length];
|
||||
output.getBytes(0, outputBytes, 0, length);
|
||||
return outputBytes;
|
||||
return serializer.serialize(bytes);
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,6 @@ package it.cavallium.dbengine.database.collections;
|
||||
public class SubStageGetterSingleBytes extends SubStageGetterSingle<byte[]> {
|
||||
|
||||
public SubStageGetterSingleBytes() {
|
||||
super(Serializer.noopBytes());
|
||||
super(Serializer.noop());
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user