(unfinished) Netty 5 refactoring

This commit is contained in:
Andrea Cavalli 2021-08-29 23:18:03 +02:00
parent 9984cfff73
commit 3b55e8bd24
56 changed files with 2457 additions and 2269 deletions

View File

@ -32,6 +32,13 @@
<releases><enabled>false</enabled></releases> <releases><enabled>false</enabled></releases>
<snapshots><enabled>true</enabled></snapshots> <snapshots><enabled>true</enabled></snapshots>
</repository> </repository>
<repository>
<id>netty5-snapshots</id>
<name>Netty 5 snapshots</name>
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
<releases><enabled>true</enabled></releases>
<snapshots><enabled>true</enabled></snapshots>
</repository>
</repositories> </repositories>
<pluginRepositories> <pluginRepositories>
<pluginRepository> <pluginRepository>
@ -245,7 +252,7 @@
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId> <artifactId>netty-buffer</artifactId>
<version>4.1.63.Final</version> <version>5.0.0.Final-SNAPSHOT</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.xml.bind</groupId> <groupId>javax.xml.bind</groupId>

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
@ -18,7 +18,7 @@ public interface CompositeDatabase {
*/ */
Mono<Void> releaseSnapshot(CompositeSnapshot snapshot); Mono<Void> releaseSnapshot(CompositeSnapshot snapshot);
ByteBufAllocator getAllocator(); BufferAllocator getAllocator();
/** /**
* Find corrupted items * Find corrupted items

View File

@ -1,23 +1,23 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
public class MappedSerializer<A, B> implements Serializer<B, ByteBuf> { public class MappedSerializer<A, B> implements Serializer<B, Buffer> {
private final Serializer<A, ByteBuf> serializer; private final Serializer<A, Buffer> serializer;
private final Mapper<A, B> keyMapper; private final Mapper<A, B> keyMapper;
public MappedSerializer(Serializer<A, ByteBuf> serializer, public MappedSerializer(Serializer<A, Buffer> serializer,
Mapper<A, B> keyMapper) { Mapper<A, B> keyMapper) {
this.serializer = serializer; this.serializer = serializer;
this.keyMapper = keyMapper; this.keyMapper = keyMapper;
} }
@Override @Override
public @NotNull B deserialize(@NotNull ByteBuf serialized) throws SerializationException { public @NotNull B deserialize(@NotNull Buffer serialized) throws SerializationException {
try { try {
return keyMapper.map(serializer.deserialize(serialized.retain())); return keyMapper.map(serializer.deserialize(serialized.retain()));
} finally { } finally {
@ -26,7 +26,7 @@ public class MappedSerializer<A, B> implements Serializer<B, ByteBuf> {
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull B deserialized) throws SerializationException { public @NotNull Buffer serialize(@NotNull B deserialized) throws SerializationException {
return serializer.serialize(keyMapper.unmap(deserialized)); return serializer.serialize(keyMapper.unmap(deserialized));
} }
} }

View File

@ -1,23 +1,23 @@
package it.cavallium.dbengine.client; package it.cavallium.dbengine.client;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B, ByteBuf> { public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B, Buffer> {
private final SerializerFixedBinaryLength<A, ByteBuf> fixedLengthSerializer; private final SerializerFixedBinaryLength<A, Buffer> fixedLengthSerializer;
private final Mapper<A, B> keyMapper; private final Mapper<A, B> keyMapper;
public MappedSerializerFixedLength(SerializerFixedBinaryLength<A, ByteBuf> fixedLengthSerializer, public MappedSerializerFixedLength(SerializerFixedBinaryLength<A, Buffer> fixedLengthSerializer,
Mapper<A, B> keyMapper) { Mapper<A, B> keyMapper) {
this.fixedLengthSerializer = fixedLengthSerializer; this.fixedLengthSerializer = fixedLengthSerializer;
this.keyMapper = keyMapper; this.keyMapper = keyMapper;
} }
@Override @Override
public @NotNull B deserialize(@NotNull ByteBuf serialized) throws SerializationException { public @NotNull B deserialize(@NotNull Buffer serialized) throws SerializationException {
try { try {
return keyMapper.map(fixedLengthSerializer.deserialize(serialized.retain())); return keyMapper.map(fixedLengthSerializer.deserialize(serialized.retain()));
} finally { } finally {
@ -26,7 +26,7 @@ public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryL
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull B deserialized) throws SerializationException { public @NotNull Buffer serialize(@NotNull B deserialized) throws SerializationException {
return fixedLengthSerializer.serialize(keyMapper.unmap(deserialized)); return fixedLengthSerializer.serialize(keyMapper.unmap(deserialized));
} }

View File

@ -3,9 +3,46 @@ package it.cavallium.dbengine.database;
import java.util.Objects; import java.util.Objects;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
public record Delta<T>(@Nullable T previous, @Nullable T current) { public class Delta<T> {
private final @Nullable T previous;
private final @Nullable T current;
public Delta(@Nullable T previous, @Nullable T current) {
this.previous = previous;
this.current = current;
}
public boolean isModified() { public boolean isModified() {
return !Objects.equals(previous, current); return !Objects.equals(previous, current);
} }
public @Nullable T previous() {
return previous;
}
public @Nullable T current() {
return current;
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj == null || obj.getClass() != this.getClass())
return false;
var that = (Delta) obj;
return Objects.equals(this.previous, that.previous) && Objects.equals(this.current, that.current);
}
@Override
public int hashCode() {
return Objects.hash(previous, current);
}
@Override
public String toString() {
return "Delta[" + "previous=" + previous + ", " + "current=" + current + ']';
}
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.client.DatabaseOptions; import it.cavallium.dbengine.client.DatabaseOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers; import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities; import it.cavallium.dbengine.client.IndicizerSimilarities;
@ -11,7 +11,7 @@ import reactor.core.publisher.Mono;
@SuppressWarnings("UnusedReturnValue") @SuppressWarnings("UnusedReturnValue")
public interface LLDatabaseConnection { public interface LLDatabaseConnection {
ByteBufAllocator getAllocator(); BufferAllocator getAllocator();
Mono<? extends LLDatabaseConnection> connect(); Mono<? extends LLDatabaseConnection> connect();

View File

@ -0,0 +1,129 @@
package it.cavallium.dbengine.database;
import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Drop;
import io.netty.buffer.api.Owned;
import io.netty.buffer.api.Send;
import io.netty.buffer.api.internal.ResourceSupport;
import java.util.StringJoiner;
import org.jetbrains.annotations.Nullable;
public class LLDelta extends ResourceSupport<LLDelta, LLDelta> {
@Nullable
private final Buffer previous;
@Nullable
private final Buffer current;
private LLDelta(@Nullable Send<Buffer> previous, @Nullable Send<Buffer> current, Drop<LLDelta> drop) {
super(new LLDelta.CloseOnDrop(drop));
assert isAllAccessible();
this.previous = previous != null ? previous.receive().makeReadOnly() : null;
this.current = current != null ? current.receive().makeReadOnly() : null;
}
private boolean isAllAccessible() {
assert previous == null || previous.isAccessible();
assert current == null || current.isAccessible();
assert this.isAccessible();
assert this.isOwned();
return true;
}
public static LLDelta of(Send<Buffer> min, Send<Buffer> max) {
return new LLDelta(min, max, d -> {});
}
public Send<Buffer> previous() {
ensureOwned();
return previous != null ? previous.copy().send() : null;
}
public Send<Buffer> current() {
ensureOwned();
return current != null ? current.copy().send() : null;
}
public boolean isModified() {
return !LLUtils.equals(previous, current);
}
private void ensureOwned() {
assert isAllAccessible();
if (!isOwned()) {
if (!isAccessible()) {
throw this.createResourceClosedException();
} else {
throw new IllegalStateException("Resource not owned");
}
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LLDelta LLDelta = (LLDelta) o;
return LLUtils.equals(previous, LLDelta.previous) && LLUtils.equals(current, LLDelta.current);
}
@Override
public int hashCode() {
int result = LLUtils.hashCode(previous);
result = 31 * result + LLUtils.hashCode(current);
return result;
}
@Override
public String toString() {
return new StringJoiner(", ", LLDelta.class.getSimpleName() + "[", "]")
.add("min=" + LLUtils.toString(previous))
.add("max=" + LLUtils.toString(current))
.toString();
}
public LLDelta copy() {
ensureOwned();
return new LLDelta(previous != null ? previous.copy().send() : null,
current != null ? current.copy().send() : null,
d -> {}
);
}
@Override
protected RuntimeException createResourceClosedException() {
return new IllegalStateException("Closed");
}
@Override
protected Owned<LLDelta> prepareSend() {
Send<Buffer> minSend;
Send<Buffer> maxSend;
minSend = this.previous != null ? this.previous.send() : null;
maxSend = this.current != null ? this.current.send() : null;
return drop -> new LLDelta(minSend, maxSend, drop);
}
private static class CloseOnDrop implements Drop<LLDelta> {
private final Drop<LLDelta> delegate;
public CloseOnDrop(Drop<LLDelta> drop) {
this.delegate = drop;
}
@Override
public void drop(LLDelta obj) {
if (obj.previous != null) {
obj.previous.close();
}
if (obj.current != null) {
obj.current.close();
}
delegate.drop(obj);
}
}
}

View File

@ -1,7 +1,8 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.database.serialization.BiSerializationFunction; import it.cavallium.dbengine.database.serialization.BiSerializationFunction;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
@ -23,89 +24,90 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
String getColumnName(); String getColumnName();
ByteBufAllocator getAllocator(); BufferAllocator getAllocator();
Mono<ByteBuf> get(@Nullable LLSnapshot snapshot, Mono<ByteBuf> key, boolean existsAlmostCertainly); Mono<Send<Buffer>> get(@Nullable LLSnapshot snapshot, Mono<Send<Buffer>> key, boolean existsAlmostCertainly);
default Mono<ByteBuf> get(@Nullable LLSnapshot snapshot, Mono<ByteBuf> key) { default Mono<Send<Buffer>> get(@Nullable LLSnapshot snapshot, Mono<Send<Buffer>> key) {
return get(snapshot, key, false); return get(snapshot, key, false);
} }
Mono<ByteBuf> put(Mono<ByteBuf> key, Mono<ByteBuf> value, LLDictionaryResultType resultType); Mono<Send<Buffer>> put(Mono<Send<Buffer>> key, Mono<Send<Buffer>> value, LLDictionaryResultType resultType);
Mono<UpdateMode> getUpdateMode(); Mono<UpdateMode> getUpdateMode();
default Mono<ByteBuf> update(Mono<ByteBuf> key, default Mono<Send<Buffer>> update(Mono<Send<Buffer>> key,
SerializationFunction<@Nullable ByteBuf, @Nullable ByteBuf> updater, SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> updater,
UpdateReturnMode updateReturnMode, UpdateReturnMode updateReturnMode,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return this return this
.updateAndGetDelta(key, updater, existsAlmostCertainly) .updateAndGetDelta(key, updater, existsAlmostCertainly)
.transform(prev -> LLUtils.resolveDelta(prev, updateReturnMode)); .transform(prev -> LLUtils.resolveLLDelta(prev, updateReturnMode));
} }
default Mono<ByteBuf> update(Mono<ByteBuf> key, default Mono<Send<Buffer>> update(Mono<Send<Buffer>> key,
SerializationFunction<@Nullable ByteBuf, @Nullable ByteBuf> updater, SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> updater,
UpdateReturnMode returnMode) { UpdateReturnMode returnMode) {
return update(key, updater, returnMode, false); return update(key, updater, returnMode, false);
} }
Mono<Delta<ByteBuf>> updateAndGetDelta(Mono<ByteBuf> key, Mono<LLDelta> updateAndGetDelta(Mono<Send<Buffer>> key,
SerializationFunction<@Nullable ByteBuf, @Nullable ByteBuf> updater, SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> updater,
boolean existsAlmostCertainly); boolean existsAlmostCertainly);
default Mono<Delta<ByteBuf>> updateAndGetDelta(Mono<ByteBuf> key, default Mono<LLDelta> updateAndGetDelta(Mono<Send<Buffer>> key,
SerializationFunction<@Nullable ByteBuf, @Nullable ByteBuf> updater) { SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> updater) {
return updateAndGetDelta(key, updater, false); return updateAndGetDelta(key, updater, false);
} }
Mono<Void> clear(); Mono<Void> clear();
Mono<ByteBuf> remove(Mono<ByteBuf> key, LLDictionaryResultType resultType); Mono<Send<Buffer>> remove(Mono<Send<Buffer>> key, LLDictionaryResultType resultType);
<K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot, <K> Flux<Tuple3<K, Send<Buffer>, Optional<Send<Buffer>>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys, Flux<Tuple2<K, Send<Buffer>>> keys,
boolean existsAlmostCertainly); boolean existsAlmostCertainly);
default <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot, Flux<Tuple2<K, ByteBuf>> keys) { default <K> Flux<Tuple3<K, Send<Buffer>, Optional<Send<Buffer>>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, Send<Buffer>>> keys) {
return getMulti(snapshot, keys, false); return getMulti(snapshot, keys, false);
} }
Flux<LLEntry> putMulti(Flux<LLEntry> entries, boolean getOldValues); Flux<Send<LLEntry>> putMulti(Flux<Send<LLEntry>> entries, boolean getOldValues);
<X> Flux<ExtraKeyOperationResult<ByteBuf, X>> updateMulti(Flux<Tuple2<ByteBuf, X>> entries, <X> Flux<ExtraKeyOperationResult<Send<Buffer>, X>> updateMulti(Flux<Tuple2<Send<Buffer>, X>> entries,
BiSerializationFunction<ByteBuf, X, ByteBuf> updateFunction); BiSerializationFunction<Send<Buffer>, X, Send<Buffer>> updateFunction);
Flux<LLEntry> getRange(@Nullable LLSnapshot snapshot, Mono<LLRange> range, boolean existsAlmostCertainly); Flux<Send<LLEntry>> getRange(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range, boolean existsAlmostCertainly);
default Flux<LLEntry> getRange(@Nullable LLSnapshot snapshot, Mono<LLRange> range) { default Flux<Send<LLEntry>> getRange(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range) {
return getRange(snapshot, range, false); return getRange(snapshot, range, false);
} }
Flux<List<LLEntry>> getRangeGrouped(@Nullable LLSnapshot snapshot, Flux<List<Send<LLEntry>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, Mono<Send<LLRange>> range,
int prefixLength, int prefixLength,
boolean existsAlmostCertainly); boolean existsAlmostCertainly);
default Flux<List<LLEntry>> getRangeGrouped(@Nullable LLSnapshot snapshot, default Flux<List<Send<LLEntry>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
Mono<LLRange> range, Mono<Send<LLRange>> range,
int prefixLength) { int prefixLength) {
return getRangeGrouped(snapshot, range, prefixLength, false); return getRangeGrouped(snapshot, range, prefixLength, false);
} }
Flux<ByteBuf> getRangeKeys(@Nullable LLSnapshot snapshot, Mono<LLRange> range); Flux<Send<Buffer>> getRangeKeys(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range);
Flux<List<ByteBuf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, Mono<LLRange> range, int prefixLength); Flux<List<Send<Buffer>>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range, int prefixLength);
Flux<ByteBuf> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, Mono<LLRange> range, int prefixLength); Flux<Send<Buffer>> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range, int prefixLength);
Flux<BadBlock> badBlocks(Mono<LLRange> range); Flux<BadBlock> badBlocks(Mono<Send<LLRange>> range);
Mono<Void> setRange(Mono<LLRange> range, Flux<LLEntry> entries); Mono<Void> setRange(Mono<Send<LLRange>> range, Flux<Send<LLEntry>> entries);
default Mono<Void> replaceRange(Mono<LLRange> range, default Mono<Void> replaceRange(Mono<Send<LLRange>> range,
boolean canKeysChange, boolean canKeysChange,
Function<LLEntry, Mono<LLEntry>> entriesReplacer, Function<Send<LLEntry>, Mono<Send<LLEntry>>> entriesReplacer,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return Mono.defer(() -> { return Mono.defer(() -> {
if (canKeysChange) { if (canKeysChange) {
@ -124,19 +126,19 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
}); });
} }
default Mono<Void> replaceRange(Mono<LLRange> range, default Mono<Void> replaceRange(Mono<Send<LLRange>> range,
boolean canKeysChange, boolean canKeysChange,
Function<LLEntry, Mono<LLEntry>> entriesReplacer) { Function<Send<LLEntry>, Mono<Send<LLEntry>>> entriesReplacer) {
return replaceRange(range, canKeysChange, entriesReplacer, false); return replaceRange(range, canKeysChange, entriesReplacer, false);
} }
Mono<Boolean> isRangeEmpty(@Nullable LLSnapshot snapshot, Mono<LLRange> range); Mono<Boolean> isRangeEmpty(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range);
Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, Mono<LLRange> range, boolean fast); Mono<Long> sizeRange(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range, boolean fast);
Mono<LLEntry> getOne(@Nullable LLSnapshot snapshot, Mono<LLRange> range); Mono<Send<LLEntry>> getOne(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range);
Mono<ByteBuf> getOneKey(@Nullable LLSnapshot snapshot, Mono<LLRange> range); Mono<Send<Buffer>> getOneKey(@Nullable LLSnapshot snapshot, Mono<Send<LLRange>> range);
Mono<LLEntry> removeOne(Mono<LLRange> range); Mono<Send<LLEntry>> removeOne(Mono<Send<LLRange>> range);
} }

View File

@ -1,74 +1,127 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.util.IllegalReferenceCountException; import io.netty.buffer.api.Drop;
import java.util.Map; import io.netty.buffer.api.Owned;
import java.util.concurrent.atomic.AtomicInteger; import io.netty.buffer.api.Send;
import org.warp.commonutils.log.Logger; import io.netty.buffer.api.internal.ResourceSupport;
import org.warp.commonutils.log.LoggerFactory; import java.util.StringJoiner;
import org.jetbrains.annotations.NotNull;
public class LLEntry { public class LLEntry extends ResourceSupport<LLEntry, LLEntry> {
@NotNull
private final Buffer key;
@NotNull
private final Buffer value;
private static final Logger logger = LoggerFactory.getLogger(LLEntry.class); private LLEntry(Send<Buffer> key, Send<Buffer> value, Drop<LLEntry> drop) {
super(new LLEntry.CloseOnDrop(drop));
private final AtomicInteger refCnt = new AtomicInteger(1); assert isAllAccessible();
this.key = key.receive().makeReadOnly();
private final ByteBuf key; this.value = value.receive().makeReadOnly();
private final ByteBuf value;
public LLEntry(ByteBuf key, ByteBuf value) {
try {
this.key = key.retain();
this.value = value.retain();
} finally {
key.release();
value.release();
}
} }
public ByteBuf getKey() { private boolean isAllAccessible() {
if (refCnt.get() <= 0) { assert key.isAccessible();
throw new IllegalReferenceCountException(refCnt.get()); assert value.isAccessible();
} assert this.isAccessible();
assert this.isOwned();
return true;
}
public static LLEntry of(Send<Buffer> key, Send<Buffer> value) {
return new LLEntry(key, value, d -> {});
}
public Send<Buffer> getKey() {
ensureOwned();
return key.copy().send();
}
public Buffer getKeyUnsafe() {
return key; return key;
} }
public ByteBuf getValue() { public Send<Buffer> getValue() {
if (refCnt.get() <= 0) { ensureOwned();
throw new IllegalReferenceCountException(refCnt.get()); return value.copy().send();
} }
public Buffer getValueUnsafe() {
return value; return value;
} }
public void retain() { private void ensureOwned() {
if (refCnt.getAndIncrement() <= 0) { assert isAllAccessible();
throw new IllegalReferenceCountException(refCnt.get(), 1); if (!isOwned()) {
if (!isAccessible()) {
throw this.createResourceClosedException();
} else {
throw new IllegalStateException("Resource not owned");
}
} }
key.retain();
value.retain();
}
public void release() {
if (refCnt.decrementAndGet() < 0) {
throw new IllegalReferenceCountException(refCnt.get(), -1);
}
if (key.refCnt() > 0) {
key.release();
}
if (value.refCnt() > 0) {
value.release();
}
}
public boolean isReleased() {
return refCnt.get() <= 0;
} }
@Override @Override
protected void finalize() throws Throwable { public boolean equals(Object o) {
if (refCnt.get() > 0) { if (this == o) {
logger.warn(this.getClass().getName() + "::release has not been called!"); return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LLEntry LLEntry = (LLEntry) o;
return LLUtils.equals(key, LLEntry.key) && LLUtils.equals(value, LLEntry.value);
}
@Override
public int hashCode() {
int result = LLUtils.hashCode(key);
result = 31 * result + LLUtils.hashCode(value);
return result;
}
@Override
public String toString() {
return new StringJoiner(", ", LLEntry.class.getSimpleName() + "[", "]")
.add("key=" + LLUtils.toString(key))
.add("value=" + LLUtils.toString(value))
.toString();
}
public LLEntry copy() {
ensureOwned();
return new LLEntry(key.copy().send(), value.copy().send(), d -> {});
}
@Override
protected RuntimeException createResourceClosedException() {
return new IllegalStateException("Closed");
}
@Override
protected Owned<LLEntry> prepareSend() {
Send<Buffer> keySend;
Send<Buffer> valueSend;
keySend = this.key.send();
valueSend = this.value.send();
return drop -> new LLEntry(keySend, valueSend, drop);
}
private static class CloseOnDrop implements Drop<LLEntry> {
private final Drop<LLEntry> delegate;
public CloseOnDrop(Drop<LLEntry> drop) {
this.delegate = drop;
}
@Override
public void drop(LLEntry obj) {
obj.key.close();
obj.value.close();
delegate.drop(obj);
} }
super.finalize();
} }
} }

View File

@ -2,7 +2,7 @@ package it.cavallium.dbengine.database;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.database.collections.DatabaseInt; import it.cavallium.dbengine.database.collections.DatabaseInt;
import it.cavallium.dbengine.database.collections.DatabaseLong; import it.cavallium.dbengine.database.collections.DatabaseLong;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -46,7 +46,7 @@ public interface LLKeyValueDatabase extends LLSnapshottable, LLKeyValueDatabaseS
Mono<Void> verifyChecksum(); Mono<Void> verifyChecksum();
ByteBufAllocator getAllocator(); BufferAllocator getAllocator();
Mono<Void> close(); Mono<Void> close();
} }

View File

@ -1,117 +1,146 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import static io.netty.buffer.Unpooled.wrappedBuffer; import static io.netty.buffer.Unpooled.wrappedBuffer;
import static io.netty.buffer.Unpooled.wrappedUnmodifiableBuffer;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufUtil; import io.netty.buffer.api.Drop;
import io.netty.util.IllegalReferenceCountException; import io.netty.buffer.api.Owned;
import java.util.Arrays; import io.netty.buffer.api.Send;
import io.netty.buffer.api.internal.ResourceSupport;
import java.util.StringJoiner; import java.util.StringJoiner;
import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Range of data, from min (inclusive),to max (exclusive) * Range of data, from min (inclusive),to max (exclusive)
*/ */
public class LLRange { public class LLRange extends ResourceSupport<LLRange, LLRange> {
private static final LLRange RANGE_ALL = new LLRange(null, null, false); private static final LLRange RANGE_ALL = new LLRange(null, null, null, d -> {});
private final ByteBuf min; private Buffer min;
private final ByteBuf max; private Buffer max;
private final boolean releasable; private Buffer single;
private final AtomicInteger refCnt = new AtomicInteger(1);
private LLRange(ByteBuf min, ByteBuf max, boolean releasable) { private LLRange(Send<Buffer> min, Send<Buffer> max, Send<Buffer> single, Drop<LLRange> drop) {
assert min == null || min.refCnt() > 0; super(new CloseOnDrop(drop));
assert max == null || max.refCnt() > 0; assert isAllAccessible();
this.min = min; assert single == null || (min == null && max == null);
this.max = max; this.min = min != null ? min.receive().makeReadOnly() : null;
this.releasable = releasable; this.max = max != null ? max.receive().makeReadOnly() : null;
this.single = single != null ? single.receive().makeReadOnly() : null;
}
private boolean isAllAccessible() {
assert min == null || min.isAccessible();
assert max == null || max.isAccessible();
assert single == null || single.isAccessible();
assert this.isAccessible();
assert this.isOwned();
return true;
} }
public static LLRange all() { public static LLRange all() {
return RANGE_ALL; return RANGE_ALL.copy();
} }
public static LLRange from(ByteBuf min) { public static LLRange from(Send<Buffer> min) {
return new LLRange(min, null, true); return new LLRange(min, null, null, d -> {});
} }
public static LLRange to(ByteBuf max) { public static LLRange to(Send<Buffer> max) {
return new LLRange(null, max, true); return new LLRange(null, max, null, d -> {});
} }
public static LLRange single(ByteBuf single) { public static LLRange single(Send<Buffer> single) {
try { return new LLRange(null, null, single, d -> {});
return new LLRange(single.retain(), single.retain(), true);
} finally {
single.release();
}
} }
public static LLRange of(ByteBuf min, ByteBuf max) { public static LLRange of(Send<Buffer> min, Send<Buffer> max) {
return new LLRange(min, max, true); return new LLRange(min, max, null, d -> {});
} }
public boolean isAll() { public boolean isAll() {
checkReleased(); ensureOwned();
assert min == null || min.refCnt() > 0; return min == null && max == null && single == null;
assert max == null || max.refCnt() > 0;
return min == null && max == null;
} }
public boolean isSingle() { public boolean isSingle() {
checkReleased(); ensureOwned();
assert min == null || min.refCnt() > 0; return single != null;
assert max == null || max.refCnt() > 0;
if (min == null || max == null) return false;
return LLUtils.equals(min, max);
} }
public boolean hasMin() { public boolean hasMin() {
checkReleased(); ensureOwned();
assert min == null || min.refCnt() > 0; return min != null || single != null;
assert max == null || max.refCnt() > 0;
return min != null;
} }
public ByteBuf getMin() { public Send<Buffer> getMin() {
checkReleased(); ensureOwned();
assert min == null || min.refCnt() > 0; if (min != null) {
assert max == null || max.refCnt() > 0; return min.copy().send();
assert min != null; } else if (single != null) {
return min; return single.copy().send();
} else {
return null;
}
}
public Buffer getMinUnsafe() {
ensureOwned();
if (min != null) {
return min;
} else if (single != null) {
return single;
} else {
return null;
}
} }
public boolean hasMax() { public boolean hasMax() {
checkReleased(); ensureOwned();
assert min == null || min.refCnt() > 0; return max != null || single != null;
assert max == null || max.refCnt() > 0;
return max != null;
} }
public ByteBuf getMax() { public Send<Buffer> getMax() {
checkReleased(); ensureOwned();
assert min == null || min.refCnt() > 0; if (max != null) {
assert max == null || max.refCnt() > 0; return max.copy().send();
assert max != null; } else if (single != null) {
return max; return single.copy().send();
} } else {
return null;
public ByteBuf getSingle() {
checkReleased();
assert min == null || min.refCnt() > 0;
assert max == null || max.refCnt() > 0;
assert isSingle();
return min;
}
private void checkReleased() {
if (!releasable) {
return;
} }
if (refCnt.get() <= 0) { }
throw new IllegalReferenceCountException(0);
public Buffer getMaxUnsafe() {
ensureOwned();
if (max != null) {
return max;
} else if (single != null) {
return single;
} else {
return null;
}
}
public Send<Buffer> getSingle() {
ensureOwned();
assert isSingle();
return single != null ? single.copy().send() : null;
}
public Buffer getSingleUnsafe() {
ensureOwned();
assert isSingle();
return single;
}
private void ensureOwned() {
assert isAllAccessible();
if (!isOwned()) {
if (!isAccessible()) {
throw this.createResourceClosedException();
} else {
throw new IllegalStateException("Resource not owned");
}
} }
} }
@ -142,34 +171,53 @@ public class LLRange {
.toString(); .toString();
} }
public LLRange retain() { public LLRange copy() {
if (!releasable) { ensureOwned();
return this; return new LLRange(min != null ? min.copy().send() : null,
} max != null ? max.copy().send() : null,
if (refCnt.updateAndGet(refCnt -> refCnt <= 0 ? 0 : (refCnt + 1)) <= 0) { single != null ? single.copy().send(): null,
throw new IllegalReferenceCountException(0, 1); d -> {}
} );
if (min != null) {
min.retain();
}
if (max != null) {
max.retain();
}
return this;
} }
public void release() { @Override
if (!releasable) { protected RuntimeException createResourceClosedException() {
return; return new IllegalStateException("Closed");
}
@Override
protected Owned<LLRange> prepareSend() {
Send<Buffer> minSend;
Send<Buffer> maxSend;
Send<Buffer> singleSend;
minSend = this.min != null ? this.min.send() : null;
maxSend = this.max != null ? this.max.send() : null;
singleSend = this.single != null ? this.single.send() : null;
this.makeInaccessible();
return drop -> new LLRange(minSend, maxSend, singleSend, drop);
}
private void makeInaccessible() {
this.min = null;
this.max = null;
this.single = null;
}
private static class CloseOnDrop implements Drop<LLRange> {
private final Drop<LLRange> delegate;
public CloseOnDrop(Drop<LLRange> drop) {
this.delegate = drop;
} }
if (refCnt.decrementAndGet() < 0) {
throw new IllegalReferenceCountException(0, -1); @Override
} public void drop(LLRange obj) {
if (min != null) { if (obj.min != null) obj.min.close();
min.release(); if (obj.max != null) obj.max.close();
} if (obj.single != null) obj.single.close();
if (max != null) { obj.makeInaccessible();
max.release(); delegate.drop(obj);
} }
} }
} }

View File

@ -2,21 +2,17 @@ package it.cavallium.dbengine.database;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.ByteBufUtil; import io.netty.buffer.api.CompositeBuffer;
import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.api.Send;
import io.netty.buffer.Unpooled;
import io.netty.util.AbstractReferenceCounted;
import io.netty.util.IllegalReferenceCountException; import io.netty.util.IllegalReferenceCountException;
import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.database.disk.ReleasableSlice;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.SerializationFunction; import it.cavallium.dbengine.database.serialization.SerializationFunction;
import it.cavallium.dbengine.lucene.RandomSortField; import it.cavallium.dbengine.lucene.RandomSortField;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -24,7 +20,8 @@ import java.util.Map.Entry;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.function.Function; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.ToIntFunction; import java.util.function.ToIntFunction;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
@ -45,7 +42,6 @@ import org.jetbrains.annotations.Nullable;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.warp.commonutils.functional.IOFunction;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2; import reactor.util.function.Tuple2;
@ -56,6 +52,7 @@ public class LLUtils {
private static final Logger logger = LoggerFactory.getLogger(LLUtils.class); private static final Logger logger = LoggerFactory.getLogger(LLUtils.class);
private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.allocateDirect(0);
private static final byte[] RESPONSE_TRUE = new byte[]{1}; private static final byte[] RESPONSE_TRUE = new byte[]{1};
private static final byte[] RESPONSE_FALSE = new byte[]{0}; private static final byte[] RESPONSE_FALSE = new byte[]{0};
private static final byte[] RESPONSE_TRUE_BUF = new byte[]{1}; private static final byte[] RESPONSE_TRUE_BUF = new byte[]{1};
@ -73,12 +70,10 @@ public class LLUtils {
return response[0] == 1; return response[0] == 1;
} }
public static boolean responseToBoolean(ByteBuf response) { public static boolean responseToBoolean(Buffer response) {
try { try (response) {
assert response.readableBytes() == 1; assert response.readableBytes() == 1;
return response.getByte(response.readerIndex()) == 1; return response.getByte(response.readerOffset()) == 1;
} finally {
response.release();
} }
} }
@ -86,8 +81,8 @@ public class LLUtils {
return bool ? RESPONSE_TRUE : RESPONSE_FALSE; return bool ? RESPONSE_TRUE : RESPONSE_FALSE;
} }
public static ByteBuf booleanToResponseByteBuffer(boolean bool) { public static Buffer booleanToResponseByteBuffer(BufferAllocator alloc, boolean bool) {
return Unpooled.wrappedBuffer(booleanToResponse(bool)); return alloc.allocate(1).writeByte(bool ? (byte) 1 : 0);
} }
@Nullable @Nullable
@ -171,9 +166,9 @@ public class LLUtils {
return new it.cavallium.dbengine.database.LLKeyScore(hit.docId(), hit.score(), hit.key()); return new it.cavallium.dbengine.database.LLKeyScore(hit.docId(), hit.score(), hit.key());
} }
public static String toStringSafe(ByteBuf key) { public static String toStringSafe(Buffer key) {
try { try {
if (key.refCnt() > 0) { if (key.isAccessible()) {
return toString(key); return toString(key);
} else { } else {
return "(released)"; return "(released)";
@ -183,11 +178,11 @@ public class LLUtils {
} }
} }
public static String toString(ByteBuf key) { public static String toString(Buffer key) {
if (key == null) { if (key == null) {
return "null"; return "null";
} else { } else {
int startIndex = key.readerIndex(); int startIndex = key.readerOffset();
int iMax = key.readableBytes() - 1; int iMax = key.readableBytes() - 1;
int iLimit = 128; int iLimit = 128;
if (iMax <= -1) { if (iMax <= -1) {
@ -213,111 +208,117 @@ public class LLUtils {
} }
} }
public static boolean equals(ByteBuf a, ByteBuf b) { public static boolean equals(Buffer a, Buffer b) {
if (a == null && b == null) { if (a == null && b == null) {
return true; return true;
} else if (a != null && b != null) { } else if (a != null && b != null) {
return ByteBufUtil.equals(a, b); var aCur = a.openCursor();
var bCur = b.openCursor();
if (aCur.bytesLeft() != bCur.bytesLeft()) {
return false;
}
while (aCur.readByte() && bCur.readByte()) {
if (aCur.getByte() != bCur.getByte()) {
return false;
}
}
return true;
} else { } else {
return false; return false;
} }
} }
public static byte[] toArray(ByteBuf key) { public static byte[] toArray(Buffer key) {
if (key.hasArray()) { byte[] array = new byte[key.readableBytes()];
return Arrays.copyOfRange(key.array(), key.arrayOffset() + key.readerIndex(), key.arrayOffset() + key.writerIndex()); key.copyInto(key.readerOffset(), array, 0, key.readableBytes());
} else { return array;
byte[] keyBytes = new byte[key.readableBytes()];
key.getBytes(key.readerIndex(), keyBytes, 0, key.readableBytes());
return keyBytes;
}
} }
public static List<byte[]> toArray(List<ByteBuf> input) { public static List<byte[]> toArray(List<Buffer> input) {
List<byte[]> result = new ArrayList<>(input.size()); List<byte[]> result = new ArrayList<>(input.size());
for (ByteBuf byteBuf : input) { for (Buffer byteBuf : input) {
result.add(toArray(byteBuf)); result.add(toArray(byteBuf));
} }
return result; return result;
} }
public static int hashCode(ByteBuf buf) { public static int hashCode(Buffer buf) {
return buf == null ? 0 : buf.hashCode(); if (buf == null)
return 0;
int result = 1;
var cur = buf.openCursor();
while (cur.readByte()) {
var element = cur.getByte();
result = 31 * result + element;
}
return result;
} }
/**
*
* @return null if size is equal to RocksDB.NOT_FOUND
*/
@Nullable @Nullable
public static ByteBuf readNullableDirectNioBuffer(ByteBufAllocator alloc, ToIntFunction<ByteBuffer> reader) { public static Buffer readNullableDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
ByteBuf buffer = alloc.directBuffer(); Buffer buffer = alloc.allocate(4096);
ByteBuf directBuffer = null;
ByteBuffer nioBuffer; ByteBuffer nioBuffer;
int size; int size;
Boolean mustBeCopied = null;
do { do {
if (mustBeCopied == null || !mustBeCopied) { nioBuffer = LLUtils.toDirect(buffer);
nioBuffer = LLUtils.toDirectFast(buffer); nioBuffer.limit(nioBuffer.capacity());
if (nioBuffer != null) { assert nioBuffer.isDirect();
nioBuffer.limit(nioBuffer.capacity()); size = reader.applyAsInt(nioBuffer);
} if (size != RocksDB.NOT_FOUND) {
} else { if (size == nioBuffer.limit()) {
nioBuffer = null; buffer.readerOffset(0).writerOffset(size);
} return buffer;
if ((mustBeCopied != null && mustBeCopied) || nioBuffer == null) { } else {
directBuffer = buffer; assert size > nioBuffer.limit();
nioBuffer = directBuffer.nioBuffer(0, directBuffer.capacity()); assert nioBuffer.limit() > 0;
mustBeCopied = true; buffer.ensureWritable(size);
} else {
mustBeCopied = false;
}
try {
assert nioBuffer.isDirect();
size = reader.applyAsInt(nioBuffer);
if (size != RocksDB.NOT_FOUND) {
if (mustBeCopied) {
buffer.writerIndex(0).writeBytes(nioBuffer);
}
if (size == nioBuffer.limit()) {
buffer.setIndex(0, size);
return buffer;
} else {
assert size > nioBuffer.limit();
assert nioBuffer.limit() > 0;
buffer.capacity(size);
}
}
} finally {
if (nioBuffer != null) {
nioBuffer = null;
}
if(directBuffer != null) {
directBuffer.release();
directBuffer = null;
} }
} }
} while (size != RocksDB.NOT_FOUND); } while (size != RocksDB.NOT_FOUND);
// Return null if size is equal to RocksDB.NOT_FOUND
return null; return null;
} }
@Nullable @Nullable
public static ByteBuffer toDirectFast(ByteBuf buffer) { public static ByteBuffer toDirectFast(Buffer buffer) {
ByteBuffer result = buffer.nioBuffer(0, buffer.capacity()); int readableComponents = buffer.countReadableComponents();
if (result.isDirect()) { if (readableComponents > 0) {
result.limit(buffer.writerIndex()); AtomicReference<ByteBuffer> byteBufferReference = new AtomicReference<>(null);
buffer.forEachReadable(0, (index, component) -> {
byteBufferReference.setPlain(component.readableBuffer());
return false;
});
ByteBuffer byteBuffer = byteBufferReference.getPlain();
if (byteBuffer != null && byteBuffer.isDirect()) {
byteBuffer.limit(buffer.writerOffset());
assert result.isDirect(); assert byteBuffer.isDirect();
assert result.capacity() == buffer.capacity(); assert byteBuffer.capacity() == buffer.capacity();
assert buffer.readerIndex() == result.position(); assert buffer.readerOffset() == byteBuffer.position();
assert result.limit() - result.position() == buffer.readableBytes(); assert byteBuffer.limit() - byteBuffer.position() == buffer.readableBytes();
return result; return byteBuffer;
} else {
return null;
}
} else if (readableComponents == 0) {
return EMPTY_BYTE_BUFFER;
} else { } else {
return null; return null;
} }
} }
public static ByteBuffer toDirect(ByteBuf buffer) { public static ByteBuffer toDirect(Buffer buffer) {
ByteBuffer result = toDirectFast(buffer); ByteBuffer result = toDirectFast(buffer);
if (result == null) { if (result == null) {
throw new IllegalArgumentException("The supplied ByteBuf is not direct " throw new IllegalArgumentException("The supplied Buffer is not direct "
+ "(if it's a CompositeByteBuf it must be consolidated before)"); + "(if it's a CompositeByteBuf it must be consolidated before)");
} }
assert result.isDirect(); assert result.isDirect();
@ -325,9 +326,9 @@ public class LLUtils {
} }
/* /*
public static ByteBuf toDirectCopy(ByteBuf buffer) { public static Buffer toDirectCopy(Buffer buffer) {
try { try {
ByteBuf directCopyBuf = buffer.alloc().buffer(buffer.capacity(), buffer.maxCapacity()); Buffer directCopyBuf = buffer.alloc().buffer(buffer.capacity(), buffer.maxCapacity());
directCopyBuf.writeBytes(buffer, 0, buffer.writerIndex()); directCopyBuf.writeBytes(buffer, 0, buffer.writerIndex());
return directCopyBuf; return directCopyBuf;
} finally { } finally {
@ -336,26 +337,14 @@ public class LLUtils {
} }
*/ */
public static ByteBuf convertToDirectByteBuf(ByteBufAllocator alloc, ByteBuf buffer) { public static Buffer fromByteArray(BufferAllocator alloc, byte[] array) {
ByteBuf result; Buffer result = alloc.allocate(array.length);
ByteBuf directCopyBuf = alloc.buffer(buffer.capacity(), buffer.maxCapacity());
directCopyBuf.writeBytes(buffer, 0, buffer.writerIndex());
directCopyBuf.readerIndex(buffer.readerIndex());
result = directCopyBuf;
assert result.isDirect();
assert result.capacity() == buffer.capacity();
assert buffer.readerIndex() == result.readerIndex();
return result;
}
public static ByteBuf fromByteArray(ByteBufAllocator alloc, byte[] array) {
ByteBuf result = alloc.buffer(array.length);
result.writeBytes(array); result.writeBytes(array);
return result; return result;
} }
@NotNull @NotNull
public static ByteBuf readDirectNioBuffer(ByteBufAllocator alloc, ToIntFunction<ByteBuffer> reader) { public static Buffer readDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
var buffer = readNullableDirectNioBuffer(alloc, reader); var buffer = readNullableDirectNioBuffer(alloc, reader);
if (buffer == null) { if (buffer == null) {
throw new IllegalStateException("A non-nullable buffer read operation tried to return a \"not found\" element"); throw new IllegalStateException("A non-nullable buffer read operation tried to return a \"not found\" element");
@ -363,81 +352,54 @@ public class LLUtils {
return buffer; return buffer;
} }
public static ByteBuf compositeBuffer(ByteBufAllocator alloc, ByteBuf buffer) { public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer) {
return buffer; try (var composite = buffer.receive().compact()) {
} assert composite.countReadableComponents() == 1 || composite.countReadableComponents() == 0;
return composite.send();
public static ByteBuf compositeBuffer(ByteBufAllocator alloc, ByteBuf buffer1, ByteBuf buffer2) {
try {
if (buffer1.readableBytes() == 0) {
return compositeBuffer(alloc, buffer2.retain());
} else if (buffer2.readableBytes() == 0) {
return compositeBuffer(alloc, buffer1.retain());
}
CompositeByteBuf result = alloc.compositeBuffer(2);
try {
result.addComponent(true, buffer1.retain());
result.addComponent(true, buffer2.retain());
return result.consolidate().retain();
} finally {
result.release();
}
} finally {
buffer1.release();
buffer2.release();
} }
} }
public static ByteBuf compositeBuffer(ByteBufAllocator alloc, ByteBuf buffer1, ByteBuf buffer2, ByteBuf buffer3) { public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer1, Send<Buffer> buffer2) {
try { try (buffer1) {
if (buffer1.readableBytes() == 0) { try (buffer2) {
return compositeBuffer(alloc, buffer2.retain(), buffer3.retain()); try (var composite = CompositeBuffer.compose(alloc, buffer1, buffer2).compact()) {
} else if (buffer2.readableBytes() == 0) { assert composite.countReadableComponents() == 1 || composite.countReadableComponents() == 0;
return compositeBuffer(alloc, buffer1.retain(), buffer3.retain()); return composite.send();
} else if (buffer3.readableBytes() == 0) { }
return compositeBuffer(alloc, buffer1.retain(), buffer2.retain());
} }
CompositeByteBuf result = alloc.compositeBuffer(3);
try {
result.addComponent(true, buffer1.retain());
result.addComponent(true, buffer2.retain());
result.addComponent(true, buffer3.retain());
return result.consolidate().retain();
} finally {
result.release();
}
} finally {
buffer1.release();
buffer2.release();
buffer3.release();
} }
} }
public static ByteBuf compositeBuffer(ByteBufAllocator alloc, ByteBuf... buffers) { public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer1, Send<Buffer> buffer2, Send<Buffer> buffer3) {
try { try (buffer1) {
switch (buffers.length) { try (buffer2) {
case 0: try (buffer3) {
return alloc.buffer(0); try (var composite = CompositeBuffer.compose(alloc, buffer1, buffer2, buffer3).compact()) {
case 1: assert composite.countReadableComponents() == 1 || composite.countReadableComponents() == 0;
return compositeBuffer(alloc, buffers[0].retain().retain()); return composite.send();
case 2:
return compositeBuffer(alloc, buffers[0].retain(), buffers[1].retain());
case 3:
return compositeBuffer(alloc, buffers[0].retain(), buffers[1].retain(), buffers[2].retain());
default:
CompositeByteBuf result = alloc.compositeBuffer(buffers.length);
try {
for (ByteBuf buffer : buffers) {
result.addComponent(true, buffer.retain());
}
return result.consolidate().retain();
} finally {
result.release();
} }
}
} }
}
}
public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer>... buffers) {
try {
return switch (buffers.length) {
case 0 -> alloc.allocate(0).send();
case 1 -> compositeBuffer(alloc, buffers[0]);
case 2 -> compositeBuffer(alloc, buffers[0], buffers[1]);
case 3 -> compositeBuffer(alloc, buffers[0], buffers[1], buffers[2]);
default -> {
try (var composite = CompositeBuffer.compose(alloc, buffers).compact()) {
assert composite.countReadableComponents() == 1 || composite.countReadableComponents() == 0;
yield composite.send();
}
}
};
} finally { } finally {
for (ByteBuf buffer : buffers) { for (Send<Buffer> buffer : buffers) {
buffer.release(); buffer.close();
} }
} }
} }
@ -467,6 +429,33 @@ public class LLUtils {
}); });
} }
public static Mono<Send<Buffer>> resolveLLDelta(Mono<LLDelta> prev, UpdateReturnMode updateReturnMode) {
return prev.handle((delta, sink) -> {
try (delta) {
switch (updateReturnMode) {
case GET_NEW_VALUE -> {
var current = delta.current();
if (current != null) {
sink.next(current);
} else {
sink.complete();
}
}
case GET_OLD_VALUE -> {
var previous = delta.previous();
if (previous != null) {
sink.next(previous);
} else {
sink.complete();
}
}
case NOTHING -> sink.complete();
default -> sink.error(new IllegalStateException());
}
}
});
}
public static <T, U> Mono<Delta<U>> mapDelta(Mono<Delta<T>> mono, public static <T, U> Mono<Delta<U>> mapDelta(Mono<Delta<T>> mono,
SerializationFunction<@NotNull T, @Nullable U> mapper) { SerializationFunction<@NotNull T, @Nullable U> mapper) {
return mono.handle((delta, sink) -> { return mono.handle((delta, sink) -> {
@ -492,38 +481,57 @@ public class LLUtils {
}); });
} }
public static <U> Mono<Delta<U>> mapLLDelta(Mono<LLDelta> mono,
SerializationFunction<@NotNull Send<Buffer>, @Nullable U> mapper) {
return mono.handle((delta, sink) -> {
try {
try (Send<Buffer> prev = delta.previous()) {
try (Send<Buffer> curr = delta.current()) {
U newPrev;
U newCurr;
if (prev != null) {
newPrev = mapper.apply(prev);
} else {
newPrev = null;
}
if (curr != null) {
newCurr = mapper.apply(curr);
} else {
newCurr = null;
}
sink.next(new Delta<>(newPrev, newCurr));
}
}
} catch (SerializationException ex) {
sink.error(ex);
}
});
}
public static <R, V> boolean isDeltaChanged(Delta<V> delta) { public static <R, V> boolean isDeltaChanged(Delta<V> delta) {
return !Objects.equals(delta.previous(), delta.current()); return !Objects.equals(delta.previous(), delta.current());
} }
public static Mono<ByteBuf> lazyRetain(ByteBuf buf) { public static Mono<Send<Buffer>> lazyRetain(Buffer buf) {
return Mono.just(buf).map(ByteBuf::retain); return Mono.just(buf).map(b -> b.copy().send());
} }
public static Mono<LLRange> lazyRetainRange(LLRange range) { public static Mono<Send<LLRange>> lazyRetainRange(LLRange range) {
return Mono.just(range).map(LLRange::retain); return Mono.just(range).map(r -> r.copy().send());
} }
public static Mono<ByteBuf> lazyRetain(Callable<ByteBuf> bufCallable) { public static Mono<Send<Buffer>> lazyRetain(Callable<Send<Buffer>> bufCallable) {
return Mono.fromCallable(bufCallable).cacheInvalidateIf(byteBuf -> { return Mono.fromCallable(bufCallable);
// Retain if the value has been cached previously
byteBuf.retain();
return false;
});
} }
public static Mono<LLRange> lazyRetainRange(Callable<LLRange> rangeCallable) { public static Mono<Send<LLRange>> lazyRetainRange(Callable<Send<LLRange>> rangeCallable) {
return Mono.fromCallable(rangeCallable).cacheInvalidateIf(range -> { return Mono.fromCallable(rangeCallable);
// Retain if the value has been cached previously
range.retain();
return false;
});
} }
public static <T> Mono<T> handleDiscard(Mono<T> mono) { public static <T> Mono<T> handleDiscard(Mono<T> mono) {
return mono return mono
.doOnDiscard(Object.class, obj -> { .doOnDiscard(Object.class, obj -> {
if (obj instanceof ReferenceCounted o) { if (obj instanceof SafeCloseable o) {
discardRefCounted(o); discardRefCounted(o);
} else if (obj instanceof Entry o) { } else if (obj instanceof Entry o) {
discardEntry(o); discardEntry(o);
@ -539,13 +547,15 @@ public class LLUtils {
discardLLRange(o); discardLLRange(o);
} else if (obj instanceof Delta o) { } else if (obj instanceof Delta o) {
discardDelta(o); discardDelta(o);
} else if (obj instanceof Send o) {
discardSend(o);
} else if (obj instanceof Map o) { } else if (obj instanceof Map o) {
discardMap(o); discardMap(o);
} }
}); });
// todo: check if the single object discard hook is more performant // todo: check if the single object discard hook is more performant
/* /*
.doOnDiscard(ReferenceCounted.class, LLUtils::discardRefCounted) .doOnDiscard(SafeCloseable.class, LLUtils::discardRefCounted)
.doOnDiscard(Map.Entry.class, LLUtils::discardEntry) .doOnDiscard(Map.Entry.class, LLUtils::discardEntry)
.doOnDiscard(Collection.class, LLUtils::discardCollection) .doOnDiscard(Collection.class, LLUtils::discardCollection)
.doOnDiscard(Tuple2.class, LLUtils::discardTuple2) .doOnDiscard(Tuple2.class, LLUtils::discardTuple2)
@ -553,6 +563,7 @@ public class LLUtils {
.doOnDiscard(LLEntry.class, LLUtils::discardLLEntry) .doOnDiscard(LLEntry.class, LLUtils::discardLLEntry)
.doOnDiscard(LLRange.class, LLUtils::discardLLRange) .doOnDiscard(LLRange.class, LLUtils::discardLLRange)
.doOnDiscard(Delta.class, LLUtils::discardDelta) .doOnDiscard(Delta.class, LLUtils::discardDelta)
.doOnDiscard(Send.class, LLUtils::discardSend)
.doOnDiscard(Map.class, LLUtils::discardMap); .doOnDiscard(Map.class, LLUtils::discardMap);
*/ */
@ -561,7 +572,7 @@ public class LLUtils {
public static <T> Flux<T> handleDiscard(Flux<T> mono) { public static <T> Flux<T> handleDiscard(Flux<T> mono) {
return mono return mono
.doOnDiscard(Object.class, obj -> { .doOnDiscard(Object.class, obj -> {
if (obj instanceof ReferenceCounted o) { if (obj instanceof SafeCloseable o) {
discardRefCounted(o); discardRefCounted(o);
} else if (obj instanceof Entry o) { } else if (obj instanceof Entry o) {
discardEntry(o); discardEntry(o);
@ -577,15 +588,15 @@ public class LLUtils {
discardLLRange(o); discardLLRange(o);
} else if (obj instanceof Delta o) { } else if (obj instanceof Delta o) {
discardDelta(o); discardDelta(o);
} else if (obj instanceof Send o) {
discardSend(o);
} else if (obj instanceof Map o) { } else if (obj instanceof Map o) {
discardMap(o); discardMap(o);
} else {
System.err.println(obj.getClass().getName());
} }
}); });
// todo: check if the single object discard hook is more performant // todo: check if the single object discard hook is more performant
/* /*
.doOnDiscard(ReferenceCounted.class, LLUtils::discardRefCounted) .doOnDiscard(SafeCloseable.class, LLUtils::discardRefCounted)
.doOnDiscard(Map.Entry.class, LLUtils::discardEntry) .doOnDiscard(Map.Entry.class, LLUtils::discardEntry)
.doOnDiscard(Collection.class, LLUtils::discardCollection) .doOnDiscard(Collection.class, LLUtils::discardCollection)
.doOnDiscard(Tuple2.class, LLUtils::discardTuple2) .doOnDiscard(Tuple2.class, LLUtils::discardTuple2)
@ -593,113 +604,78 @@ public class LLUtils {
.doOnDiscard(LLEntry.class, LLUtils::discardLLEntry) .doOnDiscard(LLEntry.class, LLUtils::discardLLEntry)
.doOnDiscard(LLRange.class, LLUtils::discardLLRange) .doOnDiscard(LLRange.class, LLUtils::discardLLRange)
.doOnDiscard(Delta.class, LLUtils::discardDelta) .doOnDiscard(Delta.class, LLUtils::discardDelta)
.doOnDiscard(Send.class, LLUtils::discardSend)
.doOnDiscard(Map.class, LLUtils::discardMap); .doOnDiscard(Map.class, LLUtils::discardMap);
*/ */
} }
private static void discardLLEntry(LLEntry entry) { private static void discardLLEntry(LLEntry entry) {
logger.trace("Releasing discarded ByteBuf"); logger.trace("Releasing discarded Buffer");
entry.release(); entry.close();
} }
private static void discardLLRange(LLRange range) { private static void discardLLRange(LLRange range) {
logger.trace("Releasing discarded ByteBuf"); logger.trace("Releasing discarded Buffer");
range.release(); range.close();
} }
private static void discardEntry(Map.Entry<?, ?> e) { private static void discardEntry(Map.Entry<?, ?> e) {
if (e.getKey() instanceof ByteBuf bb) { if (e.getKey() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
if (e.getValue() instanceof ByteBuf bb) { if (e.getValue() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
} }
private static void discardTuple2(Tuple2<?, ?> e) { private static void discardTuple2(Tuple2<?, ?> e) {
if (e.getT1() instanceof ByteBuf bb) { if (e.getT1() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
if (e.getT2() instanceof ByteBuf bb) { if (e.getT2() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
} }
private static void discardTuple3(Tuple3<?, ?, ?> e) { private static void discardTuple3(Tuple3<?, ?, ?> e) {
if (e.getT1() instanceof ByteBuf bb) { if (e.getT1() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} else if (e.getT1() instanceof Optional opt) { } else if (e.getT1() instanceof Optional opt) {
if (opt.isPresent() && opt.get() instanceof ByteBuf bb) { if (opt.isPresent() && opt.get() instanceof Buffer bb) {
logger.trace("Releasing discarded ByteBuf"); bb.close();
bb.release();
} }
} }
if (e.getT2() instanceof ByteBuf bb) { if (e.getT2() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} else if (e.getT1() instanceof Optional opt) { } else if (e.getT1() instanceof Optional opt) {
if (opt.isPresent() && opt.get() instanceof ByteBuf bb) { if (opt.isPresent() && opt.get() instanceof Buffer bb) {
logger.trace("Releasing discarded ByteBuf"); bb.close();
bb.release();
} }
} }
if (e.getT3() instanceof ByteBuf bb) { if (e.getT3() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} else if (e.getT1() instanceof Optional opt) { } else if (e.getT1() instanceof Optional opt) {
if (opt.isPresent() && opt.get() instanceof ByteBuf bb) { if (opt.isPresent() && opt.get() instanceof Buffer bb) {
logger.trace("Releasing discarded ByteBuf"); bb.close();
bb.release();
} }
} }
} }
private static void discardRefCounted(ReferenceCounted referenceCounted) { private static void discardRefCounted(SafeCloseable safeCloseable) {
if (referenceCounted.refCnt() > 0) { safeCloseable.close();
logger.trace("Releasing discarded ByteBuf");
referenceCounted.release();
}
} }
private static void discardCollection(Collection<?> collection) { private static void discardCollection(Collection<?> collection) {
for (Object o : collection) { for (Object o : collection) {
if (o instanceof ReferenceCounted referenceCounted) { if (o instanceof SafeCloseable safeCloseable) {
if (referenceCounted.refCnt() > 0) { safeCloseable.close();
logger.trace("Releasing discarded ByteBuf");
referenceCounted.release();
}
} else if (o instanceof Map.Entry entry) { } else if (o instanceof Map.Entry entry) {
if (entry.getKey() instanceof ReferenceCounted bb) { if (entry.getKey() instanceof SafeCloseable bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
if (entry.getValue() instanceof ReferenceCounted bb) { if (entry.getValue() instanceof SafeCloseable bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
} else { } else {
break; break;
@ -708,35 +684,27 @@ public class LLUtils {
} }
private static void discardDelta(Delta<?> delta) { private static void discardDelta(Delta<?> delta) {
if (delta.previous() instanceof ByteBuf bb) { if (delta.previous() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
if (delta.current() instanceof ByteBuf bb) { if (delta.current() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
} }
} }
private static void discardSend(Send<?> send) {
send.close();
}
private static void discardMap(Map<?, ?> map) { private static void discardMap(Map<?, ?> map) {
for (Entry<?, ?> entry : map.entrySet()) { for (Entry<?, ?> entry : map.entrySet()) {
boolean hasByteBuf = false; boolean hasByteBuf = false;
if (entry.getKey() instanceof ByteBuf bb) { if (entry.getKey() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
hasByteBuf = true; hasByteBuf = true;
} }
if (entry.getValue() instanceof ByteBuf bb) { if (entry.getValue() instanceof Buffer bb) {
if (bb.refCnt() > 0) { bb.close();
logger.trace("Releasing discarded ByteBuf");
bb.release();
}
hasByteBuf = true; hasByteBuf = true;
} }
if (!hasByteBuf) { if (!hasByteBuf) {
@ -744,4 +712,28 @@ public class LLUtils {
} }
} }
} }
public static boolean isDirect(Buffer key) {
if (key.countReadableComponents() == 1) {
return key.forEachReadable(0, (index, component) -> component.readableBuffer().isDirect()) >= 0;
} else {
return false;
}
}
public static String deserializeString(Send<Buffer> bufferSend, int readerOffset, int length, Charset charset) {
try (var buffer = bufferSend.receive()) {
byte[] bytes = new byte[Math.min(length, buffer.readableBytes())];
buffer.copyInto(readerOffset, bytes, 0, length);
return new String(bytes, charset);
}
}
public static int utf8MaxBytes(String deserialized) {
return deserialized.length() * 3;
}
public static void writeString(Buffer buf, String deserialized, Charset charset) {
buf.writeBytes(deserialized.getBytes(charset));
}
} }

View File

@ -0,0 +1,7 @@
package it.cavallium.dbengine.database;
public interface SafeCloseable extends AutoCloseable {
@Override
void close();
}

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import java.util.function.Function; import java.util.function.Function;
@ -11,9 +11,9 @@ public class DatabaseEmpty {
@SuppressWarnings({"unused", "InstantiationOfUtilityClass"}) @SuppressWarnings({"unused", "InstantiationOfUtilityClass"})
public static final Nothing NOTHING = new Nothing(); public static final Nothing NOTHING = new Nothing();
public static final Serializer<Nothing, ByteBuf> NOTHING_SERIALIZER = new Serializer<>() { public static final Serializer<Nothing, Buffer> NOTHING_SERIALIZER = new Serializer<>() {
@Override @Override
public @NotNull Nothing deserialize(@NotNull ByteBuf serialized) { public @NotNull Nothing deserialize(@NotNull Buffer serialized) {
try { try {
return NOTHING; return NOTHING;
} finally { } finally {
@ -22,7 +22,7 @@ public class DatabaseEmpty {
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull Nothing deserialized) { public @NotNull Buffer serialize(@NotNull Nothing deserialized) {
return EMPTY_BUFFER; return EMPTY_BUFFER;
} }
}; };
@ -33,7 +33,7 @@ public class DatabaseEmpty {
private DatabaseEmpty() { private DatabaseEmpty() {
} }
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, ByteBuf key) { public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, Buffer key) {
return new DatabaseSingle<>(dictionary, key, NOTHING_SERIALIZER); return new DatabaseSingle<>(dictionary, key, NOTHING_SERIALIZER);
} }

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
@ -38,40 +39,38 @@ import reactor.util.function.Tuples;
*/ */
public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> { public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> {
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, Send<Buffer>> valueSerializer;
protected DatabaseMapDictionary(LLDictionary dictionary, protected DatabaseMapDictionary(LLDictionary dictionary,
ByteBuf prefixKey, Send<Buffer> prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, Send<Buffer>> valueSerializer) {
// Do not retain or release or use the prefixKey here // Do not retain or release or use the prefixKey here
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0); super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0);
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary, public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, Send<Buffer>> valueSerializer) {
return new DatabaseMapDictionary<>(dictionary, dictionary.getAllocator().buffer(0), keySerializer, valueSerializer); return new DatabaseMapDictionary<>(dictionary, dictionary.getAllocator().allocate(0).send(), keySerializer, valueSerializer);
} }
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary, public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
ByteBuf prefixKey, Send<Buffer> prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, Send<Buffer>> valueSerializer) {
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer); return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
} }
private ByteBuf toKey(ByteBuf suffixKey) { private Send<Buffer> toKey(Send<Buffer> suffixKeyToSend) {
try { try (var suffixKey = suffixKeyToSend.receive()) {
assert suffixKeyConsistency(suffixKey.readableBytes()); assert suffixKeyConsistency(suffixKey.readableBytes());
return LLUtils.compositeBuffer(dictionary.getAllocator(), keyPrefix.retain(), suffixKey.retain()); return LLUtils.compositeBuffer(dictionary.getAllocator(), keyPrefix.copy().send(), suffixKey.send());
} finally {
suffixKey.release();
} }
} }
private void deserializeValue(ByteBuf value, SynchronousSink<U> sink) { private void deserializeValue(Send<Buffer> value, SynchronousSink<U> sink) {
try { try {
sink.next(valueSerializer.deserialize(value)); sink.next(valueSerializer.deserialize(value));
} catch (SerializationException ex) { } catch (SerializationException ex) {
@ -202,7 +201,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
); );
} }
public SerializationFunction<@Nullable ByteBuf, @Nullable ByteBuf> getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) { public SerializationFunction<@Nullable Buffer, @Nullable Buffer> getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) {
return oldSerialized -> { return oldSerialized -> {
try { try {
U result; U result;
@ -224,7 +223,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
}; };
} }
public <X> BiSerializationFunction<@Nullable ByteBuf, X, @Nullable ByteBuf> getSerializedUpdater( public <X> BiSerializationFunction<@Nullable Buffer, X, @Nullable Buffer> getSerializedUpdater(
BiSerializationFunction<@Nullable U, X, @Nullable U> updater) { BiSerializationFunction<@Nullable U, X, @Nullable U> updater) {
return (oldSerialized, extra) -> { return (oldSerialized, extra) -> {
try { try {
@ -336,7 +335,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) { public Flux<Entry<T, Optional<U>>> getMulti(@Nullable CompositeSnapshot snapshot, Flux<T> keys, boolean existsAlmostCertainly) {
return dictionary.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> { return dictionary.getMulti(resolveSnapshot(snapshot), keys.flatMap(keySuffix -> Mono.fromCallable(() -> {
ByteBuf keySuffixBuf = serializeSuffix(keySuffix); Buffer keySuffixBuf = serializeSuffix(keySuffix);
try { try {
var key = toKey(keySuffixBuf.retain()); var key = toKey(keySuffixBuf.retain());
try { try {
@ -367,9 +366,9 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
private LLEntry serializeEntry(T key, U value) throws SerializationException { private LLEntry serializeEntry(T key, U value) throws SerializationException {
ByteBuf serializedKey = toKey(serializeSuffix(key)); Buffer serializedKey = toKey(serializeSuffix(key));
try { try {
ByteBuf serializedValue = valueSerializer.serialize(value); Buffer serializedValue = valueSerializer.serialize(value);
try { try {
return new LLEntry(serializedKey.retain(), serializedValue.retain()); return new LLEntry(serializedKey.retain(), serializedValue.retain());
} finally { } finally {
@ -403,15 +402,15 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries, public <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries,
BiSerializationFunction<@Nullable U, X, @Nullable U> updater) { BiSerializationFunction<@Nullable U, X, @Nullable U> updater) {
Flux<Tuple2<ByteBuf, X>> serializedEntries = entries Flux<Tuple2<Buffer, X>> serializedEntries = entries
.flatMap(entry -> Mono .flatMap(entry -> Mono
.fromCallable(() -> Tuples.of(serializeSuffix(entry.getT1()), entry.getT2())) .fromCallable(() -> Tuples.of(serializeSuffix(entry.getT1()), entry.getT2()))
) )
.doOnDiscard(Tuple2.class, uncastedEntry -> { .doOnDiscard(Tuple2.class, uncastedEntry -> {
if (uncastedEntry.getT1() instanceof ByteBuf byteBuf) { if (uncastedEntry.getT1() instanceof Buffer byteBuf) {
byteBuf.release(); byteBuf.release();
} }
if (uncastedEntry.getT2() instanceof ByteBuf byteBuf) { if (uncastedEntry.getT2() instanceof Buffer byteBuf) {
byteBuf.release(); byteBuf.release();
} }
}); });
@ -435,7 +434,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
.getRangeKeys(resolveSnapshot(snapshot), rangeMono) .getRangeKeys(resolveSnapshot(snapshot), rangeMono)
.handle((key, sink) -> { .handle((key, sink) -> {
try { try {
ByteBuf keySuffixWithExt = stripPrefix(key.retain(), false); Buffer keySuffixWithExt = stripPrefix(key.retain(), false);
try { try {
sink.next(Map.entry(deserializeSuffix(keySuffixWithExt.retainedSlice()), sink.next(Map.entry(deserializeSuffix(keySuffixWithExt.retainedSlice()),
new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary, new DatabaseSingleMapped<>(new DatabaseSingle<>(dictionary,
@ -459,10 +458,10 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
return dictionary return dictionary
.getRange(resolveSnapshot(snapshot), rangeMono) .getRange(resolveSnapshot(snapshot), rangeMono)
.<Entry<T, U>>handle((serializedEntry, sink) -> { .<Entry<T, U>>handle((serializedEntry, sink) -> {
ByteBuf key = serializedEntry.getKey(); Buffer key = serializedEntry.getKey();
ByteBuf value = serializedEntry.getValue(); Buffer value = serializedEntry.getValue();
try { try {
ByteBuf keySuffix = stripPrefix(key.retain(), false); Buffer keySuffix = stripPrefix(key.retain(), false);
try { try {
sink.next(Map.entry(deserializeSuffix(keySuffix.retain()), sink.next(Map.entry(deserializeSuffix(keySuffix.retain()),
valueSerializer.deserialize(value.retain()))); valueSerializer.deserialize(value.retain())));
@ -477,12 +476,12 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
}) })
.doOnDiscard(Entry.class, uncastedEntry -> { .doOnDiscard(Entry.class, uncastedEntry -> {
if (uncastedEntry.getKey() instanceof ByteBuf byteBuf) { if (uncastedEntry.getKey() instanceof Buffer byteBuf) {
if (byteBuf.refCnt() > 0) { if (byteBuf.refCnt() > 0) {
byteBuf.release(); byteBuf.release();
} }
} }
if (uncastedEntry.getValue() instanceof ByteBuf byteBuf) { if (uncastedEntry.getValue() instanceof Buffer byteBuf) {
if (byteBuf.refCnt() > 0) { if (byteBuf.refCnt() > 0) {
byteBuf.release(); byteBuf.release();
} }
@ -496,9 +495,9 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
this.getAllValues(null), this.getAllValues(null),
dictionary.setRange(rangeMono, entries.handle((entry, sink) -> { dictionary.setRange(rangeMono, entries.handle((entry, sink) -> {
try { try {
ByteBuf serializedKey = toKey(serializeSuffix(entry.getKey())); Buffer serializedKey = toKey(serializeSuffix(entry.getKey()));
try { try {
ByteBuf serializedValue = valueSerializer.serialize(entry.getValue()); Buffer serializedValue = valueSerializer.serialize(entry.getValue());
try { try {
sink.next(new LLEntry(serializedKey.retain(), serializedValue.retain())); sink.next(new LLEntry(serializedKey.retain(), serializedValue.retain()));
} finally { } finally {

View File

@ -1,7 +1,9 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Resource;
import io.netty.buffer.api.Send;
import io.netty.util.IllegalReferenceCountException; import io.netty.util.IllegalReferenceCountException;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
@ -28,178 +30,155 @@ import reactor.util.function.Tuples;
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> { public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
protected final LLDictionary dictionary; protected final LLDictionary dictionary;
private final ByteBufAllocator alloc; private final BufferAllocator alloc;
protected final SubStageGetter<U, US> subStageGetter; protected final SubStageGetter<U, US> subStageGetter;
protected final SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer; protected final SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer;
protected final ByteBuf keyPrefix; protected final Buffer keyPrefix;
protected final int keyPrefixLength; protected final int keyPrefixLength;
protected final int keySuffixLength; protected final int keySuffixLength;
protected final int keyExtLength; protected final int keyExtLength;
protected final LLRange range; protected final LLRange range;
protected final Mono<LLRange> rangeMono; protected final Mono<Send<LLRange>> rangeMono;
private volatile boolean released; private volatile boolean released;
private static ByteBuf incrementPrefix(ByteBufAllocator alloc, ByteBuf originalKey, int prefixLength) { private static Send<Buffer> incrementPrefix(BufferAllocator alloc, Send<Buffer> originalKeySend, int prefixLength) {
try { try (var originalKey = originalKeySend.receive()) {
assert originalKey.readableBytes() >= prefixLength; assert originalKey.readableBytes() >= prefixLength;
ByteBuf copiedBuf = alloc.buffer(originalKey.writerIndex(), originalKey.writerIndex() + 1); try (Buffer copiedBuf = alloc.allocate(originalKey.writerOffset())) {
try {
boolean overflowed = true; boolean overflowed = true;
final int ff = 0xFF; final int ff = 0xFF;
int writtenBytes = 0; int writtenBytes = 0;
copiedBuf.writerIndex(prefixLength); copiedBuf.writerOffset(prefixLength);
for (int i = prefixLength - 1; i >= 0; i--) { for (int i = prefixLength - 1; i >= 0; i--) {
int iByte = originalKey.getUnsignedByte(i); int iByte = originalKey.getUnsignedByte(i);
if (iByte != ff) { if (iByte != ff) {
copiedBuf.setByte(i, iByte + 1); copiedBuf.setUnsignedByte(i, iByte + 1);
writtenBytes++; writtenBytes++;
overflowed = false; overflowed = false;
break; break;
} else { } else {
copiedBuf.setByte(i, 0x00); copiedBuf.setUnsignedByte(i, 0x00);
writtenBytes++; writtenBytes++;
overflowed = true; overflowed = true;
} }
} }
assert prefixLength - writtenBytes >= 0; assert prefixLength - writtenBytes >= 0;
if (prefixLength - writtenBytes > 0) { if (prefixLength - writtenBytes > 0) {
copiedBuf.setBytes(0, originalKey, 0, (prefixLength - writtenBytes)); originalKey.copyInto(0, copiedBuf, 0, (prefixLength - writtenBytes));
} }
copiedBuf.writerIndex(copiedBuf.capacity()); copiedBuf.writerOffset(copiedBuf.capacity());
if (originalKey.writerIndex() - prefixLength > 0) { if (originalKey.writerOffset() - prefixLength > 0) {
copiedBuf.setBytes(prefixLength, originalKey, prefixLength, originalKey.writerIndex() - prefixLength); originalKey.copyInto(prefixLength, copiedBuf, prefixLength, originalKey.writerOffset() - prefixLength);
} }
if (overflowed) { if (overflowed) {
for (int i = 0; i < copiedBuf.writerIndex(); i++) { for (int i = 0; i < copiedBuf.writerOffset(); i++) {
copiedBuf.setByte(i, 0xFF); copiedBuf.setUnsignedByte(i, 0xFF);
} }
copiedBuf.writeZero(1); copiedBuf.writeByte((byte) 0x00);
} }
return copiedBuf.retain(); return copiedBuf.send();
} finally {
copiedBuf.release();
} }
} finally {
originalKey.release();
} }
} }
static ByteBuf firstRangeKey(ByteBufAllocator alloc, static Send<Buffer> firstRangeKey(BufferAllocator alloc,
ByteBuf prefixKey, Send<Buffer> prefixKey,
int prefixLength, int prefixLength,
int suffixLength, int suffixLength,
int extLength) { int extLength) {
return zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength); return zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
} }
static ByteBuf nextRangeKey(ByteBufAllocator alloc, static Send<Buffer> nextRangeKey(BufferAllocator alloc,
ByteBuf prefixKey, Send<Buffer> prefixKey,
int prefixLength, int prefixLength,
int suffixLength, int suffixLength,
int extLength) { int extLength) {
try { try (prefixKey) {
ByteBuf nonIncremented = zeroFillKeySuffixAndExt(alloc, prefixKey.retain(), prefixLength, suffixLength, extLength); try (Send<Buffer> nonIncremented = zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength,
try { extLength)) {
return incrementPrefix(alloc, nonIncremented.retain(), prefixLength); return incrementPrefix(alloc, nonIncremented, prefixLength);
} finally {
nonIncremented.release();
} }
} finally {
prefixKey.release();
} }
} }
protected static ByteBuf zeroFillKeySuffixAndExt(ByteBufAllocator alloc, protected static Send<Buffer> zeroFillKeySuffixAndExt(BufferAllocator alloc,
ByteBuf prefixKey, Send<Buffer> prefixKeySend,
int prefixLength, int prefixLength,
int suffixLength, int suffixLength,
int extLength) { int extLength) {
try { try (var prefixKey = prefixKeySend.receive()) {
assert prefixKey.readableBytes() == prefixLength; assert prefixKey.readableBytes() == prefixLength;
assert suffixLength > 0; assert suffixLength > 0;
assert extLength >= 0; assert extLength >= 0;
ByteBuf zeroSuffixAndExt = alloc.buffer(suffixLength + extLength, suffixLength + extLength); try (Buffer zeroSuffixAndExt = alloc.allocate(suffixLength + extLength)) {
try { for (int i = 0; i < suffixLength + extLength; i++) {
zeroSuffixAndExt.writeZero(suffixLength + extLength); zeroSuffixAndExt.writeByte((byte) 0x0);
ByteBuf result = LLUtils.compositeBuffer(alloc, prefixKey.retain(), zeroSuffixAndExt.retain()); }
try { try (Send<Buffer> result = LLUtils.compositeBuffer(alloc, prefixKey.send(), zeroSuffixAndExt.send())) {
return result.retain(); return result;
} finally {
result.release();
} }
} finally {
zeroSuffixAndExt.release();
} }
} finally {
prefixKey.release();
} }
} }
static ByteBuf firstRangeKey( static Send<Buffer> firstRangeKey(
ByteBufAllocator alloc, BufferAllocator alloc,
ByteBuf prefixKey, Send<Buffer> prefixKey,
ByteBuf suffixKey, Send<Buffer> suffixKey,
int prefixLength, int prefixLength,
int suffixLength, int suffixLength,
int extLength) { int extLength) {
return zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength); return zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
} }
static ByteBuf nextRangeKey( static Send<Buffer> nextRangeKey(
ByteBufAllocator alloc, BufferAllocator alloc,
ByteBuf prefixKey, Send<Buffer> prefixKey,
ByteBuf suffixKey, Send<Buffer> suffixKey,
int prefixLength, int prefixLength,
int suffixLength, int suffixLength,
int extLength) { int extLength) {
try { try (Send<Buffer> nonIncremented = zeroFillKeyExt(alloc,
ByteBuf nonIncremented = zeroFillKeyExt(alloc, prefixKey,
prefixKey.retain(), suffixKey,
suffixKey.retain(), prefixLength,
prefixLength, suffixLength,
suffixLength, extLength
extLength )) {
); return incrementPrefix(alloc, nonIncremented, prefixLength + suffixLength);
try {
return incrementPrefix(alloc, nonIncremented.retain(), prefixLength + suffixLength);
} finally {
nonIncremented.release();
}
} finally {
prefixKey.release();
suffixKey.release();
} }
} }
protected static ByteBuf zeroFillKeyExt( protected static Send<Buffer> zeroFillKeyExt(
ByteBufAllocator alloc, BufferAllocator alloc,
ByteBuf prefixKey, Send<Buffer> prefixKeySend,
ByteBuf suffixKey, Send<Buffer> suffixKeySend,
int prefixLength, int prefixLength,
int suffixLength, int suffixLength,
int extLength) { int extLength) {
try { try (var prefixKey = prefixKeySend.receive()) {
assert prefixKey.readableBytes() == prefixLength; try (var suffixKey = suffixKeySend.receive()) {
assert suffixKey.readableBytes() == suffixLength; assert prefixKey.readableBytes() == prefixLength;
assert suffixLength > 0; assert suffixKey.readableBytes() == suffixLength;
assert extLength >= 0; assert suffixLength > 0;
ByteBuf result = LLUtils.compositeBuffer(alloc, assert extLength >= 0;
prefixKey.retain(),
suffixKey.retain(), try (var ext = alloc.allocate(extLength)) {
alloc.buffer(extLength, extLength).writeZero(extLength) for (int i = 0; i < extLength; i++) {
); ext.writeByte((byte) 0);
try { }
assert result.readableBytes() == prefixLength + suffixLength + extLength;
return result.retain(); try (Buffer result = LLUtils.compositeBuffer(alloc, prefixKey.send(), suffixKey.send(), ext.send())
} finally { .receive()) {
result.release(); assert result.readableBytes() == prefixLength + suffixLength + extLength;
return result.send();
}
}
} }
} finally {
prefixKey.release();
suffixKey.release();
} }
} }
@ -208,22 +187,18 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
*/ */
@Deprecated @Deprecated
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary, public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer,
SubStageGetterSingle<U> subStageGetter) { SubStageGetterSingle<U> subStageGetter) {
return new DatabaseMapDictionaryDeep<>(dictionary, return new DatabaseMapDictionaryDeep<>(dictionary, dictionary.getAllocator().allocate(0).send(),
dictionary.getAllocator().buffer(0), keySerializer, subStageGetter, 0);
keySerializer,
subStageGetter,
0
);
} }
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(LLDictionary dictionary, public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(LLDictionary dictionary,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySerializer,
int keyExtLength, int keyExtLength,
SubStageGetter<U, US> subStageGetter) { SubStageGetter<U, US> subStageGetter) {
return new DatabaseMapDictionaryDeep<>(dictionary, return new DatabaseMapDictionaryDeep<>(dictionary,
dictionary.getAllocator().buffer(0), dictionary.getAllocator().allocate(0).send(),
keySerializer, keySerializer,
subStageGetter, subStageGetter,
keyExtLength keyExtLength
@ -231,56 +206,45 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
} }
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(LLDictionary dictionary, public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(LLDictionary dictionary,
ByteBuf prefixKey, Send<Buffer> prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer,
SubStageGetter<U, US> subStageGetter, SubStageGetter<U, US> subStageGetter,
int keyExtLength) { int keyExtLength) {
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength); return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength);
} }
protected DatabaseMapDictionaryDeep(LLDictionary dictionary, protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
ByteBuf prefixKey, Send<Buffer> prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer, SerializerFixedBinaryLength<T, Send<Buffer>> keySuffixSerializer,
SubStageGetter<U, US> subStageGetter, SubStageGetter<U, US> subStageGetter,
int keyExtLength) { int keyExtLength) {
try { this.dictionary = dictionary;
this.dictionary = dictionary; this.alloc = dictionary.getAllocator();
this.alloc = dictionary.getAllocator(); this.subStageGetter = subStageGetter;
this.subStageGetter = subStageGetter; this.keySuffixSerializer = keySuffixSerializer;
this.keySuffixSerializer = keySuffixSerializer; this.keyPrefix = prefixKey.receive();
assert prefixKey.refCnt() > 0; assert keyPrefix.isAccessible();
this.keyPrefix = prefixKey.retain(); this.keyPrefixLength = keyPrefix.readableBytes();
assert keyPrefix.refCnt() > 0; this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
this.keyPrefixLength = keyPrefix.readableBytes(); this.keyExtLength = keyExtLength;
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength(); try (Buffer firstKey = firstRangeKey(alloc,
this.keyExtLength = keyExtLength; keyPrefix.copy().send(),
ByteBuf firstKey = firstRangeKey(alloc, keyPrefixLength,
keyPrefix.retain(), keySuffixLength,
keyExtLength
).receive()) {
try (Buffer nextRangeKey = nextRangeKey(alloc,
keyPrefix.copy().send(),
keyPrefixLength, keyPrefixLength,
keySuffixLength, keySuffixLength,
keyExtLength keyExtLength
); ).receive()) {
try { assert keyPrefix.isAccessible();
ByteBuf nextRangeKey = nextRangeKey(alloc, assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
keyPrefix.retain(), this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.send(), nextRangeKey.send());
keyPrefixLength, this.rangeMono = LLUtils.lazyRetainRange(this.range);
keySuffixLength, assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
keyExtLength
);
try {
assert keyPrefix.refCnt() > 0;
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.retain(), nextRangeKey.retain());
this.rangeMono = LLUtils.lazyRetainRange(this.range);
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
} finally {
nextRangeKey.release();
}
} finally {
firstKey.release();
} }
} finally {
prefixKey.release();
} }
} }
@ -302,49 +266,31 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
/** /**
* Keep only suffix and ext * Keep only suffix and ext
*/ */
protected ByteBuf stripPrefix(ByteBuf key, boolean slice) { protected Send<Buffer> stripPrefix(Send<Buffer> keyToReceive) {
try { try (var key = keyToReceive.receive()) {
if (slice) { return key.copy(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength).send();
return key.retainedSlice(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength);
} else {
return key.retain().readerIndex(key.readerIndex() + keyPrefixLength);
}
} finally {
key.release();
} }
} }
/** /**
* Remove ext from full key * Remove ext from full key
*/ */
protected ByteBuf removeExtFromFullKey(ByteBuf key, boolean slice) { protected Send<Buffer> removeExtFromFullKey(Send<Buffer> keyToReceive) {
try { try (var key = keyToReceive.receive()) {
if (slice) { return key.copy(key.readerOffset(), keyPrefixLength + keySuffixLength).send();
return key.retainedSlice(key.readerIndex(), keyPrefixLength + keySuffixLength);
} else {
return key.retain().writerIndex(key.writerIndex() - (keyPrefixLength + keySuffixLength));
}
} finally {
key.release();
} }
} }
/** /**
* Add prefix to suffix * Add prefix to suffix
*/ */
protected ByteBuf toKeyWithoutExt(ByteBuf suffixKey) { protected Send<Buffer> toKeyWithoutExt(Send<Buffer> suffixKeyToReceive) {
try { try (var suffixKey = suffixKeyToReceive.receive()) {
assert suffixKey.readableBytes() == keySuffixLength; assert suffixKey.readableBytes() == keySuffixLength;
ByteBuf result = LLUtils.compositeBuffer(alloc, keyPrefix.retain(), suffixKey.retain()); try (Buffer result = LLUtils.compositeBuffer(alloc, keyPrefix.copy().send(), suffixKey.send()).receive()) {
assert keyPrefix.refCnt() > 0;
try {
assert result.readableBytes() == keyPrefixLength + keySuffixLength; assert result.readableBytes() == keyPrefixLength + keySuffixLength;
return result.retain(); return result.send();
} finally {
result.release();
} }
} finally {
suffixKey.release();
} }
} }
@ -356,26 +302,23 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
} }
} }
protected LLRange toExtRange(ByteBuf keySuffix) { protected Send<LLRange> toExtRange(Buffer keySuffix) {
try { try (Buffer first = firstRangeKey(alloc,
ByteBuf first = firstRangeKey(alloc, keyPrefix.copy().send(),
keyPrefix.retain(), keySuffix.copy().send(),
keySuffix.retain(), keyPrefixLength,
keySuffixLength,
keyExtLength
).receive()) {
try (Buffer end = nextRangeKey(alloc,
keyPrefix.copy().send(),
keySuffix.copy().send(),
keyPrefixLength, keyPrefixLength,
keySuffixLength, keySuffixLength,
keyExtLength keyExtLength
); ).receive()) {
ByteBuf end = nextRangeKey(alloc, return LLRange.of(first.send(), end.send()).send();
keyPrefix.retain(), }
keySuffix.retain(),
keyPrefixLength,
keySuffixLength,
keyExtLength
);
assert keyPrefix.refCnt() > 0;
return LLRange.of(first, end);
} finally {
keySuffix.release();
} }
} }
@ -392,16 +335,14 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
@Override @Override
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) { public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
return Mono.using( return Mono.using(
() -> serializeSuffix(keySuffix), () -> serializeSuffix(keySuffix).receive(),
keySuffixData -> { keySuffixData -> Mono.using(
return Mono.using( () -> toKeyWithoutExt(keySuffixData.send()).receive(),
() -> toKeyWithoutExt(keySuffixData.retain()), keyWithoutExt -> this.subStageGetter
keyWithoutExt -> this.subStageGetter .subStage(dictionary, snapshot, LLUtils.lazyRetain(keyWithoutExt)),
.subStage(dictionary, snapshot, LLUtils.lazyRetain(keyWithoutExt)), Resource::close
ReferenceCounted::release ),
); Resource::close
},
ReferenceCounted::release
).transform(LLUtils::handleDiscard).doOnDiscard(DatabaseStage.class, DatabaseStage::release); ).transform(LLUtils::handleDiscard).doOnDiscard(DatabaseStage.class, DatabaseStage::release);
} }
@ -415,26 +356,21 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
return dictionary.badBlocks(rangeMono); return dictionary.badBlocks(rangeMono);
} }
private static record GroupBuffers(ByteBuf groupKeyWithExt, ByteBuf groupKeyWithoutExt, ByteBuf groupSuffix) {} private static record GroupBuffers(Buffer groupKeyWithExt, Buffer groupKeyWithoutExt, Buffer groupSuffix) {}
@Override @Override
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) { public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
return Flux return Flux
.defer(() -> dictionary.getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength)) .defer(() -> dictionary.getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength))
.flatMapSequential(groupKeyWithoutExt -> Mono .flatMapSequential(groupKeyWithoutExtSend -> Mono
.using( .using(
() -> { () -> {
try { try (var groupKeyWithoutExt = groupKeyWithoutExtSend.receive()) {
var groupSuffix = this.stripPrefix(groupKeyWithoutExt.retain(), true); try (var groupSuffix = this.stripPrefix(groupKeyWithoutExt.copy().send()).receive()) {
try {
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength); assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
return Tuples.of(groupKeyWithoutExt.retain(), groupSuffix.retain()); return Tuples.of(groupKeyWithoutExt, groupSuffix);
} finally {
groupSuffix.release();
} }
} finally {
groupKeyWithoutExt.release();
} }
}, },
groupKeyWithoutExtAndGroupSuffix -> this.subStageGetter groupKeyWithoutExtAndGroupSuffix -> this.subStageGetter
@ -444,14 +380,15 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
) )
.<Entry<T, US>>handle((us, sink) -> { .<Entry<T, US>>handle((us, sink) -> {
try { try {
sink.next(Map.entry(this.deserializeSuffix(groupKeyWithoutExtAndGroupSuffix.getT2().retain()), us)); sink.next(Map.entry(this.deserializeSuffix(groupKeyWithoutExtAndGroupSuffix.getT2().send()),
us));
} catch (SerializationException ex) { } catch (SerializationException ex) {
sink.error(ex); sink.error(ex);
} }
}), }),
entry -> { entry -> {
entry.getT1().release(); entry.getT1().close();
entry.getT2().release(); entry.getT2().close();
} }
) )
) )
@ -489,8 +426,8 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
return dictionary.clear(); return dictionary.clear();
} else if (range.isSingle()) { } else if (range.isSingle()) {
return dictionary return dictionary
.remove(LLUtils.lazyRetain(range.getSingle()), LLDictionaryResultType.VOID) .remove(LLUtils.lazyRetain(range::getSingle), LLDictionaryResultType.VOID)
.doOnNext(ReferenceCounted::release) .doOnNext(Send::close)
.then(); .then();
} else { } else {
return dictionary.setRange(LLUtils.lazyRetainRange(range), Flux.empty()); return dictionary.setRange(LLUtils.lazyRetainRange(range), Flux.empty());
@ -499,31 +436,30 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
protected T deserializeSuffix(ByteBuf keySuffix) throws SerializationException { protected T deserializeSuffix(Send<Buffer> keySuffixToReceive) throws SerializationException {
try { try (var keySuffix = keySuffixToReceive.receive()) {
assert suffixKeyConsistency(keySuffix.readableBytes()); assert suffixKeyConsistency(keySuffix.readableBytes());
var result = keySuffixSerializer.deserialize(keySuffix.retain()); var result = keySuffixSerializer.deserialize(keySuffix.send());
assert keyPrefix.refCnt() > 0; assert keyPrefix.isAccessible();
return result; return result;
} finally {
keySuffix.release();
} }
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers
protected ByteBuf serializeSuffix(T keySuffix) throws SerializationException { protected Send<Buffer> serializeSuffix(T keySuffix) throws SerializationException {
ByteBuf suffixData = keySuffixSerializer.serialize(keySuffix); try (Buffer suffixData = keySuffixSerializer.serialize(keySuffix).receive()) {
assert suffixKeyConsistency(suffixData.readableBytes()); assert suffixKeyConsistency(suffixData.readableBytes());
assert keyPrefix.refCnt() > 0; assert keyPrefix.isAccessible();
return suffixData; return suffixData.send();
}
} }
@Override @Override
public void release() { public void release() {
if (!released) { if (!released) {
released = true; released = true;
this.range.release(); this.range.close();
this.keyPrefix.release(); this.keyPrefix.close();
} else { } else {
throw new IllegalReferenceCountException(0, -1); throw new IllegalReferenceCountException(0, -1);
} }

View File

@ -1,7 +1,8 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
@ -33,41 +34,37 @@ import reactor.util.function.Tuples;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> { public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
private final ByteBufAllocator alloc; private final BufferAllocator alloc;
private final DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary; private final DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary;
private final Function<T, TH> keySuffixHashFunction; private final Function<T, TH> keySuffixHashFunction;
protected DatabaseMapDictionaryHashed(LLDictionary dictionary, protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
ByteBuf prefixKey, Send<Buffer> prefixKey,
Serializer<T, ByteBuf> keySuffixSerializer, Serializer<T, Send<Buffer>> keySuffixSerializer,
Serializer<U, ByteBuf> valueSerializer, Serializer<U, Send<Buffer>> valueSerializer,
Function<T, TH> keySuffixHashFunction, Function<T, TH> keySuffixHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keySuffixHashSerializer) { SerializerFixedBinaryLength<TH, Buffer> keySuffixHashSerializer) {
try { if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) {
if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) { throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
}
this.alloc = dictionary.getAllocator();
ValueWithHashSerializer<T, U> valueWithHashSerializer
= new ValueWithHashSerializer<>(alloc, keySuffixSerializer, valueSerializer);
ValuesSetSerializer<Entry<T, U>> valuesSetSerializer
= new ValuesSetSerializer<>(alloc, valueWithHashSerializer);
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
prefixKey.retain(),
keySuffixHashSerializer,
valuesSetSerializer
);
this.keySuffixHashFunction = keySuffixHashFunction;
} finally {
prefixKey.release();
} }
this.alloc = dictionary.getAllocator();
ValueWithHashSerializer<T, U> valueWithHashSerializer
= new ValueWithHashSerializer<>(alloc, keySuffixSerializer, valueSerializer);
ValuesSetSerializer<Entry<T, U>> valuesSetSerializer
= new ValuesSetSerializer<>(alloc, valueWithHashSerializer);
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
prefixKey,
keySuffixHashSerializer,
valuesSetSerializer
);
this.keySuffixHashFunction = keySuffixHashFunction;
} }
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary, public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
Serializer<T, ByteBuf> keySerializer, Serializer<T, Buffer> keySerializer,
Serializer<U, ByteBuf> valueSerializer, Serializer<U, Buffer> valueSerializer,
Function<T, UH> keyHashFunction, Function<T, UH> keyHashFunction,
SerializerFixedBinaryLength<UH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<UH, Buffer> keyHashSerializer) {
return new DatabaseMapDictionaryHashed<>( return new DatabaseMapDictionaryHashed<>(
dictionary, dictionary,
dictionary.getAllocator().buffer(0), dictionary.getAllocator().buffer(0),
@ -79,11 +76,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
} }
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary, public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> tail(LLDictionary dictionary,
ByteBuf prefixKey, Buffer prefixKey,
Serializer<T, ByteBuf> keySuffixSerializer, Serializer<T, Buffer> keySuffixSerializer,
Serializer<U, ByteBuf> valueSerializer, Serializer<U, Buffer> valueSerializer,
Function<T, UH> keySuffixHashFunction, Function<T, UH> keySuffixHashFunction,
SerializerFixedBinaryLength<UH, ByteBuf> keySuffixHashSerializer) { SerializerFixedBinaryLength<UH, Buffer> keySuffixHashSerializer) {
return new DatabaseMapDictionaryHashed<>(dictionary, return new DatabaseMapDictionaryHashed<>(dictionary,
prefixKey, prefixKey,
keySuffixSerializer, keySuffixSerializer,

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
@ -15,13 +15,13 @@ import reactor.core.publisher.Mono;
public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing> { public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing> {
protected DatabaseSetDictionary(LLDictionary dictionary, protected DatabaseSetDictionary(LLDictionary dictionary,
ByteBuf prefixKey, Buffer prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer) { SerializerFixedBinaryLength<T, Buffer> keySuffixSerializer) {
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.NOTHING_SERIALIZER); super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.NOTHING_SERIALIZER);
} }
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary, public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer) { SerializerFixedBinaryLength<T, Buffer> keySerializer) {
var buf = dictionary.getAllocator().buffer(0); var buf = dictionary.getAllocator().buffer(0);
try { try {
return new DatabaseSetDictionary<>(dictionary, buf, keySerializer); return new DatabaseSetDictionary<>(dictionary, buf, keySerializer);
@ -31,8 +31,8 @@ public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing>
} }
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary, public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
ByteBuf prefixKey, Buffer prefixKey,
SerializerFixedBinaryLength<T, ByteBuf> keySuffixSerializer) { SerializerFixedBinaryLength<T, Buffer> keySuffixSerializer) {
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer); return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer);
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
@ -17,10 +17,10 @@ import reactor.core.publisher.Mono;
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> { public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
protected DatabaseSetDictionaryHashed(LLDictionary dictionary, protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
ByteBuf prefixKey, Buffer prefixKey,
Serializer<T, ByteBuf> keySuffixSerializer, Serializer<T, Buffer> keySuffixSerializer,
Function<T, TH> keySuffixHashFunction, Function<T, TH> keySuffixHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keySuffixHashSerializer) { SerializerFixedBinaryLength<TH, Buffer> keySuffixHashSerializer) {
super(dictionary, super(dictionary,
prefixKey, prefixKey,
keySuffixSerializer, keySuffixSerializer,
@ -31,9 +31,9 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
} }
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary, public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
Serializer<T, ByteBuf> keySerializer, Serializer<T, Buffer> keySerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) {
return new DatabaseSetDictionaryHashed<>(dictionary, return new DatabaseSetDictionaryHashed<>(dictionary,
dictionary.getAllocator().buffer(0), dictionary.getAllocator().buffer(0),
keySerializer, keySerializer,
@ -43,10 +43,10 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
} }
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary, public static <T, TH> DatabaseSetDictionaryHashed<T, TH> tail(LLDictionary dictionary,
ByteBuf prefixKey, Buffer prefixKey,
Serializer<T, ByteBuf> keySuffixSerializer, Serializer<T, Buffer> keySuffixSerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) {
return new DatabaseSetDictionaryHashed<>(dictionary, return new DatabaseSetDictionaryHashed<>(dictionary,
prefixKey, prefixKey,
keySuffixSerializer, keySuffixSerializer,

View File

@ -1,6 +1,8 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import io.netty.buffer.api.internal.ResourceSupport;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
@ -23,18 +25,16 @@ import reactor.core.publisher.SynchronousSink;
public class DatabaseSingle<U> implements DatabaseStageEntry<U> { public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
private final LLDictionary dictionary; private final LLDictionary dictionary;
private final ByteBuf key; private final Buffer key;
private final Mono<ByteBuf> keyMono; private final Mono<Send<Buffer>> keyMono;
private final Serializer<U, ByteBuf> serializer; private final Serializer<U, Send<Buffer>> serializer;
public DatabaseSingle(LLDictionary dictionary, ByteBuf key, Serializer<U, ByteBuf> serializer) { public DatabaseSingle(LLDictionary dictionary, Send<Buffer> key, Serializer<U, Send<Buffer>> serializer) {
try { try (key) {
this.dictionary = dictionary; this.dictionary = dictionary;
this.key = key.retain(); this.key = key.receive();
this.keyMono = LLUtils.lazyRetain(this.key); this.keyMono = LLUtils.lazyRetain(this.key);
this.serializer = serializer; this.serializer = serializer;
} finally {
key.release();
} }
} }
@ -46,7 +46,7 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
} }
} }
private void deserializeValue(ByteBuf value, SynchronousSink<U> sink) { private void deserializeValue(Send<Buffer> value, SynchronousSink<U> sink) {
try { try {
sink.next(serializer.deserialize(value)); sink.next(serializer.deserialize(value));
} catch (SerializationException ex) { } catch (SerializationException ex) {
@ -63,13 +63,9 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
@Override @Override
public Mono<U> setAndGetPrevious(U value) { public Mono<U> setAndGetPrevious(U value) {
return Mono return dictionary
.using(() -> serializer.serialize(value), .put(keyMono, Mono.fromCallable(() -> serializer.serialize(value)), LLDictionaryResultType.PREVIOUS_VALUE)
valueByteBuf -> dictionary .handle(this::deserializeValue);
.put(keyMono, LLUtils.lazyRetain(valueByteBuf), LLDictionaryResultType.PREVIOUS_VALUE)
.handle(this::deserializeValue),
ReferenceCounted::release
);
} }
@Override @Override
@ -99,7 +95,7 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
} else { } else {
return serializer.serialize(result); return serializer.serialize(result);
} }
}, existsAlmostCertainly).transform(mono -> LLUtils.mapDelta(mono, serializer::deserialize)); }, existsAlmostCertainly).transform(mono -> LLUtils.mapLLDelta(mono, serializer::deserialize));
} }
@Override @Override
@ -112,23 +108,23 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
@Override @Override
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
return dictionary return dictionary
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single)) .isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single).map(ResourceSupport::send))
.map(empty -> empty ? 0L : 1L); .map(empty -> empty ? 0L : 1L);
} }
@Override @Override
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) { public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
return dictionary return dictionary
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single)); .isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single).map(ResourceSupport::send));
} }
@Override @Override
public void release() { public void release() {
key.release(); key.close();
} }
@Override @Override
public Flux<BadBlock> badBlocks() { public Flux<BadBlock> badBlocks() {
return dictionary.badBlocks(keyMono.map(LLRange::single)); return dictionary.badBlocks(keyMono.map(LLRange::single).map(ResourceSupport::send));
} }
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import java.util.Collection; import java.util.Collection;
@ -13,7 +14,7 @@ public interface SubStageGetter<U, US extends DatabaseStage<U>> {
Mono<US> subStage(LLDictionary dictionary, Mono<US> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> prefixKey); Mono<Send<Buffer>> prefixKey);
boolean isMultiKey(); boolean isMultiKey();
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
@ -16,15 +16,15 @@ import reactor.core.publisher.Mono;
public class SubStageGetterHashMap<T, U, TH> implements public class SubStageGetterHashMap<T, U, TH> implements
SubStageGetter<Map<T, U>, DatabaseMapDictionaryHashed<T, U, TH>> { SubStageGetter<Map<T, U>, DatabaseMapDictionaryHashed<T, U, TH>> {
private final Serializer<T, ByteBuf> keySerializer; private final Serializer<T, Buffer> keySerializer;
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, Buffer> valueSerializer;
private final Function<T, TH> keyHashFunction; private final Function<T, TH> keyHashFunction;
private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer; private final SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer;
public SubStageGetterHashMap(Serializer<T, ByteBuf> keySerializer, public SubStageGetterHashMap(Serializer<T, Buffer> keySerializer,
Serializer<U, ByteBuf> valueSerializer, Serializer<U, Buffer> valueSerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
this.keyHashFunction = keyHashFunction; this.keyHashFunction = keyHashFunction;
@ -34,7 +34,7 @@ public class SubStageGetterHashMap<T, U, TH> implements
@Override @Override
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary, public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> prefixKeyMono) { Mono<Buffer> prefixKeyMono) {
return Mono.usingWhen( return Mono.usingWhen(
prefixKeyMono, prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
@ -16,13 +16,13 @@ import reactor.core.publisher.Mono;
public class SubStageGetterHashSet<T, TH> implements public class SubStageGetterHashSet<T, TH> implements
SubStageGetter<Map<T, Nothing>, DatabaseSetDictionaryHashed<T, TH>> { SubStageGetter<Map<T, Nothing>, DatabaseSetDictionaryHashed<T, TH>> {
private final Serializer<T, ByteBuf> keySerializer; private final Serializer<T, Buffer> keySerializer;
private final Function<T, TH> keyHashFunction; private final Function<T, TH> keyHashFunction;
private final SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer; private final SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer;
public SubStageGetterHashSet(Serializer<T, ByteBuf> keySerializer, public SubStageGetterHashSet(Serializer<T, Buffer> keySerializer,
Function<T, TH> keyHashFunction, Function<T, TH> keyHashFunction,
SerializerFixedBinaryLength<TH, ByteBuf> keyHashSerializer) { SerializerFixedBinaryLength<TH, Buffer> keyHashSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.keyHashFunction = keyHashFunction; this.keyHashFunction = keyHashFunction;
this.keyHashSerializer = keyHashSerializer; this.keyHashSerializer = keyHashSerializer;
@ -31,7 +31,7 @@ public class SubStageGetterHashSet<T, TH> implements
@Override @Override
public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary, public Mono<DatabaseSetDictionaryHashed<T, TH>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> prefixKeyMono) { Mono<Buffer> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseSetDictionaryHashed .fromSupplier(() -> DatabaseSetDictionaryHashed

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
@ -14,11 +14,11 @@ import reactor.core.publisher.Mono;
public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseMapDictionary<T, U>> { public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, DatabaseMapDictionary<T, U>> {
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, Buffer> keySerializer;
private final Serializer<U, ByteBuf> valueSerializer; private final Serializer<U, Buffer> valueSerializer;
public SubStageGetterMap(SerializerFixedBinaryLength<T, ByteBuf> keySerializer, public SubStageGetterMap(SerializerFixedBinaryLength<T, Buffer> keySerializer,
Serializer<U, ByteBuf> valueSerializer) { Serializer<U, Buffer> valueSerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }
@ -26,7 +26,7 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
@Override @Override
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary, public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> prefixKeyMono) { Mono<Buffer> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseMapDictionary .fromSupplier(() -> DatabaseMapDictionary

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
@ -15,11 +15,11 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
SubStageGetter<Map<T, U>, DatabaseMapDictionaryDeep<T, U, US>> { SubStageGetter<Map<T, U>, DatabaseMapDictionaryDeep<T, U, US>> {
private final SubStageGetter<U, US> subStageGetter; private final SubStageGetter<U, US> subStageGetter;
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, Buffer> keySerializer;
private final int keyExtLength; private final int keyExtLength;
public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter, public SubStageGetterMapDeep(SubStageGetter<U, US> subStageGetter,
SerializerFixedBinaryLength<T, ByteBuf> keySerializer, SerializerFixedBinaryLength<T, Buffer> keySerializer,
int keyExtLength) { int keyExtLength) {
this.subStageGetter = subStageGetter; this.subStageGetter = subStageGetter;
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
@ -41,7 +41,7 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
@Override @Override
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary, public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> prefixKeyMono) { Mono<Buffer> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseMapDictionaryDeep .fromSupplier(() -> DatabaseMapDictionaryDeep
@ -61,16 +61,16 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
return true; return true;
} }
private Mono<Void> checkKeyFluxConsistency(ByteBuf prefixKey, List<ByteBuf> keys) { private Mono<Void> checkKeyFluxConsistency(Buffer prefixKey, List<Buffer> keys) {
return Mono return Mono
.fromCallable(() -> { .fromCallable(() -> {
try { try {
for (ByteBuf key : keys) { for (Buffer key : keys) {
assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength(); assert key.readableBytes() == prefixKey.readableBytes() + getKeyBinaryLength();
} }
} finally { } finally {
prefixKey.release(); prefixKey.release();
for (ByteBuf key : keys) { for (Buffer key : keys) {
key.release(); key.release();
} }
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.util.ReferenceCounted; import io.netty.util.ReferenceCounted;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
@ -14,16 +14,16 @@ import reactor.core.publisher.Mono;
public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, DatabaseSetDictionary<T>> { public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, DatabaseSetDictionary<T>> {
private final SerializerFixedBinaryLength<T, ByteBuf> keySerializer; private final SerializerFixedBinaryLength<T, Buffer> keySerializer;
public SubStageGetterSet(SerializerFixedBinaryLength<T, ByteBuf> keySerializer) { public SubStageGetterSet(SerializerFixedBinaryLength<T, Buffer> keySerializer) {
this.keySerializer = keySerializer; this.keySerializer = keySerializer;
} }
@Override @Override
public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary, public Mono<DatabaseSetDictionary<T>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> prefixKeyMono) { Mono<Buffer> prefixKeyMono) {
return Mono.usingWhen(prefixKeyMono, return Mono.usingWhen(prefixKeyMono,
prefixKey -> Mono prefixKey -> Mono
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer)), .fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey.retain(), keySerializer)),

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
@ -13,21 +14,21 @@ import reactor.core.publisher.Mono;
public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> { public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageEntry<T>> {
private final Serializer<T, ByteBuf> serializer; private final Serializer<T, Send<Buffer>> serializer;
public SubStageGetterSingle(Serializer<T, ByteBuf> serializer) { public SubStageGetterSingle(Serializer<T, Send<Buffer>> serializer) {
this.serializer = serializer; this.serializer = serializer;
} }
@Override @Override
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary, public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary,
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
Mono<ByteBuf> keyPrefixMono) { Mono<Send<Buffer>> keyPrefixMono) {
return Mono.usingWhen( return Mono.usingWhen(
keyPrefixMono, keyPrefixMono,
keyPrefix -> Mono keyPrefix -> Mono
.<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix.retain(), serializer)), .<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix, serializer)),
keyPrefix -> Mono.fromRunnable(keyPrefix::release) keyPrefix -> Mono.fromRunnable(keyPrefix::close)
); );
} }

View File

@ -1,9 +1,9 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
public class SubStageGetterSingleBytes extends SubStageGetterSingle<ByteBuf> { public class SubStageGetterSingleBytes extends SubStageGetterSingle<Buffer> {
public SubStageGetterSingleBytes() { public SubStageGetterSingleBytes() {
super(Serializer.noop()); super(Serializer.noop());

View File

@ -1,7 +1,8 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
@ -9,43 +10,35 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>, ByteBuf> { class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>, Send<Buffer>> {
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final Serializer<X, ByteBuf> keySuffixSerializer; private final Serializer<X, Send<Buffer>> keySuffixSerializer;
private final Serializer<Y, ByteBuf> valueSerializer; private final Serializer<Y, Send<Buffer>> valueSerializer;
ValueWithHashSerializer(ByteBufAllocator allocator, ValueWithHashSerializer(BufferAllocator allocator,
Serializer<X, ByteBuf> keySuffixSerializer, Serializer<X, Send<Buffer>> keySuffixSerializer,
Serializer<Y, ByteBuf> valueSerializer) { Serializer<Y, Send<Buffer>> valueSerializer) {
this.allocator = allocator; this.allocator = allocator;
this.keySuffixSerializer = keySuffixSerializer; this.keySuffixSerializer = keySuffixSerializer;
this.valueSerializer = valueSerializer; this.valueSerializer = valueSerializer;
} }
@Override @Override
public @NotNull Entry<X, Y> deserialize(@NotNull ByteBuf serialized) throws SerializationException { public @NotNull Entry<X, Y> deserialize(@NotNull Send<Buffer> serializedToReceive) throws SerializationException {
try { try (var serialized = serializedToReceive.receive()) {
X deserializedKey = keySuffixSerializer.deserialize(serialized.retain()); X deserializedKey = keySuffixSerializer.deserialize(serialized.copy().send());
Y deserializedValue = valueSerializer.deserialize(serialized.retain()); Y deserializedValue = valueSerializer.deserialize(serialized.send());
return Map.entry(deserializedKey, deserializedValue); return Map.entry(deserializedKey, deserializedValue);
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull Entry<X, Y> deserialized) throws SerializationException { public @NotNull Send<Buffer> serialize(@NotNull Entry<X, Y> deserialized) throws SerializationException {
ByteBuf keySuffix = keySuffixSerializer.serialize(deserialized.getKey()); try (Buffer keySuffix = keySuffixSerializer.serialize(deserialized.getKey()).receive()) {
try { try (Buffer value = valueSerializer.serialize(deserialized.getValue()).receive()) {
ByteBuf value = valueSerializer.serialize(deserialized.getValue()); return LLUtils.compositeBuffer(allocator, keySuffix.send(), value.send());
try {
return LLUtils.compositeBuffer(allocator, keySuffix.retain(), value.retain());
} finally {
value.release();
} }
} finally {
keySuffix.release();
} }
} }
} }

View File

@ -1,7 +1,8 @@
package it.cavallium.dbengine.database.collections; package it.cavallium.dbengine.database.collections;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.unimi.dsi.fastutil.objects.ObjectArraySet; import it.unimi.dsi.fastutil.objects.ObjectArraySet;
@ -13,47 +14,39 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>, ByteBuf> { class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>, Send<Buffer>> {
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final Serializer<X, ByteBuf> entrySerializer; private final Serializer<X, Send<Buffer>> entrySerializer;
ValuesSetSerializer(ByteBufAllocator allocator, Serializer<X, ByteBuf> entrySerializer) { ValuesSetSerializer(BufferAllocator allocator, Serializer<X, Send<Buffer>> entrySerializer) {
this.allocator = allocator; this.allocator = allocator;
this.entrySerializer = entrySerializer; this.entrySerializer = entrySerializer;
} }
@Override @Override
public @NotNull ObjectArraySet<X> deserialize(@NotNull ByteBuf serialized) throws SerializationException { public @NotNull ObjectArraySet<X> deserialize(@NotNull Send<Buffer> serializedToReceive) throws SerializationException {
try { try (var serialized = serializedToReceive.receive()) {
int entriesLength = serialized.readInt(); int entriesLength = serialized.readInt();
ArrayList<X> deserializedElements = new ArrayList<>(entriesLength); ArrayList<X> deserializedElements = new ArrayList<>(entriesLength);
for (int i = 0; i < entriesLength; i++) { for (int i = 0; i < entriesLength; i++) {
X entry = entrySerializer.deserialize(serialized.retain()); X entry = entrySerializer.deserialize(serialized.send());
deserializedElements.add(entry); deserializedElements.add(entry);
} }
return new ObjectArraySet<>(deserializedElements); return new ObjectArraySet<>(deserializedElements);
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull ObjectArraySet<X> deserialized) throws SerializationException { public @NotNull Send<Buffer> serialize(@NotNull ObjectArraySet<X> deserialized) throws SerializationException {
ByteBuf output = allocator.buffer(); try (Buffer output = allocator.allocate(64)) {
try {
output.writeInt(deserialized.size()); output.writeInt(deserialized.size());
for (X entry : deserialized) { for (X entry : deserialized) {
ByteBuf serialized = entrySerializer.serialize(entry); try (Buffer serialized = entrySerializer.serialize(entry).receive()) {
try {
output.writeBytes(serialized); output.writeBytes(serialized);
} finally {
serialized.release();
} }
} }
return output.retain(); return output.send();
} finally {
output.release();
} }
} }
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.client.IndicizerAnalyzers; import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities; import it.cavallium.dbengine.client.IndicizerSimilarities;
import it.cavallium.dbengine.client.LuceneOptions; import it.cavallium.dbengine.client.LuceneOptions;
@ -23,16 +23,16 @@ public class LLLocalDatabaseConnection implements LLDatabaseConnection {
JMXNettyMonitoringManager.initialize(); JMXNettyMonitoringManager.initialize();
} }
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final Path basePath; private final Path basePath;
public LLLocalDatabaseConnection(ByteBufAllocator allocator, Path basePath) { public LLLocalDatabaseConnection(BufferAllocator allocator, Path basePath) {
this.allocator = allocator; this.allocator = allocator;
this.basePath = basePath; this.basePath = basePath;
} }
@Override @Override
public ByteBufAllocator getAllocator() { public BufferAllocator getAllocator() {
return allocator; return allocator;
} }

View File

@ -1,7 +1,8 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import java.util.Map; import java.util.Map;
@ -10,12 +11,12 @@ import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator<LLEntry> { public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksIterator<Send<LLEntry>> {
public LLLocalEntryReactiveRocksIterator(RocksDB db, public LLLocalEntryReactiveRocksIterator(RocksDB db,
ByteBufAllocator alloc, BufferAllocator alloc,
ColumnFamilyHandle cfh, ColumnFamilyHandle cfh,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
String debugName) { String debugName) {
@ -23,7 +24,7 @@ public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksItera
} }
@Override @Override
public LLEntry getEntry(ByteBuf key, ByteBuf value) { public Send<LLEntry> getEntry(Send<Buffer> key, Send<Buffer> value) {
return new LLEntry(key, value); return LLEntry.of(key, value).send();
} }
} }

View File

@ -1,7 +1,8 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import java.util.Map; import java.util.Map;
@ -11,11 +12,11 @@ import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
public class LLLocalGroupedEntryReactiveRocksIterator extends public class LLLocalGroupedEntryReactiveRocksIterator extends
LLLocalGroupedReactiveRocksIterator<LLEntry> { LLLocalGroupedReactiveRocksIterator<Send<LLEntry>> {
public LLLocalGroupedEntryReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh, public LLLocalGroupedEntryReactiveRocksIterator(RocksDB db, BufferAllocator alloc, ColumnFamilyHandle cfh,
int prefixLength, int prefixLength,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
String debugName) { String debugName) {
@ -23,7 +24,7 @@ public class LLLocalGroupedEntryReactiveRocksIterator extends
} }
@Override @Override
public LLEntry getEntry(ByteBuf key, ByteBuf value) { public Send<LLEntry> getEntry(Send<Buffer> key, Send<Buffer> value) {
return new LLEntry(key, value); return LLEntry.of(key, value).send();
} }
} }

View File

@ -1,19 +1,20 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<ByteBuf> { public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReactiveRocksIterator<Send<Buffer>> {
public LLLocalGroupedKeyReactiveRocksIterator(RocksDB db, public LLLocalGroupedKeyReactiveRocksIterator(RocksDB db,
ByteBufAllocator alloc, BufferAllocator alloc,
ColumnFamilyHandle cfh, ColumnFamilyHandle cfh,
int prefixLength, int prefixLength,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
String debugName) { String debugName) {
@ -21,9 +22,9 @@ public class LLLocalGroupedKeyReactiveRocksIterator extends LLLocalGroupedReacti
} }
@Override @Override
public ByteBuf getEntry(ByteBuf key, ByteBuf value) { public Send<Buffer> getEntry(Send<Buffer> key, Send<Buffer> value) {
if (value != null) { if (value != null) {
value.release(); value.close();
} }
return key; return key;
} }

View File

@ -1,8 +1,9 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.ByteBufUtil; import io.netty.buffer.api.BufferUtil;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep; import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
@ -20,7 +21,7 @@ import static io.netty.buffer.Unpooled.*;
public abstract class LLLocalGroupedReactiveRocksIterator<T> { public abstract class LLLocalGroupedReactiveRocksIterator<T> {
private final RocksDB db; private final RocksDB db;
private final ByteBufAllocator alloc; private final BufferAllocator alloc;
private final ColumnFamilyHandle cfh; private final ColumnFamilyHandle cfh;
private final int prefixLength; private final int prefixLength;
private final LLRange range; private final LLRange range;
@ -29,9 +30,9 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
private final boolean canFillCache; private final boolean canFillCache;
private final boolean readValues; private final boolean readValues;
public LLLocalGroupedReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh, public LLLocalGroupedReactiveRocksIterator(RocksDB db, BufferAllocator alloc, ColumnFamilyHandle cfh,
int prefixLength, int prefixLength,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
boolean canFillCache, boolean canFillCache,
@ -59,18 +60,18 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
try { try {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
ObjectArrayList<T> values = new ObjectArrayList<>(); ObjectArrayList<T> values = new ObjectArrayList<>();
ByteBuf firstGroupKey = null; Buffer firstGroupKey = null;
try { try {
rocksIterator.status(); rocksIterator.status();
while (rocksIterator.isValid()) { while (rocksIterator.isValid()) {
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key); Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
try { try {
if (firstGroupKey == null) { if (firstGroupKey == null) {
firstGroupKey = key.retain(); firstGroupKey = key.retain();
} else if (!ByteBufUtil.equals(firstGroupKey, firstGroupKey.readerIndex(), key, key.readerIndex(), prefixLength)) { } else if (!ByteBufUtil.equals(firstGroupKey, firstGroupKey.readerIndex(), key, key.readerIndex(), prefixLength)) {
break; break;
} }
ByteBuf value; Buffer value;
if (readValues) { if (readValues) {
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value); value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
} else { } else {
@ -112,7 +113,7 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
}); });
} }
public abstract T getEntry(ByteBuf key, ByteBuf value); public abstract T getEntry(Send<Buffer> key, Send<Buffer> value);
public void release() { public void release() {
range.release(); range.release();

View File

@ -1,8 +1,9 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.ByteBufUtil; import io.netty.buffer.api.BufferUtil;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import java.util.Arrays; import java.util.Arrays;
@ -17,7 +18,7 @@ import static io.netty.buffer.Unpooled.*;
public class LLLocalKeyPrefixReactiveRocksIterator { public class LLLocalKeyPrefixReactiveRocksIterator {
private final RocksDB db; private final RocksDB db;
private final ByteBufAllocator alloc; private final BufferAllocator alloc;
private final ColumnFamilyHandle cfh; private final ColumnFamilyHandle cfh;
private final int prefixLength; private final int prefixLength;
private final LLRange range; private final LLRange range;
@ -26,9 +27,9 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
private final boolean canFillCache; private final boolean canFillCache;
private final String debugName; private final String debugName;
public LLLocalKeyPrefixReactiveRocksIterator(RocksDB db, ByteBufAllocator alloc, ColumnFamilyHandle cfh, public LLLocalKeyPrefixReactiveRocksIterator(RocksDB db, BufferAllocator alloc, ColumnFamilyHandle cfh,
int prefixLength, int prefixLength,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
boolean canFillCache, boolean canFillCache,
@ -45,7 +46,7 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
} }
public Flux<ByteBuf> flux() { public Flux<Send<Buffer>> flux() {
return Flux return Flux
.generate(() -> { .generate(() -> {
var readOptions = new ReadOptions(this.readOptions); var readOptions = new ReadOptions(this.readOptions);
@ -59,10 +60,10 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
try { try {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
rocksIterator.status(); rocksIterator.status();
ByteBuf firstGroupKey = null; Buffer firstGroupKey = null;
try { try {
while (rocksIterator.isValid()) { while (rocksIterator.isValid()) {
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key); Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
try { try {
if (firstGroupKey == null) { if (firstGroupKey == null) {
firstGroupKey = key.retain(); firstGroupKey = key.retain();

View File

@ -1,18 +1,19 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.ReadOptions; import org.rocksdb.ReadOptions;
import org.rocksdb.RocksDB; import org.rocksdb.RocksDB;
public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterator<ByteBuf> { public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterator<Send<Buffer>> {
public LLLocalKeyReactiveRocksIterator(RocksDB db, public LLLocalKeyReactiveRocksIterator(RocksDB db,
ByteBufAllocator alloc, BufferAllocator alloc,
ColumnFamilyHandle cfh, ColumnFamilyHandle cfh,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
String debugName) { String debugName) {
@ -20,9 +21,9 @@ public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterato
} }
@Override @Override
public ByteBuf getEntry(ByteBuf key, ByteBuf value) { public Send<Buffer> getEntry(Send<Buffer> key, Send<Buffer> value) {
if (value != null) { if (value != null) {
value.release(); value.close();
} }
return key; return key;
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.database.Column; import it.cavallium.dbengine.database.Column;
import it.cavallium.dbengine.client.DatabaseOptions; import it.cavallium.dbengine.client.DatabaseOptions;
import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.LLKeyValueDatabase;
@ -65,7 +65,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
private static final ColumnFamilyDescriptor DEFAULT_COLUMN_FAMILY = new ColumnFamilyDescriptor( private static final ColumnFamilyDescriptor DEFAULT_COLUMN_FAMILY = new ColumnFamilyDescriptor(
RocksDB.DEFAULT_COLUMN_FAMILY); RocksDB.DEFAULT_COLUMN_FAMILY);
private final ByteBufAllocator allocator; private final BufferAllocator allocator;
private final Scheduler dbScheduler; private final Scheduler dbScheduler;
// Configurations // Configurations
@ -81,7 +81,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
private final AtomicLong nextSnapshotNumbers = new AtomicLong(1); private final AtomicLong nextSnapshotNumbers = new AtomicLong(1);
@SuppressWarnings("SwitchStatementWithTooFewBranches") @SuppressWarnings("SwitchStatementWithTooFewBranches")
public LLLocalKeyValueDatabase(ByteBufAllocator allocator, public LLLocalKeyValueDatabase(BufferAllocator allocator,
String name, String name,
@Nullable Path path, @Nullable Path path,
List<Column> columns, List<Column> columns,
@ -497,7 +497,7 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
} }
@Override @Override
public ByteBufAllocator getAllocator() { public BufferAllocator getAllocator() {
return allocator; return allocator;
} }

View File

@ -2,8 +2,9 @@ package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator; import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.api.Send;
import io.netty.util.IllegalReferenceCountException; import io.netty.util.IllegalReferenceCountException;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
@ -27,7 +28,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
private final AtomicBoolean released = new AtomicBoolean(false); private final AtomicBoolean released = new AtomicBoolean(false);
private final RocksDB db; private final RocksDB db;
private final ByteBufAllocator alloc; private final BufferAllocator alloc;
private final ColumnFamilyHandle cfh; private final ColumnFamilyHandle cfh;
private final LLRange range; private final LLRange range;
private final boolean allowNettyDirect; private final boolean allowNettyDirect;
@ -36,9 +37,9 @@ public abstract class LLLocalReactiveRocksIterator<T> {
private final String debugName; private final String debugName;
public LLLocalReactiveRocksIterator(RocksDB db, public LLLocalReactiveRocksIterator(RocksDB db,
ByteBufAllocator alloc, BufferAllocator alloc,
ColumnFamilyHandle cfh, ColumnFamilyHandle cfh,
LLRange range, Send<LLRange> range,
boolean allowNettyDirect, boolean allowNettyDirect,
ReadOptions readOptions, ReadOptions readOptions,
boolean readValues, boolean readValues,
@ -46,7 +47,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
this.db = db; this.db = db;
this.alloc = alloc; this.alloc = alloc;
this.cfh = cfh; this.cfh = cfh;
this.range = range; this.range = range.receive();
this.allowNettyDirect = allowNettyDirect; this.allowNettyDirect = allowNettyDirect;
this.readOptions = readOptions; this.readOptions = readOptions;
this.readValues = readValues; this.readValues = readValues;
@ -55,59 +56,53 @@ public abstract class LLLocalReactiveRocksIterator<T> {
public Flux<T> flux() { public Flux<T> flux() {
return Flux return Flux
.<T, @NotNull Tuple3<RocksIterator, ReleasableSlice, ReleasableSlice>>generate(() -> { .generate(() -> {
var readOptions = new ReadOptions(this.readOptions); var readOptions = new ReadOptions(this.readOptions);
if (!range.hasMin() || !range.hasMax()) { if (!range.hasMin() || !range.hasMax()) {
readOptions.setReadaheadSize(32 * 1024); // 32KiB readOptions.setReadaheadSize(32 * 1024); // 32KiB
readOptions.setFillCache(false); readOptions.setFillCache(false);
} }
return getRocksIterator(allowNettyDirect, readOptions, range.retain(), db, cfh); return getRocksIterator(allowNettyDirect, readOptions, range.copy().send(), db, cfh);
}, (tuple, sink) -> { }, (tuple, sink) -> {
range.retain();
try { try {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
rocksIterator.status(); rocksIterator.status();
if (rocksIterator.isValid()) { if (rocksIterator.isValid()) {
ByteBuf key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key); try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key)) {
try { Buffer value;
ByteBuf value;
if (readValues) { if (readValues) {
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value); value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
} else { } else {
value = alloc.buffer(0); value = alloc.allocate(0);
} }
try { try {
rocksIterator.next(); rocksIterator.next();
rocksIterator.status(); rocksIterator.status();
sink.next(getEntry(key.retain(), value.retain())); sink.next(getEntry(key.send(), value.send()));
} finally { } finally {
value.release(); value.close();
} }
} finally {
key.release();
} }
} else { } else {
sink.complete(); sink.complete();
} }
} catch (RocksDBException ex) { } catch (RocksDBException ex) {
sink.error(ex); sink.error(ex);
} finally {
range.release();
} }
return tuple; return tuple;
}, tuple -> { }, tuple -> {
var rocksIterator = tuple.getT1(); var rocksIterator = tuple.getT1();
rocksIterator.close(); rocksIterator.close();
tuple.getT2().release(); tuple.getT2().close();
tuple.getT3().release(); tuple.getT3().close();
}); });
} }
public abstract T getEntry(ByteBuf key, ByteBuf value); public abstract T getEntry(Send<Buffer> key, Send<Buffer> value);
public void release() { public void release() {
if (released.compareAndSet(false, true)) { if (released.compareAndSet(false, true)) {
range.release(); range.close();
} else { } else {
throw new IllegalReferenceCountException(0, -1); throw new IllegalReferenceCountException(0, -1);
} }

View File

@ -1,17 +1,20 @@
package it.cavallium.dbengine.database.disk; package it.cavallium.dbengine.database.disk;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Resource;
import it.cavallium.dbengine.database.SafeCloseable;
import org.rocksdb.AbstractSlice; import org.rocksdb.AbstractSlice;
public interface ReleasableSlice { public interface ReleasableSlice extends SafeCloseable {
default void release() { @Override
default void close() {
} }
AbstractSlice<?> slice(); AbstractSlice<?> slice();
ByteBuf byteBuf(); Buffer byteBuf();
Object additionalData(); Object additionalData();
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.memory; package it.cavallium.dbengine.database.memory;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.client.DatabaseOptions; import it.cavallium.dbengine.client.DatabaseOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers; import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities; import it.cavallium.dbengine.client.IndicizerSimilarities;

View File

@ -1,7 +1,7 @@
package it.cavallium.dbengine.database.memory; package it.cavallium.dbengine.database.memory;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.BadBlock;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.ExtraKeyOperationResult; import it.cavallium.dbengine.database.ExtraKeyOperationResult;
@ -81,7 +81,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
} }
private Mono<ByteBuf> transformResult(Mono<ByteList> result, LLDictionaryResultType resultType) { private Mono<Buffer> transformResult(Mono<ByteList> result, LLDictionaryResultType resultType) {
if (resultType == LLDictionaryResultType.PREVIOUS_VALUE) { if (resultType == LLDictionaryResultType.PREVIOUS_VALUE) {
// Don't retain the result because it has been removed from the skip list // Don't retain the result because it has been removed from the skip list
return result.map(this::kk); return result.map(this::kk);
@ -95,11 +95,11 @@ public class LLMemoryDictionary implements LLDictionary {
} }
} }
private ByteList k(ByteBuf buf) { private ByteList k(Buffer buf) {
return new BinaryLexicographicList(LLUtils.toArray(buf)); return new BinaryLexicographicList(LLUtils.toArray(buf));
} }
private ByteBuf kk(ByteList bytesList) { private Buffer kk(ByteList bytesList) {
var buffer = getAllocator().buffer(bytesList.size()); var buffer = getAllocator().buffer(bytesList.size());
buffer.writeBytes(bytesList.toByteArray()); buffer.writeBytes(bytesList.toByteArray());
return buffer; return buffer;
@ -139,7 +139,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Mono<ByteBuf> get(@Nullable LLSnapshot snapshot, Mono<ByteBuf> keyMono, boolean existsAlmostCertainly) { public Mono<Buffer> get(@Nullable LLSnapshot snapshot, Mono<Buffer> keyMono, boolean existsAlmostCertainly) {
return Mono.usingWhen(keyMono, return Mono.usingWhen(keyMono,
key -> Mono key -> Mono
.fromCallable(() -> snapshots.get(resolveSnapshot(snapshot)).get(k(key))) .fromCallable(() -> snapshots.get(resolveSnapshot(snapshot)).get(k(key)))
@ -150,7 +150,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Mono<ByteBuf> put(Mono<ByteBuf> keyMono, Mono<ByteBuf> valueMono, LLDictionaryResultType resultType) { public Mono<Buffer> put(Mono<Buffer> keyMono, Mono<Buffer> valueMono, LLDictionaryResultType resultType) {
return Mono.usingWhen(keyMono, return Mono.usingWhen(keyMono,
key -> Mono.usingWhen(valueMono, key -> Mono.usingWhen(valueMono,
value -> Mono value -> Mono
@ -169,17 +169,17 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Mono<Delta<ByteBuf>> updateAndGetDelta(Mono<ByteBuf> keyMono, public Mono<Delta<Buffer>> updateAndGetDelta(Mono<Buffer> keyMono,
SerializationFunction<@Nullable ByteBuf, @Nullable ByteBuf> updater, SerializationFunction<@Nullable Buffer, @Nullable Buffer> updater,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return Mono.usingWhen(keyMono, return Mono.usingWhen(keyMono,
key -> Mono.fromCallable(() -> { key -> Mono.fromCallable(() -> {
AtomicReference<ByteBuf> oldRef = new AtomicReference<>(null); AtomicReference<Buffer> oldRef = new AtomicReference<>(null);
var newValue = mainDb.compute(k(key), (_unused, old) -> { var newValue = mainDb.compute(k(key), (_unused, old) -> {
if (old != null) { if (old != null) {
oldRef.set(kk(old)); oldRef.set(kk(old));
} }
ByteBuf v = null; Buffer v = null;
try { try {
v = updater.apply(old != null ? kk(old) : null); v = updater.apply(old != null ? kk(old) : null);
} catch (SerializationException e) { } catch (SerializationException e) {
@ -205,7 +205,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Mono<ByteBuf> remove(Mono<ByteBuf> keyMono, LLDictionaryResultType resultType) { public Mono<Buffer> remove(Mono<Buffer> keyMono, LLDictionaryResultType resultType) {
return Mono.usingWhen(keyMono, return Mono.usingWhen(keyMono,
key -> Mono key -> Mono
.fromCallable(() -> mainDb.remove(k(key))) .fromCallable(() -> mainDb.remove(k(key)))
@ -228,8 +228,8 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public <K> Flux<Tuple3<K, ByteBuf, Optional<ByteBuf>>> getMulti(@Nullable LLSnapshot snapshot, public <K> Flux<Tuple3<K, Buffer, Optional<Buffer>>> getMulti(@Nullable LLSnapshot snapshot,
Flux<Tuple2<K, ByteBuf>> keys, Flux<Tuple2<K, Buffer>> keys,
boolean existsAlmostCertainly) { boolean existsAlmostCertainly) {
return keys return keys
.flatMapSequential(key -> { .flatMapSequential(key -> {
@ -267,8 +267,8 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public <X> Flux<ExtraKeyOperationResult<ByteBuf, X>> updateMulti(Flux<Tuple2<ByteBuf, X>> entries, public <X> Flux<ExtraKeyOperationResult<Buffer, X>> updateMulti(Flux<Tuple2<Buffer, X>> entries,
BiSerializationFunction<ByteBuf, X, ByteBuf> updateFunction) { BiSerializationFunction<Buffer, X, Buffer> updateFunction) {
return Flux.error(new UnsupportedOperationException("Not implemented")); return Flux.error(new UnsupportedOperationException("Not implemented"));
} }
@ -304,7 +304,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Flux<ByteBuf> getRangeKeys(@Nullable LLSnapshot snapshot, Mono<LLRange> rangeMono) { public Flux<Buffer> getRangeKeys(@Nullable LLSnapshot snapshot, Mono<LLRange> rangeMono) {
return Flux.usingWhen(rangeMono, return Flux.usingWhen(rangeMono,
range -> { range -> {
if (range.isSingle()) { if (range.isSingle()) {
@ -325,7 +325,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Flux<List<ByteBuf>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, public Flux<List<Buffer>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot,
Mono<LLRange> rangeMono, Mono<LLRange> rangeMono,
int prefixLength) { int prefixLength) {
return getRangeKeys(snapshot, rangeMono) return getRangeKeys(snapshot, rangeMono)
@ -333,7 +333,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Flux<ByteBuf> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, Mono<LLRange> rangeMono, int prefixLength) { public Flux<Buffer> getRangeKeyPrefixes(@Nullable LLSnapshot snapshot, Mono<LLRange> rangeMono, int prefixLength) {
return getRangeKeys(snapshot, rangeMono) return getRangeKeys(snapshot, rangeMono)
.distinctUntilChanged(k -> k.slice(k.readerIndex(), prefixLength), (a, b) -> { .distinctUntilChanged(k -> k.slice(k.readerIndex(), prefixLength), (a, b) -> {
if (LLUtils.equals(a, b)) { if (LLUtils.equals(a, b)) {
@ -376,7 +376,7 @@ public class LLMemoryDictionary implements LLDictionary {
} }
@Override @Override
public Mono<ByteBuf> getOneKey(@Nullable LLSnapshot snapshot, Mono<LLRange> rangeMono) { public Mono<Buffer> getOneKey(@Nullable LLSnapshot snapshot, Mono<LLRange> rangeMono) {
return Mono.error(new UnsupportedOperationException("Not implemented")); return Mono.error(new UnsupportedOperationException("Not implemented"));
} }

View File

@ -1,7 +1,7 @@
package it.cavallium.dbengine.database.memory; package it.cavallium.dbengine.database.memory;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import it.cavallium.dbengine.client.DatabaseOptions; import it.cavallium.dbengine.client.DatabaseOptions;
import it.cavallium.dbengine.database.Column; import it.cavallium.dbengine.database.Column;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.database.memory; package it.cavallium.dbengine.database.memory;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import it.cavallium.dbengine.database.LLDictionaryResultType; import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLSingleton; import it.cavallium.dbengine.database.LLSingleton;
@ -13,13 +13,13 @@ public class LLMemorySingleton implements LLSingleton {
private final LLMemoryDictionary dict; private final LLMemoryDictionary dict;
private final byte[] singletonName; private final byte[] singletonName;
private final Mono<ByteBuf> singletonNameBufMono; private final Mono<Buffer> singletonNameBufMono;
public LLMemorySingleton(LLMemoryDictionary dict, byte[] singletonName) { public LLMemorySingleton(LLMemoryDictionary dict, byte[] singletonName) {
this.dict = dict; this.dict = dict;
this.singletonName = singletonName; this.singletonName = singletonName;
ByteBuf singletonNameBuf = Unpooled.wrappedBuffer(singletonName); Buffer singletonNameBuf = Unpooled.wrappedBuffer(singletonName);
this.singletonNameBufMono = Mono.just(singletonNameBuf).map(ByteBuf::retain); this.singletonNameBufMono = Mono.just(singletonNameBuf).map(Buffer::retain);
} }
@Override @Override
@ -42,8 +42,8 @@ public class LLMemorySingleton implements LLSingleton {
@Override @Override
public Mono<Void> set(byte[] value) { public Mono<Void> set(byte[] value) {
var bbKey = Mono.just(Unpooled.wrappedBuffer(singletonName)).map(ByteBuf::retain); var bbKey = Mono.just(Unpooled.wrappedBuffer(singletonName)).map(Buffer::retain);
var bbVal = Mono.just(Unpooled.wrappedBuffer(value)).map(ByteBuf::retain); var bbVal = Mono.just(Unpooled.wrappedBuffer(value)).map(Buffer::retain);
return dict return dict
.put(bbKey, bbVal, LLDictionaryResultType.VOID) .put(bbKey, bbVal, LLDictionaryResultType.VOID)
.then(); .then();

View File

@ -1,7 +1,7 @@
package it.cavallium.dbengine.database.serialization; package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.ByteBufInputStream; import io.netty.buffer.api.BufferInputStream;
import io.netty.buffer.ByteBufOutputStream; import io.netty.buffer.api.BufferOutputStream;
import java.io.IOException; import java.io.IOException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;

View File

@ -1,9 +1,9 @@
package it.cavallium.dbengine.database.serialization; package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.ByteBufInputStream; import io.netty.buffer.api.BufferInputStream;
import io.netty.buffer.ByteBufOutputStream; import io.netty.buffer.api.BufferOutputStream;
import io.netty.buffer.PooledByteBufAllocator; import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.Unpooled; import io.netty.buffer.Unpooled;
import java.io.IOError; import java.io.IOError;
@ -11,7 +11,7 @@ import java.io.IOException;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.warp.commonutils.error.IndexOutOfBoundsException; import org.warp.commonutils.error.IndexOutOfBoundsException;
public class CodecSerializer<A> implements Serializer<A, ByteBuf> { public class CodecSerializer<A> implements Serializer<A, Buffer> {
private final ByteBufAllocator allocator; private final ByteBufAllocator allocator;
private final Codecs<A> deserializationCodecs; private final Codecs<A> deserializationCodecs;
@ -40,7 +40,7 @@ public class CodecSerializer<A> implements Serializer<A, ByteBuf> {
} }
@Override @Override
public @NotNull A deserialize(@NotNull ByteBuf serialized) { public @NotNull A deserialize(@NotNull Buffer serialized) {
try (var is = new ByteBufInputStream(serialized)) { try (var is = new ByteBufInputStream(serialized)) {
int codecId; int codecId;
if (microCodecs) { if (microCodecs) {
@ -59,8 +59,8 @@ public class CodecSerializer<A> implements Serializer<A, ByteBuf> {
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull A deserialized) { public @NotNull Buffer serialize(@NotNull A deserialized) {
ByteBuf buf = allocator.buffer(); Buffer buf = allocator.buffer();
try (var os = new ByteBufOutputStream(buf)) { try (var os = new ByteBufOutputStream(buf)) {
if (microCodecs) { if (microCodecs) {
os.writeByte(serializationCodecId); os.writeByte(serializationCodecId);

View File

@ -1,9 +1,9 @@
package it.cavallium.dbengine.database.serialization; package it.cavallium.dbengine.database.serialization;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Buffer;
import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.ByteBufUtil; import io.netty.buffer.api.Send;
import io.netty.buffer.PooledByteBufAllocator; import it.cavallium.dbengine.database.LLUtils;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
@ -13,52 +13,41 @@ public interface Serializer<A, B> {
@NotNull B serialize(@NotNull A deserialized) throws SerializationException; @NotNull B serialize(@NotNull A deserialized) throws SerializationException;
Serializer<ByteBuf, ByteBuf> NOOP_SERIALIZER = new Serializer<>() { Serializer<Send<Buffer>, Send<Buffer>> NOOP_SERIALIZER = new Serializer<>() {
@Override @Override
public @NotNull ByteBuf deserialize(@NotNull ByteBuf serialized) { public @NotNull Send<Buffer> deserialize(@NotNull Send<Buffer> serialized) {
try { return serialized;
return serialized.retainedSlice();
} finally {
serialized.release();
}
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull ByteBuf deserialized) { public @NotNull Send<Buffer> serialize(@NotNull Send<Buffer> deserialized) {
try { return deserialized;
return deserialized.retainedSlice();
} finally {
deserialized.release();
}
} }
}; };
static Serializer<ByteBuf, ByteBuf> noop() { static Serializer<Send<Buffer>, Send<Buffer>> noop() {
return NOOP_SERIALIZER; return NOOP_SERIALIZER;
} }
static Serializer<String, ByteBuf> utf8(ByteBufAllocator allocator) { static Serializer<String, Send<Buffer>> utf8(BufferAllocator allocator) {
return new Serializer<>() { return new Serializer<>() {
@Override @Override
public @NotNull String deserialize(@NotNull ByteBuf serialized) { public @NotNull String deserialize(@NotNull Send<Buffer> serializedToReceive) {
try { try (Buffer serialized = serializedToReceive.receive()) {
var length = serialized.readInt(); int length = serialized.readInt();
var result = serialized.toString(serialized.readerIndex(), length, StandardCharsets.UTF_8); return LLUtils.deserializeString(serialized.send(), serialized.readerOffset(), length, StandardCharsets.UTF_8);
serialized.readerIndex(serialized.readerIndex() + length);
return result;
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull String deserialized) { public @NotNull Send<Buffer> serialize(@NotNull String deserialized) {
// UTF-8 uses max. 3 bytes per char, so calculate the worst case. // UTF-8 uses max. 3 bytes per char, so calculate the worst case.
int length = ByteBufUtil.utf8Bytes(deserialized); int length = LLUtils.utf8MaxBytes(deserialized);
ByteBuf buf = allocator.buffer(Integer.BYTES + length); try (Buffer buf = allocator.allocate(Integer.BYTES + length)) {
buf.writeInt(length); buf.writeInt(length);
ByteBufUtil.writeUtf8(buf, deserialized); LLUtils.writeString(buf, deserialized, StandardCharsets.UTF_8);
return buf; return buf.send();
}
} }
}; };
} }

View File

@ -1,12 +1,9 @@
package it.cavallium.dbengine.database.serialization; package it.cavallium.dbengine.database.serialization;
import com.google.common.primitives.Ints; import io.netty.buffer.api.Buffer;
import com.google.common.primitives.Longs; import io.netty.buffer.api.BufferAllocator;
import io.netty.buffer.ByteBuf; import io.netty.buffer.api.Send;
import io.netty.buffer.ByteBufAllocator; import it.cavallium.dbengine.database.LLUtils;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.PooledByteBufAllocator;
import java.io.NotSerializableException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
@ -15,31 +12,30 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
int getSerializedBinaryLength(); int getSerializedBinaryLength();
static SerializerFixedBinaryLength<ByteBuf, ByteBuf> noop(int length) { static SerializerFixedBinaryLength<Send<Buffer>, Send<Buffer>> noop(int length) {
return new SerializerFixedBinaryLength<>() { return new SerializerFixedBinaryLength<>() {
@Override @Override
public @NotNull ByteBuf deserialize(@NotNull ByteBuf serialized) { public @NotNull Send<Buffer> deserialize(@NotNull Send<Buffer> serialized) {
try { try (var buf = serialized.receive()) {
if (serialized.readableBytes() != getSerializedBinaryLength()) { if (buf.readableBytes() != getSerializedBinaryLength()) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with " "Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
+ serialized.readableBytes() + " bytes instead"); + buf.readableBytes() + " bytes instead");
} }
return serialized.retain(); return buf.send();
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull ByteBuf deserialized) { public @NotNull Send<Buffer> serialize(@NotNull Send<Buffer> deserialized) {
ByteBuf buf = deserialized.retain(); try (Buffer buf = deserialized.receive()) {
if (buf.readableBytes() != getSerializedBinaryLength()) { if (buf.readableBytes() != getSerializedBinaryLength()) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to serialize an element with " "Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to serialize an element with "
+ buf.readableBytes() + " bytes instead"); + buf.readableBytes() + " bytes instead");
}
return buf.send();
} }
return buf;
} }
@Override @Override
@ -49,38 +45,32 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
}; };
} }
static SerializerFixedBinaryLength<String, ByteBuf> utf8(ByteBufAllocator allocator, int length) { static SerializerFixedBinaryLength<String, Send<Buffer>> utf8(BufferAllocator allocator, int length) {
return new SerializerFixedBinaryLength<>() { return new SerializerFixedBinaryLength<>() {
@Override @Override
public @NotNull String deserialize(@NotNull ByteBuf serialized) throws SerializationException { public @NotNull String deserialize(@NotNull Send<Buffer> serializedToReceive) throws SerializationException {
try { try (var serialized = serializedToReceive.receive()) {
if (serialized.readableBytes() != getSerializedBinaryLength()) { if (serialized.readableBytes() != getSerializedBinaryLength()) {
throw new SerializationException( throw new SerializationException(
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with " "Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
+ serialized.readableBytes() + " bytes instead"); + serialized.readableBytes() + " bytes instead");
} }
var result = serialized.toString(StandardCharsets.UTF_8); var readerOffset = serialized.readerOffset();
serialized.readerIndex(serialized.writerIndex()); return LLUtils.deserializeString(serialized.send(), readerOffset, length, StandardCharsets.UTF_8);
return result;
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull String deserialized) throws SerializationException { public @NotNull Send<Buffer> serialize(@NotNull String deserialized) throws SerializationException {
// UTF-8 uses max. 3 bytes per char, so calculate the worst case. // UTF-8 uses max. 3 bytes per char, so calculate the worst case.
ByteBuf buf = allocator.buffer(ByteBufUtil.utf8MaxBytes(deserialized)); try (Buffer buf = allocator.allocate(LLUtils.utf8MaxBytes(deserialized))) {
try { LLUtils.writeString(buf, deserialized, StandardCharsets.UTF_8);
ByteBufUtil.writeUtf8(buf, deserialized);
if (buf.readableBytes() != getSerializedBinaryLength()) { if (buf.readableBytes() != getSerializedBinaryLength()) {
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength() throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength()
+ " bytes has tried to serialize an element with " + " bytes has tried to serialize an element with "
+ buf.readableBytes() + " bytes instead"); + buf.readableBytes() + " bytes instead");
} }
return buf.retain(); return buf.send();
} finally {
buf.release();
} }
} }
@ -91,26 +81,25 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
}; };
} }
static SerializerFixedBinaryLength<Integer, ByteBuf> intSerializer(ByteBufAllocator allocator) { static SerializerFixedBinaryLength<Integer, Send<Buffer>> intSerializer(BufferAllocator allocator) {
return new SerializerFixedBinaryLength<>() { return new SerializerFixedBinaryLength<>() {
@Override @Override
public @NotNull Integer deserialize(@NotNull ByteBuf serialized) { public @NotNull Integer deserialize(@NotNull Send<Buffer> serializedToReceive) {
try { try (var serialized = serializedToReceive.receive()) {
if (serialized.readableBytes() != getSerializedBinaryLength()) { if (serialized.readableBytes() != getSerializedBinaryLength()) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with " "Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
+ serialized.readableBytes() + " bytes instead"); + serialized.readableBytes() + " bytes instead");
} }
return serialized.readInt(); return serialized.readInt();
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull Integer deserialized) { public @NotNull Send<Buffer> serialize(@NotNull Integer deserialized) {
ByteBuf buf = allocator.buffer(Integer.BYTES); try (Buffer buf = allocator.allocate(Integer.BYTES)) {
return buf.writeInt(deserialized); return buf.writeInt(deserialized).send();
}
} }
@Override @Override
@ -120,26 +109,25 @@ public interface SerializerFixedBinaryLength<A, B> extends Serializer<A, B> {
}; };
} }
static SerializerFixedBinaryLength<Long, ByteBuf> longSerializer(ByteBufAllocator allocator) { static SerializerFixedBinaryLength<Long, Send<Buffer>> longSerializer(BufferAllocator allocator) {
return new SerializerFixedBinaryLength<>() { return new SerializerFixedBinaryLength<>() {
@Override @Override
public @NotNull Long deserialize(@NotNull ByteBuf serialized) { public @NotNull Long deserialize(@NotNull Send<Buffer> serializedToReceive) {
try { try (var serialized = serializedToReceive.receive()) {
if (serialized.readableBytes() != getSerializedBinaryLength()) { if (serialized.readableBytes() != getSerializedBinaryLength()) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with " "Fixed serializer with " + getSerializedBinaryLength() + " bytes has tried to deserialize an element with "
+ serialized.readableBytes() + " bytes instead"); + serialized.readableBytes() + " bytes instead");
} }
return serialized.readLong(); return serialized.readLong();
} finally {
serialized.release();
} }
} }
@Override @Override
public @NotNull ByteBuf serialize(@NotNull Long deserialized) { public @NotNull Send<Buffer> serialize(@NotNull Long deserialized) {
ByteBuf buf = allocator.buffer(Long.BYTES); try (Buffer buf = allocator.allocate(Long.BYTES)) {
return buf.writeLong(deserialized); return buf.writeLong(deserialized).send();
}
} }
@Override @Override

View File

@ -75,7 +75,7 @@ public class RandomFieldComparator extends FieldComparator<Float> implements Lea
} }
}; };
if (!(scorer instanceof ScoreCachingWrappingScorer)) { if (!(scorer instanceof ScoreCachingWrappingScorer)) {
this.scorer = new ScoreCachingWrappingScorer(randomizedScorer); this.scorer = ScoreCachingWrappingScorer.wrap(randomizedScorer);
} else { } else {
this.scorer = randomizedScorer; this.scorer = randomizedScorer;
} }

View File

@ -13,7 +13,6 @@ import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.HitQueue; import org.apache.lucene.search.HitQueue;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MultiCollectorManager.Collectors;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable; import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;

View File

@ -1,13 +1,13 @@
package it.cavallium.dbengine.netty; package it.cavallium.dbengine.netty;
import io.netty.buffer.ByteBufAllocatorMetric; import io.netty.buffer.api.BufferAllocatorMetric;
public class JMXNettyMonitoring implements JMXNettyMonitoringMBean { public class JMXNettyMonitoring implements JMXNettyMonitoringMBean {
private final String name; private final String name;
private final ByteBufAllocatorMetric metric; private final ByteBufAllocatorMetric metric;
public JMXNettyMonitoring(String name, io.netty.buffer.ByteBufAllocatorMetric metric) { public JMXNettyMonitoring(String name, io.netty.buffer.api.BufferAllocatorMetric metric) {
this.name = name; this.name = name;
this.metric = metric; this.metric = metric;
} }

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.netty; package it.cavallium.dbengine.netty;
import io.netty.buffer.ByteBufAllocatorMetric; import io.netty.buffer.api.BufferAllocatorMetric;
import io.netty.buffer.PoolArenaMetric; import io.netty.buffer.PoolArenaMetric;
import io.netty.buffer.PooledByteBufAllocator; import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.PooledByteBufAllocatorMetric; import io.netty.buffer.PooledByteBufAllocatorMetric;

View File

@ -1,6 +1,6 @@
package it.cavallium.dbengine.netty; package it.cavallium.dbengine.netty;
import io.netty.buffer.ByteBufAllocatorMetric; import io.netty.buffer.api.BufferAllocatorMetric;
import io.netty.buffer.PooledByteBufAllocatorMetric; import io.netty.buffer.PooledByteBufAllocatorMetric;
public class JMXPooledNettyMonitoring extends JMXNettyMonitoring implements JMXNettyMonitoringMBean { public class JMXPooledNettyMonitoring extends JMXNettyMonitoring implements JMXNettyMonitoringMBean {

View File

@ -1,6 +1,9 @@
package org.rocksdb; package org.rocksdb;
import io.netty.buffer.ByteBuf; import static it.cavallium.dbengine.database.LLUtils.isDirect;
import io.netty.buffer.api.Buffer;
import io.netty.buffer.api.Send;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
@ -25,7 +28,7 @@ public class CappedWriteBatch extends WriteBatch {
private final int cap; private final int cap;
private final WriteOptions writeOptions; private final WriteOptions writeOptions;
private final List<ByteBuf> buffersToRelease; private final List<Buffer> buffersToRelease;
/** /**
* @param cap The limit of operations * @param cap The limit of operations
@ -53,9 +56,8 @@ public class CappedWriteBatch extends WriteBatch {
private synchronized void releaseAllBuffers() { private synchronized void releaseAllBuffers() {
if (!buffersToRelease.isEmpty()) { if (!buffersToRelease.isEmpty()) {
for (ByteBuf byteBuffer : buffersToRelease) { for (Buffer byteBuffer : buffersToRelease) {
assert byteBuffer.refCnt() > 0; byteBuffer.close();
byteBuffer.release();
} }
buffersToRelease.clear(); buffersToRelease.clear();
} }
@ -90,8 +92,12 @@ public class CappedWriteBatch extends WriteBatch {
flushIfNeeded(false); flushIfNeeded(false);
} }
public synchronized void put(ColumnFamilyHandle columnFamilyHandle, ByteBuf key, ByteBuf value) throws RocksDBException { public synchronized void put(ColumnFamilyHandle columnFamilyHandle,
if (USE_FAST_DIRECT_BUFFERS && key.isDirect() && value.isDirect()) { Send<Buffer> keyToReceive,
Send<Buffer> valueToReceive) throws RocksDBException {
var key = keyToReceive.receive();
var value = valueToReceive.receive();
if (USE_FAST_DIRECT_BUFFERS && isDirect(key) && isDirect(value)) {
buffersToRelease.add(key); buffersToRelease.add(key);
buffersToRelease.add(value); buffersToRelease.add(value);
ByteBuffer keyNioBuffer = LLUtils.toDirect(key); ByteBuffer keyNioBuffer = LLUtils.toDirect(key);
@ -106,8 +112,8 @@ public class CappedWriteBatch extends WriteBatch {
byte[] valueArray = LLUtils.toArray(value); byte[] valueArray = LLUtils.toArray(value);
super.put(columnFamilyHandle, keyArray, valueArray); super.put(columnFamilyHandle, keyArray, valueArray);
} finally { } finally {
key.release(); key.close();
value.release(); value.close();
} }
} }
flushIfNeeded(false); flushIfNeeded(false);
@ -151,7 +157,8 @@ public class CappedWriteBatch extends WriteBatch {
flushIfNeeded(false); flushIfNeeded(false);
} }
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, ByteBuf key) throws RocksDBException { public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, Send<Buffer> keyToReceive) throws RocksDBException {
var key = keyToReceive.receive();
if (USE_FAST_DIRECT_BUFFERS) { if (USE_FAST_DIRECT_BUFFERS) {
buffersToRelease.add(key); buffersToRelease.add(key);
ByteBuffer keyNioBuffer = LLUtils.toDirect(key); ByteBuffer keyNioBuffer = LLUtils.toDirect(key);
@ -167,7 +174,7 @@ public class CappedWriteBatch extends WriteBatch {
try { try {
super.delete(columnFamilyHandle, LLUtils.toArray(key)); super.delete(columnFamilyHandle, LLUtils.toArray(key));
} finally { } finally {
key.release(); key.close();
} }
} }
flushIfNeeded(false); flushIfNeeded(false);