Remove some leaks

This commit is contained in:
Andrea Cavalli 2022-05-20 23:59:56 +02:00
parent d253111233
commit 18d5ddf6e1
13 changed files with 461 additions and 410 deletions

41
pom.xml
View File

@ -90,13 +90,20 @@
<dependency> <dependency>
<groupId>io.projectreactor</groupId> <groupId>io.projectreactor</groupId>
<artifactId>reactor-bom</artifactId> <artifactId>reactor-bom</artifactId>
<version>2020.0.18</version> <version>2020.0.19</version>
<type>pom</type> <type>pom</type>
<scope>import</scope> <scope>import</scope>
</dependency> </dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>
<dependencies> <dependencies>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-tools</artifactId>
<classifier>original</classifier>
<scope>runtime</scope>
<version>3.4.18</version>
</dependency>
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
@ -106,6 +113,16 @@
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty5-buffer</artifactId> <artifactId>netty5-buffer</artifactId>
<version>5.0.0.Alpha2</version> <version>5.0.0.Alpha2</version>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.yaml</groupId> <groupId>org.yaml</groupId>
@ -418,11 +435,11 @@
<version>3.12.0</version> <version>3.12.0</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.projectreactor</groupId> <groupId>io.projectreactor</groupId>
<artifactId>reactor-test</artifactId> <artifactId>reactor-test</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>
<build> <build>
<testSourceDirectory>src/test/java</testSourceDirectory> <testSourceDirectory>src/test/java</testSourceDirectory>
@ -600,6 +617,18 @@
</lifecycleMappingMetadata> </lifecycleMappingMetadata>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-maven-plugin</artifactId>
<version>1.12.10</version>
<configuration>
<transformations>
<transformation>
<plugin>reactor.tools.agent.ReactorDebugByteBuddyPlugin</plugin>
</transformation>
</transformations>
</configuration>
</plugin>
</plugins> </plugins>
</pluginManagement> </pluginManagement>
</build> </build>

View File

@ -797,29 +797,21 @@ public class LLUtils {
} }
public static Mono<Buffer> resolveLLDelta(Mono<LLDelta> prev, UpdateReturnMode updateReturnMode) { public static Mono<Buffer> resolveLLDelta(Mono<LLDelta> prev, UpdateReturnMode updateReturnMode) {
return prev.handle((delta, sink) -> { return prev.mapNotNull(delta -> {
final Buffer previous = delta.previousUnsafe(); final Buffer previous = delta.previousUnsafe();
final Buffer current = delta.currentUnsafe(); final Buffer current = delta.currentUnsafe();
switch (updateReturnMode) { return switch (updateReturnMode) {
case GET_NEW_VALUE -> { case GET_NEW_VALUE -> {
if (previous != null && previous.isAccessible()) { if (previous != null && previous.isAccessible()) {
previous.close(); previous.close();
} }
if (current != null) { yield current;
sink.next(current);
} else {
sink.complete();
}
} }
case GET_OLD_VALUE -> { case GET_OLD_VALUE -> {
if (current != null && current.isAccessible()) { if (current != null && current.isAccessible()) {
current.close(); current.close();
} }
if (previous != null) { yield previous;
sink.next(previous);
} else {
sink.complete();
}
} }
case NOTHING -> { case NOTHING -> {
if (previous != null && previous.isAccessible()) { if (previous != null && previous.isAccessible()) {
@ -828,10 +820,9 @@ public class LLUtils {
if (current != null && current.isAccessible()) { if (current != null && current.isAccessible()) {
current.close(); current.close();
} }
sink.complete(); yield null;
} }
default -> sink.error(new IllegalStateException()); };
}
}); });
} }
@ -862,27 +853,23 @@ public class LLUtils {
public static <U> Mono<Delta<U>> mapLLDelta(Mono<LLDelta> mono, public static <U> Mono<Delta<U>> mapLLDelta(Mono<LLDelta> mono,
SerializationFunction<@NotNull Buffer, @Nullable U> mapper) { SerializationFunction<@NotNull Buffer, @Nullable U> mapper) {
return mono.handle((delta, sink) -> { return Mono.usingWhen(mono, delta -> Mono.fromCallable(() -> {
try (delta) { Buffer prev = delta.previousUnsafe();
Buffer prev = delta.previousUnsafe(); Buffer curr = delta.currentUnsafe();
Buffer curr = delta.currentUnsafe(); U newPrev;
U newPrev; U newCurr;
U newCurr; if (prev != null) {
if (prev != null) { newPrev = mapper.apply(prev);
newPrev = mapper.apply(prev); } else {
} else { newPrev = null;
newPrev = null;
}
if (curr != null) {
newCurr = mapper.apply(curr);
} else {
newCurr = null;
}
sink.next(new Delta<>(newPrev, newCurr));
} catch (SerializationException ex) {
sink.error(ex);
} }
}); if (curr != null) {
newCurr = mapper.apply(curr);
} else {
newCurr = null;
}
return new Delta<>(newPrev, newCurr);
}), delta -> Mono.fromRunnable(delta::close));
} }
public static <R, V> boolean isDeltaChanged(Delta<V> delta) { public static <R, V> boolean isDeltaChanged(Delta<V> delta) {

View File

@ -0,0 +1,60 @@
package it.cavallium.dbengine.database;
import it.cavallium.dbengine.database.collections.DatabaseStage;
import java.util.Map.Entry;
import java.util.Objects;
public final class SubStageEntry<T, U extends DatabaseStage<?>> implements SafeCloseable, Entry<T, U> {
private final T key;
private final U value;
public SubStageEntry(T key, U value) {
this.key = key;
this.value = value;
}
@Override
public void close() {
if (value != null && value.isAccessible()) {
value.close();
}
}
@Override
public T getKey() {
return key;
}
@Override
public U getValue() {
return value;
}
@Override
public U setValue(U value) {
throw new UnsupportedOperationException();
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj == null || obj.getClass() != this.getClass())
return false;
//noinspection rawtypes
var that = (SubStageEntry) obj;
return Objects.equals(this.key, that.key) && Objects.equals(this.value, that.value);
}
@Override
public int hashCode() {
return Objects.hash(key, value);
}
@Override
public String toString() {
return "SubStageEntry[" + "key=" + key + ", " + "value=" + value + ']';
}
}

View File

@ -4,7 +4,6 @@ import static java.util.Objects.requireNonNullElseGet;
import io.netty5.buffer.api.Buffer; import io.netty5.buffer.api.Buffer;
import io.netty5.buffer.api.Resource; import io.netty5.buffer.api.Resource;
import io.netty5.buffer.api.Send;
import io.netty5.buffer.api.internal.ResourceSupport; import io.netty5.buffer.api.internal.ResourceSupport;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.BufSupplier; import it.cavallium.dbengine.database.BufSupplier;
@ -14,6 +13,7 @@ import it.cavallium.dbengine.database.LLDictionaryResultType;
import it.cavallium.dbengine.database.LLEntry; import it.cavallium.dbengine.database.LLEntry;
import it.cavallium.dbengine.database.LLRange; import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.disk.BinarySerializationFunction; import it.cavallium.dbengine.database.disk.BinarySerializationFunction;
@ -126,32 +126,31 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
}); });
} }
private void deserializeValue(T keySuffix, Buffer value, SynchronousSink<U> sink) { private @Nullable U deserializeValue(T keySuffix, Buffer value) {
try { try {
sink.next(valueSerializer.deserialize(value)); return valueSerializer.deserialize(value);
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {
var exMessage = ex.getMessage(); var exMessage = ex.getMessage();
if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) { if (exMessage != null && exMessage.contains("read 0 to 0, write 0 to ")) {
var totalZeroBytesErrors = this.totalZeroBytesErrors.incrementAndGet(); var totalZeroBytesErrors = this.totalZeroBytesErrors.incrementAndGet();
if (totalZeroBytesErrors < 512 || totalZeroBytesErrors % 10000 == 0) { if (totalZeroBytesErrors < 512 || totalZeroBytesErrors % 10000 == 0) {
try (var keySuffixBytes = serializeKeySuffixToKey(keySuffix)) { try (var keySuffixBytes = serializeKeySuffixToKey(keySuffix)) {
LOG.error("Unexpected zero-bytes value at " + dictionary.getDatabaseName() LOG.error(
+ ":" + dictionary.getColumnName() "Unexpected zero-bytes value at "
+ ":" + LLUtils.toStringSafe(this.keyPrefix) + dictionary.getDatabaseName() + ":" + dictionary.getColumnName()
+ ":" + keySuffix + "(" + LLUtils.toStringSafe(keySuffixBytes) + ") total=" + totalZeroBytesErrors); + ":" + LLUtils.toStringSafe(this.keyPrefix) + ":" + keySuffix
+ "(" + LLUtils.toStringSafe(keySuffixBytes) + ") total=" + totalZeroBytesErrors);
} catch (SerializationException e) { } catch (SerializationException e) {
LOG.error("Unexpected zero-bytes value at " + dictionary.getDatabaseName() LOG.error(
+ ":" + dictionary.getColumnName() "Unexpected zero-bytes value at " + dictionary.getDatabaseName() + ":" + dictionary.getColumnName()
+ ":" + LLUtils.toStringSafe(this.keyPrefix) + ":" + LLUtils.toStringSafe(this.keyPrefix) + ":" + keySuffix + "(?) total="
+ ":" + keySuffix + "(?) total=" + totalZeroBytesErrors); + totalZeroBytesErrors);
} }
} }
sink.complete(); return null;
} else { } else {
sink.error(ex); throw ex;
} }
} catch (Throwable ex) {
sink.error(ex);
} }
} }
@ -278,13 +277,10 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) { public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
return dictionary return Mono.usingWhen(dictionary
.get(resolveSnapshot(snapshot), Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix))) .get(resolveSnapshot(snapshot), Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix))),
.handle((valueToReceive, sink) -> { value -> Mono.fromCallable(() -> deserializeValue(keySuffix, value)),
try (valueToReceive) { value -> Mono.fromRunnable(value::close));
deserializeValue(keySuffix, valueToReceive, sink);
}
});
} }
@Override @Override
@ -306,13 +302,10 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
public Mono<U> updateValue(T keySuffix, UpdateReturnMode updateReturnMode, public Mono<U> updateValue(T keySuffix, UpdateReturnMode updateReturnMode,
SerializationFunction<@Nullable U, @Nullable U> updater) { SerializationFunction<@Nullable U, @Nullable U> updater) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix));
return dictionary return Mono.usingWhen(dictionary.update(keyMono, getSerializedUpdater(updater), updateReturnMode),
.update(keyMono, getSerializedUpdater(updater), updateReturnMode) result -> Mono.fromCallable(() -> deserializeValue(keySuffix, result)),
.handle((valueToReceive, sink) -> { result -> Mono.fromRunnable(result::close)
try (valueToReceive) { );
deserializeValue(keySuffix, valueToReceive, sink);
}
});
} }
@Override @Override
@ -320,11 +313,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix));
return dictionary return dictionary
.updateAndGetDelta(keyMono, getSerializedUpdater(updater)) .updateAndGetDelta(keyMono, getSerializedUpdater(updater))
.transform(mono -> LLUtils.mapLLDelta(mono, serialized -> { .transform(mono -> LLUtils.mapLLDelta(mono, valueSerializer::deserialize));
try (serialized) {
return valueSerializer.deserialize(serialized);
}
}));
} }
public BinarySerializationFunction getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) { public BinarySerializationFunction getSerializedUpdater(SerializationFunction<@Nullable U, @Nullable U> updater) {
@ -368,26 +357,21 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
public Mono<U> putValueAndGetPrevious(T keySuffix, U value) { public Mono<U> putValueAndGetPrevious(T keySuffix, U value) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix));
var valueMono = Mono.fromCallable(() -> serializeValue(value)); var valueMono = Mono.fromCallable(() -> serializeValue(value));
return dictionary return Mono.usingWhen(dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE),
.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE) valueBuf -> Mono.fromCallable(() -> deserializeValue(keySuffix, valueBuf)),
.handle((valueToReceive, sink) -> { valueBuf -> Mono.fromRunnable(valueBuf::close)
try (valueToReceive) { );
deserializeValue(keySuffix, valueToReceive, sink);
}
});
} }
@Override @Override
public Mono<Boolean> putValueAndGetChanged(T keySuffix, U value) { public Mono<Boolean> putValueAndGetChanged(T keySuffix, U value) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix));
var valueMono = Mono.fromCallable(() -> serializeValue(value)); var valueMono = Mono.fromCallable(() -> serializeValue(value));
return dictionary return Mono
.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE) .usingWhen(dictionary.put(keyMono, valueMono, LLDictionaryResultType.PREVIOUS_VALUE),
.handle((Buffer valueBuf, SynchronousSink<U> sink) -> { valueBuf -> Mono.fromCallable(() -> deserializeValue(keySuffix, valueBuf)),
try (valueBuf) { valueBuf -> Mono.fromRunnable(valueBuf::close)
deserializeValue(keySuffix, valueBuf, sink); )
}
})
.map(oldValue -> !Objects.equals(oldValue, value)) .map(oldValue -> !Objects.equals(oldValue, value))
.defaultIfEmpty(value != null); .defaultIfEmpty(value != null);
} }
@ -404,13 +388,10 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Mono<U> removeAndGetPrevious(T keySuffix) { public Mono<U> removeAndGetPrevious(T keySuffix) {
var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix)); var keyMono = Mono.fromCallable(() -> serializeKeySuffixToKey(keySuffix));
return dictionary return Mono.usingWhen(dictionary.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE),
.remove(keyMono, LLDictionaryResultType.PREVIOUS_VALUE) valueBuf -> Mono.fromCallable(() -> deserializeValue(keySuffix, valueBuf)),
.handle((valueToReceive, sink) -> { valueBuf -> Mono.fromRunnable(valueBuf::close)
try (valueToReceive) { );
deserializeValue(keySuffix, valueToReceive, sink);
}
});
} }
@Override @Override
@ -433,15 +414,9 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
}); });
return dictionary return dictionary
.getMulti(resolveSnapshot(snapshot), mappedKeys) .getMulti(resolveSnapshot(snapshot), mappedKeys)
.<Optional<U>>handle((valueBufOpt, sink) -> { .handle((valueBufOpt, sink) -> {
try { try {
Optional<U> valueOpt; sink.next(valueBufOpt.map(valueSerializer::deserialize));
if (valueBufOpt.isPresent()) {
valueOpt = Optional.of(valueSerializer.deserialize(valueBufOpt.get()));
} else {
valueOpt = Optional.empty();
}
sink.next(valueOpt);
} catch (Throwable ex) { } catch (Throwable ex) {
sink.error(ex); sink.error(ex);
} finally { } finally {
@ -499,7 +474,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
@Override @Override
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return getAllStages(snapshot, rangeMono, false, smallRange); return getAllStages(snapshot, rangeMono, false, smallRange);
} }
@ -530,9 +505,8 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
/** /**
* Get all stages * Get all stages
* @param reverse if true, the results will go backwards from the specified key (inclusive) * @param reverse if true, the results will go backwards from the specified key (inclusive)
* @param smallRange
*/ */
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, public Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot,
@Nullable T keyMin, @Nullable T keyMin,
@Nullable T keyMax, @Nullable T keyMax,
boolean reverse, boolean reverse,
@ -540,39 +514,41 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
if (keyMin == null && keyMax == null) { if (keyMin == null && keyMax == null) {
return getAllStages(snapshot, smallRange); return getAllStages(snapshot, smallRange);
} else { } else {
Mono<LLRange> boundedRangeMono = rangeMono Mono<LLRange> boundedRangeMono = Mono.usingWhen(rangeMono,
.handle((fullRange, sink) -> { range -> Mono.fromCallable(() -> getPatchedRange(range, keyMin, keyMax)),
try (fullRange) { range -> Mono.fromRunnable(range::close)
sink.next(getPatchedRange(fullRange, keyMin, keyMax)); );
} catch (SerializationException e) {
sink.error(e);
}
});
return getAllStages(snapshot, boundedRangeMono, reverse, smallRange); return getAllStages(snapshot, boundedRangeMono, reverse, smallRange);
} }
} }
private Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, private Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot,
Mono<LLRange> sliceRangeMono, boolean reverse, boolean smallRange) { Mono<LLRange> sliceRangeMono, boolean reverse, boolean smallRange) {
return dictionary return dictionary
.getRangeKeys(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange) .getRangeKeys(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange)
.handle((keyBuf, sink) -> { .flatMapSequential(keyBuf -> Mono
try { .<SubStageEntry<T, DatabaseStageEntry<U>>>fromCallable(() -> {
assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength; assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
// Remove prefix. Keep only the suffix and the ext // Remove prefix. Keep only the suffix and the ext
splitPrefix(keyBuf).close(); splitPrefix(keyBuf).close();
suffixKeyLengthConsistency(keyBuf.readableBytes()); suffixKeyLengthConsistency(keyBuf.readableBytes());
T keySuffix; T keySuffix;
try (var keyBufCopy = keyBuf.copy()) { try (var keyBufCopy = keyBuf.copy()) {
keySuffix = deserializeSuffix(keyBufCopy); keySuffix = deserializeSuffix(keyBufCopy);
} }
var subStage = new DatabaseMapSingle<>(dictionary, BufSupplier.ofOwned(toKey(keyBuf)), valueSerializer, null); var bufSupplier = BufSupplier.ofOwned(toKey(keyBuf));
sink.next(Map.entry(keySuffix, subStage)); var subStage = new DatabaseMapSingle<>(dictionary, bufSupplier, valueSerializer, null);
} catch (Throwable ex) { return new SubStageEntry<>(keySuffix, subStage);
keyBuf.close(); }).doOnCancel(() -> {
sink.error(ex); if (keyBuf.isAccessible()) {
} keyBuf.close();
}); }
}).doOnError(ex -> {
if (keyBuf.isAccessible()) {
keyBuf.close();
}
})
);
} }
@Override @Override
@ -583,7 +559,6 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
/** /**
* Get all values * Get all values
* @param reverse if true, the results will go backwards from the specified key (inclusive) * @param reverse if true, the results will go backwards from the specified key (inclusive)
* @param smallRange
*/ */
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot,
@Nullable T keyMin, @Nullable T keyMin,
@ -593,14 +568,9 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
if (keyMin == null && keyMax == null) { if (keyMin == null && keyMax == null) {
return getAllValues(snapshot, smallRange); return getAllValues(snapshot, smallRange);
} else { } else {
Mono<LLRange> boundedRangeMono = rangeMono Mono<LLRange> boundedRangeMono = Mono.usingWhen(rangeMono,
.handle((fullRange, sink) -> { range -> Mono.fromCallable(() -> getPatchedRange(range, keyMin, keyMax)),
try (fullRange) { range -> Mono.fromRunnable(range::close));
sink.next(getPatchedRange(fullRange, keyMin, keyMax));
} catch (SerializationException e) {
sink.error(e);
}
});
return getAllValues(snapshot, boundedRangeMono, reverse, smallRange); return getAllValues(snapshot, boundedRangeMono, reverse, smallRange);
} }
} }
@ -610,7 +580,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
boolean reverse, boolean smallRange) { boolean reverse, boolean smallRange) {
return dictionary return dictionary
.getRange(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange) .getRange(resolveSnapshot(snapshot), sliceRangeMono, reverse, smallRange)
.<Entry<T, U>>handle((serializedEntry, sink) -> { .handle((serializedEntry, sink) -> {
try { try {
Entry<T, U> entry; Entry<T, U> entry;
try (serializedEntry) { try (serializedEntry) {

View File

@ -16,6 +16,7 @@ import it.cavallium.dbengine.database.LLRange;
import it.cavallium.dbengine.database.LLSnapshot; import it.cavallium.dbengine.database.LLSnapshot;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.RangeSupplier; import it.cavallium.dbengine.database.RangeSupplier;
import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing; import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
import it.cavallium.dbengine.database.serialization.SerializationException; import it.cavallium.dbengine.database.serialization.SerializationException;
@ -370,7 +371,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
} }
@Override @Override
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Flux<SubStageEntry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return dictionary return dictionary
.getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength, smallRange) .getRangeKeyPrefixes(resolveSnapshot(snapshot), rangeMono, keyPrefixLength + keySuffixLength, smallRange)
.flatMapSequential(groupKeyWithoutExtSend_ -> Mono.using( .flatMapSequential(groupKeyWithoutExtSend_ -> Mono.using(
@ -381,7 +382,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extend
T deserializedSuffix; T deserializedSuffix;
try (var splittedGroupSuffix = splitGroupSuffix(groupKeyWithoutExtSend)) { try (var splittedGroupSuffix = splitGroupSuffix(groupKeyWithoutExtSend)) {
deserializedSuffix = this.deserializeSuffix(splittedGroupSuffix); deserializedSuffix = this.deserializeSuffix(splittedGroupSuffix);
sink.next(Map.entry(deserializedSuffix, us)); sink.next(new SubStageEntry<>(deserializedSuffix, us));
} catch (SerializationException ex) { } catch (SerializationException ex) {
sink.error(ex); sink.error(ex);
} }

View File

@ -10,6 +10,7 @@ import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLDictionary;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import io.netty5.buffer.api.internal.ResourceSupport; import io.netty5.buffer.api.internal.ResourceSupport;
import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
@ -195,7 +196,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) { public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) {
return this return this
.atPrivate(snapshot, key, keySuffixHashFunction.apply(key)) .atPrivate(snapshot, key, keySuffixHashFunction.apply(key))
.map(cast -> (DatabaseStageEntry<U>) cast); .map(cast -> cast);
} }
private Mono<DatabaseSingleBucket<T, U, TH>> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) { private Mono<DatabaseSingleBucket<T, U, TH>> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) {
@ -210,7 +211,8 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends
} }
@Override @Override
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange) { public Flux<SubStageEntry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot,
boolean smallRange) {
return subDictionary return subDictionary
.getAllValues(snapshot, smallRange) .getAllValues(snapshot, smallRange)
.map(Entry::getValue) .map(Entry::getValue)
@ -218,8 +220,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> extends
.flatMap(bucket -> Flux .flatMap(bucket -> Flux
.fromIterable(bucket) .fromIterable(bucket)
.map(Entry::getKey) .map(Entry::getKey)
.flatMap(key -> this.at(snapshot, key).map(stage -> Map.entry(key, stage))) .flatMap(key -> this.at(snapshot, key).map(stage -> new SubStageEntry<>(key, stage))));
);
} }
@Override @Override

View File

@ -167,11 +167,7 @@ public class DatabaseMapSingle<U> extends ResourceSupport<DatabaseStage<U>, Data
} else { } else {
return serializeValue(result); return serializeValue(result);
} }
}).transform(mono -> LLUtils.mapLLDelta(mono, serialized -> { }).transform(mono -> LLUtils.mapLLDelta(mono, serializer::deserialize));
try (serialized) {
return serializer.deserialize(serialized);
}
}));
} }
@Override @Override

View File

@ -124,17 +124,15 @@ public class DatabaseSingleBucket<K, V, TH>
@Override @Override
public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater) { public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater) {
return bucketStage return bucketStage.updateAndGetDelta(oldBucket -> {
.updateAndGetDelta(oldBucket -> { V oldValue = extractValue(oldBucket);
V oldValue = extractValue(oldBucket); var result = updater.apply(oldValue);
var result = updater.apply(oldValue); if (result == null) {
if (result == null) { return this.removeValueOrDelete(oldBucket);
return this.removeValueOrDelete(oldBucket); } else {
} else { return this.insertValueOrCreate(oldBucket, result);
return this.insertValueOrCreate(oldBucket, result); }
} }).transform(mono -> LLUtils.mapDelta(mono, this::extractValue));
})
.transform(mono -> LLUtils.mapDelta(mono, this::extractValue));
} }
@Override @Override

View File

@ -164,11 +164,7 @@ public class DatabaseSingleton<U> extends ResourceSupport<DatabaseStage<U>, Data
return serializeValue(result); return serializeValue(result);
} }
} }
}).transform(mono -> LLUtils.mapLLDelta(mono, serialized -> { }).transform(mono -> LLUtils.mapLLDelta(mono, serializer::deserialize));
try (serialized) {
return serializer.deserialize(serialized);
}
}));
} }
@Override @Override

View File

@ -3,6 +3,7 @@ package it.cavallium.dbengine.database.collections;
import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.client.CompositeSnapshot;
import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.Delta;
import it.cavallium.dbengine.database.LLUtils; import it.cavallium.dbengine.database.LLUtils;
import it.cavallium.dbengine.database.SubStageEntry;
import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode; import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.KVSerializationFunction; import it.cavallium.dbengine.database.serialization.KVSerializationFunction;
@ -120,7 +121,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends
return entries.flatMap(entry -> this.putValue(entry.getKey(), entry.getValue())).then(); return entries.flatMap(entry -> this.putValue(entry.getKey(), entry.getValue())).then();
} }
Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange); Flux<SubStageEntry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot, boolean smallRange);
default Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) { default Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot, boolean smallRange) {
return this return this

View File

@ -421,26 +421,33 @@ public class LLLocalDictionary implements LLDictionary {
case GET_NEW_VALUE -> UpdateAtomicResultMode.CURRENT; case GET_NEW_VALUE -> UpdateAtomicResultMode.CURRENT;
case GET_OLD_VALUE -> UpdateAtomicResultMode.PREVIOUS; case GET_OLD_VALUE -> UpdateAtomicResultMode.PREVIOUS;
}; };
UpdateAtomicResult result; UpdateAtomicResult result = null;
var readOptions = generateReadOptionsOrStatic(null); try {
startedUpdates.increment(); var readOptions = generateReadOptionsOrStatic(null);
try (var writeOptions = new WriteOptions()) { startedUpdates.increment();
result = updateTime.recordCallable(() -> db.updateAtomic(readOptions, writeOptions, key, updater, returnMode)); try (var writeOptions = new WriteOptions()) {
} finally { result = updateTime.recordCallable(() -> db.updateAtomic(readOptions, writeOptions, key, updater, returnMode));
endedUpdates.increment(); } finally {
if (readOptions != EMPTY_READ_OPTIONS) { endedUpdates.increment();
readOptions.close(); if (readOptions != EMPTY_READ_OPTIONS) {
readOptions.close();
}
} }
} assert result != null;
assert result != null; return switch (updateReturnMode) {
return switch (updateReturnMode) { case NOTHING -> {
case NOTHING -> { result.close();
yield null;
}
case GET_NEW_VALUE -> ((UpdateAtomicResultCurrent) result).current();
case GET_OLD_VALUE -> ((UpdateAtomicResultPrevious) result).previous();
};
} catch (Throwable ex) {
if (result != null) {
result.close(); result.close();
yield null;
} }
case GET_NEW_VALUE -> ((UpdateAtomicResultCurrent) result).current(); throw ex;
case GET_OLD_VALUE -> ((UpdateAtomicResultPrevious) result).previous(); }
};
}), key -> Mono.fromRunnable(key::close)); }), key -> Mono.fromRunnable(key::close));
} }
@ -458,19 +465,27 @@ public class LLLocalDictionary implements LLDictionary {
+ "safe atomic operations"); + "safe atomic operations");
} }
UpdateAtomicResult result; UpdateAtomicResultDelta result = null;
var readOptions = generateReadOptionsOrStatic(null); try {
startedUpdates.increment(); var readOptions = generateReadOptionsOrStatic(null);
try (var writeOptions = new WriteOptions()) { startedUpdates.increment();
result = updateTime.recordCallable(() -> db.updateAtomic(readOptions, writeOptions, key, updater, DELTA)); try (var writeOptions = new WriteOptions()) {
} finally { result = updateTime.recordCallable(() ->
endedUpdates.increment(); (UpdateAtomicResultDelta) db.updateAtomic(readOptions, writeOptions, key, updater, DELTA));
if (readOptions != EMPTY_READ_OPTIONS) { } finally {
readOptions.close(); endedUpdates.increment();
if (readOptions != EMPTY_READ_OPTIONS) {
readOptions.close();
}
} }
assert result != null;
return result.delta();
} catch (Throwable ex) {
if (result != null && result.delta().isAccessible()) {
result.close();
}
throw ex;
} }
assert result != null;
return ((UpdateAtomicResultDelta) result).delta();
}), key -> Mono.fromRunnable(key::close)); }), key -> Mono.fromRunnable(key::close));
} }
@ -938,7 +953,7 @@ public class LLLocalDictionary implements LLDictionary {
if (USE_WINDOW_IN_SET_RANGE) { if (USE_WINDOW_IN_SET_RANGE) {
return Mono return Mono
.usingWhen(rangeMono, range -> runOnDb(true, () -> { .usingWhen(rangeMono, range -> runOnDb(true, () -> {
try (var writeOptions = new WriteOptions(); range) { try (var writeOptions = new WriteOptions()) {
assert !Schedulers.isInNonBlockingThread() : "Called setRange in a nonblocking thread"; assert !Schedulers.isInNonBlockingThread() : "Called setRange in a nonblocking thread";
if (!USE_WRITE_BATCH_IN_SET_RANGE_DELETE || !USE_WRITE_BATCHES_IN_SET_RANGE) { if (!USE_WRITE_BATCH_IN_SET_RANGE_DELETE || !USE_WRITE_BATCHES_IN_SET_RANGE) {
try (var opts = LLUtils.generateCustomReadOptions(null, true, isBoundedRange(range), smallRange)) { try (var opts = LLUtils.generateCustomReadOptions(null, true, isBoundedRange(range), smallRange)) {

View File

@ -96,33 +96,37 @@ public final class OptimisticRocksDBColumn extends AbstractRocksDBColumn<Optimis
boolean committedSuccessfully; boolean committedSuccessfully;
int retries = 0; int retries = 0;
ExponentialPageLimits retryTime = null; ExponentialPageLimits retryTime = null;
Buffer sentPrevData = null; Buffer prevData = null;
Buffer sentCurData = null; Buffer newData = null;
boolean changed; try {
do { boolean changed;
var prevDataArray = tx.getForUpdate(readOptions, cfh, keyArray, true); do {
if (logger.isTraceEnabled()) { if (prevData != null && prevData.isAccessible()) {
logger.trace(MARKER_ROCKSDB, prevData.close();
"Reading {}: {} (before update)", }
LLUtils.toStringSafe(key), if (newData != null && newData.isAccessible()) {
LLUtils.toStringSafe(prevDataArray) newData.close();
); }
} var prevDataArray = tx.getForUpdate(readOptions, cfh, keyArray, true);
Buffer prevData; if (logger.isTraceEnabled()) {
if (prevDataArray != null) { logger.trace(MARKER_ROCKSDB,
prevData = MemoryManager.unsafeWrap(prevDataArray); "Reading {}: {} (before update)",
prevDataArray = null; LLUtils.toStringSafe(key),
} else { LLUtils.toStringSafe(prevDataArray)
prevData = null; );
} }
try (prevData) { if (prevDataArray != null) {
prevData = MemoryManager.unsafeWrap(prevDataArray);
prevDataArray = null;
} else {
prevData = null;
}
Buffer prevDataToSendToUpdater; Buffer prevDataToSendToUpdater;
if (prevData != null) { if (prevData != null) {
prevDataToSendToUpdater = prevData.copy().makeReadOnly(); prevDataToSendToUpdater = prevData.copy().makeReadOnly();
} else { } else {
prevDataToSendToUpdater = null; prevDataToSendToUpdater = null;
} }
@Nullable Buffer newData;
try { try {
newData = updater.apply(prevDataToSendToUpdater); newData = updater.apply(prevDataToSendToUpdater);
} finally { } finally {
@ -130,119 +134,111 @@ public final class OptimisticRocksDBColumn extends AbstractRocksDBColumn<Optimis
prevDataToSendToUpdater.close(); prevDataToSendToUpdater.close();
} }
} }
try (newData) { var newDataArray = newData == null ? null : LLUtils.toArray(newData);
var newDataArray = newData == null ? null : LLUtils.toArray(newData); if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB,
"Updating {}. previous data: {}, updated data: {}",
LLUtils.toStringSafe(key),
LLUtils.toStringSafe(prevDataArray),
LLUtils.toStringSafe(newDataArray)
);
}
if (prevData != null && newData == null) {
if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB, "Deleting {} (after update)", LLUtils.toStringSafe(key));
}
tx.delete(cfh, keyArray, true);
changed = true;
committedSuccessfully = commitOptimistically(tx);
} else if (newData != null && (prevData == null || !LLUtils.equals(prevData, newData))) {
if (logger.isTraceEnabled()) { if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB, logger.trace(MARKER_ROCKSDB,
"Updating {}. previous data: {}, updated data: {}", "Writing {}: {} (after update)",
LLUtils.toStringSafe(key), LLUtils.toStringSafe(key),
LLUtils.toStringSafe(prevDataArray), LLUtils.toStringSafe(newData)
LLUtils.toStringSafe(newDataArray)
); );
} }
if (prevData != null && newData == null) { tx.put(cfh, keyArray, newDataArray);
if (logger.isTraceEnabled()) { changed = true;
logger.trace(MARKER_ROCKSDB, "Deleting {} (after update)", LLUtils.toStringSafe(key)); committedSuccessfully = commitOptimistically(tx);
} } else {
tx.delete(cfh, keyArray, true); changed = false;
changed = true; committedSuccessfully = true;
committedSuccessfully = commitOptimistically(tx); tx.rollback();
} else if (newData != null && (prevData == null || !LLUtils.equals(prevData, newData))) { }
if (logger.isTraceEnabled()) { if (!committedSuccessfully) {
logger.trace(MARKER_ROCKSDB, tx.undoGetForUpdate(cfh, keyArray);
"Writing {}: {} (after update)", tx.rollback();
LLUtils.toStringSafe(key), retries++;
LLUtils.toStringSafe(newData)
);
}
tx.put(cfh, keyArray, newDataArray);
changed = true;
committedSuccessfully = commitOptimistically(tx);
} else {
changed = false;
committedSuccessfully = true;
tx.rollback();
}
if (sentPrevData != null && sentPrevData.isAccessible()) {
sentPrevData.close();
}
if (sentCurData != null && sentCurData.isAccessible()) {
sentCurData.close();
}
sentPrevData = prevData == null ? null : prevData.copy();
sentCurData = newData == null ? null : newData.copy();
if (!committedSuccessfully) {
tx.undoGetForUpdate(cfh, keyArray);
tx.rollback();
if (sentPrevData != null && sentPrevData.isAccessible()) {
sentPrevData.close();
}
if (sentCurData != null && sentCurData.isAccessible()) {
sentCurData.close();
}
retries++;
if (retries == 1) { if (retries == 1) {
retryTime = new ExponentialPageLimits(0, 2, 2000); retryTime = new ExponentialPageLimits(0, 2, 2000);
} }
long retryNs = 1000000L * retryTime.getPageLimit(retries); long retryNs = 1000000L * retryTime.getPageLimit(retries);
// +- 30% // +- 30%
retryNs = retryNs + ThreadLocalRandom.current().nextLong(-retryNs * 30L / 100L, retryNs * 30L / 100L); retryNs = retryNs + ThreadLocalRandom.current().nextLong(-retryNs * 30L / 100L, retryNs * 30L / 100L);
if (retries >= 5 && retries % 5 == 0 || ALWAYS_PRINT_OPTIMISTIC_RETRIES) { if (retries >= 5 && retries % 5 == 0 || ALWAYS_PRINT_OPTIMISTIC_RETRIES) {
logger.warn(MARKER_ROCKSDB, "Failed optimistic transaction {} (update):" logger.warn(MARKER_ROCKSDB, "Failed optimistic transaction {} (update):"
+ " waiting {} ms before retrying for the {} time", LLUtils.toStringSafe(key), retryNs / 1000000d, retries); + " waiting {} ms before retrying for the {} time", LLUtils.toStringSafe(key), retryNs / 1000000d, retries);
} else if (logger.isDebugEnabled(MARKER_ROCKSDB)) { } else if (logger.isDebugEnabled(MARKER_ROCKSDB)) {
logger.debug(MARKER_ROCKSDB, "Failed optimistic transaction {} (update):" logger.debug(MARKER_ROCKSDB, "Failed optimistic transaction {} (update):"
+ " waiting {} ms before retrying for the {} time", LLUtils.toStringSafe(key), retryNs / 1000000d, retries); + " waiting {} ms before retrying for the {} time", LLUtils.toStringSafe(key), retryNs / 1000000d, retries);
} }
// Wait for n milliseconds // Wait for n milliseconds
if (retryNs > 0) { if (retryNs > 0) {
LockSupport.parkNanos(retryNs); LockSupport.parkNanos(retryNs);
}
} }
} }
} while (!committedSuccessfully);
if (retries > 5) {
logger.warn(MARKER_ROCKSDB, "Took {} retries to update key {}", retries, LLUtils.toStringSafe(key));
} }
} while (!committedSuccessfully); recordAtomicUpdateTime(changed, prevData != null, newData != null, initNanoTime);
if (retries > 5) { optimisticAttempts.record(retries);
logger.warn(MARKER_ROCKSDB, "Took {} retries to update key {}", retries, LLUtils.toStringSafe(key)); return switch (returnMode) {
case NOTHING -> {
if (prevData != null) {
prevData.close();
}
if (newData != null) {
newData.close();
}
yield RESULT_NOTHING;
}
case CURRENT -> {
if (prevData != null) {
prevData.close();
}
yield new UpdateAtomicResultCurrent(newData);
}
case PREVIOUS -> {
if (newData != null) {
newData.close();
}
yield new UpdateAtomicResultPrevious(prevData);
}
case BINARY_CHANGED -> {
if (prevData != null) {
prevData.close();
}
if (newData != null) {
newData.close();
}
yield new UpdateAtomicResultBinaryChanged(changed);
}
case DELTA -> new UpdateAtomicResultDelta(LLDelta.of(prevData, newData));
};
} catch (Throwable ex) {
if (prevData != null && prevData.isAccessible()) {
prevData.close();
}
if (newData != null && newData.isAccessible()) {
newData.close();
}
throw ex;
} }
recordAtomicUpdateTime(changed, sentPrevData != null, sentCurData != null, initNanoTime);
optimisticAttempts.record(retries);
return switch (returnMode) {
case NOTHING -> {
if (sentPrevData != null) {
sentPrevData.close();
}
if (sentCurData != null) {
sentCurData.close();
}
yield RESULT_NOTHING;
}
case CURRENT -> {
if (sentPrevData != null) {
sentPrevData.close();
}
yield new UpdateAtomicResultCurrent(sentCurData);
}
case PREVIOUS -> {
if (sentCurData != null) {
sentCurData.close();
}
yield new UpdateAtomicResultPrevious(sentPrevData);
}
case BINARY_CHANGED -> {
if (sentPrevData != null) {
sentPrevData.close();
}
if (sentCurData != null) {
sentCurData.close();
}
yield new UpdateAtomicResultBinaryChanged(changed);
}
case DELTA -> new UpdateAtomicResultDelta(LLDelta.of(sentPrevData, sentCurData));
};
} }
} catch (Throwable ex) { } catch (Throwable ex) {
throw new IOException("Failed to update key " + LLUtils.toStringSafe(key), ex); throw new IOException("Failed to update key " + LLUtils.toStringSafe(key), ex);

View File

@ -64,30 +64,28 @@ public final class PessimisticRocksDBColumn extends AbstractRocksDBColumn<Transa
} }
try (var txOpts = new TransactionOptions(); try (var txOpts = new TransactionOptions();
var tx = beginTransaction(writeOptions, txOpts)) { var tx = beginTransaction(writeOptions, txOpts)) {
Buffer sentPrevData; Buffer prevData = null;
Buffer sentCurData; Buffer newData = null;
boolean changed;
if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB, "Reading {} (before update lock)", LLUtils.toStringSafe(key));
}
var prevDataArray = tx.getForUpdate(readOptions, cfh, keyArray, true);
try { try {
boolean changed;
if (logger.isTraceEnabled()) { if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB, logger.trace(MARKER_ROCKSDB, "Reading {} (before update lock)", LLUtils.toStringSafe(key));
"Reading {}: {} (before update)",
LLUtils.toStringSafe(key),
LLUtils.toStringSafe(prevDataArray)
);
} }
Buffer prevData; var prevDataArray = tx.getForUpdate(readOptions, cfh, keyArray, true);
if (prevDataArray != null) { try {
readValueFoundWithoutBloomBufferSize.record(prevDataArray.length); if (logger.isTraceEnabled()) {
prevData = MemoryManager.unsafeWrap(prevDataArray); logger.trace(MARKER_ROCKSDB,
} else { "Reading {}: {} (before update)",
readValueNotFoundWithoutBloomBufferSize.record(0); LLUtils.toStringSafe(key),
prevData = null; LLUtils.toStringSafe(prevDataArray)
} );
try (prevData) { }
if (prevDataArray != null) {
readValueFoundWithoutBloomBufferSize.record(prevDataArray.length);
prevData = MemoryManager.unsafeWrap(prevDataArray);
} else {
readValueNotFoundWithoutBloomBufferSize.record(0);
}
Buffer prevDataToSendToUpdater; Buffer prevDataToSendToUpdater;
if (prevData != null) { if (prevData != null) {
prevDataToSendToUpdater = prevData.copy().makeReadOnly(); prevDataToSendToUpdater = prevData.copy().makeReadOnly();
@ -95,7 +93,6 @@ public final class PessimisticRocksDBColumn extends AbstractRocksDBColumn<Transa
prevDataToSendToUpdater = null; prevDataToSendToUpdater = null;
} }
@Nullable Buffer newData;
try { try {
newData = updater.apply(prevDataToSendToUpdater); newData = updater.apply(prevDataToSendToUpdater);
} finally { } finally {
@ -103,81 +100,85 @@ public final class PessimisticRocksDBColumn extends AbstractRocksDBColumn<Transa
prevDataToSendToUpdater.close(); prevDataToSendToUpdater.close();
} }
} }
try (newData) { var newDataArray = newData == null ? null : LLUtils.toArray(newData);
var newDataArray = newData == null ? null : LLUtils.toArray(newData); if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB,
"Updating {}. previous data: {}, updated data: {}",
LLUtils.toStringSafe(key),
LLUtils.toStringSafe(prevDataArray),
LLUtils.toStringSafe(newDataArray)
);
}
if (prevData != null && newData == null) {
if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB, "Deleting {} (after update)", LLUtils.toStringSafe(key));
}
writeValueBufferSize.record(0);
tx.delete(cfh, keyArray, true);
changed = true;
tx.commit();
} else if (newData != null && (prevData == null || !LLUtils.equals(prevData, newData))) {
if (logger.isTraceEnabled()) { if (logger.isTraceEnabled()) {
logger.trace(MARKER_ROCKSDB, logger.trace(MARKER_ROCKSDB,
"Updating {}. previous data: {}, updated data: {}", "Writing {}: {} (after update)",
LLUtils.toStringSafe(key), LLUtils.toStringSafe(key),
LLUtils.toStringSafe(prevDataArray), LLUtils.toStringSafe(newData)
LLUtils.toStringSafe(newDataArray)
); );
} }
if (prevData != null && newData == null) { writeValueBufferSize.record(newDataArray.length);
if (logger.isTraceEnabled()) { tx.put(cfh, keyArray, newDataArray);
logger.trace(MARKER_ROCKSDB, "Deleting {} (after update)", LLUtils.toStringSafe(key)); changed = true;
} tx.commit();
writeValueBufferSize.record(0); } else {
tx.delete(cfh, keyArray, true); changed = false;
changed = true; tx.rollback();
tx.commit(); }
} else if (newData != null && (prevData == null || !LLUtils.equals(prevData, newData))) { } finally {
if (logger.isTraceEnabled()) { tx.undoGetForUpdate(cfh, keyArray);
logger.trace(MARKER_ROCKSDB, }
"Writing {}: {} (after update)", recordAtomicUpdateTime(changed, prevData != null, newData != null, initNanoTime);
LLUtils.toStringSafe(key), return switch (returnMode) {
LLUtils.toStringSafe(newData) case NOTHING -> {
); if (prevData != null) {
} prevData.close();
writeValueBufferSize.record(newDataArray.length);
tx.put(cfh, keyArray, newDataArray);
changed = true;
tx.commit();
} else {
changed = false;
tx.rollback();
} }
sentPrevData = prevData == null ? null : prevData.copy(); if (newData != null) {
sentCurData = newData == null ? null : newData.copy(); newData.close();
}
yield RESULT_NOTHING;
} }
case CURRENT -> {
if (prevData != null) {
prevData.close();
}
yield new UpdateAtomicResultCurrent(newData);
}
case PREVIOUS -> {
if (newData != null) {
newData.close();
}
yield new UpdateAtomicResultPrevious(prevData);
}
case BINARY_CHANGED -> {
if (prevData != null) {
prevData.close();
}
if (newData != null) {
newData.close();
}
yield new UpdateAtomicResultBinaryChanged(changed);
}
case DELTA -> new UpdateAtomicResultDelta(LLDelta.of(prevData, newData));
};
} catch (Throwable ex) {
if (prevData != null && prevData.isAccessible()) {
prevData.close();
} }
} finally { if (newData != null && newData.isAccessible()) {
tx.undoGetForUpdate(cfh, keyArray); newData.close();
}
throw ex;
} }
recordAtomicUpdateTime(changed, sentPrevData != null, sentCurData != null, initNanoTime);
return switch (returnMode) {
case NOTHING -> {
if (sentPrevData != null) {
sentPrevData.close();
}
if (sentCurData != null) {
sentCurData.close();
}
yield RESULT_NOTHING;
}
case CURRENT -> {
if (sentPrevData != null) {
sentPrevData.close();
}
yield new UpdateAtomicResultCurrent(sentCurData);
}
case PREVIOUS -> {
if (sentCurData != null) {
sentCurData.close();
}
yield new UpdateAtomicResultPrevious(sentPrevData);
}
case BINARY_CHANGED -> {
if (sentPrevData != null) {
sentPrevData.close();
}
if (sentCurData != null) {
sentCurData.close();
}
yield new UpdateAtomicResultBinaryChanged(changed);
}
case DELTA -> new UpdateAtomicResultDelta(LLDelta.of(sentPrevData, sentCurData));
};
} }
} catch (Throwable ex) { } catch (Throwable ex) {
throw new IOException("Failed to update key " + LLUtils.toStringSafe(key), ex); throw new IOException("Failed to update key " + LLUtils.toStringSafe(key), ex);