Low-level sst entry

This commit is contained in:
Andrea Cavalli 2024-10-02 12:18:33 +02:00
parent 86377a4e65
commit 6af06ca90e
4 changed files with 68 additions and 36 deletions

View File

@ -2,7 +2,6 @@ package it.cavallium.dbengine.database.collections;
import static it.cavallium.dbengine.utils.StreamUtils.resourceStream;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import it.cavallium.buffer.Buf;
import it.cavallium.buffer.BufDataInput;
@ -20,7 +19,6 @@ import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.disk.CachedSerializationFunction;
import it.cavallium.dbengine.database.disk.LLLocalDictionary;
import it.cavallium.dbengine.database.disk.RocksDBFile;
import it.cavallium.dbengine.database.disk.RocksDBFile.RocksDBFileIterationKeyState.RocksDBFileIterationStateKeyError;
import it.cavallium.dbengine.database.disk.RocksDBFile.RocksDBFileIterationKeyState.RocksDBFileIterationStateKeyOk;
import it.cavallium.dbengine.database.disk.RocksDBFile.RocksDBFileIterationState.RocksDBFileIterationStateBegin;
@ -36,7 +34,6 @@ import it.cavallium.dbengine.utils.StreamUtils;
import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMaps;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@ -568,37 +565,52 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
}
}
public static <T, U> List<Stream<Entry<T, U>>> getAllEntriesFastUnsafe(DatabaseMapDictionary<T, U> dict,
BiConsumer<Entry<Buf, Buf>, Throwable> deserializationErrorHandler) {
public static <T, U> List<Stream<UnsafeSSTEntry<T, U>>> getAllEntriesFastUnsafe(DatabaseMapDictionary<T, U> dict,
boolean disableRocksdbChecks,
BiConsumer<UnsafeRawSSTEntry<T, U>, Throwable> deserializationErrorHandler) {
try {
var liveFiles = ((LLLocalDictionary) dict.dictionary).getAllLiveFiles();
return Lists.transform(liveFiles, file -> file.iterate(new SSTRangeFull()).map(state -> switch (state) {
case RocksDBFileIterationStateBegin rocksDBFileIterationStateBegin:
yield null;
case RocksDBFileIterationStateEnd rocksDBFileIterationStateEnd:
yield null;
case RocksDBFileIterationStateKey rocksDBFileIterationStateKey:
yield switch (rocksDBFileIterationStateKey.state()) {
case RocksDBFileIterationStateKeyError e -> null;
case RocksDBFileIterationStateKeyOk rocksDBFileIterationStateKeyOk -> {
try {
yield Map.entry(dict.deserializeSuffix(BufDataInput.create(rocksDBFileIterationStateKey.key())),
dict.deserializeValue(rocksDBFileIterationStateKeyOk.value())
);
} catch (Throwable t) {
if (deserializationErrorHandler != null) {
deserializationErrorHandler.accept(Map.entry(rocksDBFileIterationStateKey.key().copy(),
rocksDBFileIterationStateKeyOk.value().copy()
), t);
yield null;
} else {
throw t;
return Lists.transform(liveFiles, file -> file.iterate(new SSTRangeFull(), disableRocksdbChecks)
.map(state -> switch (state) {
case RocksDBFileIterationStateBegin rocksDBFileIterationStateBegin:
yield null;
case RocksDBFileIterationStateEnd rocksDBFileIterationStateEnd:
yield null;
case RocksDBFileIterationStateKey rocksDBFileIterationStateKey:
yield switch (rocksDBFileIterationStateKey.state()) {
case RocksDBFileIterationStateKeyError e -> null;
case RocksDBFileIterationStateKeyOk rocksDBFileIterationStateKeyOk -> {
var key = rocksDBFileIterationStateKey.key();
var value = rocksDBFileIterationStateKeyOk.value();
try {
var deserializedKey = dict.deserializeSuffix(BufDataInput.create(key));
var deserializedValue = dict.deserializeValue(value);
yield new UnsafeSSTEntry<>(file,
deserializedKey,
deserializedValue,
key,
value,
k -> dict.deserializeSuffix(BufDataInput.create(k)),
dict::deserializeValue
);
} catch (Throwable t) {
if (deserializationErrorHandler != null) {
deserializationErrorHandler.accept(new UnsafeRawSSTEntry<>(file,
key,
value,
k -> dict.deserializeSuffix(BufDataInput.create(k)),
dict::deserializeValue
), t);
yield null;
} else {
throw t;
}
}
}
}
}
};
}).filter(Objects::nonNull));
};
})
.filter(Objects::nonNull));
} catch (RocksDBException e) {
throw new RuntimeException(e);
}

View File

@ -0,0 +1,10 @@
package it.cavallium.dbengine.database.collections;
import it.cavallium.buffer.Buf;
import it.cavallium.dbengine.database.disk.RocksDBFile;
import java.util.function.Function;
public record UnsafeRawSSTEntry<T, U>(RocksDBFile file,
Buf rawKey, Buf rawValue,
Function<Buf, T> keyDeserializer,
Function<Buf, U> valueDeserializer) {}

View File

@ -0,0 +1,11 @@
package it.cavallium.dbengine.database.collections;
import it.cavallium.buffer.Buf;
import it.cavallium.dbengine.database.disk.RocksDBFile;
import java.util.function.Function;
public record UnsafeSSTEntry<T, U>(RocksDBFile file,
T key, U value,
Buf rawKey, Buf rawValue,
Function<Buf, T> keyDeserializer,
Function<Buf, U> valueDeserializer) {}

View File

@ -55,7 +55,6 @@ public class RocksDBFile implements Comparable<RocksDBFile> {
Long sstNumber = null;
if (extensionIndex != -1) {
String numberRaw = fileName.substring(0, extensionIndex);
//noinspection UnstableApiUsage
this.sstNumber = Longs.tryParse(numberRaw);
} else {
this.sstNumber = null;
@ -95,7 +94,7 @@ public class RocksDBFile implements Comparable<RocksDBFile> {
public Stream<SSTVerificationProgress> verify(SSTRange range) {
AtomicLong fileScanned = new AtomicLong();
AtomicLong fileTotal = new AtomicLong();
return iterate(range).map(state -> switch (state) {
return iterate(range, true).map(state -> switch (state) {
case RocksDBFileIterationStateBegin begin -> {
var countEstimate = begin.metadata().countEstimate();
if (countEstimate != null) {
@ -115,10 +114,10 @@ public class RocksDBFile implements Comparable<RocksDBFile> {
});
}
public Stream<SSTDumpProgress> readAllSST(SSTRange range, boolean failOnError) {
public Stream<SSTDumpProgress> readAllSST(SSTRange range, boolean failOnError, boolean disableRocksdbChecks) {
AtomicLong fileScanned = new AtomicLong();
AtomicLong fileTotal = new AtomicLong();
return iterate(range).<SSTDumpProgress>mapMulti((state, consumer) -> {
return iterate(range, disableRocksdbChecks).<SSTDumpProgress>mapMulti((state, consumer) -> {
switch (state) {
case RocksDBFileIterationStateBegin begin -> {
var countEstimate = begin.metadata().countEstimate();
@ -151,7 +150,7 @@ public class RocksDBFile implements Comparable<RocksDBFile> {
}).takeWhile(data -> !(data instanceof SSTBlockFail));
}
public Stream<RocksDBFileIterationState> iterate(SSTRange rangeFull) {
public Stream<RocksDBFileIterationState> iterate(SSTRange rangeFull, boolean disableRocksdbChecks) {
var intersectedRange = RocksDBFile.intersectWithMetadata(metadata.keysRange(), rangeFull);
Path filePath = metadata.filePath();
@ -171,7 +170,7 @@ public class RocksDBFile implements Comparable<RocksDBFile> {
AtomicLong fileScanned = new AtomicLong();
AtomicBoolean mustSeek = new AtomicBoolean(true);
try {
streamContent = resourceStream(() -> new LLSstFileReader(false, filePathString),
streamContent = resourceStream(() -> new LLSstFileReader(!disableRocksdbChecks, filePathString),
r -> resourceStream(() -> LLUtils.generateCustomReadOptions(null, false, intersectedRange.isBounded(), false),
ro -> {
long skipToIndex;