Update Example.java, LLDictionary.java, and 6 more files...

This commit is contained in:
Andrea Cavalli 2021-02-02 00:09:46 +01:00
parent 023bc3b0dd
commit dbca36b3aa
8 changed files with 455 additions and 299 deletions

View File

@ -5,8 +5,8 @@ import it.cavallium.dbengine.database.Column;
import it.cavallium.dbengine.database.LLKeyValueDatabase; import it.cavallium.dbengine.database.LLKeyValueDatabase;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary; import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep; import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
import it.cavallium.dbengine.database.collections.SerializerFixedBinaryLength;
import it.cavallium.dbengine.database.collections.Serializer; import it.cavallium.dbengine.database.collections.Serializer;
import it.cavallium.dbengine.database.collections.SerializerFixedBinaryLength;
import it.cavallium.dbengine.database.collections.SubStageGetterSingleBytes; import it.cavallium.dbengine.database.collections.SubStageGetterSingleBytes;
import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection; import it.cavallium.dbengine.database.disk.LLLocalDatabaseConnection;
import java.io.IOException; import java.io.IOException;
@ -21,6 +21,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function; import java.util.function.Function;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
@ -32,7 +33,7 @@ import reactor.util.function.Tuples;
public class Example { public class Example {
private static final boolean printPreviousValue = false; private static final boolean printPreviousValue = false;
private static final int numRepeats = 100; private static final int numRepeats = 1000;
private static final int batchSize = 10000; private static final int batchSize = 10000;
public static void main(String[] args) throws InterruptedException { public static void main(String[] args) throws InterruptedException {
@ -52,8 +53,8 @@ public class Example {
*/ */
testPutMulti() rangeTestPutMultiProgressive()
.then(rangeTestPutMulti()) .then(rangeTestPutMultiSame())
.subscribeOn(Schedulers.parallel()) .subscribeOn(Schedulers.parallel())
.blockOptional(); .blockOptional();
} }
@ -145,11 +146,14 @@ public class Example {
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
.map(tuple -> tuple.mapT2(dict -> DatabaseMapDictionaryDeep.simple(dict, ssg, ser))), .map(tuple -> tuple.mapT2(dict -> DatabaseMapDictionaryDeep.simple(dict, ssg, ser))),
tuple -> Mono tuple -> Mono.defer(() -> tuple.getT2().putMulti(putMultiFlux)),
.defer(() -> tuple.getT2().putMulti(putMultiFlux)
),
numRepeats, numRepeats,
tuple -> tuple.getT1().close()); tuple -> Mono
.fromRunnable(() -> System.out.println("Calculating size"))
.then(tuple.getT2().size(null, false))
.doOnNext(s -> System.out.println("Size after: " + s))
.then(tuple.getT1().close())
);
} }
private static Mono<Void> rangeTestAtPut() { private static Mono<Void> rangeTestAtPut() {
@ -227,23 +231,56 @@ public class Example {
tuple -> tuple.getT1().close()); tuple -> tuple.getT1().close());
} }
private static Mono<Void> rangeTestPutMulti() { private static Mono<Void> rangeTestPutMultiSame() {
var ser = SerializerFixedBinaryLength.noop(4); var ser = SerializerFixedBinaryLength.noop(4);
var vser = Serializer.noop(); var vser = Serializer.noop();
HashMap<byte[], byte[]> keysToPut = new HashMap<>(); HashMap<byte[], byte[]> keysToPut = new HashMap<>();
for (int i = 0; i < batchSize; i++) { for (int i = 0; i < batchSize; i++) {
keysToPut.put(Ints.toByteArray(i * 3), Ints.toByteArray(i * 11)); keysToPut.put(Ints.toByteArray(i * 3), Ints.toByteArray(i * 11));
} }
var putMultiFlux = Flux.fromIterable(keysToPut.entrySet());
return test("MapDictionary::putMulti (batch of " + batchSize + " entries)", return test("MapDictionary::putMulti (batch of " + batchSize + " entries)",
tempDb() tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict))) .flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
.map(tuple -> tuple.mapT2(dict -> DatabaseMapDictionary.simple(dict, ser, vser))), .map(tuple -> tuple.mapT2(dict -> DatabaseMapDictionary.simple(dict, ser, vser))),
tuple -> Mono tuple -> Mono
.defer(() -> tuple.getT2().putMulti(putMultiFlux) .defer(() -> tuple.getT2().putMulti(Flux.fromIterable(keysToPut.entrySet()))
), ),
numRepeats, numRepeats,
tuple -> tuple.getT1().close()); tuple -> Mono
.fromRunnable(() -> System.out.println("Calculating size"))
.then(tuple.getT2().size(null, false))
.doOnNext(s -> System.out.println("Size after: " + s))
.then(tuple.getT1().close())
);
}
private static Mono<Void> rangeTestPutMultiProgressive() {
var ser = SerializerFixedBinaryLength.noop(4);
var vser = Serializer.noop();
AtomicInteger ai = new AtomicInteger(0);
return test("MapDictionary::putMulti (batch of " + batchSize + " entries)",
tempDb()
.flatMap(db -> db.getDictionary("testmap").map(dict -> Tuples.of(db, dict)))
.map(tuple -> tuple.mapT2(dict -> DatabaseMapDictionary.simple(dict, ser, vser))),
tuple -> Mono
.defer(() -> {
var aiv = ai.incrementAndGet();
HashMap<byte[], byte[]> keysToPut = new HashMap<>();
for (int i = 0; i < batchSize; i++) {
keysToPut.put(
Ints.toByteArray(i * 3 + (batchSize * aiv)),
Ints.toByteArray(i * 11 + (batchSize * aiv))
);
}
return tuple.getT2().putMulti(Flux.fromIterable(keysToPut.entrySet()));
}),
numRepeats,
tuple -> Mono
.fromRunnable(() -> System.out.println("Calculating size"))
.then(tuple.getT2().size(null, false))
.doOnNext(s -> System.out.println("Size after: " + s))
.then(tuple.getT1().close())
);
} }
private static <U> Mono<? extends LLKeyValueDatabase> tempDb() { private static <U> Mono<? extends LLKeyValueDatabase> tempDb() {
@ -282,18 +319,20 @@ public class Example {
Duration WAIT_TIME_END = Duration.ofSeconds(5); Duration WAIT_TIME_END = Duration.ofSeconds(5);
return Mono return Mono
.delay(WAIT_TIME) .delay(WAIT_TIME)
.doOnSuccess(s -> {
System.out.println("----------------------------------------------------------------------");
System.out.println(name);
})
.then(Mono.fromRunnable(() -> instantInit.tryEmitValue(now()))) .then(Mono.fromRunnable(() -> instantInit.tryEmitValue(now())))
.then(setup) .then(setup)
.doOnSuccess(s -> instantInitTest.tryEmitValue(now())) .doOnSuccess(s -> instantInitTest.tryEmitValue(now()))
.flatMap(a ->Mono.defer(() -> test.apply(a)).repeat(numRepeats) .flatMap(a -> Mono.defer(() -> test.apply(a)).repeat(numRepeats - 1)
.then() .then()
.doOnSuccess(s -> instantEndTest.tryEmitValue(now())) .doOnSuccess(s -> instantEndTest.tryEmitValue(now()))
.then(close.apply(a))) .then(close.apply(a)))
.doOnSuccess(s -> instantEnd.tryEmitValue(now())) .doOnSuccess(s -> instantEnd.tryEmitValue(now()))
.then(Mono.zip(instantInit.asMono(), instantInitTest.asMono(), instantEndTest.asMono(), instantEnd.asMono())) .then(Mono.zip(instantInit.asMono(), instantInitTest.asMono(), instantEndTest.asMono(), instantEnd.asMono()))
.doOnSuccess(tuple -> { .doOnSuccess(tuple -> {
System.out.println("----------------------------------------------------------------------");
System.out.println(name);
System.out.println( System.out.println(
"\t - Executed " + DecimalFormat.getInstance(Locale.ITALY).format((numRepeats * batchSize)) + " times:"); "\t - Executed " + DecimalFormat.getInstance(Locale.ITALY).format((numRepeats * batchSize)) + " times:");
System.out.println("\t - Test time: " + DecimalFormat System.out.println("\t - Test time: " + DecimalFormat

View File

@ -1,5 +1,6 @@
package it.cavallium.dbengine.database; package it.cavallium.dbengine.database;
import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.function.Function; import java.util.function.Function;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
@ -22,21 +23,30 @@ public interface LLDictionary extends LLKeyValueDatabaseStructure {
Flux<Entry<byte[], byte[]>> getRange(@Nullable LLSnapshot snapshot, LLRange range); Flux<Entry<byte[], byte[]>> getRange(@Nullable LLSnapshot snapshot, LLRange range);
Flux<List<Entry<byte[], byte[]>>> getRangeGrouped(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
Flux<byte[]> getRangeKeys(@Nullable LLSnapshot snapshot, LLRange range); Flux<byte[]> getRangeKeys(@Nullable LLSnapshot snapshot, LLRange range);
Flux<List<byte[]>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength);
Flux<Entry<byte[], byte[]>> setRange(LLRange range, Flux<Entry<byte[], byte[]>> entries, boolean getOldValues); Flux<Entry<byte[], byte[]>> setRange(LLRange range, Flux<Entry<byte[], byte[]>> entries, boolean getOldValues);
default Mono<Void> replaceRange(LLRange range, boolean canKeysChange, Function<Entry<byte[], byte[]>, Mono<Entry<byte[], byte[]>>> entriesReplacer) { default Mono<Void> replaceRange(LLRange range, boolean canKeysChange, Function<Entry<byte[], byte[]>, Mono<Entry<byte[], byte[]>>> entriesReplacer) {
Flux<Entry<byte[], byte[]>> replacedFlux = this.getRange(null, range).flatMap(entriesReplacer); return Mono.defer(() -> {
if (canKeysChange) { if (canKeysChange) {
return this return this
.setRange(range, replacedFlux, false) .setRange(range, this
.then(); .getRange(null, range)
} else { .flatMap(entriesReplacer), false)
return this .then();
.putMulti(replacedFlux, false) } else {
.then(); return this
} .putMulti(this
.getRange(null, range)
.flatMap(entriesReplacer), false)
.then();
}
});
} }
Mono<Boolean> isRangeEmpty(@Nullable LLSnapshot snapshot, LLRange range); Mono<Boolean> isRangeEmpty(@Nullable LLSnapshot snapshot, LLRange range);

View File

@ -73,8 +73,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
} }
private Entry<byte[], byte[]> stripPrefix(Entry<byte[], byte[]> entry) { private Entry<byte[], byte[]> stripPrefix(Entry<byte[], byte[]> entry) {
byte[] keySuffix = stripPrefix(entry.getKey()); return Map.entry(stripPrefix(entry.getKey()), entry.getValue());
return Map.entry(keySuffix, entry.getValue());
} }
@Override @Override
@ -89,7 +88,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Mono<Long> size(@Nullable CompositeSnapshot snapshot, boolean fast) { public Mono<Long> size(@Nullable CompositeSnapshot snapshot, boolean fast) {
return dictionary.sizeRange(resolveSnapshot(snapshot), range, true); return dictionary.sizeRange(resolveSnapshot(snapshot), range, fast);
} }
@Override @Override
@ -174,9 +173,10 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
@Override @Override
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) { public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
var serializedEntries = entries return dictionary
.map(entry -> Map.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))); .setRange(range,
return dictionary.setRange(range, serializedEntries, true) entries.map(entry ->
Map.entry(toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))), true)
.map(entry -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue()))); .map(entry -> Map.entry(deserializeSuffix(stripPrefix(entry.getKey())), deserialize(entry.getValue())));
} }

View File

@ -9,11 +9,11 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.GroupedFlux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.util.function.Tuples; import reactor.util.function.Tuples;
// todo: implement optimized methods // todo: implement optimized methods
@SuppressWarnings("Convert2MethodRef")
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> { public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
public static final byte[] EMPTY_BYTES = new byte[0]; public static final byte[] EMPTY_BYTES = new byte[0];
@ -196,34 +196,40 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
@Override @Override
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) { public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
byte[] keySuffixData = serializeSuffix(keySuffix); byte[] keySuffixData = serializeSuffix(keySuffix);
Flux<byte[]> rangeKeys = this
.dictionary.getRangeKeys(resolveSnapshot(snapshot), toExtRange(keySuffixData)
);
return this.subStageGetter return this.subStageGetter
.subStage(dictionary, snapshot, toKeyWithoutExt(keySuffixData), rangeKeys); .subStage(dictionary,
} snapshot,
toKeyWithoutExt(keySuffixData),
@Override this.dictionary.getRangeKeys(resolveSnapshot(snapshot), toExtRange(keySuffixData))
public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
Flux<GroupedFlux<byte[], byte[]>> groupedFlux = dictionary
.getRangeKeys(resolveSnapshot(snapshot), range)
.groupBy(this::removeExtFromFullKey);
return groupedFlux
.flatMap(rangeKeys -> this.subStageGetter
.subStage(dictionary, snapshot, rangeKeys.key(), rangeKeys)
.map(us -> Map.entry(this.deserializeSuffix(this.stripPrefix(rangeKeys.key())), us))
); );
} }
@Override @Override
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) { public Flux<Entry<T, US>> getAllStages(@Nullable CompositeSnapshot snapshot) {
var newValues = entries return dictionary
.flatMap(entry -> at(null, entry.getKey()).map(us -> Tuples.of(us, entry.getValue()))) .getRangeKeysGrouped(resolveSnapshot(snapshot), range, keyPrefix.length + keySuffixLength)
.flatMap(tuple -> tuple.getT1().set(tuple.getT2())); .flatMap(rangeKeys -> {
//System.out.println(Thread.currentThread() + "\tkReceived range key flux");
byte[] groupKeyWithoutExt = removeExtFromFullKey(rangeKeys.get(0));
byte[] groupSuffix = this.stripPrefix(groupKeyWithoutExt);
return this.subStageGetter
.subStage(dictionary, snapshot, groupKeyWithoutExt, Flux.fromIterable(rangeKeys))
//.doOnSuccess(s -> System.out.println(Thread.currentThread() + "\tObtained stage for a key"))
.map(us -> Map.entry(this.deserializeSuffix(groupSuffix), us));
//.doOnSuccess(s -> System.out.println(Thread.currentThread() + "\tMapped stage for a key"));
}
);
//.doOnNext(s -> System.out.println(Thread.currentThread() + "\tNext stage"))
}
@Override
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
return getAllStages(null) return getAllStages(null)
.flatMap(stage -> stage.getValue().get(null).map(val -> Map.entry(stage.getKey(), val))) .flatMap(stage -> stage.getValue().get(null).map(val -> Map.entry(stage.getKey(), val)))
.concatWith(newValues.then(Mono.empty())); .concatWith(entries
.flatMap(entry -> at(null, entry.getKey()).map(us -> Tuples.of(us, entry.getValue())))
.flatMap(tuple -> tuple.getT1().set(tuple.getT2()))
.then(Mono.empty()));
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers

View File

@ -30,7 +30,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
} }
default Mono<U> putValueAndGetPrevious(T key, U value) { default Mono<U> putValueAndGetPrevious(T key, U value) {
return at(null, key).flatMap(v -> v.setAndGetPrevious(value)); return at(null, key).single().flatMap(v -> v.setAndGetPrevious(value));
} }
default Mono<Boolean> putValueAndGetStatus(T key, U value) { default Mono<Boolean> putValueAndGetStatus(T key, U value) {
@ -76,21 +76,19 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
} }
default Mono<Void> replaceAllValues(boolean canKeysChange, Function<Entry<T, U>, Mono<Entry<T, U>>> entriesReplacer) { default Mono<Void> replaceAllValues(boolean canKeysChange, Function<Entry<T, U>, Mono<Entry<T, U>>> entriesReplacer) {
Flux<Entry<T, U>> replacedFlux = this return Mono.defer(() -> {
.getAllValues(null) if (canKeysChange) {
.flatMap(entriesReplacer); return this.setAllValues(this.getAllValues(null).flatMap(entriesReplacer)).then();
if (canKeysChange) { } else {
return this return this
.setAllValues(replacedFlux) .getAllValues(null)
.then(); .flatMap(entriesReplacer)
} else { .flatMap(replacedEntry -> this
return replacedFlux .at(null, replacedEntry.getKey())
.flatMap(replacedEntry -> this .map(entry -> entry.set(replacedEntry.getValue())))
.at(null, replacedEntry.getKey()) .then();
.map(entry -> entry.set(replacedEntry.getValue())) }
) });
.then();
}
} }
default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) { default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) {

View File

@ -20,12 +20,25 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
@Nullable CompositeSnapshot snapshot, @Nullable CompositeSnapshot snapshot,
byte[] keyPrefix, byte[] keyPrefix,
Flux<byte[]> keyFlux) { Flux<byte[]> keyFlux) {
return keyFlux.singleOrEmpty().flatMap(key -> Mono.fromCallable(() -> { //System.out.println(Thread.currentThread() + "subStageGetterSingle1");
if (!Arrays.equals(keyPrefix, key)) { return keyFlux
throw new IndexOutOfBoundsException("Found more than one element!"); .singleOrEmpty()
} .flatMap(key -> Mono
return null; .<DatabaseStageEntry<T>>fromCallable(() -> {
})).thenReturn(new DatabaseSingle<>(dictionary, keyPrefix, serializer)); //System.out.println(Thread.currentThread() + "subStageGetterSingle2");
if (!Arrays.equals(keyPrefix, key)) {
throw new IndexOutOfBoundsException("Found more than one element!");
}
return null;
})
)
.then(Mono.fromSupplier(() -> {
//System.out.println(Thread.currentThread() + "subStageGetterSingle3");
return new DatabaseSingle<T>(dictionary,
keyPrefix,
serializer
);
}));
} }
//todo: temporary wrapper. convert the whole class to buffers //todo: temporary wrapper. convert the whole class to buffers

View File

@ -9,7 +9,6 @@ import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
@ -27,7 +26,6 @@ import org.rocksdb.RocksIterator;
import org.rocksdb.Snapshot; import org.rocksdb.Snapshot;
import org.rocksdb.WriteOptions; import org.rocksdb.WriteOptions;
import org.warp.commonutils.concurrency.atomicity.NotAtomic; import org.warp.commonutils.concurrency.atomicity.NotAtomic;
import org.warp.commonutils.type.VariableWrapper;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler; import reactor.core.scheduler.Scheduler;
@ -159,55 +157,59 @@ public class LLLocalDictionary implements LLDictionary {
@Override @Override
public Mono<byte[]> put(byte[] key, byte[] value, LLDictionaryResultType resultType) { public Mono<byte[]> put(byte[] key, byte[] value, LLDictionaryResultType resultType) {
Mono<byte[]> response = getPrevValue(key, resultType); return getPrevValue(key, resultType)
return Mono .concatWith(Mono
.fromCallable(() -> { .fromCallable(() -> {
db.put(cfh, key, value); db.put(cfh, key, value);
return null; return null;
}) })
.onErrorMap(IOException::new) .onErrorMap(IOException::new)
.subscribeOn(dbScheduler) .subscribeOn(dbScheduler)
.then(response); .then(Mono.empty())
).singleOrEmpty();
} }
@Override @Override
public Mono<byte[]> remove(byte[] key, LLDictionaryResultType resultType) { public Mono<byte[]> remove(byte[] key, LLDictionaryResultType resultType) {
Mono<byte[]> response = getPrevValue(key, resultType); return getPrevValue(key, resultType)
return Mono .concatWith(Mono
.fromCallable(() -> { .fromCallable(() -> {
db.delete(cfh, key); db.delete(cfh, key);
return null; return null;
}) })
.onErrorMap(IOException::new) .onErrorMap(IOException::new)
.subscribeOn(dbScheduler) .subscribeOn(dbScheduler)
.then(response); .then(Mono.empty())
).singleOrEmpty();
} }
private Mono<byte[]> getPrevValue(byte[] key, LLDictionaryResultType resultType) { private Mono<byte[]> getPrevValue(byte[] key, LLDictionaryResultType resultType) {
switch (resultType) { return Mono.defer(() -> {
case VALUE_CHANGED: switch (resultType) {
return containsKey(null, key).single().map(LLUtils::booleanToResponse); case VALUE_CHANGED:
case PREVIOUS_VALUE: return containsKey(null, key).single().map(LLUtils::booleanToResponse);
return Mono case PREVIOUS_VALUE:
.fromCallable(() -> { return Mono
var data = new Holder<byte[]>(); .fromCallable(() -> {
if (db.keyMayExist(cfh, key, data)) { var data = new Holder<byte[]>();
if (data.getValue() != null) { if (db.keyMayExist(cfh, key, data)) {
return data.getValue(); if (data.getValue() != null) {
return data.getValue();
} else {
return db.get(cfh, key);
}
} else { } else {
return db.get(cfh, key); return null;
} }
} else { })
return null; .onErrorMap(IOException::new)
} .subscribeOn(dbScheduler);
}) case VOID:
.onErrorMap(IOException::new) return Mono.empty();
.subscribeOn(dbScheduler); default:
case VOID: return Mono.error(new IllegalStateException("Unexpected value: " + resultType));
return Mono.empty(); }
default: });
return Mono.error(new IllegalStateException("Unexpected value: " + resultType));
}
} }
@Override @Override
@ -242,11 +244,12 @@ public class LLLocalDictionary implements LLDictionary {
public Flux<Entry<byte[], byte[]>> putMulti(Flux<Entry<byte[], byte[]>> entries, boolean getOldValues) { public Flux<Entry<byte[], byte[]>> putMulti(Flux<Entry<byte[], byte[]>> entries, boolean getOldValues) {
return entries return entries
.window(Math.min(MULTI_GET_WINDOW, CAPPED_WRITE_BATCH_CAP)) .window(Math.min(MULTI_GET_WINDOW, CAPPED_WRITE_BATCH_CAP))
.publishOn(dbScheduler)
.flatMap(Flux::collectList) .flatMap(Flux::collectList)
.flatMap(entriesWindow -> this .flatMap(entriesWindow -> this
.getMulti(null, Flux.fromIterable(entriesWindow).map(Entry::getKey)) .getMulti(null, Flux.fromIterable(entriesWindow).map(Entry::getKey))
.publishOn(dbScheduler)
.concatWith(Mono.fromCallable(() -> { .concatWith(Mono.fromCallable(() -> {
//System.out.println(Thread.currentThread()+"\tTest");
var batch = new CappedWriteBatch(db, var batch = new CappedWriteBatch(db,
CAPPED_WRITE_BATCH_CAP, CAPPED_WRITE_BATCH_CAP,
RESERVED_WRITE_BATCH_SIZE, RESERVED_WRITE_BATCH_SIZE,
@ -311,11 +314,26 @@ public class LLLocalDictionary implements LLDictionary {
@Override @Override
public Flux<Entry<byte[], byte[]>> getRange(@Nullable LLSnapshot snapshot, LLRange range) { public Flux<Entry<byte[], byte[]>> getRange(@Nullable LLSnapshot snapshot, LLRange range) {
if (range.isSingle()) { return Flux.defer(() -> {
return getRangeSingle(snapshot, range.getMin()); if (range.isSingle()) {
} else { return getRangeSingle(snapshot, range.getMin());
return getRangeMulti(snapshot, range); } else {
} return getRangeMulti(snapshot, range);
}
});
}
@Override
public Flux<List<Entry<byte[], byte[]>>> getRangeGrouped(@Nullable LLSnapshot snapshot,
LLRange range,
int prefixLength) {
return Flux.defer(() -> {
if (range.isSingle()) {
return getRangeSingle(snapshot, range.getMin()).map(List::of);
} else {
return getRangeMultiGrouped(snapshot, range, prefixLength);
}
});
} }
private Flux<Entry<byte[],byte[]>> getRangeSingle(LLSnapshot snapshot, byte[] key) { private Flux<Entry<byte[],byte[]>> getRangeSingle(LLSnapshot snapshot, byte[] key) {
@ -326,67 +344,138 @@ public class LLLocalDictionary implements LLDictionary {
} }
private Flux<Entry<byte[],byte[]>> getRangeMulti(LLSnapshot snapshot, LLRange range) { private Flux<Entry<byte[],byte[]>> getRangeMulti(LLSnapshot snapshot, LLRange range) {
return Mono return Flux
.fromCallable(() -> { .<Entry<byte[], byte[]>>push(sink -> {
var iter = db.newIterator(cfh, resolveSnapshot(snapshot)); //System.out.println(Thread.currentThread() + "\tPreparing Read rande item");
if (range.hasMin()) { try (var rocksIterator = db.newIterator(cfh, resolveSnapshot(snapshot))) {
iter.seek(range.getMin()); if (range.hasMin()) {
} else { rocksIterator.seek(range.getMin());
iter.seekToFirst(); } else {
rocksIterator.seekToFirst();
}
byte[] key;
while (rocksIterator.isValid()) {
key = rocksIterator.key();
if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) {
break;
}
//System.out.println(Thread.currentThread() + "\tRead rande item");
sink.next(Map.entry(key, rocksIterator.value()));
rocksIterator.next();
}
} finally {
//System.out.println(Thread.currentThread() + "\tFinish Read rande item");
sink.complete();
} }
return iter;
}) })
.subscribeOn(dbScheduler) .subscribeOn(dbScheduler);
.flatMapMany(rocksIterator -> Flux }
.<Entry<byte[], byte[]>>fromIterable(() -> {
VariableWrapper<byte[]> nextKey = new VariableWrapper<>(null);
VariableWrapper<byte[]> nextValue = new VariableWrapper<>(null);
return new Iterator<>() {
@Override
public boolean hasNext() {
assert nextKey.var == null;
assert nextValue.var == null;
if (!rocksIterator.isValid()) {
nextKey.var = null;
nextValue.var = null;
return false;
}
var key = rocksIterator.key();
var value = rocksIterator.value();
if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) {
nextKey.var = null;
nextValue.var = null;
return false;
}
nextKey.var = key;
nextValue.var = value;
return true;
}
@Override private Flux<List<Entry<byte[],byte[]>>> getRangeMultiGrouped(LLSnapshot snapshot, LLRange range, int prefixLength) {
public Entry<byte[], byte[]> next() { return Flux
var key = nextKey.var; .<List<Entry<byte[], byte[]>>>push(sink -> {
var val = nextValue.var; //System.out.println(Thread.currentThread() + "\tPreparing Read rande item");
assert key != null; try (var rocksIterator = db.newIterator(cfh, resolveSnapshot(snapshot))) {
assert val != null; if (range.hasMin()) {
nextKey.var = null; rocksIterator.seek(range.getMin());
nextValue.var = null; } else {
return Map.entry(key, val); rocksIterator.seekToFirst();
}
byte[] firstGroupKey = null;
List<Entry<byte[], byte[]>> currentGroupValues = new ArrayList<>();
byte[] key;
while (rocksIterator.isValid()) {
key = rocksIterator.key();
if (firstGroupKey == null) { // Fix first value
firstGroupKey = key;
}
if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) {
break;
}
if (Arrays.equals(firstGroupKey, 0, prefixLength, key, 0, prefixLength)) {
currentGroupValues.add(Map.entry(key, rocksIterator.value()));
} else {
if (!currentGroupValues.isEmpty()) {
//System.out.println(Thread.currentThread() + "\tRead rande item");
sink.next(currentGroupValues);
} }
}; firstGroupKey = key;
}) currentGroupValues = new ArrayList<>();
.doFinally(signalType -> rocksIterator.close()) }
.subscribeOn(dbScheduler) rocksIterator.next();
); }
if (!currentGroupValues.isEmpty()) {
//System.out.println(Thread.currentThread() + "\tRead rande item");
sink.next(currentGroupValues);
}
} finally {
//System.out.println(Thread.currentThread() + "\tFinish Read rande item");
sink.complete();
}
})
.subscribeOn(dbScheduler);
} }
@Override @Override
public Flux<byte[]> getRangeKeys(@Nullable LLSnapshot snapshot, LLRange range) { public Flux<byte[]> getRangeKeys(@Nullable LLSnapshot snapshot, LLRange range) {
if (range.isSingle()) { return Flux.defer(() -> {
return getRangeKeysSingle(snapshot, range.getMin()); if (range.isSingle()) {
} else { //System.out.println(Thread.currentThread() + "getRangeKeys single");
return getRangeKeysMulti(snapshot, range); return getRangeKeysSingle(snapshot, range.getMin()).doOnTerminate(() -> {}/*System.out.println(Thread.currentThread() + "getRangeKeys single end")*/);
} } else {
//System.out.println(Thread.currentThread() + "getRangeKeys multi");
return getRangeKeysMulti(snapshot, range);
}
});
}
@Override
public Flux<List<byte[]>> getRangeKeysGrouped(@Nullable LLSnapshot snapshot, LLRange range, int prefixLength) {
return Flux
.<List<byte[]>>push(sink -> {
//System.out.println(Thread.currentThread() + "\tPreparing Read rande item");
try (var rocksIterator = db.newIterator(cfh, resolveSnapshot(snapshot))) {
if (range.hasMin()) {
rocksIterator.seek(range.getMin());
} else {
rocksIterator.seekToFirst();
}
byte[] firstGroupKey = null;
List<byte[]> currentGroupValues = new ArrayList<>();
byte[] key;
while (rocksIterator.isValid()) {
key = rocksIterator.key();
if (firstGroupKey == null) { // Fix first value
firstGroupKey = key;
}
if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) {
break;
}
if (Arrays.equals(firstGroupKey, 0, prefixLength, key, 0, prefixLength)) {
currentGroupValues.add(key);
} else {
if (!currentGroupValues.isEmpty()) {
//System.out.println(Thread.currentThread() + "\tRead rande item");
sink.next(currentGroupValues);
}
firstGroupKey = key;
currentGroupValues = new ArrayList<>();
currentGroupValues.add(key);
}
rocksIterator.next();
}
if (!currentGroupValues.isEmpty()) {
//System.out.println(Thread.currentThread() + "\tRead rande item");
sink.next(currentGroupValues);
}
} finally {
//System.out.println(Thread.currentThread() + "\tFinish Read rande item");
sink.complete();
}
})
.subscribeOn(dbScheduler);
} }
private Flux<byte[]> getRangeKeysSingle(LLSnapshot snapshot, byte[] key) { private Flux<byte[]> getRangeKeysSingle(LLSnapshot snapshot, byte[] key) {
@ -398,105 +487,90 @@ public class LLLocalDictionary implements LLDictionary {
} }
private Flux<byte[]> getRangeKeysMulti(LLSnapshot snapshot, LLRange range) { private Flux<byte[]> getRangeKeysMulti(LLSnapshot snapshot, LLRange range) {
return Mono return Flux
.fromCallable(() -> { .<byte[]>push(sink -> {
var iter = db.newIterator(cfh, resolveSnapshot(snapshot)); //System.out.println(Thread.currentThread() + "\tkPreparing Read rande item");
if (range.hasMin()) { try (var rocksIterator = db.newIterator(cfh, resolveSnapshot(snapshot))) {
iter.seek(range.getMin()); if (range.hasMin()) {
} else { rocksIterator.seek(range.getMin());
iter.seekToFirst(); } else {
rocksIterator.seekToFirst();
}
byte[] key;
sink.onRequest(l -> {}/*System.out.println(Thread.currentThread() + "\tkRequested " + l)*/);
while (rocksIterator.isValid()) {
key = rocksIterator.key();
if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) {
break;
}
//System.out.println(Thread.currentThread() + "\tkRead rande item");
sink.next(key);
rocksIterator.next();
}
} finally {
//System.out.println(Thread.currentThread() + "\tkFinish Read rande item");
sink.complete();
} }
return iter; //System.out.println(Thread.currentThread() + "\tkFinish end Read rande item");
}) })
.subscribeOn(dbScheduler) .subscribeOn(dbScheduler);
.flatMapMany(rocksIterator -> Flux
.<byte[]>fromIterable(() -> {
VariableWrapper<byte[]> nextKey = new VariableWrapper<>(null);
return new Iterator<>() {
@Override
public boolean hasNext() {
assert nextKey.var == null;
if (!rocksIterator.isValid()) {
nextKey.var = null;
return false;
}
var key = rocksIterator.key();
var value = rocksIterator.value();
if (range.hasMax() && Arrays.compareUnsigned(key, range.getMax()) > 0) {
nextKey.var = null;
return false;
}
nextKey.var = key;
return true;
}
@Override
public byte[] next() {
var key = nextKey.var;
assert key != null;
nextKey.var = null;
return key;
}
};
})
.doFinally(signalType -> rocksIterator.close())
.subscribeOn(dbScheduler)
);
} }
@Override @Override
public Flux<Entry<byte[], byte[]>> setRange(LLRange range, public Flux<Entry<byte[], byte[]>> setRange(LLRange range,
Flux<Entry<byte[], byte[]>> entries, Flux<Entry<byte[], byte[]>> entries,
boolean getOldValues) { boolean getOldValues) {
if (range.isAll()) { return Flux.defer(() -> {
return clear().thenMany(Flux.empty()); if (range.isAll()) {
} else { return clear().thenMany(Flux.empty());
return Mono } else {
.fromCallable(() -> new CappedWriteBatch(db, return Mono
CAPPED_WRITE_BATCH_CAP, .fromCallable(() -> new CappedWriteBatch(db,
RESERVED_WRITE_BATCH_SIZE, CAPPED_WRITE_BATCH_CAP,
MAX_WRITE_BATCH_SIZE, RESERVED_WRITE_BATCH_SIZE,
BATCH_WRITE_OPTIONS MAX_WRITE_BATCH_SIZE,
)) BATCH_WRITE_OPTIONS
.subscribeOn(dbScheduler) ))
.flatMapMany(writeBatch -> Mono .subscribeOn(dbScheduler)
.fromCallable(() -> { .flatMapMany(writeBatch -> Mono
synchronized (writeBatch) { .fromCallable(() -> {
if (range.hasMin() && range.hasMax()) { synchronized (writeBatch) {
writeBatch.deleteRange(cfh, range.getMin(), range.getMax()); if (range.hasMin() && range.hasMax()) {
writeBatch.delete(cfh, range.getMax()); writeBatch.deleteRange(cfh, range.getMin(), range.getMax());
} else if (range.hasMax()) { writeBatch.delete(cfh, range.getMax());
writeBatch.deleteRange(cfh, FIRST_KEY, range.getMax()); } else if (range.hasMax()) {
writeBatch.delete(cfh, range.getMax()); writeBatch.deleteRange(cfh, FIRST_KEY, range.getMax());
} else { writeBatch.delete(cfh, range.getMax());
try (var it = db.newIterator(cfh, getReadOptions(null))) { } else {
it.seekToLast(); try (var it = db.newIterator(cfh, getReadOptions(null))) {
if (it.isValid()) { it.seekToLast();
writeBatch.deleteRange(cfh, range.getMin(), it.key()); if (it.isValid()) {
writeBatch.delete(cfh, it.key()); writeBatch.deleteRange(cfh, range.getMin(), it.key());
writeBatch.delete(cfh, it.key());
}
} }
} }
} }
} return null;
return null; })
}) .subscribeOn(dbScheduler)
.subscribeOn(dbScheduler) .thenMany(entries)
.thenMany(entries) .flatMap(newEntry -> putEntryToWriteBatch(newEntry, getOldValues, writeBatch))
.flatMap(newEntry -> putEntryToWriteBatch(newEntry, getOldValues, writeBatch)) .concatWith(Mono.<Entry<byte[], byte[]>>fromCallable(() -> {
.concatWith(Mono.<Entry<byte[], byte[]>>fromCallable(() -> { synchronized (writeBatch) {
synchronized (writeBatch) { writeBatch.writeToDbAndClose();
writeBatch.writeToDbAndClose(); writeBatch.close();
writeBatch.close(); }
} return null;
return null; }).subscribeOn(dbScheduler))
}).subscribeOn(dbScheduler)) .doFinally(signalType -> {
.doFinally(signalType -> { synchronized (writeBatch) {
synchronized (writeBatch) { writeBatch.close();
writeBatch.close(); }
} }))
})) .onErrorMap(IOException::new);
.onErrorMap(IOException::new); }
} });
} }
public Mono<Void> clear() { public Mono<Void> clear() {

View File

@ -319,24 +319,32 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.unicast() .unicast()
.onBackpressureBuffer(new ArrayBlockingQueue<>(1000)); .onBackpressureBuffer(new ArrayBlockingQueue<>(1000));
streamSearcher.search(indexSearcher, luceneScheduler.schedule(() -> {
query, try {
limit, streamSearcher.search(indexSearcher,
null, query,
ScoreMode.COMPLETE, limit,
keyFieldName, null,
keyScore -> { ScoreMode.COMPLETE,
EmitResult result = topKeysSink.tryEmitNext(keyScore); keyFieldName,
if (result.isFailure()) { keyScore -> {
throw new EmissionException(result); EmitResult result = topKeysSink.tryEmitNext(keyScore);
} if (result.isFailure()) {
}, throw new EmissionException(result);
totalHitsCount -> { }
EmitResult result = totalHitsCountSink.tryEmitValue(totalHitsCount); },
if (result.isFailure()) { totalHitsCount -> {
throw new EmissionException(result); EmitResult result = totalHitsCountSink.tryEmitValue(totalHitsCount);
} if (result.isFailure()) {
}); throw new EmissionException(result);
}
});
topKeysSink.tryEmitComplete();
} catch (IOException e) {
topKeysSink.tryEmitError(e);
totalHitsCountSink.tryEmitError(e);
}
});
return new LLSearchResult(totalHitsCountSink.asMono(), Flux.just(topKeysSink.asFlux())); return new LLSearchResult(totalHitsCountSink.asMono(), Flux.just(topKeysSink.asFlux()));
}).subscribeOn(luceneScheduler) }).subscribeOn(luceneScheduler)
@ -374,24 +382,32 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.unicast() .unicast()
.onBackpressureBuffer(new ArrayBlockingQueue<>(PagedStreamSearcher.MAX_ITEMS_PER_PAGE)); .onBackpressureBuffer(new ArrayBlockingQueue<>(PagedStreamSearcher.MAX_ITEMS_PER_PAGE));
streamSearcher.search(indexSearcher, luceneScheduler.schedule(() -> {
query, try {
limit, streamSearcher.search(indexSearcher,
luceneSort, query,
luceneScoreMode, limit,
keyFieldName, luceneSort,
keyScore -> { luceneScoreMode,
EmitResult result = topKeysSink.tryEmitNext(keyScore); keyFieldName,
if (result.isFailure()) { keyScore -> {
throw new EmissionException(result); EmitResult result = topKeysSink.tryEmitNext(keyScore);
} if (result.isFailure()) {
}, throw new EmissionException(result);
totalHitsCount -> { }
EmitResult result = totalHitsCountSink.tryEmitValue(totalHitsCount); },
if (result.isFailure()) { totalHitsCount -> {
throw new EmissionException(result); EmitResult result = totalHitsCountSink.tryEmitValue(totalHitsCount);
} if (result.isFailure()) {
}); throw new EmissionException(result);
}
});
topKeysSink.tryEmitComplete();
} catch (IOException e) {
topKeysSink.tryEmitError(e);
totalHitsCountSink.tryEmitError(e);
}
});
return new LLSearchResult(totalHitsCountSink.asMono(), Flux.just(topKeysSink.asFlux())); return new LLSearchResult(totalHitsCountSink.asMono(), Flux.just(topKeysSink.asFlux()));
}).subscribeOn(luceneScheduler) }).subscribeOn(luceneScheduler)