package it.cavallium.dbengine.database.collections; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.database.Delta; import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.collections.Joiner.ValueGetter; import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBlocking; import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.unimi.dsi.fastutil.objects.ObjectArraySet; import it.unimi.dsi.fastutil.objects.ObjectSets; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.function.Function; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @SuppressWarnings("unused") public class DatabaseMapDictionaryHashed implements DatabaseStageMap> { private final ByteBufAllocator alloc; private final DatabaseMapDictionary>> subDictionary; private final Function keySuffixHashFunction; protected DatabaseMapDictionaryHashed(LLDictionary dictionary, ByteBuf prefixKey, Serializer keySuffixSerializer, Serializer valueSerializer, Function keySuffixHashFunction, SerializerFixedBinaryLength keySuffixHashSerializer) { try { if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) { throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW"); } this.alloc = dictionary.getAllocator(); ValueWithHashSerializer valueWithHashSerializer = new ValueWithHashSerializer<>(alloc, keySuffixSerializer, valueSerializer); ValuesSetSerializer> valuesSetSerializer = new ValuesSetSerializer<>(alloc, valueWithHashSerializer); this.subDictionary = DatabaseMapDictionary.tail(dictionary, prefixKey.retain(), keySuffixHashSerializer, valuesSetSerializer ); this.keySuffixHashFunction = keySuffixHashFunction; } finally { prefixKey.release(); } } public static DatabaseMapDictionaryHashed simple(LLDictionary dictionary, Serializer keySerializer, Serializer valueSerializer, Function keyHashFunction, SerializerFixedBinaryLength keyHashSerializer) { return new DatabaseMapDictionaryHashed<>( dictionary, dictionary.getAllocator().buffer(0), keySerializer, valueSerializer, keyHashFunction, keyHashSerializer ); } public static DatabaseMapDictionaryHashed tail(LLDictionary dictionary, ByteBuf prefixKey, Serializer keySuffixSerializer, Serializer valueSerializer, Function keySuffixHashFunction, SerializerFixedBinaryLength keySuffixHashSerializer) { return new DatabaseMapDictionaryHashed<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer, keySuffixHashFunction, keySuffixHashSerializer ); } private Map>> serializeMap(Map map) { var newMap = new HashMap>>(map.size()); map.forEach((key, value) -> newMap.compute(keySuffixHashFunction.apply(key), (hash, prev) -> { if (prev == null) { prev = new HashSet<>(); } prev.add(Map.entry(key, value)); return prev; })); return newMap; } private Map deserializeMap(Map>> map) { var newMap = new HashMap(map.size()); map.forEach((hash, set) -> set.forEach(entry -> newMap.put(entry.getKey(), entry.getValue()))); return newMap; } @Override public Mono> get(@Nullable CompositeSnapshot snapshot) { return subDictionary.get(snapshot).map(this::deserializeMap); } @Override public Mono> getOrDefault(@Nullable CompositeSnapshot snapshot, Mono> defaultValue) { return this.get(snapshot).switchIfEmpty(defaultValue); } @Override public Mono set(Map map) { return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(subDictionary::set); } @Override public Mono setAndGetChanged(Map map) { return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(subDictionary::setAndGetChanged).single(); } @Override public Mono clearAndGetStatus() { return subDictionary.clearAndGetStatus(); } @Override public Mono close() { return subDictionary.close(); } @Override public Mono isEmpty(@Nullable CompositeSnapshot snapshot) { return subDictionary.isEmpty(snapshot); } @Override public DatabaseStageEntry> entry() { return this; } @Override public Flux badBlocks() { return this.subDictionary.badBlocks(); } @Override public void release() { this.subDictionary.release(); } @Override public Mono> at(@Nullable CompositeSnapshot snapshot, T key) { return this .atPrivate(snapshot, key, keySuffixHashFunction.apply(key)) .map(cast -> cast); } private Mono> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) { return subDictionary .at(snapshot, hash) .map(entry -> new DatabaseSingleBucket<>(entry, key)); } @Override public Mono getUpdateMode() { return subDictionary.getUpdateMode(); } @Override public Flux>> getAllStages(@Nullable CompositeSnapshot snapshot) { return subDictionary .getAllValues(snapshot) .map(Entry::getValue) .map(Collections::unmodifiableSet) .flatMap(bucket -> Flux .fromIterable(bucket) .map(Entry::getKey) .flatMap(key -> this .at(snapshot, key) .flatMap(stage -> Mono.just(Map.entry(key, stage)).doAfterTerminate(stage::release)) ) ); } @Override public Flux> getAllValues(@Nullable CompositeSnapshot snapshot) { return subDictionary .getAllValues(snapshot) .map(Entry::getValue) .map(Collections::unmodifiableSet) .flatMap(Flux::fromIterable); } @Override public Flux> setAllValuesAndGetPrevious(Flux> entries) { return entries .flatMap(entry -> this .at(null, entry.getKey()) .flatMap(stage -> stage .setAndGetPrevious(entry.getValue()) .map(prev -> Map.entry(entry.getKey(), prev)) .doAfterTerminate(stage::release)) ); } @Override public Mono clear() { return subDictionary.clear(); } @Override public Mono> setAndGetPrevious(Map value) { return Mono .fromSupplier(() -> this.serializeMap(value)) .flatMap(subDictionary::setAndGetPrevious) .map(this::deserializeMap); } @Override public Mono> clearAndGetPrevious() { return subDictionary .clearAndGetPrevious() .map(this::deserializeMap); } @Override public Mono> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) { return subDictionary .get(snapshot, existsAlmostCertainly) .map(this::deserializeMap); } @Override public Mono leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { return subDictionary.leavesCount(snapshot, fast); } @Override public ValueGetterBlocking getDbValueGetter(@Nullable CompositeSnapshot snapshot) { ValueGetterBlocking>> getter = subDictionary.getDbValueGetter(snapshot); return key -> extractValue(getter.get(keySuffixHashFunction.apply(key)), key); } @Override public ValueGetter getAsyncDbValueGetter(@Nullable CompositeSnapshot snapshot) { ValueGetter>> getter = subDictionary.getAsyncDbValueGetter(snapshot); return key -> getter .get(keySuffixHashFunction.apply(key)) .flatMap(set -> this.extractValueTransformation(set, key)); } private Mono extractValueTransformation(Set> entries, T key) { return Mono.fromCallable(() -> extractValue(entries, key)); } @Nullable private U extractValue(Set> entries, T key) { if (entries == null) return null; for (Entry entry : entries) { if (Objects.equals(entry.getKey(), key)) { return entry.getValue(); } } return null; } @NotNull private Set> insertValueOrCreate(@Nullable Set> entries, T key, U value) { if (entries != null) { entries.add(Map.entry(key, value)); return entries; } else { var oas = new HashSet>(1); oas.add(Map.entry(key, value)); return oas; } } @Nullable private Set> removeValueOrDelete(@Nullable Set> entries, T key) { if (entries != null) { var it = entries.iterator(); while (it.hasNext()) { var entry = it.next(); if (Objects.equals(entry.getKey(), key)) { it.remove(); break; } } if (entries.size() == 0) { return null; } else { return entries; } } else { return null; } } }