package it.cavallium.dbengine.database.collections; import io.net5.buffer.api.Buffer; import io.net5.buffer.api.BufferAllocator; import io.net5.buffer.api.Drop; import io.net5.buffer.api.Owned; import io.net5.buffer.api.Resource; import io.net5.buffer.api.Send; import it.cavallium.dbengine.client.BadBlock; import it.cavallium.dbengine.client.CompositeSnapshot; import it.cavallium.dbengine.database.LLDictionary; import it.cavallium.dbengine.database.LLUtils; import io.net5.buffer.api.internal.ResourceSupport; import it.cavallium.dbengine.database.UpdateMode; import it.cavallium.dbengine.database.serialization.Serializer; import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength; import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectSortedMap; import it.unimi.dsi.fastutil.objects.ObjectArraySet; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.function.Function; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @SuppressWarnings("unused") public class DatabaseMapDictionaryHashed extends ResourceSupport>, DatabaseMapDictionaryHashed> implements DatabaseStageMap> { private static final Logger logger = LogManager.getLogger(DatabaseMapDictionaryHashed.class); private static final Drop> DROP = new Drop<>() { @Override public void drop(DatabaseMapDictionaryHashed obj) { try { if (obj.subDictionary != null) { obj.subDictionary.close(); } } catch (Throwable ex) { logger.error("Failed to close subDictionary", ex); } } @Override public Drop> fork() { return this; } @Override public void attach(DatabaseMapDictionaryHashed obj) { } }; private final BufferAllocator alloc; private final Function keySuffixHashFunction; private DatabaseMapDictionary>> subDictionary; @SuppressWarnings({"unchecked", "rawtypes"}) protected DatabaseMapDictionaryHashed(LLDictionary dictionary, @Nullable Buffer prefixKey, Serializer keySuffixSerializer, Serializer valueSerializer, Function keySuffixHashFunction, SerializerFixedBinaryLength keySuffixHashSerializer, Runnable onClose) { super((Drop>) (Drop) DROP); if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) { throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW"); } this.alloc = dictionary.getAllocator(); ValueWithHashSerializer valueWithHashSerializer = new ValueWithHashSerializer<>(keySuffixSerializer, valueSerializer); ValuesSetSerializer> valuesSetSerializer = new ValuesSetSerializer<>(valueWithHashSerializer); this.subDictionary = DatabaseMapDictionary.tail(dictionary, prefixKey, keySuffixHashSerializer, valuesSetSerializer, onClose); this.keySuffixHashFunction = keySuffixHashFunction; } @SuppressWarnings({"unchecked", "rawtypes"}) private DatabaseMapDictionaryHashed(BufferAllocator alloc, Function keySuffixHashFunction, Send>>>> subDictionary, Drop> drop) { super((Drop>) (Drop) DROP); this.alloc = alloc; this.keySuffixHashFunction = keySuffixHashFunction; this.subDictionary = (DatabaseMapDictionary>>) subDictionary.receive(); } public static DatabaseMapDictionaryHashed simple(LLDictionary dictionary, Serializer keySerializer, Serializer valueSerializer, Function keyHashFunction, SerializerFixedBinaryLength keyHashSerializer, Runnable onClose) { return new DatabaseMapDictionaryHashed<>( dictionary, null, keySerializer, valueSerializer, keyHashFunction, keyHashSerializer, onClose ); } public static DatabaseMapDictionaryHashed tail(LLDictionary dictionary, @Nullable Buffer prefixKey, Serializer keySuffixSerializer, Serializer valueSerializer, Function keySuffixHashFunction, SerializerFixedBinaryLength keySuffixHashSerializer, Runnable onClose) { return new DatabaseMapDictionaryHashed<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer, keySuffixHashFunction, keySuffixHashSerializer, onClose ); } private Object2ObjectSortedMap>> serializeMap(Object2ObjectSortedMap map) { var newMap = new Object2ObjectLinkedOpenHashMap>>(map.size()); map.forEach((key, value) -> newMap.compute(keySuffixHashFunction.apply(key), (hash, prev) -> { if (prev == null) { prev = new ObjectArraySet<>(); } prev.add(Map.entry(key, value)); return prev; })); return newMap; } private Object2ObjectSortedMap deserializeMap(Object2ObjectSortedMap>> map) { var newMap = new Object2ObjectLinkedOpenHashMap(map.size()); map.forEach((hash, set) -> set.forEach(entry -> newMap.put(entry.getKey(), entry.getValue()))); return newMap; } @Override public Mono> get(@Nullable CompositeSnapshot snapshot) { return subDictionary.get(snapshot).map(this::deserializeMap); } @Override public Mono> getOrDefault(@Nullable CompositeSnapshot snapshot, Mono> defaultValue) { return this.get(snapshot).switchIfEmpty(defaultValue); } @Override public Mono set(Object2ObjectSortedMap map) { return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(subDictionary::set); } @Override public Mono setAndGetChanged(Object2ObjectSortedMap map) { return Mono.fromSupplier(() -> this.serializeMap(map)).flatMap(subDictionary::setAndGetChanged).single(); } @Override public Mono clearAndGetStatus() { return subDictionary.clearAndGetStatus(); } @Override public Mono isEmpty(@Nullable CompositeSnapshot snapshot) { return subDictionary.isEmpty(snapshot); } @Override public DatabaseStageEntry> entry() { return this; } @Override public Flux badBlocks() { return this.subDictionary.badBlocks(); } @Override public Mono> at(@Nullable CompositeSnapshot snapshot, T key) { return this .atPrivate(snapshot, key, keySuffixHashFunction.apply(key)) .map(cast -> (DatabaseStageEntry) cast); } private Mono> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) { return subDictionary .at(snapshot, hash) .map(entry -> new DatabaseSingleBucket(entry, key, null)); } @Override public Mono getUpdateMode() { return subDictionary.getUpdateMode(); } @Override public Flux>> getAllStages(@Nullable CompositeSnapshot snapshot) { return subDictionary .getAllValues(snapshot) .map(Entry::getValue) .map(Collections::unmodifiableSet) .flatMap(bucket -> Flux .fromIterable(bucket) .map(Entry::getKey) .flatMap(key -> this.at(snapshot, key).map(stage -> Map.entry(key, stage))) ); } @Override public Flux> getAllValues(@Nullable CompositeSnapshot snapshot) { return subDictionary .getAllValues(snapshot) .map(Entry::getValue) .map(Collections::unmodifiableSet) .concatMapIterable(list -> list); } @Override public Flux> setAllValuesAndGetPrevious(Flux> entries) { return entries .flatMap(entry -> LLUtils.usingResource(this.at(null, entry.getKey()), stage -> stage .setAndGetPrevious(entry.getValue()) .map(prev -> Map.entry(entry.getKey(), prev)), true) ); } @Override public Mono clear() { return subDictionary.clear(); } @Override public Mono> setAndGetPrevious(Object2ObjectSortedMap value) { return Mono .fromSupplier(() -> this.serializeMap(value)) .flatMap(subDictionary::setAndGetPrevious) .map(this::deserializeMap); } @Override public Mono> clearAndGetPrevious() { return subDictionary .clearAndGetPrevious() .map(this::deserializeMap); } @Override public Mono> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) { return subDictionary .get(snapshot, existsAlmostCertainly) .map(this::deserializeMap); } @Override public Mono leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) { return subDictionary.leavesCount(snapshot, fast); } @Override public ValueGetterBlocking getDbValueGetter(@Nullable CompositeSnapshot snapshot) { ValueGetterBlocking>> getter = subDictionary.getDbValueGetter(snapshot); return key -> extractValue(getter.get(keySuffixHashFunction.apply(key)), key); } @Override public ValueGetter getAsyncDbValueGetter(@Nullable CompositeSnapshot snapshot) { ValueGetter>> getter = subDictionary.getAsyncDbValueGetter(snapshot); return key -> getter .get(keySuffixHashFunction.apply(key)) .flatMap(set -> this.extractValueTransformation(set, key)); } private Mono extractValueTransformation(ObjectArraySet> entries, T key) { return Mono.fromCallable(() -> extractValue(entries, key)); } @Nullable private U extractValue(ObjectArraySet> entries, T key) { if (entries == null) return null; for (Entry entry : entries) { if (Objects.equals(entry.getKey(), key)) { return entry.getValue(); } } return null; } @NotNull private ObjectArraySet> insertValueOrCreate(@Nullable ObjectArraySet> entries, T key, U value) { if (entries != null) { var clonedEntries = entries.clone(); clonedEntries.add(Map.entry(key, value)); return clonedEntries; } else { var oas = new ObjectArraySet>(1); oas.add(Map.entry(key, value)); return oas; } } @Nullable private Set> removeValueOrDelete(@Nullable ObjectArraySet> entries, T key) { if (entries != null) { var clonedEntries = entries.clone(); var it = clonedEntries.iterator(); while (it.hasNext()) { var entry = it.next(); if (Objects.equals(entry.getKey(), key)) { it.remove(); break; } } if (clonedEntries.size() == 0) { return null; } else { return clonedEntries; } } else { return null; } } @Override protected RuntimeException createResourceClosedException() { throw new IllegalStateException("Closed"); } @Override protected Owned> prepareSend() { var subDictionary = this.subDictionary.send(); return drop -> new DatabaseMapDictionaryHashed<>(alloc, keySuffixHashFunction, subDictionary, drop); } @Override protected void makeInaccessible() { this.subDictionary = null; } }