Fix bug with map access

This commit is contained in:
Andrea Cavalli 2021-06-06 02:23:51 +02:00
parent 187274071f
commit adf3bef488
6 changed files with 22 additions and 10 deletions

View File

@ -11,6 +11,7 @@ import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
@ -81,7 +82,7 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
b -> dictionary
.setRange(range.retain(),
Flux
.fromIterable(value.entrySet())
.fromIterable(Collections.unmodifiableMap(value).entrySet())
.map(entry -> Map
.entry(this.toKey(serializeSuffix(entry.getKey())), serialize(entry.getValue()))
)

View File

@ -12,6 +12,7 @@ import it.cavallium.dbengine.database.serialization.Serializer;
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
import it.unimi.dsi.fastutil.objects.ObjectSets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -172,8 +173,10 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
return subDictionary
.getAllValues(snapshot)
.map(Entry::getValue)
.map(Collections::unmodifiableSet)
.flatMap(bucket -> Flux
.fromIterable(bucket.getValue())
.fromIterable(bucket)
.map(Entry::getKey)
.flatMap(key -> this
.at(snapshot, key)
@ -184,7 +187,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
@Override
public Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
return subDictionary.getAllValues(snapshot).flatMap(s -> Flux.fromIterable(s.getValue()));
return subDictionary
.getAllValues(snapshot)
.map(Entry::getValue)
.map(Collections::unmodifiableSet)
.flatMap(Flux::fromIterable);
}
@Override

View File

@ -8,6 +8,7 @@ import it.cavallium.dbengine.database.UpdateMode;
import it.cavallium.dbengine.database.UpdateReturnMode;
import it.cavallium.dbengine.database.collections.Joiner.ValueGetter;
import it.cavallium.dbengine.database.collections.JoinerBlocking.ValueGetterBlocking;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
@ -170,7 +171,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
@Override
default Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
return this
.setAllValuesAndGetPrevious(Flux.fromIterable(value.entrySet()))
.setAllValuesAndGetPrevious(Flux.fromIterable(Collections.unmodifiableMap(value).entrySet()))
.collectMap(Entry::getKey, Entry::getValue, HashMap::new);
}
@ -206,7 +207,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
})
.flatMap(result -> Mono
.justOrEmpty(result.getT2())
.flatMap(values -> this.setAllValues(Flux.fromIterable(values.entrySet())))
.flatMap(values -> this.setAllValues(Flux.fromIterable(Collections.unmodifiableMap(values).entrySet())))
.thenReturn(new Delta<>(result.getT1().orElse(null), result.getT2().orElse(null)))
);
} else if (updateMode == UpdateMode.ALLOW) {

View File

@ -18,6 +18,7 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@ -955,7 +956,7 @@ public class LLLocalDictionary implements LLDictionary {
}
})
.subscribeOn(dbScheduler)
.flatMapMany(Flux::fromIterable)
.flatMapMany(entries -> Flux.fromIterable(entries))
.onErrorMap(cause -> new IOException("Failed to read keys "
+ Arrays.deepToString(keysWindow.toArray(ByteBuf[]::new)), cause))
.doAfterTerminate(() -> keysWindow.forEach(ReferenceCounted::release))
@ -990,6 +991,7 @@ public class LLLocalDictionary implements LLDictionary {
castedEntry.getValue().release();
}
})
.map(Collections::unmodifiableList)
.flatMap(ew -> Mono
.using(
() -> ew,

View File

@ -164,12 +164,13 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
this.directory = directory;
}
this.luceneIndexName = name;
this.snapshotter = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
this.lowMemory = lowMemory;
this.similarity = LuceneUtils.toPerFieldSimilarityWrapper(indicizerSimilarities);
this.distributedCollectionStatisticsGetter = distributedCollectionStatisticsGetter;
;
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LuceneUtils.toPerFieldAnalyzerWrapper(indicizerAnalyzers));
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
indexWriterConfig.setIndexDeletionPolicy(snapshotter);
@ -183,8 +184,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
}
indexWriterConfig.setSimilarity(getSimilarity());
this.indexWriter = new IndexWriter(directory, indexWriterConfig);
this.searcherManager
= new SearcherManager(indexWriter, false, false, null);
this.searcherManager = new SearcherManager(indexWriter, false, false, null);
// Create scheduled tasks lifecycle manager
this.scheduledTasksLifecycle = new ScheduledTaskLifecycle();

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -227,7 +228,7 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
.computeIfAbsent(getLuceneIndex(key), _unused -> new HashMap<>())
.put(key, value)
);
return Flux.fromIterable(sortedMap.entrySet());
return Flux.fromIterable(Collections.unmodifiableMap(sortedMap).entrySet());
})
.flatMap(luceneIndexWithNewDocuments -> {
var luceneIndex = luceneIndexWithNewDocuments.getKey();