Merge branch 'safe-search'
This commit is contained in:
commit
9c86a51a69
27
pom.xml
27
pom.xml
@ -99,6 +99,10 @@
|
|||||||
<groupId>io.net5</groupId>
|
<groupId>io.net5</groupId>
|
||||||
<artifactId>netty-buffer</artifactId>
|
<artifactId>netty-buffer</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.net5.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-memseg</artifactId>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>javax.xml.bind</groupId>
|
<groupId>javax.xml.bind</groupId>
|
||||||
<artifactId>jaxb-api</artifactId>
|
<artifactId>jaxb-api</artifactId>
|
||||||
@ -168,6 +172,11 @@
|
|||||||
<artifactId>log4j-slf4j-impl</artifactId>
|
<artifactId>log4j-slf4j-impl</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.lmax</groupId>
|
||||||
|
<artifactId>disruptor</artifactId>
|
||||||
|
<version>3.3.4</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.rocksdb</groupId>
|
<groupId>org.rocksdb</groupId>
|
||||||
<artifactId>rocksdbjni</artifactId>
|
<artifactId>rocksdbjni</artifactId>
|
||||||
@ -266,6 +275,11 @@
|
|||||||
<artifactId>netty-buffer</artifactId>
|
<artifactId>netty-buffer</artifactId>
|
||||||
<version>5.0.0.Final-SNAPSHOT</version>
|
<version>5.0.0.Final-SNAPSHOT</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.net5.incubator</groupId>
|
||||||
|
<artifactId>netty-incubator-buffer-memseg</artifactId>
|
||||||
|
<version>0.0.1.Final-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>javax.xml.bind</groupId>
|
<groupId>javax.xml.bind</groupId>
|
||||||
<artifactId>jaxb-api</artifactId>
|
<artifactId>jaxb-api</artifactId>
|
||||||
@ -343,6 +357,11 @@
|
|||||||
<artifactId>log4j-slf4j-impl</artifactId>
|
<artifactId>log4j-slf4j-impl</artifactId>
|
||||||
<version>2.14.1</version>
|
<version>2.14.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.lmax</groupId>
|
||||||
|
<artifactId>disruptor</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.rocksdb</groupId>
|
<groupId>org.rocksdb</groupId>
|
||||||
<artifactId>rocksdbjni</artifactId>
|
<artifactId>rocksdbjni</artifactId>
|
||||||
@ -484,7 +503,7 @@
|
|||||||
<artifactId>maven-compiler-plugin</artifactId>
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
<version>3.8.1</version>
|
<version>3.8.1</version>
|
||||||
<configuration>
|
<configuration>
|
||||||
<release>16</release>
|
<release>17</release>
|
||||||
<annotationProcessorPaths>
|
<annotationProcessorPaths>
|
||||||
<annotationProcessorPath>
|
<annotationProcessorPath>
|
||||||
<groupId>io.soabase.record-builder</groupId>
|
<groupId>io.soabase.record-builder</groupId>
|
||||||
@ -499,8 +518,8 @@
|
|||||||
<compilerArgs>--enable-preview
|
<compilerArgs>--enable-preview
|
||||||
<arg>--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED</arg>
|
<arg>--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED</arg>
|
||||||
</compilerArgs>
|
</compilerArgs>
|
||||||
<source>16</source>
|
<source>17</source>
|
||||||
<target>16</target>
|
<target>17</target>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
@ -533,7 +552,7 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<configuration>
|
<configuration>
|
||||||
<argLine>--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit --add-opens=java.base/jdk.internal.misc=ALL-UNNAMED</argLine>
|
<argLine>--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit --add-opens=java.base/jdk.internal.misc=ALL-UNNAMED --enable-native-access=ALL-UNNAMED</argLine>
|
||||||
<systemProperties>
|
<systemProperties>
|
||||||
<property>
|
<property>
|
||||||
<name>ci</name>
|
<name>ci</name>
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.client;
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.query.ClientQueryParams;
|
import it.cavallium.dbengine.client.query.ClientQueryParams;
|
||||||
import it.cavallium.dbengine.client.query.current.data.Query;
|
import it.cavallium.dbengine.client.query.current.data.Query;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
@ -52,9 +53,9 @@ public interface LuceneIndex<T, U> extends LLSnapshottable {
|
|||||||
|
|
||||||
Mono<Void> deleteAll();
|
Mono<Void> deleteAll();
|
||||||
|
|
||||||
Mono<SearchResultKeys<T>> moreLikeThis(ClientQueryParams<SearchResultKey<T>> queryParams, T key, U mltDocumentValue);
|
Mono<Send<SearchResultKeys<T>>> moreLikeThis(ClientQueryParams<SearchResultKey<T>> queryParams, T key, U mltDocumentValue);
|
||||||
|
|
||||||
default Mono<SearchResult<T, U>> moreLikeThisWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
default Mono<Send<SearchResult<T, U>>> moreLikeThisWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
T key,
|
T key,
|
||||||
U mltDocumentValue,
|
U mltDocumentValue,
|
||||||
ValueGetter<T, U> valueGetter) {
|
ValueGetter<T, U> valueGetter) {
|
||||||
@ -64,21 +65,19 @@ public interface LuceneIndex<T, U> extends LLSnapshottable {
|
|||||||
getValueGetterTransformer(valueGetter));
|
getValueGetterTransformer(valueGetter));
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<SearchResult<T, U>> moreLikeThisWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
Mono<Send<SearchResult<T, U>>> moreLikeThisWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
T key,
|
T key,
|
||||||
U mltDocumentValue,
|
U mltDocumentValue,
|
||||||
ValueTransformer<T, U> valueTransformer);
|
ValueTransformer<T, U> valueTransformer);
|
||||||
|
|
||||||
Mono<SearchResultKeys<T>> search(ClientQueryParams<SearchResultKey<T>> queryParams);
|
Mono<Send<SearchResultKeys<T>>> search(ClientQueryParams<SearchResultKey<T>> queryParams);
|
||||||
|
|
||||||
default Mono<SearchResult<T, U>> searchWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
default Mono<Send<SearchResult<T, U>>> searchWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
ValueGetter<T, U> valueGetter) {
|
ValueGetter<T, U> valueGetter) {
|
||||||
return this.searchWithTransformer(queryParams,
|
return this.searchWithTransformer(queryParams, getValueGetterTransformer(valueGetter));
|
||||||
getValueGetterTransformer(valueGetter)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<SearchResult<T, U>> searchWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
Mono<Send<SearchResult<T, U>>> searchWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
ValueTransformer<T, U> valueTransformer);
|
ValueTransformer<T, U> valueTransformer);
|
||||||
|
|
||||||
Mono<TotalHitsCount> count(@Nullable CompositeSnapshot snapshot, Query query);
|
Mono<TotalHitsCount> count(@Nullable CompositeSnapshot snapshot, Query query);
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.client;
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.query.ClientQueryParams;
|
import it.cavallium.dbengine.client.query.ClientQueryParams;
|
||||||
import it.cavallium.dbengine.client.query.current.data.Query;
|
import it.cavallium.dbengine.client.query.current.data.Query;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
@ -84,58 +85,73 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
|
|||||||
return luceneIndex.deleteAll();
|
return luceneIndex.deleteAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<SearchResultKeys<T>> transformLuceneResultWithTransformer(LLSearchResultShard llSearchResult) {
|
private Mono<Send<SearchResultKeys<T>>> transformLuceneResultWithTransformer(
|
||||||
return Mono.just(new SearchResultKeys<>(llSearchResult.results()
|
Mono<Send<LLSearchResultShard>> llSearchResultMono) {
|
||||||
.map(signal -> new SearchResultKey<>(Mono.fromCallable(signal::key).map(indicizer::getKey), signal.score())),
|
return llSearchResultMono.map(llSearchResultToReceive -> {
|
||||||
llSearchResult.totalHitsCount(),
|
var llSearchResult = llSearchResultToReceive.receive();
|
||||||
llSearchResult.release()
|
return new SearchResultKeys<>(llSearchResult.results()
|
||||||
));
|
.map(signal -> new SearchResultKey<>(Mono
|
||||||
|
.fromCallable(signal::key)
|
||||||
|
.map(indicizer::getKey), signal.score())),
|
||||||
|
llSearchResult.totalHitsCount(),
|
||||||
|
d -> llSearchResult.close()
|
||||||
|
).send();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<SearchResult<T, U>> transformLuceneResultWithValues(LLSearchResultShard llSearchResult,
|
private Mono<Send<SearchResult<T, U>>> transformLuceneResultWithValues(
|
||||||
|
Mono<Send<LLSearchResultShard>> llSearchResultMono,
|
||||||
ValueGetter<T, U> valueGetter) {
|
ValueGetter<T, U> valueGetter) {
|
||||||
return Mono.fromCallable(() -> new SearchResult<>(llSearchResult.results().map(signal -> {
|
return llSearchResultMono.map(llSearchResultToReceive -> {
|
||||||
var key = Mono.fromCallable(signal::key).map(indicizer::getKey);
|
var llSearchResult = llSearchResultToReceive.receive();
|
||||||
return new SearchResultItem<>(key, key.flatMap(valueGetter::get), signal.score());
|
return new SearchResult<>(llSearchResult.results().map(signal -> {
|
||||||
}), llSearchResult.totalHitsCount(), llSearchResult.release()));
|
var key = Mono.fromCallable(signal::key).map(indicizer::getKey);
|
||||||
|
return new SearchResultItem<>(key, key.flatMap(valueGetter::get), signal.score());
|
||||||
|
}), llSearchResult.totalHitsCount(), d -> llSearchResult.close()).send();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<SearchResult<T, U>> transformLuceneResultWithTransformer(LLSearchResultShard llSearchResult,
|
private Mono<Send<SearchResult<T, U>>> transformLuceneResultWithTransformer(
|
||||||
|
Mono<Send<LLSearchResultShard>> llSearchResultMono,
|
||||||
ValueTransformer<T, U> valueTransformer) {
|
ValueTransformer<T, U> valueTransformer) {
|
||||||
var scoresWithKeysFlux = llSearchResult
|
return llSearchResultMono
|
||||||
.results()
|
.map(llSearchResultToReceive -> {
|
||||||
.flatMapSequential(signal -> Mono
|
var llSearchResult = llSearchResultToReceive.receive();
|
||||||
.fromCallable(signal::key)
|
var scoresWithKeysFlux = llSearchResult
|
||||||
.map(indicizer::getKey)
|
.results()
|
||||||
.map(key -> Tuples.of(signal.score(), key))
|
.flatMapSequential(signal -> Mono
|
||||||
);
|
.fromCallable(signal::key)
|
||||||
var resultItemsFlux = valueTransformer
|
.map(indicizer::getKey)
|
||||||
.transform(scoresWithKeysFlux)
|
.map(key -> Tuples.of(signal.score(), key))
|
||||||
.filter(tuple3 -> tuple3.getT3().isPresent())
|
);
|
||||||
.map(tuple3 -> new SearchResultItem<>(Mono.just(tuple3.getT2()),
|
var resultItemsFlux = valueTransformer
|
||||||
Mono.just(tuple3.getT3().orElseThrow()),
|
.transform(scoresWithKeysFlux)
|
||||||
tuple3.getT1()
|
.filter(tuple3 -> tuple3.getT3().isPresent())
|
||||||
));
|
.map(tuple3 -> new SearchResultItem<>(Mono.just(tuple3.getT2()),
|
||||||
return Mono.fromCallable(() -> new SearchResult<>(resultItemsFlux,
|
Mono.just(tuple3.getT3().orElseThrow()),
|
||||||
llSearchResult.totalHitsCount(),
|
tuple3.getT1()
|
||||||
llSearchResult.release()
|
));
|
||||||
));
|
return new SearchResult<>(resultItemsFlux,
|
||||||
|
llSearchResult.totalHitsCount(),
|
||||||
|
d -> llSearchResult.close()
|
||||||
|
).send();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<SearchResultKeys<T>> moreLikeThis(ClientQueryParams<SearchResultKey<T>> queryParams,
|
public Mono<Send<SearchResultKeys<T>>> moreLikeThis(ClientQueryParams<SearchResultKey<T>> queryParams,
|
||||||
T key,
|
T key,
|
||||||
U mltDocumentValue) {
|
U mltDocumentValue) {
|
||||||
Flux<Tuple2<String, Set<String>>> mltDocumentFields
|
Flux<Tuple2<String, Set<String>>> mltDocumentFields
|
||||||
= indicizer.getMoreLikeThisDocumentFields(key, mltDocumentValue);
|
= indicizer.getMoreLikeThisDocumentFields(key, mltDocumentValue);
|
||||||
return luceneIndex
|
return luceneIndex
|
||||||
.moreLikeThis(resolveSnapshot(queryParams.snapshot()), queryParams.toQueryParams(), indicizer.getKeyFieldName(), mltDocumentFields)
|
.moreLikeThis(resolveSnapshot(queryParams.snapshot()), queryParams.toQueryParams(), indicizer.getKeyFieldName(), mltDocumentFields)
|
||||||
.flatMap(this::transformLuceneResultWithTransformer);
|
.transform(this::transformLuceneResultWithTransformer);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<SearchResult<T, U>> moreLikeThisWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
public Mono<Send<SearchResult<T, U>>> moreLikeThisWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
T key,
|
T key,
|
||||||
U mltDocumentValue,
|
U mltDocumentValue,
|
||||||
ValueGetter<T, U> valueGetter) {
|
ValueGetter<T, U> valueGetter) {
|
||||||
@ -147,13 +163,12 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
|
|||||||
indicizer.getKeyFieldName(),
|
indicizer.getKeyFieldName(),
|
||||||
mltDocumentFields
|
mltDocumentFields
|
||||||
)
|
)
|
||||||
.flatMap(llSearchResult -> this.transformLuceneResultWithValues(llSearchResult,
|
.transform(llSearchResult -> this.transformLuceneResultWithValues(llSearchResult,
|
||||||
valueGetter
|
valueGetter));
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<SearchResult<T, U>> moreLikeThisWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
public Mono<Send<SearchResult<T, U>>> moreLikeThisWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
T key,
|
T key,
|
||||||
U mltDocumentValue,
|
U mltDocumentValue,
|
||||||
ValueTransformer<T, U> valueTransformer) {
|
ValueTransformer<T, U> valueTransformer) {
|
||||||
@ -165,40 +180,51 @@ public class LuceneIndexImpl<T, U> implements LuceneIndex<T, U> {
|
|||||||
indicizer.getKeyFieldName(),
|
indicizer.getKeyFieldName(),
|
||||||
mltDocumentFields
|
mltDocumentFields
|
||||||
)
|
)
|
||||||
.flatMap(llSearchResult -> this.transformLuceneResultWithTransformer(llSearchResult, valueTransformer));
|
.transform(llSearchResult -> this.transformLuceneResultWithTransformer(llSearchResult,
|
||||||
|
valueTransformer));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<SearchResultKeys<T>> search(ClientQueryParams<SearchResultKey<T>> queryParams) {
|
public Mono<Send<SearchResultKeys<T>>> search(ClientQueryParams<SearchResultKey<T>> queryParams) {
|
||||||
return luceneIndex
|
return luceneIndex
|
||||||
.search(resolveSnapshot(queryParams.snapshot()),
|
.search(resolveSnapshot(queryParams.snapshot()),
|
||||||
queryParams.toQueryParams(),
|
queryParams.toQueryParams(),
|
||||||
indicizer.getKeyFieldName()
|
indicizer.getKeyFieldName()
|
||||||
)
|
)
|
||||||
.flatMap(this::transformLuceneResultWithTransformer);
|
.transform(this::transformLuceneResultWithTransformer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<SearchResult<T, U>> searchWithValues(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
public Mono<Send<SearchResult<T, U>>> searchWithValues(
|
||||||
|
ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
ValueGetter<T, U> valueGetter) {
|
ValueGetter<T, U> valueGetter) {
|
||||||
return luceneIndex
|
return luceneIndex
|
||||||
.search(resolveSnapshot(queryParams.snapshot()), queryParams.toQueryParams(), indicizer.getKeyFieldName())
|
.search(resolveSnapshot(queryParams.snapshot()), queryParams.toQueryParams(),
|
||||||
.flatMap(llSearchResult -> this.transformLuceneResultWithValues(llSearchResult, valueGetter));
|
indicizer.getKeyFieldName())
|
||||||
|
.transform(llSearchResult -> this.transformLuceneResultWithValues(llSearchResult,
|
||||||
|
valueGetter));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<SearchResult<T, U>> searchWithTransformer(ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
public Mono<Send<SearchResult<T, U>>> searchWithTransformer(
|
||||||
|
ClientQueryParams<SearchResultItem<T, U>> queryParams,
|
||||||
ValueTransformer<T, U> valueTransformer) {
|
ValueTransformer<T, U> valueTransformer) {
|
||||||
return luceneIndex
|
return luceneIndex
|
||||||
.search(resolveSnapshot(queryParams.snapshot()), queryParams.toQueryParams(), indicizer.getKeyFieldName())
|
.search(resolveSnapshot(queryParams.snapshot()), queryParams.toQueryParams(),
|
||||||
.flatMap(llSearchResult -> this.transformLuceneResultWithTransformer(llSearchResult, valueTransformer));
|
indicizer.getKeyFieldName())
|
||||||
|
.transform(llSearchResult -> this.transformLuceneResultWithTransformer(llSearchResult,
|
||||||
|
valueTransformer));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<TotalHitsCount> count(@Nullable CompositeSnapshot snapshot, Query query) {
|
public Mono<TotalHitsCount> count(@Nullable CompositeSnapshot snapshot, Query query) {
|
||||||
return this
|
return this
|
||||||
.search(ClientQueryParams.<SearchResultKey<T>>builder().snapshot(snapshot).query(query).limit(0).build())
|
.search(ClientQueryParams.<SearchResultKey<T>>builder().snapshot(snapshot).query(query).limit(0).build())
|
||||||
.flatMap(tSearchResultKeys -> tSearchResultKeys.release().thenReturn(tSearchResultKeys.totalHitsCount()));
|
.map(searchResultKeysSend -> {
|
||||||
|
try (var searchResultKeys = searchResultKeysSend.receive()) {
|
||||||
|
return searchResultKeys.totalHitsCount();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -5,6 +5,7 @@ import io.net5.buffer.api.Send;
|
|||||||
import it.cavallium.dbengine.database.serialization.SerializationException;
|
import it.cavallium.dbengine.database.serialization.SerializationException;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public class MappedSerializer<A, B> implements Serializer<B> {
|
public class MappedSerializer<A, B> implements Serializer<B> {
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import io.net5.buffer.api.Send;
|
|||||||
import it.cavallium.dbengine.database.serialization.SerializationException;
|
import it.cavallium.dbengine.database.serialization.SerializationException;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B> {
|
public class MappedSerializerFixedLength<A, B> implements SerializerFixedBinaryLength<B> {
|
||||||
|
|
||||||
|
@ -1,42 +1,31 @@
|
|||||||
package it.cavallium.dbengine.client;
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import org.warp.commonutils.log.Logger;
|
import org.warp.commonutils.log.Logger;
|
||||||
import org.warp.commonutils.log.LoggerFactory;
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
public final class SearchResult<T, U> {
|
public final class SearchResult<T, U> extends LiveResourceSupport<SearchResult<T, U>, SearchResult<T, U>> {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(SearchResult.class);
|
private Flux<SearchResultItem<T, U>> results;
|
||||||
|
private TotalHitsCount totalHitsCount;
|
||||||
|
|
||||||
private volatile boolean releaseCalled;
|
public SearchResult(Flux<SearchResultItem<T, U>> results, TotalHitsCount totalHitsCount,
|
||||||
|
Drop<SearchResult<T, U>> drop) {
|
||||||
private final Flux<SearchResultItem<T, U>> results;
|
super(drop);
|
||||||
private final TotalHitsCount totalHitsCount;
|
|
||||||
private final Mono<Void> release;
|
|
||||||
|
|
||||||
public SearchResult(Flux<SearchResultItem<T, U>> results, TotalHitsCount totalHitsCount, Mono<Void> release) {
|
|
||||||
this.results = results;
|
this.results = results;
|
||||||
this.totalHitsCount = totalHitsCount;
|
this.totalHitsCount = totalHitsCount;
|
||||||
this.release = Mono.fromRunnable(() -> {
|
|
||||||
if (releaseCalled) {
|
|
||||||
logger.warn(this.getClass().getName() + "::release has been called twice!");
|
|
||||||
}
|
|
||||||
releaseCalled = true;
|
|
||||||
}).then(release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U> SearchResult<T, U> empty() {
|
public static <T, U> SearchResult<T, U> empty() {
|
||||||
var sr = new SearchResult<T, U>(Flux.empty(), TotalHitsCount.of(0, true), Mono.empty());
|
return new SearchResult<T, U>(Flux.empty(), TotalHitsCount.of(0, true), d -> {});
|
||||||
sr.releaseCalled = true;
|
|
||||||
return sr;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Flux<SearchResultItem<T, U>> resultsThenRelease() {
|
|
||||||
return Flux.usingWhen(Mono.just(true), _unused -> results, _unused -> release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Flux<SearchResultItem<T, U>> results() {
|
public Flux<SearchResultItem<T, U>> results() {
|
||||||
@ -47,39 +36,25 @@ public final class SearchResult<T, U> {
|
|||||||
return totalHitsCount;
|
return totalHitsCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Void> release() {
|
|
||||||
return release;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (obj == this)
|
|
||||||
return true;
|
|
||||||
if (obj == null || obj.getClass() != this.getClass())
|
|
||||||
return false;
|
|
||||||
var that = (SearchResult) obj;
|
|
||||||
return Objects.equals(this.results, that.results) && Objects.equals(this.totalHitsCount, that.totalHitsCount)
|
|
||||||
&& Objects.equals(this.release, that.release);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(results, totalHitsCount, release);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "SearchResult[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ", " + "release="
|
return "SearchResult[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ']';
|
||||||
+ release + ']';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
@Override
|
@Override
|
||||||
protected void finalize() throws Throwable {
|
protected RuntimeException createResourceClosedException() {
|
||||||
if (!releaseCalled) {
|
return new IllegalStateException("Closed");
|
||||||
logger.warn(this.getClass().getName() + "::release has not been called before class finalization!");
|
|
||||||
}
|
|
||||||
super.finalize();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<SearchResult<T, U>> prepareSend() {
|
||||||
|
var results = this.results;
|
||||||
|
var totalHitsCount = this.totalHitsCount;
|
||||||
|
return drop -> new SearchResult<>(results, totalHitsCount, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.results = null;
|
||||||
|
this.totalHitsCount = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
package it.cavallium.dbengine.client;
|
package it.cavallium.dbengine.client;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
import it.cavallium.dbengine.database.collections.ValueGetter;
|
import it.cavallium.dbengine.database.collections.ValueGetter;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import org.reactivestreams.Publisher;
|
import org.reactivestreams.Publisher;
|
||||||
@ -11,42 +15,29 @@ import reactor.core.publisher.Flux;
|
|||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public final class SearchResultKeys<T> {
|
public final class SearchResultKeys<T> extends LiveResourceSupport<SearchResultKeys<T>, SearchResultKeys<T>> {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(SearchResultKeys.class);
|
private static final Logger logger = LoggerFactory.getLogger(SearchResultKeys.class);
|
||||||
|
|
||||||
private volatile boolean releaseCalled;
|
private Flux<SearchResultKey<T>> results;
|
||||||
|
private TotalHitsCount totalHitsCount;
|
||||||
|
|
||||||
private final Flux<SearchResultKey<T>> results;
|
public SearchResultKeys(Flux<SearchResultKey<T>> results, TotalHitsCount totalHitsCount,
|
||||||
private final TotalHitsCount totalHitsCount;
|
Drop<SearchResultKeys<T>> drop) {
|
||||||
private final Mono<Void> release;
|
super(drop);
|
||||||
|
|
||||||
public SearchResultKeys(Flux<SearchResultKey<T>> results, TotalHitsCount totalHitsCount, Mono<Void> release) {
|
|
||||||
this.results = results;
|
this.results = results;
|
||||||
this.totalHitsCount = totalHitsCount;
|
this.totalHitsCount = totalHitsCount;
|
||||||
this.release = Mono.fromRunnable(() -> {
|
|
||||||
if (releaseCalled) {
|
|
||||||
logger.warn(this.getClass().getName() + "::release has been called twice!");
|
|
||||||
}
|
|
||||||
releaseCalled = true;
|
|
||||||
}).then(release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> SearchResultKeys<T> empty() {
|
public static <T> SearchResultKeys<T> empty() {
|
||||||
var sr = new SearchResultKeys<T>(Flux.empty(), TotalHitsCount.of(0, true), Mono.empty());
|
return new SearchResultKeys<T>(Flux.empty(), TotalHitsCount.of(0, true), d -> {});
|
||||||
sr.releaseCalled = true;
|
|
||||||
return sr;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public <U> SearchResult<T, U> withValues(ValueGetter<T, U> valuesGetter) {
|
public <U> SearchResult<T, U> withValues(ValueGetter<T, U> valuesGetter) {
|
||||||
return new SearchResult<>(results.map(item -> new SearchResultItem<>(item.key(),
|
return new SearchResult<>(results.map(item -> new SearchResultItem<>(item.key(),
|
||||||
item.key().flatMap(valuesGetter::get),
|
item.key().flatMap(valuesGetter::get),
|
||||||
item.score()
|
item.score()
|
||||||
)), totalHitsCount, release);
|
)), totalHitsCount, d -> this.close());
|
||||||
}
|
|
||||||
|
|
||||||
public Flux<SearchResultKey<T>> resultsThenRelease() {
|
|
||||||
return Flux.usingWhen(Mono.just(true), _unused -> results, _unused -> release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Flux<SearchResultKey<T>> results() {
|
public Flux<SearchResultKey<T>> results() {
|
||||||
@ -57,39 +48,27 @@ public final class SearchResultKeys<T> {
|
|||||||
return totalHitsCount;
|
return totalHitsCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Void> release() {
|
|
||||||
return release;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (obj == this)
|
|
||||||
return true;
|
|
||||||
if (obj == null || obj.getClass() != this.getClass())
|
|
||||||
return false;
|
|
||||||
var that = (SearchResultKeys) obj;
|
|
||||||
return Objects.equals(this.results, that.results) && Objects.equals(this.totalHitsCount, that.totalHitsCount)
|
|
||||||
&& Objects.equals(this.release, that.release);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(results, totalHitsCount, release);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "SearchResultKeys[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ", " + "release="
|
return "SearchResultKeys[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ']';
|
||||||
+ release + ']';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
@Override
|
@Override
|
||||||
protected void finalize() throws Throwable {
|
protected RuntimeException createResourceClosedException() {
|
||||||
if (!releaseCalled) {
|
return new IllegalStateException("Closed");
|
||||||
logger.warn(this.getClass().getName() + "::release has not been called before class finalization!");
|
}
|
||||||
}
|
|
||||||
super.finalize();
|
@Override
|
||||||
|
protected Owned<SearchResultKeys<T>> prepareSend() {
|
||||||
|
var results = this.results;
|
||||||
|
var totalHitsCount = this.totalHitsCount;
|
||||||
|
makeInaccessible();
|
||||||
|
return drop -> new SearchResultKeys<>(results, totalHitsCount, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.results = null;
|
||||||
|
this.totalHitsCount = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@ import io.net5.buffer.api.internal.ResourceSupport;
|
|||||||
import java.util.StringJoiner;
|
import java.util.StringJoiner;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public class LLDelta extends ResourceSupport<LLDelta, LLDelta> {
|
public class LLDelta extends LiveResourceSupport<LLDelta, LLDelta> {
|
||||||
@Nullable
|
@Nullable
|
||||||
private final Buffer previous;
|
private final Buffer previous;
|
||||||
@Nullable
|
@Nullable
|
||||||
|
@ -7,14 +7,15 @@ import io.net5.buffer.api.Send;
|
|||||||
import io.net5.buffer.api.internal.ResourceSupport;
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import java.util.StringJoiner;
|
import java.util.StringJoiner;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public class LLEntry extends ResourceSupport<LLEntry, LLEntry> {
|
public class LLEntry extends LiveResourceSupport<LLEntry, LLEntry> {
|
||||||
@NotNull
|
@NotNull
|
||||||
private final Buffer key;
|
private final Buffer key;
|
||||||
@NotNull
|
@NotNull
|
||||||
private final Buffer value;
|
private final Buffer value;
|
||||||
|
|
||||||
private LLEntry(Send<Buffer> key, Send<Buffer> value, Drop<LLEntry> drop) {
|
private LLEntry(@NotNull Send<Buffer> key, @NotNull Send<Buffer> value, Drop<LLEntry> drop) {
|
||||||
super(new LLEntry.CloseOnDrop(drop));
|
super(new LLEntry.CloseOnDrop(drop));
|
||||||
this.key = key.receive().makeReadOnly();
|
this.key = key.receive().makeReadOnly();
|
||||||
this.value = value.receive().makeReadOnly();
|
this.value = value.receive().makeReadOnly();
|
||||||
@ -29,7 +30,7 @@ public class LLEntry extends ResourceSupport<LLEntry, LLEntry> {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LLEntry of(Send<Buffer> key, Send<Buffer> value) {
|
public static LLEntry of(@NotNull Send<Buffer> key, @NotNull Send<Buffer> value) {
|
||||||
return new LLEntry(key, value, d -> {});
|
return new LLEntry(key, value, d -> {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database;
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.data.generator.nativedata.Nullablefloat;
|
import it.cavallium.data.generator.nativedata.Nullablefloat;
|
||||||
import it.cavallium.dbengine.client.query.current.data.NoSort;
|
import it.cavallium.dbengine.client.query.current.data.NoSort;
|
||||||
import it.cavallium.dbengine.client.query.current.data.Query;
|
import it.cavallium.dbengine.client.query.current.data.Query;
|
||||||
@ -40,7 +41,7 @@ public interface LLLuceneIndex extends LLSnapshottable {
|
|||||||
* The additional query will be used with the moreLikeThis query: "mltQuery AND additionalQuery"
|
* The additional query will be used with the moreLikeThis query: "mltQuery AND additionalQuery"
|
||||||
* @return the collection has one or more flux
|
* @return the collection has one or more flux
|
||||||
*/
|
*/
|
||||||
Mono<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
|
Mono<Send<LLSearchResultShard>> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||||
QueryParams queryParams,
|
QueryParams queryParams,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Flux<Tuple2<String, Set<String>>> mltDocumentFields);
|
Flux<Tuple2<String, Set<String>>> mltDocumentFields);
|
||||||
@ -50,13 +51,18 @@ public interface LLLuceneIndex extends LLSnapshottable {
|
|||||||
* returned can be at most <code>limit * 15</code>
|
* returned can be at most <code>limit * 15</code>
|
||||||
* @return the collection has one or more flux
|
* @return the collection has one or more flux
|
||||||
*/
|
*/
|
||||||
Mono<LLSearchResultShard> search(@Nullable LLSnapshot snapshot, QueryParams queryParams, String keyFieldName);
|
Mono<Send<LLSearchResultShard>> search(@Nullable LLSnapshot snapshot, QueryParams queryParams, String keyFieldName);
|
||||||
|
|
||||||
default Mono<TotalHitsCount> count(@Nullable LLSnapshot snapshot, Query query) {
|
default Mono<TotalHitsCount> count(@Nullable LLSnapshot snapshot, Query query) {
|
||||||
QueryParams params = QueryParams.of(query, 0, 0, Nullablefloat.empty(), NoSort.of(), ScoreMode.of(false, false));
|
QueryParams params = QueryParams.of(query, 0, 0, Nullablefloat.empty(), NoSort.of(), ScoreMode.of(false, false));
|
||||||
return Mono.from(this.search(snapshot, params, null)
|
return Mono.from(this.search(snapshot, params, null)
|
||||||
.flatMap(llSearchResultShard -> llSearchResultShard.release().thenReturn(llSearchResultShard.totalHitsCount()))
|
.map(llSearchResultShardToReceive -> {
|
||||||
.defaultIfEmpty(TotalHitsCount.of(0, true)));
|
try (var llSearchResultShard = llSearchResultShardToReceive.receive()) {
|
||||||
|
return llSearchResultShard.totalHitsCount();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.defaultIfEmpty(TotalHitsCount.of(0, true))
|
||||||
|
).doOnDiscard(Send.class, Send::close);
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean isLowMemoryMode();
|
boolean isLowMemoryMode();
|
||||||
|
@ -12,7 +12,7 @@ import java.util.StringJoiner;
|
|||||||
/**
|
/**
|
||||||
* Range of data, from min (inclusive),to max (exclusive)
|
* Range of data, from min (inclusive),to max (exclusive)
|
||||||
*/
|
*/
|
||||||
public class LLRange extends ResourceSupport<LLRange, LLRange> {
|
public class LLRange extends LiveResourceSupport<LLRange, LLRange> {
|
||||||
|
|
||||||
private static final LLRange RANGE_ALL = new LLRange(null, null, null, d -> {});
|
private static final LLRange RANGE_ALL = new LLRange(null, null, null, d -> {});
|
||||||
private Buffer min;
|
private Buffer min;
|
||||||
@ -193,11 +193,10 @@ public class LLRange extends ResourceSupport<LLRange, LLRange> {
|
|||||||
minSend = this.min != null ? this.min.send() : null;
|
minSend = this.min != null ? this.min.send() : null;
|
||||||
maxSend = this.max != null ? this.max.send() : null;
|
maxSend = this.max != null ? this.max.send() : null;
|
||||||
singleSend = this.single != null ? this.single.send() : null;
|
singleSend = this.single != null ? this.single.send() : null;
|
||||||
this.makeInaccessible();
|
|
||||||
return drop -> new LLRange(minSend, maxSend, singleSend, drop);
|
return drop -> new LLRange(minSend, maxSend, singleSend, drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void makeInaccessible() {
|
protected void makeInaccessible() {
|
||||||
this.min = null;
|
this.min = null;
|
||||||
this.max = null;
|
this.max = null;
|
||||||
this.single = null;
|
this.single = null;
|
||||||
@ -213,10 +212,9 @@ public class LLRange extends ResourceSupport<LLRange, LLRange> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void drop(LLRange obj) {
|
public void drop(LLRange obj) {
|
||||||
if (obj.min != null) obj.min.close();
|
if (obj.min != null && obj.min.isAccessible()) obj.min.close();
|
||||||
if (obj.max != null) obj.max.close();
|
if (obj.max != null && obj.max.isAccessible()) obj.max.close();
|
||||||
if (obj.single != null) obj.single.close();
|
if (obj.single != null && obj.single.isAccessible()) obj.single.close();
|
||||||
obj.makeInaccessible();
|
|
||||||
delegate.drop(obj);
|
delegate.drop(obj);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,46 +1,41 @@
|
|||||||
package it.cavallium.dbengine.database;
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LuceneSearchResult;
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import org.warp.commonutils.log.Logger;
|
import org.warp.commonutils.log.Logger;
|
||||||
import org.warp.commonutils.log.LoggerFactory;
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
public final class LLSearchResultShard {
|
public final class LLSearchResultShard extends LiveResourceSupport<LLSearchResultShard, LLSearchResultShard> {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(LLSearchResultShard.class);
|
private static final Logger logger = LoggerFactory.getLogger(LLSearchResultShard.class);
|
||||||
|
|
||||||
private volatile boolean releaseCalled;
|
private Flux<LLKeyScore> results;
|
||||||
|
private TotalHitsCount totalHitsCount;
|
||||||
|
|
||||||
private final Flux<LLKeyScore> results;
|
public LLSearchResultShard(Flux<LLKeyScore> results, TotalHitsCount totalHitsCount, Drop<LLSearchResultShard> drop) {
|
||||||
private final TotalHitsCount totalHitsCount;
|
super(drop);
|
||||||
private final Mono<Void> release;
|
|
||||||
|
|
||||||
public LLSearchResultShard(Flux<LLKeyScore> results, TotalHitsCount totalHitsCount, Mono<Void> release) {
|
|
||||||
this.results = results;
|
this.results = results;
|
||||||
this.totalHitsCount = totalHitsCount;
|
this.totalHitsCount = totalHitsCount;
|
||||||
this.release = Mono.fromRunnable(() -> {
|
|
||||||
if (releaseCalled) {
|
|
||||||
logger.warn(this.getClass().getName() + "::release has been called twice!");
|
|
||||||
}
|
|
||||||
releaseCalled = true;
|
|
||||||
}).then(release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Flux<LLKeyScore> results() {
|
public Flux<LLKeyScore> results() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("LLSearchResultShard must be owned to be used"));
|
||||||
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
public TotalHitsCount totalHitsCount() {
|
public TotalHitsCount totalHitsCount() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("LLSearchResultShard must be owned to be used"));
|
||||||
|
}
|
||||||
return totalHitsCount;
|
return totalHitsCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Void> release() {
|
|
||||||
return release;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == this)
|
if (obj == this)
|
||||||
@ -48,28 +43,33 @@ public final class LLSearchResultShard {
|
|||||||
if (obj == null || obj.getClass() != this.getClass())
|
if (obj == null || obj.getClass() != this.getClass())
|
||||||
return false;
|
return false;
|
||||||
var that = (LLSearchResultShard) obj;
|
var that = (LLSearchResultShard) obj;
|
||||||
return Objects.equals(this.results, that.results) && Objects.equals(this.totalHitsCount, that.totalHitsCount)
|
return Objects.equals(this.results, that.results) && Objects.equals(this.totalHitsCount, that.totalHitsCount);
|
||||||
&& Objects.equals(this.release, that.release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(results, totalHitsCount, release);
|
return Objects.hash(results, totalHitsCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "LLSearchResultShard[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ", " + "release="
|
return "LLSearchResultShard[" + "results=" + results + ", " + "totalHitsCount=" + totalHitsCount + ']';
|
||||||
+ release + ']';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
@Override
|
@Override
|
||||||
protected void finalize() throws Throwable {
|
protected RuntimeException createResourceClosedException() {
|
||||||
if (!releaseCalled) {
|
return new IllegalStateException("Closed");
|
||||||
logger.warn(this.getClass().getName() + "::release has not been called before class finalization!");
|
|
||||||
}
|
|
||||||
super.finalize();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<LLSearchResultShard> prepareSend() {
|
||||||
|
var results = this.results;
|
||||||
|
var totalHitsCount = this.totalHitsCount;
|
||||||
|
return drop -> new LLSearchResultShard(results, totalHitsCount, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.results = null;
|
||||||
|
this.totalHitsCount = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
package it.cavallium.dbengine.database;
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.ArrayUtils.EMPTY_BYTE_ARRAY;
|
||||||
|
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
import io.net5.buffer.api.CompositeBuffer;
|
import io.net5.buffer.api.CompositeBuffer;
|
||||||
|
import io.net5.buffer.api.Resource;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import io.net5.util.IllegalReferenceCountException;
|
import io.net5.util.IllegalReferenceCountException;
|
||||||
import io.net5.util.internal.PlatformDependent;
|
import io.net5.util.internal.PlatformDependent;
|
||||||
@ -15,6 +18,8 @@ import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
|||||||
import it.cavallium.dbengine.lucene.RandomSortField;
|
import it.cavallium.dbengine.lucene.RandomSortField;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.Charset;
|
||||||
|
import java.nio.charset.CharsetEncoder;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -24,6 +29,7 @@ import java.util.Objects;
|
|||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.function.ToIntFunction;
|
import java.util.function.ToIntFunction;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
@ -35,6 +41,7 @@ import org.apache.lucene.document.StringField;
|
|||||||
import org.apache.lucene.document.TextField;
|
import org.apache.lucene.document.TextField;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.ScoreMode;
|
import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
@ -48,6 +55,7 @@ import org.slf4j.Marker;
|
|||||||
import org.slf4j.MarkerFactory;
|
import org.slf4j.MarkerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.core.scheduler.Schedulers;
|
||||||
import reactor.util.function.Tuple2;
|
import reactor.util.function.Tuple2;
|
||||||
import reactor.util.function.Tuple3;
|
import reactor.util.function.Tuple3;
|
||||||
|
|
||||||
@ -173,9 +181,9 @@ public class LLUtils {
|
|||||||
return new it.cavallium.dbengine.database.LLKeyScore(hit.docId(), hit.score(), hit.key());
|
return new it.cavallium.dbengine.database.LLKeyScore(hit.docId(), hit.score(), hit.key());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String toStringSafe(Buffer key) {
|
public static String toStringSafe(@Nullable Buffer key) {
|
||||||
try {
|
try {
|
||||||
if (key.isAccessible()) {
|
if (key == null || key.isAccessible()) {
|
||||||
return toString(key);
|
return toString(key);
|
||||||
} else {
|
} else {
|
||||||
return "(released)";
|
return "(released)";
|
||||||
@ -185,7 +193,35 @@ public class LLUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String toString(Buffer key) {
|
public static String toStringSafe(@Nullable LLRange range) {
|
||||||
|
try {
|
||||||
|
if (range == null || range.isAccessible()) {
|
||||||
|
return toString(range);
|
||||||
|
} else {
|
||||||
|
return "(released)";
|
||||||
|
}
|
||||||
|
} catch (IllegalReferenceCountException ex) {
|
||||||
|
return "(released)";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String toString(@Nullable LLRange range) {
|
||||||
|
if (range == null) {
|
||||||
|
return "null";
|
||||||
|
} else if (range.isAll()) {
|
||||||
|
return "ξ";
|
||||||
|
} else if (range.hasMin() && range.hasMax()) {
|
||||||
|
return "[" + toStringSafe(range.getMinUnsafe()) + "," + toStringSafe(range.getMaxUnsafe()) + ")";
|
||||||
|
} else if (range.hasMin()) {
|
||||||
|
return "[" + toStringSafe(range.getMinUnsafe()) + ",*)";
|
||||||
|
} else if (range.hasMax()) {
|
||||||
|
return "[*," + toStringSafe(range.getMaxUnsafe()) + ")";
|
||||||
|
} else {
|
||||||
|
return "∅";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String toString(@Nullable Buffer key) {
|
||||||
if (key == null) {
|
if (key == null) {
|
||||||
return "null";
|
return "null";
|
||||||
} else {
|
} else {
|
||||||
@ -195,20 +231,37 @@ public class LLUtils {
|
|||||||
if (iMax <= -1) {
|
if (iMax <= -1) {
|
||||||
return "[]";
|
return "[]";
|
||||||
} else {
|
} else {
|
||||||
StringBuilder b = new StringBuilder();
|
StringBuilder arraySB = new StringBuilder();
|
||||||
b.append('[');
|
StringBuilder asciiSB = new StringBuilder();
|
||||||
|
boolean isAscii = true;
|
||||||
|
arraySB.append('[');
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
b.append(key.getByte(startIndex + i));
|
var byteVal = key.getUnsignedByte(startIndex + i);
|
||||||
|
arraySB.append(byteVal);
|
||||||
|
if (isAscii) {
|
||||||
|
if (byteVal >= 32 && byteVal < 127) {
|
||||||
|
asciiSB.append((char) byteVal);
|
||||||
|
} else if (byteVal == 0) {
|
||||||
|
asciiSB.append('␀');
|
||||||
|
} else {
|
||||||
|
isAscii = false;
|
||||||
|
asciiSB = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
if (i == iLimit) {
|
if (i == iLimit) {
|
||||||
b.append("…");
|
arraySB.append("…");
|
||||||
}
|
}
|
||||||
if (i == iMax || i == iLimit) {
|
if (i == iMax || i == iLimit) {
|
||||||
return b.append(']').toString();
|
if (isAscii) {
|
||||||
|
return asciiSB.insert(0, "\"").append("\"").toString();
|
||||||
|
} else {
|
||||||
|
return arraySB.append(']').toString();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
b.append(", ");
|
arraySB.append(", ");
|
||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -257,7 +310,10 @@ public class LLUtils {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static byte[] toArray(Buffer key) {
|
public static byte[] toArray(@Nullable Buffer key) {
|
||||||
|
if (key == null) {
|
||||||
|
return EMPTY_BYTE_ARRAY;
|
||||||
|
}
|
||||||
byte[] array = new byte[key.readableBytes()];
|
byte[] array = new byte[key.readableBytes()];
|
||||||
key.copyInto(key.readerOffset(), array, 0, key.readableBytes());
|
key.copyInto(key.readerOffset(), array, 0, key.readableBytes());
|
||||||
return array;
|
return array;
|
||||||
@ -291,7 +347,7 @@ public class LLUtils {
|
|||||||
*/
|
*/
|
||||||
@SuppressWarnings("ConstantConditions")
|
@SuppressWarnings("ConstantConditions")
|
||||||
@Nullable
|
@Nullable
|
||||||
public static Send<Buffer> readNullableDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
public static Buffer readNullableDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
||||||
ByteBuffer directBuffer;
|
ByteBuffer directBuffer;
|
||||||
Buffer buffer;
|
Buffer buffer;
|
||||||
{
|
{
|
||||||
@ -308,7 +364,7 @@ public class LLUtils {
|
|||||||
if (size != RocksDB.NOT_FOUND) {
|
if (size != RocksDB.NOT_FOUND) {
|
||||||
if (size == directBuffer.limit()) {
|
if (size == directBuffer.limit()) {
|
||||||
buffer.readerOffset(0).writerOffset(size);
|
buffer.readerOffset(0).writerOffset(size);
|
||||||
return buffer.send();
|
return buffer;
|
||||||
} else {
|
} else {
|
||||||
assert size > directBuffer.limit();
|
assert size > directBuffer.limit();
|
||||||
assert directBuffer.limit() > 0;
|
assert directBuffer.limit() > 0;
|
||||||
@ -318,7 +374,7 @@ public class LLUtils {
|
|||||||
PlatformDependent.freeDirectBuffer(directBuffer);
|
PlatformDependent.freeDirectBuffer(directBuffer);
|
||||||
directBuffer = null;
|
directBuffer = null;
|
||||||
}
|
}
|
||||||
directBuffer = LLUtils.obtainDirect(buffer);
|
directBuffer = LLUtils.obtainDirect(buffer, true);
|
||||||
buffer.ensureWritable(size);
|
buffer.ensureWritable(size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -333,7 +389,131 @@ public class LLUtils {
|
|||||||
PlatformDependent.freeDirectBuffer(directBuffer);
|
PlatformDependent.freeDirectBuffer(directBuffer);
|
||||||
directBuffer = null;
|
directBuffer = null;
|
||||||
}
|
}
|
||||||
buffer.close();
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void ensureBlocking() {
|
||||||
|
if (Schedulers.isInNonBlockingThread()) {
|
||||||
|
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanup resource
|
||||||
|
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||||
|
*/
|
||||||
|
public static <U, T extends Resource<T>> Mono<U> usingSend(Mono<Send<T>> resourceSupplier,
|
||||||
|
Function<Send<T>, Mono<U>> resourceClosure,
|
||||||
|
boolean cleanupOnSuccess) {
|
||||||
|
return Mono.usingWhen(resourceSupplier, resourceClosure, r -> {
|
||||||
|
if (cleanupOnSuccess) {
|
||||||
|
return Mono.fromRunnable(() -> r.close());
|
||||||
|
} else {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
|
}, (r, ex) -> Mono.fromRunnable(() -> r.close()), r -> Mono.fromRunnable(() -> r.close()))
|
||||||
|
.doOnDiscard(Send.class, send -> send.close());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanup resource
|
||||||
|
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||||
|
*/
|
||||||
|
public static <U, T extends Resource<T>, V extends T> Mono<U> usingResource(Mono<V> resourceSupplier,
|
||||||
|
Function<V, Mono<U>> resourceClosure,
|
||||||
|
boolean cleanupOnSuccess) {
|
||||||
|
return Mono.usingWhen(resourceSupplier, resourceClosure, r -> {
|
||||||
|
if (cleanupOnSuccess) {
|
||||||
|
return Mono.fromRunnable(() -> r.close());
|
||||||
|
} else {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
|
}, (r, ex) -> Mono.fromRunnable(() -> r.close()), r -> Mono.fromRunnable(() -> r.close()))
|
||||||
|
.doOnDiscard(Resource.class, resource -> resource.close())
|
||||||
|
.doOnDiscard(Send.class, send -> send.close());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanup resource
|
||||||
|
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||||
|
*/
|
||||||
|
public static <U, T extends Resource<T>, V extends T> Flux<U> usingEachResource(Flux<V> resourceSupplier,
|
||||||
|
Function<V, Mono<U>> resourceClosure,
|
||||||
|
boolean cleanupOnSuccess) {
|
||||||
|
return resourceSupplier
|
||||||
|
.concatMap(resource -> Mono.usingWhen(Mono.just(resource), resourceClosure, r -> {
|
||||||
|
if (cleanupOnSuccess) {
|
||||||
|
return Mono.fromRunnable(() -> r.close());
|
||||||
|
} else {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
|
}, (r, ex) -> Mono.fromRunnable(() -> r.close()), r -> Mono.fromRunnable(() -> r.close())))
|
||||||
|
.doOnDiscard(Resource.class, resource -> resource.close())
|
||||||
|
.doOnDiscard(Send.class, send -> send.close());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanup resource
|
||||||
|
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||||
|
*/
|
||||||
|
public static <U, T extends Resource<T>> Mono<U> usingSendResource(Mono<Send<T>> resourceSupplier,
|
||||||
|
Function<T, Mono<U>> resourceClosure,
|
||||||
|
boolean cleanupOnSuccess) {
|
||||||
|
return Mono.usingWhen(resourceSupplier.map(Send::receive), resourceClosure, r -> {
|
||||||
|
if (cleanupOnSuccess) {
|
||||||
|
return Mono.fromRunnable(() -> r.close());
|
||||||
|
} else {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
|
}, (r, ex) -> Mono.fromRunnable(() -> r.close()), r -> Mono.fromRunnable(() -> r.close()))
|
||||||
|
.doOnDiscard(Resource.class, resource -> resource.close())
|
||||||
|
.doOnDiscard(Send.class, send -> send.close());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanup resource
|
||||||
|
* @param cleanupOnSuccess if true the resource will be cleaned up if the function is successful
|
||||||
|
*/
|
||||||
|
public static <U, T extends Resource<T>> Flux<U> usingSendResources(Mono<Send<T>> resourceSupplier,
|
||||||
|
Function<T, Flux<U>> resourceClosure,
|
||||||
|
boolean cleanupOnSuccess) {
|
||||||
|
return Flux.usingWhen(resourceSupplier.map(Send::receive), resourceClosure, r -> {
|
||||||
|
if (cleanupOnSuccess) {
|
||||||
|
return Mono.fromRunnable(() -> r.close());
|
||||||
|
} else {
|
||||||
|
return Mono.empty();
|
||||||
|
}
|
||||||
|
}, (r, ex) -> Mono.fromRunnable(() -> r.close()), r -> Mono.fromRunnable(() -> r.close()))
|
||||||
|
.doOnDiscard(Resource.class, resource -> resource.close())
|
||||||
|
.doOnDiscard(Send.class, send -> send.close());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean isSet(ScoreDoc[] scoreDocs) {
|
||||||
|
for (ScoreDoc scoreDoc : scoreDocs) {
|
||||||
|
if (scoreDoc == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Send<Buffer> empty(BufferAllocator allocator) {
|
||||||
|
try {
|
||||||
|
return allocator.allocate(0).send();
|
||||||
|
} catch (Exception ex) {
|
||||||
|
try (var empty = CompositeBuffer.compose(allocator)) {
|
||||||
|
assert empty.readableBytes() == 0;
|
||||||
|
assert empty.capacity() == 0;
|
||||||
|
return empty.send();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Send<Buffer> copy(BufferAllocator allocator, Buffer buf) {
|
||||||
|
if (CompositeBuffer.isComposite(buf) && buf.capacity() == 0) {
|
||||||
|
return empty(allocator);
|
||||||
|
} else {
|
||||||
|
return buf.copy().send();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -342,29 +522,33 @@ public class LLUtils {
|
|||||||
@NotNull
|
@NotNull
|
||||||
public static DirectBuffer newDirect(BufferAllocator allocator, int size) {
|
public static DirectBuffer newDirect(BufferAllocator allocator, int size) {
|
||||||
try (var buf = allocator.allocate(size)) {
|
try (var buf = allocator.allocate(size)) {
|
||||||
var direct = obtainDirect(buf);
|
var direct = obtainDirect(buf, true);
|
||||||
return new DirectBuffer(buf.send(), direct);
|
return new DirectBuffer(buf.send(), direct);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
public static DirectBuffer convertToDirect(BufferAllocator allocator, Send<Buffer> content) {
|
public static DirectBuffer convertToReadableDirect(BufferAllocator allocator, Send<Buffer> content) {
|
||||||
try (var buf = content.receive()) {
|
try (var buf = content.receive()) {
|
||||||
if (buf.countComponents() != 0) {
|
DirectBuffer result;
|
||||||
var direct = obtainDirect(buf);
|
if (buf.countComponents() == 1) {
|
||||||
return new DirectBuffer(buf.send(), direct);
|
var direct = obtainDirect(buf, false);
|
||||||
|
result = new DirectBuffer(buf.send(), direct);
|
||||||
} else {
|
} else {
|
||||||
var direct = newDirect(allocator, buf.readableBytes());
|
var direct = newDirect(allocator, buf.readableBytes());
|
||||||
try (var buf2 = direct.buffer().receive()) {
|
try (var buf2 = direct.buffer().receive()) {
|
||||||
buf.copyInto(buf.readerOffset(), buf2, buf2.writerOffset(), buf.readableBytes());
|
buf.copyInto(buf.readerOffset(), buf2, buf2.writerOffset(), buf.readableBytes());
|
||||||
return new DirectBuffer(buf2.send(), direct.byteBuffer());
|
buf2.writerOffset(buf2.writerOffset() + buf.readableBytes());
|
||||||
|
assert buf2.readableBytes() == buf.readableBytes();
|
||||||
|
result = new DirectBuffer(buf2.send(), direct.byteBuffer());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
public static ByteBuffer obtainDirect(Buffer buffer) {
|
public static ByteBuffer obtainDirect(Buffer buffer, boolean writable) {
|
||||||
if (!PlatformDependent.hasUnsafe()) {
|
if (!PlatformDependent.hasUnsafe()) {
|
||||||
throw new UnsupportedOperationException("Please enable unsafe support or disable netty direct buffers",
|
throw new UnsupportedOperationException("Please enable unsafe support or disable netty direct buffers",
|
||||||
PlatformDependent.getUnsafeUnavailabilityCause()
|
PlatformDependent.getUnsafeUnavailabilityCause()
|
||||||
@ -372,15 +556,33 @@ public class LLUtils {
|
|||||||
}
|
}
|
||||||
if (!MemorySegmentUtils.isSupported()) {
|
if (!MemorySegmentUtils.isSupported()) {
|
||||||
throw new UnsupportedOperationException("Foreign Memory Access API support is disabled."
|
throw new UnsupportedOperationException("Foreign Memory Access API support is disabled."
|
||||||
+ " Please set \"--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit\"");
|
+ " Please set \"" + MemorySegmentUtils.getSuggestedArgs() + "\"",
|
||||||
|
MemorySegmentUtils.getUnsupportedCause()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
assert buffer.isAccessible();
|
assert buffer.isAccessible();
|
||||||
|
buffer.compact();
|
||||||
|
assert buffer.readerOffset() == 0;
|
||||||
AtomicLong nativeAddress = new AtomicLong(0);
|
AtomicLong nativeAddress = new AtomicLong(0);
|
||||||
if (buffer.countComponents() == 1 && buffer.countReadableComponents() == 1) {
|
if (buffer.countComponents() == 1) {
|
||||||
buffer.forEachReadable(0, (i, c) -> {
|
if (writable) {
|
||||||
nativeAddress.setPlain(c.readableNativeAddress());
|
if (buffer.countWritableComponents() == 1) {
|
||||||
return false;
|
buffer.forEachWritable(0, (i, c) -> {
|
||||||
});
|
assert c.writableNativeAddress() != 0;
|
||||||
|
nativeAddress.setPlain(c.writableNativeAddress());
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var readableComponents = buffer.countReadableComponents();
|
||||||
|
if (readableComponents == 1) {
|
||||||
|
buffer.forEachReadable(0, (i, c) -> {
|
||||||
|
assert c.readableNativeAddress() != 0;
|
||||||
|
nativeAddress.setPlain(c.readableNativeAddress());
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (nativeAddress.getPlain() == 0) {
|
if (nativeAddress.getPlain() == 0) {
|
||||||
if (buffer.capacity() == 0) {
|
if (buffer.capacity() == 0) {
|
||||||
@ -391,7 +593,7 @@ public class LLUtils {
|
|||||||
}
|
}
|
||||||
throw new IllegalStateException("Buffer is not direct");
|
throw new IllegalStateException("Buffer is not direct");
|
||||||
}
|
}
|
||||||
return MemorySegmentUtils.directBuffer(nativeAddress.getPlain(), buffer.capacity());
|
return MemorySegmentUtils.directBuffer(nativeAddress.getPlain(), writable ? buffer.capacity() : buffer.writerOffset());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Buffer fromByteArray(BufferAllocator alloc, byte[] array) {
|
public static Buffer fromByteArray(BufferAllocator alloc, byte[] array) {
|
||||||
@ -401,83 +603,57 @@ public class LLUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
public static Send<Buffer> readDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
public static Buffer readDirectNioBuffer(BufferAllocator alloc, ToIntFunction<ByteBuffer> reader) {
|
||||||
var nullableSend = readNullableDirectNioBuffer(alloc, reader);
|
var nullable = readNullableDirectNioBuffer(alloc, reader);
|
||||||
try (var buffer = nullableSend != null ? nullableSend.receive() : null) {
|
if (nullable == null) {
|
||||||
if (buffer == null) {
|
throw new IllegalStateException("A non-nullable buffer read operation tried to return a \"not found\" element");
|
||||||
throw new IllegalStateException("A non-nullable buffer read operation tried to return a \"not found\" element");
|
|
||||||
}
|
|
||||||
return buffer.send();
|
|
||||||
}
|
}
|
||||||
|
return nullable;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer) {
|
public static Buffer compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer) {
|
||||||
try (var composite = buffer.receive()) {
|
return buffer.receive();
|
||||||
return composite.send();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer> buffer1, Send<Buffer> buffer2) {
|
@NotNull
|
||||||
try (var buf1 = buffer1.receive()) {
|
public static Buffer compositeBuffer(BufferAllocator alloc,
|
||||||
try (var buf2 = buffer2.receive()) {
|
@NotNull Send<Buffer> buffer1,
|
||||||
try (var composite = CompositeBuffer.compose(alloc, buf1.split().send(), buf2.split().send())) {
|
@NotNull Send<Buffer> buffer2) {
|
||||||
return composite.send();
|
var b1 = buffer1.receive();
|
||||||
}
|
try (var b2 = buffer2.receive()) {
|
||||||
|
if (b1.writerOffset() < b1.capacity() || b2.writerOffset() < b2.capacity()) {
|
||||||
|
b1.ensureWritable(b2.readableBytes(), b2.readableBytes(), true);
|
||||||
|
b2.copyInto(b2.readerOffset(), b1, b1.writerOffset(), b2.readableBytes());
|
||||||
|
b1.writerOffset(b1.writerOffset() + b2.readableBytes());
|
||||||
|
return b1;
|
||||||
|
} else {
|
||||||
|
return CompositeBuffer.compose(alloc, b1.send(), b2.send());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Send<Buffer> compositeBuffer(BufferAllocator alloc,
|
@NotNull
|
||||||
Send<Buffer> buffer1,
|
public static Buffer compositeBuffer(BufferAllocator alloc,
|
||||||
Send<Buffer> buffer2,
|
@NotNull Send<Buffer> buffer1,
|
||||||
Send<Buffer> buffer3) {
|
@NotNull Send<Buffer> buffer2,
|
||||||
try (var buf1 = buffer1.receive()) {
|
@NotNull Send<Buffer> buffer3) {
|
||||||
try (var buf2 = buffer2.receive()) {
|
var b1 = buffer1.receive();
|
||||||
try (var buf3 = buffer3.receive()) {
|
try (var b2 = buffer2.receive()) {
|
||||||
try (var composite = CompositeBuffer.compose(alloc,
|
try (var b3 = buffer3.receive()) {
|
||||||
buf1.split().send(),
|
if (b1.writerOffset() < b1.capacity()
|
||||||
buf2.split().send(),
|
|| b2.writerOffset() < b2.capacity()
|
||||||
buf3.split().send()
|
|| b3.writerOffset() < b3.capacity()) {
|
||||||
)) {
|
b1.ensureWritable(b2.readableBytes(), b2.readableBytes(), true);
|
||||||
return composite.send();
|
b2.copyInto(b2.readerOffset(), b1, b1.writerOffset(), b2.readableBytes());
|
||||||
}
|
b1.writerOffset(b1.writerOffset() + b2.readableBytes());
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@SafeVarargs
|
b1.ensureWritable(b3.readableBytes(), b3.readableBytes(), true);
|
||||||
public static Send<Buffer> compositeBuffer(BufferAllocator alloc, Send<Buffer>... buffers) {
|
b3.copyInto(b3.readerOffset(), b1, b1.writerOffset(), b3.readableBytes());
|
||||||
try {
|
b1.writerOffset(b1.writerOffset() + b3.readableBytes());
|
||||||
return switch (buffers.length) {
|
return b1;
|
||||||
case 0 -> alloc.allocate(0).send();
|
} else {
|
||||||
case 1 -> compositeBuffer(alloc, buffers[0]);
|
return CompositeBuffer.compose(alloc, b1.send(), b2.send(), b3.send());
|
||||||
case 2 -> compositeBuffer(alloc, buffers[0], buffers[1]);
|
|
||||||
case 3 -> compositeBuffer(alloc, buffers[0], buffers[1], buffers[2]);
|
|
||||||
default -> {
|
|
||||||
Buffer[] bufs = new Buffer[buffers.length];
|
|
||||||
for (int i = 0; i < buffers.length; i++) {
|
|
||||||
bufs[i] = buffers[i].receive();
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
Send<Buffer>[] sentBufs = new Send[buffers.length];
|
|
||||||
for (int i = 0; i < buffers.length; i++) {
|
|
||||||
sentBufs[i] = bufs[i].split().send();
|
|
||||||
}
|
|
||||||
try (var composite = CompositeBuffer.compose(alloc, sentBufs)) {
|
|
||||||
yield composite.send();
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
for (Buffer buf : bufs) {
|
|
||||||
buf.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
} finally {
|
|
||||||
for (Send<Buffer> buffer : buffers) {
|
|
||||||
buffer.close();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -807,7 +983,9 @@ public class LLUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static void discardStage(DatabaseStage<?> stage) {
|
private static void discardStage(DatabaseStage<?> stage) {
|
||||||
stage.release();
|
if (stage != null && stage.isAccessible()) {
|
||||||
|
stage.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isDirect(Buffer key) {
|
public static boolean isDirect(Buffer key) {
|
||||||
|
@ -0,0 +1,32 @@
|
|||||||
|
package it.cavallium.dbengine.database;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.Resource;
|
||||||
|
import io.net5.buffer.api.internal.LifecycleTracer;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
|
|
||||||
|
public abstract class LiveResourceSupport<I extends Resource<I>, T extends LiveResourceSupport<I, T>> extends ResourceSupport<I, T> {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(LiveResourceSupport.class);
|
||||||
|
|
||||||
|
protected LiveResourceSupport(Drop<T> drop) {
|
||||||
|
super(drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void finalize() throws Throwable {
|
||||||
|
if (this.isAccessible()) {
|
||||||
|
var ise = new IllegalStateException("Resource not released");
|
||||||
|
ise.setStackTrace(new StackTraceElement[0]);
|
||||||
|
logger.error("Resource not released: {}", this, attachTrace(ise));
|
||||||
|
try {
|
||||||
|
this.close();
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
super.finalize();
|
||||||
|
}
|
||||||
|
}
|
@ -46,7 +46,7 @@ public class RepeatedElementList<T> implements List<T> {
|
|||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
@Override
|
@Override
|
||||||
public Object[] toArray() {
|
public Object @NotNull [] toArray() {
|
||||||
var arr = new Object[size];
|
var arr = new Object[size];
|
||||||
Arrays.fill(arr, element);
|
Arrays.fill(arr, element);
|
||||||
return arr;
|
return arr;
|
||||||
@ -54,7 +54,7 @@ public class RepeatedElementList<T> implements List<T> {
|
|||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
@Override
|
@Override
|
||||||
public <T1> T1[] toArray(@NotNull T1[] a) {
|
public <T1> T1 @NotNull [] toArray(@NotNull T1 @NotNull [] a) {
|
||||||
var arr = Arrays.copyOf(a, size);
|
var arr = Arrays.copyOf(a, size);
|
||||||
Arrays.fill(arr, element);
|
Arrays.fill(arr, element);
|
||||||
return arr;
|
return arr;
|
||||||
@ -152,8 +152,9 @@ public class RepeatedElementList<T> implements List<T> {
|
|||||||
@NotNull
|
@NotNull
|
||||||
@Override
|
@Override
|
||||||
public ListIterator<T> listIterator(int index) {
|
public ListIterator<T> listIterator(int index) {
|
||||||
return new ListIterator<T>() {
|
return new ListIterator<>() {
|
||||||
int position = index - 1;
|
int position = index - 1;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasNext() {
|
public boolean hasNext() {
|
||||||
return position + 1 < size;
|
return position + 1 < size;
|
||||||
|
@ -2,11 +2,14 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer.DeserializationResult;
|
import it.cavallium.dbengine.database.serialization.Serializer.DeserializationResult;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public class DatabaseEmpty {
|
public class DatabaseEmpty {
|
||||||
|
|
||||||
@ -17,7 +20,7 @@ public class DatabaseEmpty {
|
|||||||
public static Serializer<Nothing> nothingSerializer(BufferAllocator bufferAllocator) {
|
public static Serializer<Nothing> nothingSerializer(BufferAllocator bufferAllocator) {
|
||||||
return new Serializer<>() {
|
return new Serializer<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Nothing> deserialize(@NotNull Send<Buffer> serialized) {
|
public @NotNull DeserializationResult<Nothing> deserialize(@Nullable Send<Buffer> serialized) {
|
||||||
try (serialized) {
|
try (serialized) {
|
||||||
return NOTHING_RESULT;
|
return NOTHING_RESULT;
|
||||||
}
|
}
|
||||||
@ -25,7 +28,7 @@ public class DatabaseEmpty {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Send<Buffer> serialize(@NotNull Nothing deserialized) {
|
public @NotNull Send<Buffer> serialize(@NotNull Nothing deserialized) {
|
||||||
return bufferAllocator.allocate(0).send();
|
return LLUtils.empty(bufferAllocator);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -33,8 +36,10 @@ public class DatabaseEmpty {
|
|||||||
private DatabaseEmpty() {
|
private DatabaseEmpty() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary, Send<Buffer> key) {
|
public static DatabaseStageEntry<Nothing> create(LLDictionary dictionary,
|
||||||
return new DatabaseSingle<>(dictionary, key, nothingSerializer(dictionary.getAllocator()));
|
Send<Buffer> key,
|
||||||
|
Drop<DatabaseSingle<Nothing>> drop) {
|
||||||
|
return new DatabaseSingle<>(dictionary, key, nothingSerializer(dictionary.getAllocator()), drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class Nothing {
|
public static final class Nothing {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import io.net5.buffer.api.internal.ResourceSupport;
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
@ -24,6 +25,7 @@ import java.util.Map;
|
|||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
@ -39,31 +41,46 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
private final Serializer<U> valueSerializer;
|
private final Serializer<U> valueSerializer;
|
||||||
|
|
||||||
protected DatabaseMapDictionary(LLDictionary dictionary,
|
protected DatabaseMapDictionary(LLDictionary dictionary,
|
||||||
Send<Buffer> prefixKey,
|
@NotNull Send<Buffer> prefixKey,
|
||||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||||
Serializer<U> valueSerializer) {
|
Serializer<U> valueSerializer,
|
||||||
|
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||||
// Do not retain or release or use the prefixKey here
|
// Do not retain or release or use the prefixKey here
|
||||||
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0);
|
super(dictionary, prefixKey, keySuffixSerializer, new SubStageGetterSingle<>(valueSerializer), 0, drop);
|
||||||
this.valueSerializer = valueSerializer;
|
this.valueSerializer = valueSerializer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
|
public static <T, U> DatabaseMapDictionary<T, U> simple(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T> keySerializer,
|
SerializerFixedBinaryLength<T> keySerializer,
|
||||||
Serializer<U> valueSerializer) {
|
Serializer<U> valueSerializer,
|
||||||
return new DatabaseMapDictionary<>(dictionary, dictionary.getAllocator().allocate(0).send(), keySerializer, valueSerializer);
|
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||||
|
return new DatabaseMapDictionary<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer,
|
||||||
|
valueSerializer, drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
|
public static <T, U> DatabaseMapDictionary<T, U> tail(LLDictionary dictionary,
|
||||||
Send<Buffer> prefixKey,
|
Send<Buffer> prefixKey,
|
||||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||||
Serializer<U> valueSerializer) {
|
Serializer<U> valueSerializer,
|
||||||
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer);
|
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||||
|
return new DatabaseMapDictionary<>(dictionary, prefixKey, keySuffixSerializer, valueSerializer, drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Send<Buffer> toKey(Send<Buffer> suffixKeyToSend) {
|
private Send<Buffer> toKey(Send<Buffer> suffixKeyToSend) {
|
||||||
try (var suffixKey = suffixKeyToSend.receive()) {
|
try (var suffixKey = suffixKeyToSend.receive()) {
|
||||||
assert suffixKeyConsistency(suffixKey.readableBytes());
|
assert suffixKeyConsistency(suffixKey.readableBytes());
|
||||||
return LLUtils.compositeBuffer(dictionary.getAllocator(), keyPrefix.copy().send(), suffixKey.send());
|
if (keyPrefix.readableBytes() > 0) {
|
||||||
|
try (var result = LLUtils.compositeBuffer(dictionary.getAllocator(),
|
||||||
|
LLUtils.copy(dictionary.getAllocator(), keyPrefix),
|
||||||
|
suffixKey.send()
|
||||||
|
)) {
|
||||||
|
assert result.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
|
||||||
|
return result.send();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert suffixKey.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
|
||||||
|
return suffixKey.send();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +98,12 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
.getRange(resolveSnapshot(snapshot), rangeMono, existsAlmostCertainly)
|
.getRange(resolveSnapshot(snapshot), rangeMono, existsAlmostCertainly)
|
||||||
.<Entry<T, U>>handle((entrySend, sink) -> {
|
.<Entry<T, U>>handle((entrySend, sink) -> {
|
||||||
try (var entry = entrySend.receive()) {
|
try (var entry = entrySend.receive()) {
|
||||||
var key = deserializeSuffix(stripPrefix(entry.getKey()));
|
T key;
|
||||||
|
try (var serializedKey = entry.getKey().receive()) {
|
||||||
|
removePrefix(serializedKey);
|
||||||
|
suffixKeyConsistency(serializedKey.readableBytes());
|
||||||
|
key = deserializeSuffix(serializedKey.send());
|
||||||
|
}
|
||||||
var value = valueSerializer.deserialize(entry.getValue()).deserializedData();
|
var value = valueSerializer.deserialize(entry.getValue()).deserializedData();
|
||||||
sink.next(Map.entry(key, value));
|
sink.next(Map.entry(key, value));
|
||||||
} catch (SerializationException ex) {
|
} catch (SerializationException ex) {
|
||||||
@ -130,20 +152,23 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||||
return Mono.fromCallable(() ->
|
return Mono.fromCallable(() ->
|
||||||
new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), valueSerializer));
|
new DatabaseSingle<>(dictionary, toKey(serializeSuffix(keySuffix)), valueSerializer, d -> {}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
|
public Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T keySuffix, boolean existsAlmostCertainly) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.get(resolveSnapshot(snapshot), Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix))), existsAlmostCertainly)
|
.get(resolveSnapshot(snapshot),
|
||||||
|
Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix))),
|
||||||
|
existsAlmostCertainly
|
||||||
|
)
|
||||||
.handle((value, sink) -> deserializeValue(value, sink));
|
.handle((value, sink) -> deserializeValue(value, sink));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> putValue(T keySuffix, U value) {
|
public Mono<Void> putValue(T keySuffix, U value) {
|
||||||
var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix)));
|
var keyMono = Mono.fromCallable(() -> toKey(serializeSuffix(keySuffix))).single();
|
||||||
var valueMono = Mono.fromCallable(() -> valueSerializer.serialize(value));
|
var valueMono = Mono.fromCallable(() -> valueSerializer.serialize(value)).single();
|
||||||
return dictionary
|
return dictionary
|
||||||
.put(keyMono, valueMono, LLDictionaryResultType.VOID)
|
.put(keyMono, valueMono, LLDictionaryResultType.VOID)
|
||||||
.doOnNext(Send::close)
|
.doOnNext(Send::close)
|
||||||
@ -297,9 +322,10 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Send<LLEntry> serializeEntry(T key, U value) throws SerializationException {
|
private Send<LLEntry> serializeEntry(T key, U value) throws SerializationException {
|
||||||
try (var serializedKey = toKey(serializeSuffix(key)).receive()) {
|
try (var serializedKey = toKey(serializeSuffix(key))) {
|
||||||
try (var serializedValue = valueSerializer.serialize(value).receive()) {
|
var serializedValueToReceive = valueSerializer.serialize(value);
|
||||||
return LLEntry.of(serializedKey.send(), serializedValue.send()).send();
|
try (var serializedValue = serializedValueToReceive.receive()) {
|
||||||
|
return LLEntry.of(serializedKey, serializedValue.send()).send();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -368,16 +394,15 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
public Flux<Entry<T, DatabaseStageEntry<U>>> getAllStages(@Nullable CompositeSnapshot snapshot) {
|
||||||
return dictionary
|
return dictionary
|
||||||
.getRangeKeys(resolveSnapshot(snapshot), rangeMono)
|
.getRangeKeys(resolveSnapshot(snapshot), rangeMono)
|
||||||
.handle((key, sink) -> {
|
.handle((keyBufToReceive, sink) -> {
|
||||||
try (key) {
|
try (var keyBuf = keyBufToReceive.receive()) {
|
||||||
try (var keySuffixWithExt = stripPrefix(key).receive()) {
|
assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
|
||||||
sink.next(Map.entry(deserializeSuffix(keySuffixWithExt.copy().send()),
|
// Remove prefix. Keep only the suffix and the ext
|
||||||
new DatabaseSingle<>(dictionary,
|
removePrefix(keyBuf);
|
||||||
toKey(keySuffixWithExt.send()),
|
suffixKeyConsistency(keyBuf.readableBytes());
|
||||||
valueSerializer
|
sink.next(Map.entry(deserializeSuffix(keyBuf.copy().send()),
|
||||||
)
|
new DatabaseSingle<>(dictionary, toKey(keyBuf.send()), valueSerializer, d -> {})
|
||||||
));
|
));
|
||||||
}
|
|
||||||
} catch (SerializationException ex) {
|
} catch (SerializationException ex) {
|
||||||
sink.error(ex);
|
sink.error(ex);
|
||||||
}
|
}
|
||||||
@ -390,8 +415,14 @@ public class DatabaseMapDictionary<T, U> extends DatabaseMapDictionaryDeep<T, U,
|
|||||||
.getRange(resolveSnapshot(snapshot), rangeMono)
|
.getRange(resolveSnapshot(snapshot), rangeMono)
|
||||||
.<Entry<T, U>>handle((serializedEntryToReceive, sink) -> {
|
.<Entry<T, U>>handle((serializedEntryToReceive, sink) -> {
|
||||||
try (var serializedEntry = serializedEntryToReceive.receive()) {
|
try (var serializedEntry = serializedEntryToReceive.receive()) {
|
||||||
sink.next(Map.entry(deserializeSuffix(stripPrefix(serializedEntry.getKey())),
|
try (var keyBuf = serializedEntry.getKey().receive()) {
|
||||||
valueSerializer.deserialize(serializedEntry.getValue()).deserializedData()));
|
assert keyBuf.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
|
||||||
|
// Remove prefix. Keep only the suffix and the ext
|
||||||
|
removePrefix(keyBuf);
|
||||||
|
suffixKeyConsistency(keyBuf.readableBytes());
|
||||||
|
sink.next(Map.entry(deserializeSuffix(keyBuf.send()),
|
||||||
|
valueSerializer.deserialize(serializedEntry.getValue()).deserializedData()));
|
||||||
|
}
|
||||||
} catch (SerializationException e) {
|
} catch (SerializationException e) {
|
||||||
sink.error(e);
|
sink.error(e);
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,11 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
import io.net5.buffer.api.Resource;
|
import io.net5.buffer.api.Resource;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import io.net5.util.IllegalReferenceCountException;
|
import io.net5.util.IllegalReferenceCountException;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
@ -12,152 +15,116 @@ import it.cavallium.dbengine.database.LLDictionaryResultType;
|
|||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import it.cavallium.dbengine.database.LLSnapshot;
|
import it.cavallium.dbengine.database.LLSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
import it.cavallium.dbengine.database.UpdateMode;
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializationException;
|
import it.cavallium.dbengine.database.serialization.SerializationException;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Objects;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
// todo: implement optimized methods (which?)
|
// todo: implement optimized methods (which?)
|
||||||
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implements DatabaseStageMap<T, U, US> {
|
public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> extends
|
||||||
|
LiveResourceSupport<DatabaseStage<Map<T, U>>, DatabaseMapDictionaryDeep<T, U, US>>
|
||||||
|
implements DatabaseStageMap<T, U, US> {
|
||||||
|
|
||||||
protected final LLDictionary dictionary;
|
protected final LLDictionary dictionary;
|
||||||
private final BufferAllocator alloc;
|
private final BufferAllocator alloc;
|
||||||
protected final SubStageGetter<U, US> subStageGetter;
|
protected final SubStageGetter<U, US> subStageGetter;
|
||||||
protected final SerializerFixedBinaryLength<T> keySuffixSerializer;
|
protected final SerializerFixedBinaryLength<T> keySuffixSerializer;
|
||||||
protected final Buffer keyPrefix;
|
|
||||||
protected final int keyPrefixLength;
|
protected final int keyPrefixLength;
|
||||||
protected final int keySuffixLength;
|
protected final int keySuffixLength;
|
||||||
protected final int keyExtLength;
|
protected final int keyExtLength;
|
||||||
protected final LLRange range;
|
|
||||||
protected final Mono<Send<LLRange>> rangeMono;
|
protected final Mono<Send<LLRange>> rangeMono;
|
||||||
private volatile boolean released;
|
|
||||||
|
|
||||||
private static Send<Buffer> incrementPrefix(BufferAllocator alloc, Send<Buffer> originalKeySend, int prefixLength) {
|
protected LLRange range;
|
||||||
try (var originalKey = originalKeySend.receive()) {
|
protected Buffer keyPrefix;
|
||||||
assert originalKey.readableBytes() >= prefixLength;
|
|
||||||
var originalKeyLength = originalKey.readableBytes();
|
|
||||||
try (Buffer copiedBuf = alloc.allocate(originalKey.readableBytes())) {
|
|
||||||
boolean overflowed = true;
|
|
||||||
final int ff = 0xFF;
|
|
||||||
int writtenBytes = 0;
|
|
||||||
copiedBuf.writerOffset(prefixLength);
|
|
||||||
for (int i = prefixLength - 1; i >= 0; i--) {
|
|
||||||
int iByte = originalKey.getUnsignedByte(i);
|
|
||||||
if (iByte != ff) {
|
|
||||||
copiedBuf.setUnsignedByte(i, iByte + 1);
|
|
||||||
writtenBytes++;
|
|
||||||
overflowed = false;
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
copiedBuf.setUnsignedByte(i, 0x00);
|
|
||||||
writtenBytes++;
|
|
||||||
overflowed = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert prefixLength - writtenBytes >= 0;
|
|
||||||
if (prefixLength - writtenBytes > 0) {
|
|
||||||
originalKey.copyInto(0, copiedBuf, 0, (prefixLength - writtenBytes));
|
|
||||||
}
|
|
||||||
|
|
||||||
copiedBuf.writerOffset(originalKeyLength);
|
private static void incrementPrefix(Buffer prefix, int prefixLength) {
|
||||||
|
assert prefix.readableBytes() >= prefixLength;
|
||||||
if (originalKeyLength - prefixLength > 0) {
|
assert prefix.readerOffset() == 0;
|
||||||
originalKey.copyInto(prefixLength, copiedBuf, prefixLength, originalKeyLength - prefixLength);
|
final var originalKeyLength = prefix.readableBytes();
|
||||||
}
|
boolean overflowed = true;
|
||||||
|
final int ff = 0xFF;
|
||||||
if (overflowed) {
|
int writtenBytes = 0;
|
||||||
copiedBuf.ensureWritable(originalKeyLength + 1);
|
for (int i = prefixLength - 1; i >= 0; i--) {
|
||||||
copiedBuf.writerOffset(originalKeyLength + 1);
|
int iByte = prefix.getUnsignedByte(i);
|
||||||
for (int i = 0; i < originalKeyLength; i++) {
|
if (iByte != ff) {
|
||||||
copiedBuf.setUnsignedByte(i, 0xFF);
|
prefix.setUnsignedByte(i, iByte + 1);
|
||||||
}
|
writtenBytes++;
|
||||||
copiedBuf.setUnsignedByte(originalKeyLength, (byte) 0x00);
|
overflowed = false;
|
||||||
}
|
break;
|
||||||
return copiedBuf.send();
|
} else {
|
||||||
|
prefix.setUnsignedByte(i, 0x00);
|
||||||
|
writtenBytes++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert prefixLength - writtenBytes >= 0;
|
||||||
|
|
||||||
|
if (overflowed) {
|
||||||
|
assert prefix.writerOffset() == originalKeyLength;
|
||||||
|
prefix.ensureWritable(1, 1, true);
|
||||||
|
prefix.writerOffset(originalKeyLength + 1);
|
||||||
|
for (int i = 0; i < originalKeyLength; i++) {
|
||||||
|
prefix.setUnsignedByte(i, 0xFF);
|
||||||
|
}
|
||||||
|
prefix.setUnsignedByte(originalKeyLength, (byte) 0x00);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static Send<Buffer> firstRangeKey(BufferAllocator alloc,
|
static Buffer firstRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, int prefixLength, int suffixLength,
|
||||||
Send<Buffer> prefixKey,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
int extLength) {
|
||||||
return zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
|
return zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Send<Buffer> nextRangeKey(BufferAllocator alloc,
|
static Buffer nextRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, int prefixLength, int suffixLength,
|
||||||
Send<Buffer> prefixKey,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
int extLength) {
|
||||||
try (prefixKey) {
|
try (prefixKey) {
|
||||||
try (Send<Buffer> nonIncremented = zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength,
|
Buffer nonIncremented = zeroFillKeySuffixAndExt(alloc, prefixKey, prefixLength, suffixLength, extLength);
|
||||||
extLength)) {
|
incrementPrefix(nonIncremented, prefixLength);
|
||||||
return incrementPrefix(alloc, nonIncremented, prefixLength);
|
return nonIncremented;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static Send<Buffer> zeroFillKeySuffixAndExt(BufferAllocator alloc,
|
protected static Buffer zeroFillKeySuffixAndExt(BufferAllocator alloc, @NotNull Send<Buffer> prefixKeySend,
|
||||||
Send<Buffer> prefixKeySend,
|
int prefixLength, int suffixLength, int extLength) {
|
||||||
int prefixLength,
|
var result = prefixKeySend.receive();
|
||||||
int suffixLength,
|
if (result == null) {
|
||||||
int extLength) {
|
assert prefixLength == 0;
|
||||||
try (var prefixKey = prefixKeySend.receive()) {
|
var buf = alloc.allocate(prefixLength + suffixLength + extLength);
|
||||||
assert prefixKey.readableBytes() == prefixLength;
|
buf.writerOffset(prefixLength + suffixLength + extLength);
|
||||||
|
buf.fill((byte) 0);
|
||||||
|
return buf;
|
||||||
|
} else {
|
||||||
|
assert result.readableBytes() == prefixLength;
|
||||||
assert suffixLength > 0;
|
assert suffixLength > 0;
|
||||||
assert extLength >= 0;
|
assert extLength >= 0;
|
||||||
try (Buffer zeroSuffixAndExt = alloc.allocate(suffixLength + extLength)) {
|
result.ensureWritable(suffixLength + extLength, suffixLength + extLength, true);
|
||||||
for (int i = 0; i < suffixLength + extLength; i++) {
|
for (int i = 0; i < suffixLength + extLength; i++) {
|
||||||
zeroSuffixAndExt.writeByte((byte) 0x0);
|
result.writeByte((byte) 0x0);
|
||||||
}
|
|
||||||
try (Buffer result = LLUtils.compositeBuffer(alloc, prefixKey.send(), zeroSuffixAndExt.send()).receive()) {
|
|
||||||
return result.send();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static Send<Buffer> firstRangeKey(
|
static Buffer firstRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, Send<Buffer> suffixKey, int prefixLength,
|
||||||
BufferAllocator alloc,
|
int suffixLength, int extLength) {
|
||||||
Send<Buffer> prefixKey,
|
|
||||||
Send<Buffer> suffixKey,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
|
||||||
return zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
return zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Send<Buffer> nextRangeKey(
|
static Buffer nextRangeKey(BufferAllocator alloc, Send<Buffer> prefixKey, Send<Buffer> suffixKey, int prefixLength,
|
||||||
BufferAllocator alloc,
|
int suffixLength, int extLength) {
|
||||||
Send<Buffer> prefixKey,
|
Buffer nonIncremented = zeroFillKeyExt(alloc, prefixKey, suffixKey, prefixLength, suffixLength, extLength);
|
||||||
Send<Buffer> suffixKey,
|
incrementPrefix(nonIncremented, prefixLength + suffixLength);
|
||||||
int prefixLength,
|
return nonIncremented;
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
|
||||||
try (Send<Buffer> nonIncremented = zeroFillKeyExt(alloc,
|
|
||||||
prefixKey,
|
|
||||||
suffixKey,
|
|
||||||
prefixLength,
|
|
||||||
suffixLength,
|
|
||||||
extLength
|
|
||||||
)) {
|
|
||||||
return incrementPrefix(alloc, nonIncremented, prefixLength + suffixLength);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static Send<Buffer> zeroFillKeyExt(
|
protected static Buffer zeroFillKeyExt(BufferAllocator alloc, Send<Buffer> prefixKeySend, Send<Buffer> suffixKeySend,
|
||||||
BufferAllocator alloc,
|
int prefixLength, int suffixLength, int extLength) {
|
||||||
Send<Buffer> prefixKeySend,
|
|
||||||
Send<Buffer> suffixKeySend,
|
|
||||||
int prefixLength,
|
|
||||||
int suffixLength,
|
|
||||||
int extLength) {
|
|
||||||
try (var prefixKey = prefixKeySend.receive()) {
|
try (var prefixKey = prefixKeySend.receive()) {
|
||||||
try (var suffixKey = suffixKeySend.receive()) {
|
try (var suffixKey = suffixKeySend.receive()) {
|
||||||
assert prefixKey.readableBytes() == prefixLength;
|
assert prefixKey.readableBytes() == prefixLength;
|
||||||
@ -165,17 +132,14 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
assert suffixLength > 0;
|
assert suffixLength > 0;
|
||||||
assert extLength >= 0;
|
assert extLength >= 0;
|
||||||
|
|
||||||
try (var ext = alloc.allocate(extLength)) {
|
Buffer result = LLUtils.compositeBuffer(alloc, prefixKey.send(), suffixKey.send());
|
||||||
for (int i = 0; i < extLength; i++) {
|
result.ensureWritable(extLength, extLength, true);
|
||||||
ext.writeByte((byte) 0);
|
for (int i = 0; i < extLength; i++) {
|
||||||
}
|
result.writeByte((byte) 0);
|
||||||
|
|
||||||
try (Buffer result = LLUtils.compositeBuffer(alloc, prefixKey.send(), suffixKey.send(), ext.send())
|
|
||||||
.receive()) {
|
|
||||||
assert result.readableBytes() == prefixLength + suffixLength + extLength;
|
|
||||||
return result.send();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert result.readableBytes() == prefixLength + suffixLength + extLength;
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -185,69 +149,82 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
*/
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary,
|
public static <T, U> DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>> simple(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T> keySerializer,
|
SerializerFixedBinaryLength<T> keySerializer, SubStageGetterSingle<U> subStageGetter,
|
||||||
SubStageGetterSingle<U> subStageGetter) {
|
Drop<DatabaseMapDictionaryDeep<T, U, DatabaseStageEntry<U>>> drop) {
|
||||||
return new DatabaseMapDictionaryDeep<>(dictionary, dictionary.getAllocator().allocate(0).send(),
|
return new DatabaseMapDictionaryDeep<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer,
|
||||||
keySerializer, subStageGetter, 0);
|
subStageGetter, 0, drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(LLDictionary dictionary,
|
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepTail(
|
||||||
SerializerFixedBinaryLength<T> keySerializer,
|
LLDictionary dictionary, SerializerFixedBinaryLength<T> keySerializer, int keyExtLength,
|
||||||
int keyExtLength,
|
SubStageGetter<U, US> subStageGetter, Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||||
SubStageGetter<U, US> subStageGetter) {
|
return new DatabaseMapDictionaryDeep<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer,
|
||||||
return new DatabaseMapDictionaryDeep<>(dictionary,
|
subStageGetter, keyExtLength, drop);
|
||||||
dictionary.getAllocator().allocate(0).send(),
|
|
||||||
keySerializer,
|
|
||||||
subStageGetter,
|
|
||||||
keyExtLength
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(LLDictionary dictionary,
|
public static <T, U, US extends DatabaseStage<U>> DatabaseMapDictionaryDeep<T, U, US> deepIntermediate(
|
||||||
Send<Buffer> prefixKey,
|
LLDictionary dictionary, Send<Buffer> prefixKey, SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
SubStageGetter<U, US> subStageGetter, int keyExtLength, Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||||
SubStageGetter<U, US> subStageGetter,
|
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter,
|
||||||
int keyExtLength) {
|
keyExtLength, drop);
|
||||||
return new DatabaseMapDictionaryDeep<>(dictionary, prefixKey, keySuffixSerializer, subStageGetter, keyExtLength);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
protected DatabaseMapDictionaryDeep(LLDictionary dictionary, @NotNull Send<Buffer> prefixKeyToReceive,
|
||||||
Send<Buffer> prefixKeyToReceive,
|
SerializerFixedBinaryLength<T> keySuffixSerializer, SubStageGetter<U, US> subStageGetter, int keyExtLength,
|
||||||
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||||
SubStageGetter<U, US> subStageGetter,
|
super(new CloseOnDrop<>(drop));
|
||||||
int keyExtLength) {
|
|
||||||
try (var prefixKey = prefixKeyToReceive.receive()) {
|
try (var prefixKey = prefixKeyToReceive.receive()) {
|
||||||
this.dictionary = dictionary;
|
this.dictionary = dictionary;
|
||||||
this.alloc = dictionary.getAllocator();
|
this.alloc = dictionary.getAllocator();
|
||||||
this.subStageGetter = subStageGetter;
|
this.subStageGetter = subStageGetter;
|
||||||
this.keySuffixSerializer = keySuffixSerializer;
|
this.keySuffixSerializer = keySuffixSerializer;
|
||||||
this.keyPrefix = prefixKey.copy();
|
assert prefixKey.isAccessible();
|
||||||
assert keyPrefix.isAccessible();
|
this.keyPrefixLength = prefixKey.readableBytes();
|
||||||
this.keyPrefixLength = keyPrefix.readableBytes();
|
|
||||||
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
|
this.keySuffixLength = keySuffixSerializer.getSerializedBinaryLength();
|
||||||
this.keyExtLength = keyExtLength;
|
this.keyExtLength = keyExtLength;
|
||||||
try (Buffer firstKey = firstRangeKey(alloc,
|
Buffer firstKey = firstRangeKey(alloc, LLUtils.copy(alloc, prefixKey), keyPrefixLength,
|
||||||
prefixKey.copy().send(),
|
keySuffixLength, keyExtLength);
|
||||||
keyPrefixLength,
|
try (firstKey) {
|
||||||
keySuffixLength,
|
var nextRangeKey = nextRangeKey(alloc, LLUtils.copy(alloc, prefixKey),
|
||||||
keyExtLength
|
keyPrefixLength, keySuffixLength, keyExtLength);
|
||||||
).receive().compact()) {
|
try (nextRangeKey) {
|
||||||
try (Buffer nextRangeKey = nextRangeKey(alloc,
|
assert prefixKey.isAccessible();
|
||||||
prefixKey.copy().send(),
|
|
||||||
keyPrefixLength,
|
|
||||||
keySuffixLength,
|
|
||||||
keyExtLength
|
|
||||||
).receive().compact()) {
|
|
||||||
assert keyPrefix.isAccessible();
|
|
||||||
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
|
assert keyPrefixLength == 0 || !LLUtils.equals(firstKey, nextRangeKey);
|
||||||
this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.send(), nextRangeKey.send());
|
this.range = keyPrefixLength == 0 ? LLRange.all() : LLRange.of(firstKey.send(), nextRangeKey.send());
|
||||||
this.rangeMono = LLUtils.lazyRetainRange(this.range);
|
this.rangeMono = LLUtils.lazyRetainRange(this.range);
|
||||||
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
|
assert subStageKeysConsistency(keyPrefixLength + keySuffixLength + keyExtLength);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.keyPrefix = prefixKey.send().receive();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private DatabaseMapDictionaryDeep(LLDictionary dictionary,
|
||||||
|
BufferAllocator alloc,
|
||||||
|
SubStageGetter<U, US> subStageGetter,
|
||||||
|
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||||
|
int keyPrefixLength,
|
||||||
|
int keySuffixLength,
|
||||||
|
int keyExtLength,
|
||||||
|
Mono<Send<LLRange>> rangeMono,
|
||||||
|
Send<LLRange> range,
|
||||||
|
Send<Buffer> keyPrefix,
|
||||||
|
Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
|
this.dictionary = dictionary;
|
||||||
|
this.alloc = alloc;
|
||||||
|
this.subStageGetter = subStageGetter;
|
||||||
|
this.keySuffixSerializer = keySuffixSerializer;
|
||||||
|
this.keyPrefixLength = keyPrefixLength;
|
||||||
|
this.keySuffixLength = keySuffixLength;
|
||||||
|
this.keyExtLength = keyExtLength;
|
||||||
|
this.rangeMono = rangeMono;
|
||||||
|
|
||||||
|
this.range = range.receive();
|
||||||
|
this.keyPrefix = keyPrefix.receive();
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
protected boolean suffixKeyConsistency(int keySuffixLength) {
|
protected boolean suffixKeyConsistency(int keySuffixLength) {
|
||||||
return this.keySuffixLength == keySuffixLength;
|
return this.keySuffixLength == keySuffixLength;
|
||||||
@ -264,21 +241,30 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Keep only suffix and ext
|
* Removes the prefix from the key
|
||||||
*/
|
*/
|
||||||
protected Send<Buffer> stripPrefix(Send<Buffer> keyToReceive) {
|
protected void removePrefix(Buffer key) {
|
||||||
try (var key = keyToReceive.receive()) {
|
assert key.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength
|
||||||
return key.copy(this.keyPrefixLength, key.readableBytes() - this.keyPrefixLength).send();
|
|| key.readableBytes() == keyPrefixLength + keySuffixLength;
|
||||||
}
|
key.readerOffset(key.readerOffset() + this.keyPrefixLength);
|
||||||
|
assert key.readableBytes() == keySuffixLength + keyExtLength
|
||||||
|
|| key.readableBytes() == keySuffixLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add prefix to suffix
|
* Removes the ext from the key
|
||||||
*/
|
*/
|
||||||
|
protected void removeExt(Buffer key) {
|
||||||
|
assert key.readableBytes() == keyPrefixLength + keySuffixLength + keyExtLength;
|
||||||
|
key.writerOffset(keyPrefixLength + keySuffixLength);
|
||||||
|
assert key.readableBytes() == keyPrefixLength + keySuffixLength;
|
||||||
|
}
|
||||||
|
|
||||||
protected Send<Buffer> toKeyWithoutExt(Send<Buffer> suffixKeyToReceive) {
|
protected Send<Buffer> toKeyWithoutExt(Send<Buffer> suffixKeyToReceive) {
|
||||||
try (var suffixKey = suffixKeyToReceive.receive()) {
|
try (var suffixKey = suffixKeyToReceive.receive()) {
|
||||||
assert suffixKey.readableBytes() == keySuffixLength;
|
assert suffixKey.readableBytes() == keySuffixLength;
|
||||||
try (Buffer result = LLUtils.compositeBuffer(alloc, keyPrefix.copy().send(), suffixKey.send()).receive()) {
|
try (var result = Objects.requireNonNull(LLUtils.compositeBuffer(alloc,
|
||||||
|
LLUtils.copy(alloc, keyPrefix), suffixKey.send()))) {
|
||||||
assert result.readableBytes() == keyPrefixLength + keySuffixLength;
|
assert result.readableBytes() == keyPrefixLength + keySuffixLength;
|
||||||
return result.send();
|
return result.send();
|
||||||
}
|
}
|
||||||
@ -305,10 +291,11 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
public Mono<US> at(@Nullable CompositeSnapshot snapshot, T keySuffix) {
|
||||||
|
var suffixKeyWithoutExt = Mono.fromCallable(() -> toKeyWithoutExt(serializeSuffix(keySuffix)));
|
||||||
return this.subStageGetter
|
return this.subStageGetter
|
||||||
.subStage(dictionary, snapshot, Mono.fromCallable(() -> toKeyWithoutExt(serializeSuffix(keySuffix))))
|
.subStage(dictionary, snapshot, suffixKeyWithoutExt)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
.doOnDiscard(DatabaseStage.class, DatabaseStage::release);
|
.doOnDiscard(DatabaseStage.class, DatabaseStage::close);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -328,11 +315,10 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
.flatMapSequential(groupKeyWithoutExtSend_ -> Mono.using(
|
.flatMapSequential(groupKeyWithoutExtSend_ -> Mono.using(
|
||||||
groupKeyWithoutExtSend_::receive,
|
groupKeyWithoutExtSend_::receive,
|
||||||
groupKeyWithoutExtSend -> this.subStageGetter
|
groupKeyWithoutExtSend -> this.subStageGetter
|
||||||
.subStage(dictionary, snapshot, getGroupKeyWithoutExt(groupKeyWithoutExtSend.copy().send()))
|
.subStage(dictionary, snapshot, Mono.fromCallable(() -> groupKeyWithoutExtSend.copy().send()))
|
||||||
.<Entry<T, US>>handle((us, sink) -> {
|
.<Entry<T, US>>handle((us, sink) -> {
|
||||||
try {
|
try {
|
||||||
sink.next(Map.entry(this.deserializeSuffix(getGroupSuffix(groupKeyWithoutExtSend.send())),
|
sink.next(Map.entry(this.deserializeSuffix(getGroupSuffix(groupKeyWithoutExtSend.send())), us));
|
||||||
us));
|
|
||||||
} catch (SerializationException ex) {
|
} catch (SerializationException ex) {
|
||||||
sink.error(ex);
|
sink.error(ex);
|
||||||
}
|
}
|
||||||
@ -342,22 +328,22 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
.transform(LLUtils::handleDiscard);
|
.transform(LLUtils::handleDiscard);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Send<Buffer> getGroupSuffix(Send<Buffer> groupKeyWithoutExtSend) {
|
private Send<Buffer> getGroupSuffix(Send<Buffer> groupKeyWithoutExt) {
|
||||||
try (var groupKeyWithoutExt = groupKeyWithoutExtSend.receive()) {
|
try (var buffer = groupKeyWithoutExt.receive()) {
|
||||||
try (var groupSuffix = this.stripPrefix(groupKeyWithoutExt.copy().send()).receive()) {
|
assert subStageKeysConsistency(buffer.readableBytes() + keyExtLength);
|
||||||
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
this.removePrefix(buffer);
|
||||||
return groupSuffix.send();
|
assert subStageKeysConsistency(keyPrefixLength + buffer.readableBytes() + keyExtLength);
|
||||||
}
|
return buffer.send();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<Send<Buffer>> getGroupKeyWithoutExt(Send<Buffer> groupKeyWithoutExtSend) {
|
private Send<Buffer> getGroupWithoutExt(Send<Buffer> groupKeyWithExtSend) {
|
||||||
return Mono.fromCallable(() -> {
|
try (var buffer = groupKeyWithExtSend.receive()) {
|
||||||
try (var groupKeyWithoutExt = groupKeyWithoutExtSend.receive()) {
|
assert subStageKeysConsistency(buffer.readableBytes());
|
||||||
assert subStageKeysConsistency(groupKeyWithoutExt.readableBytes() + keyExtLength);
|
this.removeExt(buffer);
|
||||||
return groupKeyWithoutExt.send();
|
assert subStageKeysConsistency(buffer.readableBytes() + keyExtLength);
|
||||||
}
|
return buffer.send();
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean subStageKeysConsistency(int totalKeyLength) {
|
private boolean subStageKeysConsistency(int totalKeyLength) {
|
||||||
@ -401,7 +387,7 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
protected T deserializeSuffix(Send<Buffer> keySuffixToReceive) throws SerializationException {
|
protected T deserializeSuffix(@NotNull Send<Buffer> keySuffixToReceive) throws SerializationException {
|
||||||
try (var keySuffix = keySuffixToReceive.receive()) {
|
try (var keySuffix = keySuffixToReceive.receive()) {
|
||||||
assert suffixKeyConsistency(keySuffix.readableBytes());
|
assert suffixKeyConsistency(keySuffix.readableBytes());
|
||||||
var result = keySuffixSerializer.deserialize(keySuffix.send());
|
var result = keySuffixSerializer.deserialize(keySuffix.send());
|
||||||
@ -411,22 +397,54 @@ public class DatabaseMapDictionaryDeep<T, U, US extends DatabaseStage<U>> implem
|
|||||||
}
|
}
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
|
@NotNull
|
||||||
protected Send<Buffer> serializeSuffix(T keySuffix) throws SerializationException {
|
protected Send<Buffer> serializeSuffix(T keySuffix) throws SerializationException {
|
||||||
try (Buffer suffixData = keySuffixSerializer.serialize(keySuffix).receive()) {
|
try (var suffixDataToReceive = keySuffixSerializer.serialize(keySuffix)) {
|
||||||
assert suffixKeyConsistency(suffixData.readableBytes());
|
try (Buffer suffixData = suffixDataToReceive.receive()) {
|
||||||
assert keyPrefix.isAccessible();
|
assert suffixKeyConsistency(suffixData.readableBytes());
|
||||||
return suffixData.send();
|
assert keyPrefix.isAccessible();
|
||||||
|
return suffixData.send();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void release() {
|
protected RuntimeException createResourceClosedException() {
|
||||||
if (!released) {
|
throw new IllegalStateException("Closed");
|
||||||
released = true;
|
}
|
||||||
this.range.close();
|
|
||||||
this.keyPrefix.close();
|
@Override
|
||||||
} else {
|
protected Owned<DatabaseMapDictionaryDeep<T, U, US>> prepareSend() {
|
||||||
throw new IllegalReferenceCountException(0, -1);
|
var keyPrefix = this.keyPrefix.send();
|
||||||
|
var range = this.range.send();
|
||||||
|
return drop -> new DatabaseMapDictionaryDeep<>(dictionary, alloc, subStageGetter, keySuffixSerializer,
|
||||||
|
keyPrefixLength, keySuffixLength, keyExtLength, rangeMono, range, keyPrefix, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.keyPrefix = null;
|
||||||
|
this.range = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop<T, U, US extends DatabaseStage<U>> implements
|
||||||
|
Drop<DatabaseMapDictionaryDeep<T, U, US>> {
|
||||||
|
|
||||||
|
private final Drop<DatabaseMapDictionaryDeep<T,U,US>> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<DatabaseMapDictionaryDeep<T, U, US>> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(DatabaseMapDictionaryDeep<T, U, US> obj) {
|
||||||
|
if (obj.range != null) {
|
||||||
|
obj.range.close();
|
||||||
|
}
|
||||||
|
if (obj.keyPrefix != null) {
|
||||||
|
obj.keyPrefix.close();
|
||||||
|
}
|
||||||
|
delegate.drop(obj);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,14 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
import it.cavallium.dbengine.database.UpdateMode;
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
@ -23,18 +27,23 @@ import reactor.core.publisher.Flux;
|
|||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
|
public class DatabaseMapDictionaryHashed<T, U, TH> extends
|
||||||
|
LiveResourceSupport<DatabaseStage<Map<T, U>>, DatabaseMapDictionaryHashed<T, U, TH>>
|
||||||
|
implements DatabaseStageMap<T, U, DatabaseStageEntry<U>> {
|
||||||
|
|
||||||
private final BufferAllocator alloc;
|
private final BufferAllocator alloc;
|
||||||
private final DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary;
|
|
||||||
private final Function<T, TH> keySuffixHashFunction;
|
private final Function<T, TH> keySuffixHashFunction;
|
||||||
|
|
||||||
|
private DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>> subDictionary;
|
||||||
|
|
||||||
protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
|
protected DatabaseMapDictionaryHashed(LLDictionary dictionary,
|
||||||
Send<Buffer> prefixKey,
|
@NotNull Send<Buffer> prefixKey,
|
||||||
Serializer<T> keySuffixSerializer,
|
Serializer<T> keySuffixSerializer,
|
||||||
Serializer<U> valueSerializer,
|
Serializer<U> valueSerializer,
|
||||||
Function<T, TH> keySuffixHashFunction,
|
Function<T, TH> keySuffixHashFunction,
|
||||||
SerializerFixedBinaryLength<TH> keySuffixHashSerializer) {
|
SerializerFixedBinaryLength<TH> keySuffixHashSerializer,
|
||||||
|
Drop<DatabaseMapDictionaryHashed<T, U, TH>> drop) {
|
||||||
|
super(new DatabaseMapDictionaryHashed.CloseOnDrop<>(drop));
|
||||||
if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) {
|
if (dictionary.getUpdateMode().block() != UpdateMode.ALLOW) {
|
||||||
throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
|
throw new IllegalArgumentException("Hashed maps only works when UpdateMode is ALLOW");
|
||||||
}
|
}
|
||||||
@ -43,26 +52,36 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
= new ValueWithHashSerializer<>(alloc, keySuffixSerializer, valueSerializer);
|
= new ValueWithHashSerializer<>(alloc, keySuffixSerializer, valueSerializer);
|
||||||
ValuesSetSerializer<Entry<T, U>> valuesSetSerializer
|
ValuesSetSerializer<Entry<T, U>> valuesSetSerializer
|
||||||
= new ValuesSetSerializer<>(alloc, valueWithHashSerializer);
|
= new ValuesSetSerializer<>(alloc, valueWithHashSerializer);
|
||||||
this.subDictionary = DatabaseMapDictionary.tail(dictionary,
|
this.subDictionary = DatabaseMapDictionary.tail(dictionary, prefixKey, keySuffixHashSerializer,
|
||||||
prefixKey,
|
valuesSetSerializer, d -> {});
|
||||||
keySuffixHashSerializer,
|
|
||||||
valuesSetSerializer
|
|
||||||
);
|
|
||||||
this.keySuffixHashFunction = keySuffixHashFunction;
|
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private DatabaseMapDictionaryHashed(BufferAllocator alloc,
|
||||||
|
Function<T, TH> keySuffixHashFunction,
|
||||||
|
Send<DatabaseStage<Map<TH, ObjectArraySet<Entry<T, U>>>>> subDictionary,
|
||||||
|
Drop<DatabaseMapDictionaryHashed<T, U, TH>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
|
this.alloc = alloc;
|
||||||
|
this.keySuffixHashFunction = keySuffixHashFunction;
|
||||||
|
|
||||||
|
this.subDictionary = (DatabaseMapDictionary<TH, ObjectArraySet<Entry<T, U>>>) subDictionary.receive();
|
||||||
|
}
|
||||||
|
|
||||||
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
|
public static <T, U, UH> DatabaseMapDictionaryHashed<T, U, UH> simple(LLDictionary dictionary,
|
||||||
Serializer<T> keySerializer,
|
Serializer<T> keySerializer,
|
||||||
Serializer<U> valueSerializer,
|
Serializer<U> valueSerializer,
|
||||||
Function<T, UH> keyHashFunction,
|
Function<T, UH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<UH> keyHashSerializer) {
|
SerializerFixedBinaryLength<UH> keyHashSerializer,
|
||||||
|
Drop<DatabaseMapDictionaryHashed<T, U, UH>> drop) {
|
||||||
return new DatabaseMapDictionaryHashed<>(
|
return new DatabaseMapDictionaryHashed<>(
|
||||||
dictionary,
|
dictionary,
|
||||||
dictionary.getAllocator().allocate(0).send(),
|
LLUtils.empty(dictionary.getAllocator()),
|
||||||
keySerializer,
|
keySerializer,
|
||||||
valueSerializer,
|
valueSerializer,
|
||||||
keyHashFunction,
|
keyHashFunction,
|
||||||
keyHashSerializer
|
keyHashSerializer,
|
||||||
|
drop
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,13 +90,15 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
Serializer<T> keySuffixSerializer,
|
Serializer<T> keySuffixSerializer,
|
||||||
Serializer<U> valueSerializer,
|
Serializer<U> valueSerializer,
|
||||||
Function<T, UH> keySuffixHashFunction,
|
Function<T, UH> keySuffixHashFunction,
|
||||||
SerializerFixedBinaryLength<UH> keySuffixHashSerializer) {
|
SerializerFixedBinaryLength<UH> keySuffixHashSerializer,
|
||||||
|
Drop<DatabaseMapDictionaryHashed<T, U, UH>> drop) {
|
||||||
return new DatabaseMapDictionaryHashed<>(dictionary,
|
return new DatabaseMapDictionaryHashed<>(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySuffixSerializer,
|
keySuffixSerializer,
|
||||||
valueSerializer,
|
valueSerializer,
|
||||||
keySuffixHashFunction,
|
keySuffixHashFunction,
|
||||||
keySuffixHashSerializer
|
keySuffixHashSerializer,
|
||||||
|
drop
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,11 +145,6 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
return subDictionary.clearAndGetStatus();
|
return subDictionary.clearAndGetStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<Void> close() {
|
|
||||||
return subDictionary.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Boolean> isEmpty(@Nullable CompositeSnapshot snapshot) {
|
||||||
return subDictionary.isEmpty(snapshot);
|
return subDictionary.isEmpty(snapshot);
|
||||||
@ -144,11 +160,6 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
return this.subDictionary.badBlocks();
|
return this.subDictionary.badBlocks();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void release() {
|
|
||||||
this.subDictionary.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) {
|
public Mono<DatabaseStageEntry<U>> at(@Nullable CompositeSnapshot snapshot, T key) {
|
||||||
return this
|
return this
|
||||||
@ -159,7 +170,7 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
private Mono<DatabaseSingleBucket<T, U, TH>> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) {
|
private Mono<DatabaseSingleBucket<T, U, TH>> atPrivate(@Nullable CompositeSnapshot snapshot, T key, TH hash) {
|
||||||
return subDictionary
|
return subDictionary
|
||||||
.at(snapshot, hash)
|
.at(snapshot, hash)
|
||||||
.map(entry -> new DatabaseSingleBucket<>(entry, key));
|
.map(entry -> new DatabaseSingleBucket<>(entry, key, d -> {}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -192,13 +203,11 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
@Override
|
@Override
|
||||||
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
public Flux<Entry<T, U>> setAllValuesAndGetPrevious(Flux<Entry<T, U>> entries) {
|
||||||
return entries
|
return entries
|
||||||
.flatMap(entry -> Flux.usingWhen(
|
.flatMap(entry -> LLUtils.usingResource(this.at(null, entry.getKey()),
|
||||||
this.at(null, entry.getKey()),
|
|
||||||
stage -> stage
|
stage -> stage
|
||||||
.setAndGetPrevious(entry.getValue())
|
.setAndGetPrevious(entry.getValue())
|
||||||
.map(prev -> Map.entry(entry.getKey(), prev)),
|
.map(prev -> Map.entry(entry.getKey(), prev)), true)
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
);
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -296,4 +305,37 @@ public class DatabaseMapDictionaryHashed<T, U, TH> implements DatabaseStageMap<T
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
throw new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<DatabaseMapDictionaryHashed<T, U, TH>> prepareSend() {
|
||||||
|
var subDictionary = this.subDictionary.send();
|
||||||
|
return drop -> new DatabaseMapDictionaryHashed<>(alloc, keySuffixHashFunction, subDictionary, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.subDictionary = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop<T, U, TH> implements Drop<DatabaseMapDictionaryHashed<T,U,TH>> {
|
||||||
|
|
||||||
|
private final Drop<DatabaseMapDictionaryHashed<T,U,TH>> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<DatabaseMapDictionaryHashed<T,U,TH>> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(DatabaseMapDictionaryHashed<T, U, TH> obj) {
|
||||||
|
if (obj.subDictionary != null) {
|
||||||
|
obj.subDictionary.close();
|
||||||
|
}
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@ -17,21 +19,22 @@ public class DatabaseSetDictionary<T> extends DatabaseMapDictionary<T, Nothing>
|
|||||||
|
|
||||||
protected DatabaseSetDictionary(LLDictionary dictionary,
|
protected DatabaseSetDictionary(LLDictionary dictionary,
|
||||||
Send<Buffer> prefixKey,
|
Send<Buffer> prefixKey,
|
||||||
SerializerFixedBinaryLength<T> keySuffixSerializer) {
|
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||||
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.nothingSerializer(dictionary.getAllocator()));
|
Drop<DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>>> drop) {
|
||||||
|
super(dictionary, prefixKey, keySuffixSerializer, DatabaseEmpty.nothingSerializer(dictionary.getAllocator()), drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
|
public static <T> DatabaseSetDictionary<T> simple(LLDictionary dictionary,
|
||||||
SerializerFixedBinaryLength<T> keySerializer) {
|
SerializerFixedBinaryLength<T> keySerializer,
|
||||||
try (var buf = dictionary.getAllocator().allocate(0)) {
|
Drop<DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>>> drop) {
|
||||||
return new DatabaseSetDictionary<>(dictionary, buf.send(), keySerializer);
|
return new DatabaseSetDictionary<>(dictionary, LLUtils.empty(dictionary.getAllocator()), keySerializer, drop);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
|
public static <T> DatabaseSetDictionary<T> tail(LLDictionary dictionary,
|
||||||
Send<Buffer> prefixKey,
|
Send<Buffer> prefixKey,
|
||||||
SerializerFixedBinaryLength<T> keySuffixSerializer) {
|
SerializerFixedBinaryLength<T> keySuffixSerializer,
|
||||||
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer);
|
Drop<DatabaseMapDictionaryDeep<T, Nothing, DatabaseStageEntry<Nothing>>> drop) {
|
||||||
|
return new DatabaseSetDictionary<>(dictionary, prefixKey, keySuffixSerializer, drop);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Set<T>> getKeySet(@Nullable CompositeSnapshot snapshot) {
|
public Mono<Set<T>> getKeySet(@Nullable CompositeSnapshot snapshot) {
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
import it.cavallium.dbengine.database.collections.DatabaseEmpty.Nothing;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
@ -11,6 +13,7 @@ import java.util.HashMap;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@ -18,28 +21,32 @@ import reactor.core.publisher.Mono;
|
|||||||
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
|
public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHashed<T, Nothing, TH> {
|
||||||
|
|
||||||
protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
|
protected DatabaseSetDictionaryHashed(LLDictionary dictionary,
|
||||||
Send<Buffer> prefixKey,
|
@NotNull Send<Buffer> prefixKey,
|
||||||
Serializer<T> keySuffixSerializer,
|
Serializer<T> keySuffixSerializer,
|
||||||
Function<T, TH> keySuffixHashFunction,
|
Function<T, TH> keySuffixHashFunction,
|
||||||
SerializerFixedBinaryLength<TH> keySuffixHashSerializer) {
|
SerializerFixedBinaryLength<TH> keySuffixHashSerializer,
|
||||||
|
Drop<DatabaseMapDictionaryHashed<T, Nothing, TH>> drop) {
|
||||||
super(dictionary,
|
super(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySuffixSerializer,
|
keySuffixSerializer,
|
||||||
DatabaseEmpty.nothingSerializer(dictionary.getAllocator()),
|
DatabaseEmpty.nothingSerializer(dictionary.getAllocator()),
|
||||||
keySuffixHashFunction,
|
keySuffixHashFunction,
|
||||||
keySuffixHashSerializer
|
keySuffixHashSerializer,
|
||||||
|
drop
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
|
public static <T, TH> DatabaseSetDictionaryHashed<T, TH> simple(LLDictionary dictionary,
|
||||||
Serializer<T> keySerializer,
|
Serializer<T> keySerializer,
|
||||||
Function<T, TH> keyHashFunction,
|
Function<T, TH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<TH> keyHashSerializer) {
|
SerializerFixedBinaryLength<TH> keyHashSerializer,
|
||||||
|
Drop<DatabaseMapDictionaryHashed<T, Nothing, TH>> drop) {
|
||||||
return new DatabaseSetDictionaryHashed<>(dictionary,
|
return new DatabaseSetDictionaryHashed<>(dictionary,
|
||||||
dictionary.getAllocator().allocate(0).send(),
|
LLUtils.empty(dictionary.getAllocator()),
|
||||||
keySerializer,
|
keySerializer,
|
||||||
keyHashFunction,
|
keyHashFunction,
|
||||||
keyHashSerializer
|
keyHashSerializer,
|
||||||
|
drop
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,12 +54,13 @@ public class DatabaseSetDictionaryHashed<T, TH> extends DatabaseMapDictionaryHas
|
|||||||
Send<Buffer> prefixKey,
|
Send<Buffer> prefixKey,
|
||||||
Serializer<T> keySuffixSerializer,
|
Serializer<T> keySuffixSerializer,
|
||||||
Function<T, TH> keyHashFunction,
|
Function<T, TH> keyHashFunction,
|
||||||
SerializerFixedBinaryLength<TH> keyHashSerializer) {
|
SerializerFixedBinaryLength<TH> keyHashSerializer, Drop<DatabaseMapDictionaryHashed<T, Nothing, TH>> drop) {
|
||||||
return new DatabaseSetDictionaryHashed<>(dictionary,
|
return new DatabaseSetDictionaryHashed<>(dictionary,
|
||||||
prefixKey,
|
prefixKey,
|
||||||
keySuffixSerializer,
|
keySuffixSerializer,
|
||||||
keyHashFunction,
|
keyHashFunction,
|
||||||
keyHashSerializer
|
keyHashSerializer,
|
||||||
|
drop
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import io.net5.buffer.api.internal.ResourceSupport;
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
@ -20,14 +22,18 @@ import reactor.core.publisher.Flux;
|
|||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.publisher.SynchronousSink;
|
import reactor.core.publisher.SynchronousSink;
|
||||||
|
|
||||||
public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
public class DatabaseSingle<U> extends ResourceSupport<DatabaseStage<U>, DatabaseSingle<U>> implements
|
||||||
|
DatabaseStageEntry<U> {
|
||||||
|
|
||||||
private final LLDictionary dictionary;
|
private final LLDictionary dictionary;
|
||||||
private final Buffer key;
|
|
||||||
private final Mono<Send<Buffer>> keyMono;
|
private final Mono<Send<Buffer>> keyMono;
|
||||||
private final Serializer<U> serializer;
|
private final Serializer<U> serializer;
|
||||||
|
|
||||||
public DatabaseSingle(LLDictionary dictionary, Send<Buffer> key, Serializer<U> serializer) {
|
private Buffer key;
|
||||||
|
|
||||||
|
public DatabaseSingle(LLDictionary dictionary, Send<Buffer> key, Serializer<U> serializer,
|
||||||
|
Drop<DatabaseSingle<U>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
try (key) {
|
try (key) {
|
||||||
this.dictionary = dictionary;
|
this.dictionary = dictionary;
|
||||||
this.key = key.receive();
|
this.key = key.receive();
|
||||||
@ -124,13 +130,41 @@ public class DatabaseSingle<U> implements DatabaseStageEntry<U> {
|
|||||||
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single).map(ResourceSupport::send));
|
.isRangeEmpty(resolveSnapshot(snapshot), keyMono.map(LLRange::single).map(ResourceSupport::send));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void release() {
|
|
||||||
key.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Flux<BadBlock> badBlocks() {
|
public Flux<BadBlock> badBlocks() {
|
||||||
return dictionary.badBlocks(keyMono.map(LLRange::single).map(ResourceSupport::send));
|
return dictionary.badBlocks(keyMono.map(LLRange::single).map(ResourceSupport::send));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
throw new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<DatabaseSingle<U>> prepareSend() {
|
||||||
|
var key = this.key.send();
|
||||||
|
return drop -> new DatabaseSingle<>(dictionary, key, serializer, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.key = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop<U> implements Drop<DatabaseSingle<U>> {
|
||||||
|
|
||||||
|
private final Drop<DatabaseSingle<U>> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<DatabaseSingle<U>> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(DatabaseSingle<U> obj) {
|
||||||
|
if (obj.key != null) {
|
||||||
|
obj.key.close();
|
||||||
|
}
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,10 +1,14 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.Column;
|
import it.cavallium.dbengine.database.Column;
|
||||||
import it.cavallium.dbengine.database.Delta;
|
import it.cavallium.dbengine.database.Delta;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
import it.cavallium.dbengine.database.UpdateReturnMode;
|
import it.cavallium.dbengine.database.UpdateReturnMode;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
import it.cavallium.dbengine.database.serialization.SerializationFunction;
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
|
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
|
||||||
@ -23,14 +27,26 @@ import reactor.core.publisher.Flux;
|
|||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
public class DatabaseSingleBucket<K, V, TH>
|
||||||
|
extends LiveResourceSupport<DatabaseStage<V>, DatabaseSingleBucket<K, V, TH>>
|
||||||
|
implements DatabaseStageEntry<V> {
|
||||||
|
|
||||||
private final DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage;
|
|
||||||
private final K key;
|
private final K key;
|
||||||
|
|
||||||
public DatabaseSingleBucket(DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage, K key) {
|
private DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage;
|
||||||
this.bucketStage = bucketStage;
|
|
||||||
|
public DatabaseSingleBucket(DatabaseStageEntry<ObjectArraySet<Entry<K, V>>> bucketStage, K key,
|
||||||
|
Drop<DatabaseSingleBucket<K, V, TH>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
this.key = key;
|
this.key = key;
|
||||||
|
this.bucketStage = bucketStage;
|
||||||
|
}
|
||||||
|
|
||||||
|
private DatabaseSingleBucket(Send<DatabaseStage<ObjectArraySet<Entry<K, V>>>> bucketStage, K key,
|
||||||
|
Drop<DatabaseSingleBucket<K, V, TH>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
|
this.key = key;
|
||||||
|
this.bucketStage = (DatabaseStageEntry<ObjectArraySet<Entry<K, V>>>) bucketStage.receive();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -77,7 +93,8 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater, boolean existsAlmostCertainly) {
|
public Mono<Delta<V>> updateAndGetDelta(SerializationFunction<@Nullable V, @Nullable V> updater,
|
||||||
|
boolean existsAlmostCertainly) {
|
||||||
return bucketStage
|
return bucketStage
|
||||||
.updateAndGetDelta(oldBucket -> {
|
.updateAndGetDelta(oldBucket -> {
|
||||||
V oldValue = extractValue(oldBucket);
|
V oldValue = extractValue(oldBucket);
|
||||||
@ -106,11 +123,6 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
|||||||
return this.updateAndGetDelta(prev -> null).map(LLUtils::isDeltaChanged);
|
return this.updateAndGetDelta(prev -> null).map(LLUtils::isDeltaChanged);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<Void> close() {
|
|
||||||
return bucketStage.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return this.get(snapshot).map(prev -> 1L).defaultIfEmpty(0L);
|
return this.get(snapshot).map(prev -> 1L).defaultIfEmpty(0L);
|
||||||
@ -131,11 +143,6 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
|||||||
return bucketStage.badBlocks();
|
return bucketStage.badBlocks();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void release() {
|
|
||||||
bucketStage.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<V> extractValueTransformation(Set<Entry<K, V>> entries) {
|
private Mono<V> extractValueTransformation(Set<Entry<K, V>> entries) {
|
||||||
return Mono.fromCallable(() -> extractValue(entries));
|
return Mono.fromCallable(() -> extractValue(entries));
|
||||||
}
|
}
|
||||||
@ -193,4 +200,38 @@ public class DatabaseSingleBucket<K, V, TH> implements DatabaseStageEntry<V> {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
throw new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<DatabaseSingleBucket<K, V, TH>> prepareSend() {
|
||||||
|
var bucketStage = this.bucketStage.send();
|
||||||
|
return drop -> new DatabaseSingleBucket<>(bucketStage, key, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.bucketStage = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop<K, V, TH> implements
|
||||||
|
Drop<DatabaseSingleBucket<K, V, TH>> {
|
||||||
|
|
||||||
|
private final Drop<DatabaseSingleBucket<K, V, TH>> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<DatabaseSingleBucket<K, V, TH>> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(DatabaseSingleBucket<K, V, TH> obj) {
|
||||||
|
if (obj.bucketStage != null) {
|
||||||
|
obj.bucketStage.close();
|
||||||
|
}
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.client.Mapper;
|
import it.cavallium.dbengine.client.Mapper;
|
||||||
@ -14,16 +18,28 @@ import reactor.core.publisher.Mono;
|
|||||||
import reactor.core.publisher.SynchronousSink;
|
import reactor.core.publisher.SynchronousSink;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
public class DatabaseSingleMapped<A, B> extends ResourceSupport<DatabaseStage<A>, DatabaseSingleMapped<A, B>>
|
||||||
|
implements DatabaseStageEntry<A> {
|
||||||
|
|
||||||
private final DatabaseStageEntry<B> serializedSingle;
|
|
||||||
private final Mapper<A, B> mapper;
|
private final Mapper<A, B> mapper;
|
||||||
|
|
||||||
public DatabaseSingleMapped(DatabaseStageEntry<B> serializedSingle, Mapper<A, B> mapper) {
|
private DatabaseStageEntry<B> serializedSingle;
|
||||||
|
|
||||||
|
public DatabaseSingleMapped(DatabaseStageEntry<B> serializedSingle, Mapper<A, B> mapper,
|
||||||
|
Drop<DatabaseSingleMapped<A, B>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
this.serializedSingle = serializedSingle;
|
this.serializedSingle = serializedSingle;
|
||||||
this.mapper = mapper;
|
this.mapper = mapper;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private DatabaseSingleMapped(Send<DatabaseStage<B>> serializedSingle, Mapper<A, B> mapper,
|
||||||
|
Drop<DatabaseSingleMapped<A, B>> drop) {
|
||||||
|
super(new CloseOnDrop<>(drop));
|
||||||
|
this.mapper = mapper;
|
||||||
|
|
||||||
|
this.serializedSingle = (DatabaseStageEntry<B>) serializedSingle.receive();
|
||||||
|
}
|
||||||
|
|
||||||
private void deserializeSink(B value, SynchronousSink<A> sink) {
|
private void deserializeSink(B value, SynchronousSink<A> sink) {
|
||||||
try {
|
try {
|
||||||
sink.next(this.unMap(value));
|
sink.next(this.unMap(value));
|
||||||
@ -107,11 +123,6 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
|||||||
return serializedSingle.clearAndGetStatus();
|
return serializedSingle.clearAndGetStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<Void> close() {
|
|
||||||
return serializedSingle.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
public Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return serializedSingle.leavesCount(snapshot, fast);
|
return serializedSingle.leavesCount(snapshot, fast);
|
||||||
@ -132,11 +143,6 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
|||||||
return this.serializedSingle.badBlocks();
|
return this.serializedSingle.badBlocks();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void release() {
|
|
||||||
serializedSingle.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
//todo: temporary wrapper. convert the whole class to buffers
|
//todo: temporary wrapper. convert the whole class to buffers
|
||||||
private A unMap(B bytes) throws SerializationException {
|
private A unMap(B bytes) throws SerializationException {
|
||||||
return mapper.unmap(bytes);
|
return mapper.unmap(bytes);
|
||||||
@ -146,4 +152,37 @@ public class DatabaseSingleMapped<A, B> implements DatabaseStageEntry<A> {
|
|||||||
private B map(A bytes) throws SerializationException {
|
private B map(A bytes) throws SerializationException {
|
||||||
return mapper.map(bytes);
|
return mapper.map(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
throw new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<DatabaseSingleMapped<A, B>> prepareSend() {
|
||||||
|
var serializedSingle = this.serializedSingle.send();
|
||||||
|
return drop -> new DatabaseSingleMapped<>(serializedSingle, mapper, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.serializedSingle = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop<A, B> implements Drop<DatabaseSingleMapped<A, B>> {
|
||||||
|
|
||||||
|
private final Drop<DatabaseSingleMapped<A, B>> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<DatabaseSingleMapped<A, B>> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(DatabaseSingleMapped<A, B> obj) {
|
||||||
|
if (obj.serializedSingle != null) {
|
||||||
|
obj.serializedSingle.close();
|
||||||
|
}
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Resource;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.Delta;
|
import it.cavallium.dbengine.database.Delta;
|
||||||
@ -12,7 +13,7 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
public interface DatabaseStage<T> extends DatabaseStageWithEntry<T>, Resource<DatabaseStage<T>> {
|
||||||
|
|
||||||
default Mono<T> get(@Nullable CompositeSnapshot snapshot) {
|
default Mono<T> get(@Nullable CompositeSnapshot snapshot) {
|
||||||
return get(snapshot, false);
|
return get(snapshot, false);
|
||||||
@ -74,12 +75,6 @@ public interface DatabaseStage<T> extends DatabaseStageWithEntry<T> {
|
|||||||
return clearAndGetPrevious().map(Objects::nonNull).defaultIfEmpty(false);
|
return clearAndGetPrevious().map(Objects::nonNull).defaultIfEmpty(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
void release();
|
|
||||||
|
|
||||||
default Mono<Void> close() {
|
|
||||||
return Mono.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count all the elements.
|
* Count all the elements.
|
||||||
* If it's a nested collection the count will include all the children recursively
|
* If it's a nested collection the count will include all the children recursively
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Resource;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
|
||||||
|
@ -34,11 +34,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
Mono<US> at(@Nullable CompositeSnapshot snapshot, T key);
|
||||||
|
|
||||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key, boolean existsAlmostCertainly) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(this.at(snapshot, key),
|
||||||
this.at(snapshot, key),
|
stage -> stage.get(snapshot, existsAlmostCertainly), true);
|
||||||
stage -> stage.get(snapshot, existsAlmostCertainly),
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
default Mono<U> getValue(@Nullable CompositeSnapshot snapshot, T key) {
|
||||||
@ -50,11 +47,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> putValue(T key, U value) {
|
default Mono<Void> putValue(T key, U value) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(at(null, key).single(),
|
||||||
at(null, key).single(),
|
stage -> stage.set(value), true);
|
||||||
stage -> stage.set(value),
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Mono<UpdateMode> getUpdateMode();
|
Mono<UpdateMode> getUpdateMode();
|
||||||
@ -63,11 +57,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
UpdateReturnMode updateReturnMode,
|
UpdateReturnMode updateReturnMode,
|
||||||
boolean existsAlmostCertainly,
|
boolean existsAlmostCertainly,
|
||||||
SerializationFunction<@Nullable U, @Nullable U> updater) {
|
SerializationFunction<@Nullable U, @Nullable U> updater) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(this.at(null, key).single(),
|
||||||
this.at(null, key).single(),
|
stage -> stage.update(updater, updateReturnMode, existsAlmostCertainly), true);
|
||||||
stage -> stage.update(updater, updateReturnMode, existsAlmostCertainly),
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
default <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries,
|
default <X> Flux<ExtraKeyOperationResult<T, X>> updateMulti(Flux<Tuple2<T, X>> entries,
|
||||||
@ -94,11 +85,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
default Mono<Delta<U>> updateValueAndGetDelta(T key,
|
default Mono<Delta<U>> updateValueAndGetDelta(T key,
|
||||||
boolean existsAlmostCertainly,
|
boolean existsAlmostCertainly,
|
||||||
SerializationFunction<@Nullable U, @Nullable U> updater) {
|
SerializationFunction<@Nullable U, @Nullable U> updater) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(this.at(null, key).single(),
|
||||||
this.at(null, key).single(),
|
stage -> stage.updateAndGetDelta(updater, existsAlmostCertainly), true);
|
||||||
stage -> stage.updateAndGetDelta(updater, existsAlmostCertainly),
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Delta<U>> updateValueAndGetDelta(T key, SerializationFunction<@Nullable U, @Nullable U> updater) {
|
default Mono<Delta<U>> updateValueAndGetDelta(T key, SerializationFunction<@Nullable U, @Nullable U> updater) {
|
||||||
@ -106,22 +94,14 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
default Mono<U> putValueAndGetPrevious(T key, U value) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(at(null, key).single(), stage -> stage.setAndGetPrevious(value), true);
|
||||||
at(null, key).single(),
|
|
||||||
stage -> stage.setAndGetPrevious(value),
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return true if the key was associated with any value, false if the key didn't exist.
|
* @return true if the key was associated with any value, false if the key didn't exist.
|
||||||
*/
|
*/
|
||||||
default Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
default Mono<Boolean> putValueAndGetChanged(T key, U value) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(at(null, key).single(), stage -> stage.setAndGetChanged(value), true).single();
|
||||||
at(null, key).single(),
|
|
||||||
stage -> stage.setAndGetChanged(value),
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
).single();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> remove(T key) {
|
default Mono<Void> remove(T key) {
|
||||||
@ -129,11 +109,7 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
default Mono<U> removeAndGetPrevious(T key) {
|
default Mono<U> removeAndGetPrevious(T key) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingResource(at(null, key), DatabaseStage::clearAndGetPrevious, true);
|
||||||
at(null, key),
|
|
||||||
DatabaseStage::clearAndGetPrevious,
|
|
||||||
stage -> Mono.fromRunnable(stage::release)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Boolean> removeAndGetStatus(T key) {
|
default Mono<Boolean> removeAndGetStatus(T key) {
|
||||||
@ -175,11 +151,11 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
default Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
default Flux<Entry<T, U>> getAllValues(@Nullable CompositeSnapshot snapshot) {
|
||||||
return this
|
return this
|
||||||
.getAllStages(snapshot)
|
.getAllStages(snapshot)
|
||||||
.flatMapSequential(entry -> entry
|
.flatMapSequential(stage -> stage
|
||||||
.getValue()
|
.getValue()
|
||||||
.get(snapshot, true)
|
.get(snapshot, true)
|
||||||
.map(value -> Map.entry(entry.getKey(), value))
|
.map(value -> Map.entry(stage.getKey(), value))
|
||||||
.doAfterTerminate(() -> entry.getValue().release())
|
.doFinally(s -> stage.getValue().close())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,7 +169,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
return setAllValues(Flux.empty());
|
return setAllValues(Flux.empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
default Mono<Void> replaceAllValues(boolean canKeysChange, Function<Entry<T, U>, Mono<Entry<T, U>>> entriesReplacer) {
|
default Mono<Void> replaceAllValues(boolean canKeysChange, Function<Entry<T, U>,
|
||||||
|
Mono<Entry<T, U>>> entriesReplacer) {
|
||||||
if (canKeysChange) {
|
if (canKeysChange) {
|
||||||
return this.setAllValues(this.getAllValues(null).flatMap(entriesReplacer)).then();
|
return this.setAllValues(this.getAllValues(null).flatMap(entriesReplacer)).then();
|
||||||
} else {
|
} else {
|
||||||
@ -202,7 +179,11 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
.flatMap(entriesReplacer)
|
.flatMap(entriesReplacer)
|
||||||
.flatMap(replacedEntry -> this
|
.flatMap(replacedEntry -> this
|
||||||
.at(null, replacedEntry.getKey())
|
.at(null, replacedEntry.getKey())
|
||||||
.flatMap(v -> v.set(replacedEntry.getValue()).doAfterTerminate(v::release)))
|
.flatMap(stage -> stage
|
||||||
|
.set(replacedEntry.getValue())
|
||||||
|
.doFinally(s -> stage.close())
|
||||||
|
)
|
||||||
|
)
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -210,9 +191,8 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) {
|
default Mono<Void> replaceAll(Function<Entry<T, US>, Mono<Void>> entriesReplacer) {
|
||||||
return this
|
return this
|
||||||
.getAllStages(null)
|
.getAllStages(null)
|
||||||
.flatMap(stage -> Mono
|
.flatMap(stage -> entriesReplacer.apply(stage)
|
||||||
.defer(() -> entriesReplacer.apply(stage))
|
.doFinally(s -> stage.getValue().close())
|
||||||
.doAfterTerminate(() -> stage.getValue().release())
|
|
||||||
)
|
)
|
||||||
.then();
|
.then();
|
||||||
}
|
}
|
||||||
@ -221,14 +201,15 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
default Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
|
default Mono<Map<T, U>> setAndGetPrevious(Map<T, U> value) {
|
||||||
return this
|
return this
|
||||||
.setAllValuesAndGetPrevious(Flux.fromIterable(Map.copyOf(value).entrySet()))
|
.setAllValuesAndGetPrevious(Flux.fromIterable(Map.copyOf(value).entrySet()))
|
||||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new);
|
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||||
|
.filter(map -> !map.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
default Mono<Boolean> setAndGetChanged(Map<T, U> value) {
|
default Mono<Boolean> setAndGetChanged(Map<T, U> value) {
|
||||||
return this
|
return this
|
||||||
.setAndGetPrevious(value)
|
.setAndGetPrevious(value)
|
||||||
.map(oldValue -> !Objects.equals(oldValue, value))
|
.map(oldValue -> !Objects.equals(oldValue, value.isEmpty() ? null : value))
|
||||||
.switchIfEmpty(Mono.fromSupplier(() -> !value.isEmpty()));
|
.switchIfEmpty(Mono.fromSupplier(() -> !value.isEmpty()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -286,17 +267,17 @@ public interface DatabaseStageMap<T, U, US extends DatabaseStage<U>> extends Dat
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
default Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
default Mono<Map<T, U>> get(@Nullable CompositeSnapshot snapshot, boolean existsAlmostCertainly) {
|
||||||
return getAllValues(snapshot)
|
return this
|
||||||
.collectMap(Entry::getKey, Entry::getValue, HashMap::new);
|
.getAllValues(snapshot)
|
||||||
|
.collectMap(Entry::getKey, Entry::getValue, HashMap::new)
|
||||||
|
.filter(map -> !map.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
default Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
default Mono<Long> leavesCount(@Nullable CompositeSnapshot snapshot, boolean fast) {
|
||||||
return getAllStages(snapshot)
|
return this
|
||||||
.flatMap(stage -> Mono
|
.getAllStages(snapshot)
|
||||||
.fromRunnable(() -> stage.getValue().release())
|
.doOnNext(stage -> stage.getValue().close())
|
||||||
.thenReturn(true)
|
|
||||||
)
|
|
||||||
.count();
|
.count();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.collections;
|
package it.cavallium.dbengine.database.collections;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Resource;
|
||||||
import it.cavallium.dbengine.client.BadBlock;
|
import it.cavallium.dbengine.client.BadBlock;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ import io.net5.buffer.api.Buffer;
|
|||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -34,20 +35,9 @@ public class SubStageGetterHashMap<T, U, TH> implements
|
|||||||
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseMapDictionaryHashed<T, U, TH>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
Mono<Send<Buffer>> prefixKeyMono) {
|
Mono<Send<Buffer>> prefixKeyMono) {
|
||||||
return Mono.usingWhen(
|
return LLUtils.usingSend(prefixKeyMono, prefixKey -> Mono.just(DatabaseMapDictionaryHashed
|
||||||
prefixKeyMono,
|
.tail(dictionary, prefixKey, keySerializer, valueSerializer, keyHashFunction,
|
||||||
prefixKey -> Mono
|
keyHashSerializer, d -> {})), true);
|
||||||
.fromSupplier(() -> DatabaseMapDictionaryHashed
|
|
||||||
.tail(dictionary,
|
|
||||||
prefixKey,
|
|
||||||
keySerializer,
|
|
||||||
valueSerializer,
|
|
||||||
keyHashFunction,
|
|
||||||
keyHashSerializer
|
|
||||||
)
|
|
||||||
),
|
|
||||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getKeyHashBinaryLength() {
|
public int getKeyHashBinaryLength() {
|
||||||
|
@ -34,13 +34,8 @@ public class SubStageGetterHashSet<T, TH> implements
|
|||||||
Mono<Send<Buffer>> prefixKeyMono) {
|
Mono<Send<Buffer>> prefixKeyMono) {
|
||||||
return Mono.usingWhen(prefixKeyMono,
|
return Mono.usingWhen(prefixKeyMono,
|
||||||
prefixKey -> Mono
|
prefixKey -> Mono
|
||||||
.fromSupplier(() -> DatabaseSetDictionaryHashed
|
.fromSupplier(() -> DatabaseSetDictionaryHashed.tail(dictionary, prefixKey, keySerializer,
|
||||||
.tail(dictionary,
|
keyHashFunction, keyHashSerializer, d -> {})
|
||||||
prefixKey,
|
|
||||||
keySerializer,
|
|
||||||
keyHashFunction,
|
|
||||||
keyHashSerializer
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||||
);
|
);
|
||||||
|
@ -4,6 +4,7 @@ import io.net5.buffer.api.Buffer;
|
|||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -25,11 +26,9 @@ public class SubStageGetterMap<T, U> implements SubStageGetter<Map<T, U>, Databa
|
|||||||
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseMapDictionary<T, U>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
Mono<Send<Buffer>> prefixKeyMono) {
|
Mono<Send<Buffer>> prefixKeyMono) {
|
||||||
return Mono.usingWhen(prefixKeyMono,
|
return LLUtils.usingSend(prefixKeyMono,
|
||||||
prefixKey -> Mono.fromSupplier(() -> DatabaseMapDictionary
|
prefixKey -> Mono.fromSupplier(() -> DatabaseMapDictionary
|
||||||
.tail(dictionary, prefixKey, keySerializer, valueSerializer)),
|
.tail(dictionary, prefixKey, keySerializer, valueSerializer, d -> {})), true);
|
||||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getKeyBinaryLength() {
|
public int getKeyBinaryLength() {
|
||||||
|
@ -4,6 +4,7 @@ import io.net5.buffer.api.Buffer;
|
|||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLDictionary;
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
@ -40,11 +41,9 @@ public class SubStageGetterMapDeep<T, U, US extends DatabaseStage<U>> implements
|
|||||||
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseMapDictionaryDeep<T, U, US>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
Mono<Send<Buffer>> prefixKeyMono) {
|
Mono<Send<Buffer>> prefixKeyMono) {
|
||||||
return Mono.usingWhen(prefixKeyMono,
|
return LLUtils.usingSend(prefixKeyMono, prefixKey -> Mono.just(DatabaseMapDictionaryDeep
|
||||||
prefixKey -> Mono.fromSupplier(() -> DatabaseMapDictionaryDeep
|
.deepIntermediate(dictionary, prefixKey, keySerializer, subStageGetter, keyExtLength,
|
||||||
.deepIntermediate(dictionary, prefixKey, keySerializer, subStageGetter, keyExtLength)),
|
d -> {})), true);
|
||||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getKeyBinaryLength() {
|
public int getKeyBinaryLength() {
|
||||||
|
@ -24,7 +24,7 @@ public class SubStageGetterSet<T> implements SubStageGetter<Map<T, Nothing>, Dat
|
|||||||
Mono<Send<Buffer>> prefixKeyMono) {
|
Mono<Send<Buffer>> prefixKeyMono) {
|
||||||
return Mono.usingWhen(prefixKeyMono,
|
return Mono.usingWhen(prefixKeyMono,
|
||||||
prefixKey -> Mono
|
prefixKey -> Mono
|
||||||
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer)),
|
.fromSupplier(() -> DatabaseSetDictionary.tail(dictionary, prefixKey, keySerializer, d -> {})),
|
||||||
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
prefixKey -> Mono.fromRunnable(prefixKey::close)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -20,12 +20,7 @@ public class SubStageGetterSingle<T> implements SubStageGetter<T, DatabaseStageE
|
|||||||
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary,
|
public Mono<DatabaseStageEntry<T>> subStage(LLDictionary dictionary,
|
||||||
@Nullable CompositeSnapshot snapshot,
|
@Nullable CompositeSnapshot snapshot,
|
||||||
Mono<Send<Buffer>> keyPrefixMono) {
|
Mono<Send<Buffer>> keyPrefixMono) {
|
||||||
return Mono.usingWhen(
|
return keyPrefixMono.map(keyPrefix -> new DatabaseSingle<>(dictionary, keyPrefix, serializer, d -> {}));
|
||||||
keyPrefixMono,
|
|
||||||
keyPrefix -> Mono
|
|
||||||
.<DatabaseStageEntry<T>>fromSupplier(() -> new DatabaseSingle<>(dictionary, keyPrefix, serializer)),
|
|
||||||
keyPrefix -> Mono.fromRunnable(keyPrefix::close)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,16 @@ package it.cavallium.dbengine.database.collections;
|
|||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
|
import io.net5.buffer.api.CompositeBuffer;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.serialization.SerializationException;
|
import it.cavallium.dbengine.database.serialization.SerializationException;
|
||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
import java.util.Objects;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> {
|
class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> {
|
||||||
|
|
||||||
@ -25,8 +28,9 @@ class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Entry<X, Y>> deserialize(@NotNull Send<Buffer> serializedToReceive)
|
public @NotNull DeserializationResult<Entry<X, Y>> deserialize(@Nullable Send<Buffer> serializedToReceive)
|
||||||
throws SerializationException {
|
throws SerializationException {
|
||||||
|
Objects.requireNonNull(serializedToReceive);
|
||||||
try (var serialized = serializedToReceive.receive()) {
|
try (var serialized = serializedToReceive.receive()) {
|
||||||
DeserializationResult<X> deserializedKey = keySuffixSerializer.deserialize(serialized.copy().send());
|
DeserializationResult<X> deserializedKey = keySuffixSerializer.deserialize(serialized.copy().send());
|
||||||
DeserializationResult<Y> deserializedValue = valueSerializer.deserialize(serialized
|
DeserializationResult<Y> deserializedValue = valueSerializer.deserialize(serialized
|
||||||
@ -41,10 +45,8 @@ class ValueWithHashSerializer<X, Y> implements Serializer<Entry<X, Y>> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull Send<Buffer> serialize(@NotNull Entry<X, Y> deserialized) throws SerializationException {
|
public @NotNull Send<Buffer> serialize(@NotNull Entry<X, Y> deserialized) throws SerializationException {
|
||||||
try (Buffer keySuffix = keySuffixSerializer.serialize(deserialized.getKey()).receive()) {
|
var keySuffix = keySuffixSerializer.serialize(deserialized.getKey());
|
||||||
try (Buffer value = valueSerializer.serialize(deserialized.getValue()).receive()) {
|
var value = valueSerializer.serialize(deserialized.getValue());
|
||||||
return LLUtils.compositeBuffer(allocator, keySuffix.send(), value.send());
|
return LLUtils.compositeBuffer(allocator, keySuffix, value).send();
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,9 @@ import it.cavallium.dbengine.database.serialization.SerializationException;
|
|||||||
import it.cavallium.dbengine.database.serialization.Serializer;
|
import it.cavallium.dbengine.database.serialization.Serializer;
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
|
import it.unimi.dsi.fastutil.objects.ObjectArraySet;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Objects;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
|
class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
|
||||||
|
|
||||||
@ -20,7 +22,8 @@ class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<ObjectArraySet<X>> deserialize(@NotNull Send<Buffer> serializedToReceive) throws SerializationException {
|
public @NotNull DeserializationResult<ObjectArraySet<X>> deserialize(@Nullable Send<Buffer> serializedToReceive) throws SerializationException {
|
||||||
|
Objects.requireNonNull(serializedToReceive);
|
||||||
try (var serialized = serializedToReceive.receive()) {
|
try (var serialized = serializedToReceive.receive()) {
|
||||||
int initialReaderOffset = serialized.readerOffset();
|
int initialReaderOffset = serialized.readerOffset();
|
||||||
int entriesLength = serialized.readInt();
|
int entriesLength = serialized.readInt();
|
||||||
@ -41,9 +44,12 @@ class ValuesSetSerializer<X> implements Serializer<ObjectArraySet<X>> {
|
|||||||
try (Buffer output = allocator.allocate(64)) {
|
try (Buffer output = allocator.allocate(64)) {
|
||||||
output.writeInt(deserialized.size());
|
output.writeInt(deserialized.size());
|
||||||
for (X entry : deserialized) {
|
for (X entry : deserialized) {
|
||||||
try (Buffer serialized = entrySerializer.serialize(entry).receive()) {
|
var serializedToReceive = entrySerializer.serialize(entry);
|
||||||
output.ensureWritable(serialized.readableBytes());
|
try (Buffer serialized = serializedToReceive.receive()) {
|
||||||
output.writeBytes(serialized);
|
if (serialized.readableBytes() > 0) {
|
||||||
|
output.ensureWritable(serialized.readableBytes());
|
||||||
|
output.writeBytes(serialized);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return output.send();
|
return output.send();
|
||||||
|
@ -1,106 +0,0 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.SearcherManager;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
public class CachedIndexSearcher {
|
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(CachedIndexSearcher.class);
|
|
||||||
|
|
||||||
private final IndexSearcher indexSearcher;
|
|
||||||
private final SearcherManager associatedSearcherManager;
|
|
||||||
private final Runnable afterFinalization;
|
|
||||||
private boolean inCache = true;
|
|
||||||
private int usages = 0;
|
|
||||||
|
|
||||||
public CachedIndexSearcher(IndexSearcher indexSearcher,
|
|
||||||
@Nullable SearcherManager associatedSearcherManager,
|
|
||||||
@Nullable Runnable afterFinalization) {
|
|
||||||
this.indexSearcher = indexSearcher;
|
|
||||||
this.associatedSearcherManager = associatedSearcherManager;
|
|
||||||
this.afterFinalization = afterFinalization;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void incUsage() {
|
|
||||||
synchronized (this) {
|
|
||||||
usages++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void decUsage() throws IOException {
|
|
||||||
synchronized (this) {
|
|
||||||
if (usages > 0) {
|
|
||||||
usages--;
|
|
||||||
if (mustClose()) {
|
|
||||||
try {
|
|
||||||
close();
|
|
||||||
} finally {
|
|
||||||
if (afterFinalization != null) afterFinalization.run();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void removeFromCache() throws IOException {
|
|
||||||
synchronized (this) {
|
|
||||||
if (inCache) {
|
|
||||||
inCache = false;
|
|
||||||
if (mustClose()) {
|
|
||||||
try {
|
|
||||||
close();
|
|
||||||
} finally {
|
|
||||||
if (afterFinalization != null) afterFinalization.run();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void close() throws IOException {
|
|
||||||
if (associatedSearcherManager != null) {
|
|
||||||
associatedSearcherManager.release(indexSearcher);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean mustClose() {
|
|
||||||
return !this.inCache && this.usages == 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public IndexReader getIndexReader() {
|
|
||||||
return indexSearcher.getIndexReader();
|
|
||||||
}
|
|
||||||
|
|
||||||
public IndexSearcher getIndexSearcher() {
|
|
||||||
return indexSearcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
@Override
|
|
||||||
protected void finalize() throws Throwable {
|
|
||||||
boolean failedToRelease = false;
|
|
||||||
if (usages > 0) {
|
|
||||||
failedToRelease = true;
|
|
||||||
logger.error("A cached index searcher has been garbage collected, but "
|
|
||||||
+ usages + " usages have not been released");
|
|
||||||
}
|
|
||||||
if (inCache) {
|
|
||||||
failedToRelease = true;
|
|
||||||
logger.error("A cached index searcher has been garbage collected, but it's marked"
|
|
||||||
+ " as still actively cached");
|
|
||||||
}
|
|
||||||
if (failedToRelease) {
|
|
||||||
try {
|
|
||||||
this.close();
|
|
||||||
} catch (Throwable ex) {
|
|
||||||
logger.warn("Error when closing cached index searcher", ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
super.finalize();
|
|
||||||
}
|
|
||||||
}
|
|
@ -3,16 +3,16 @@ package it.cavallium.dbengine.database.disk;
|
|||||||
import com.google.common.cache.CacheBuilder;
|
import com.google.common.cache.CacheBuilder;
|
||||||
import com.google.common.cache.CacheLoader;
|
import com.google.common.cache.CacheLoader;
|
||||||
import com.google.common.cache.LoadingCache;
|
import com.google.common.cache.LoadingCache;
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.database.LLSnapshot;
|
import it.cavallium.dbengine.database.LLSnapshot;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.Phaser;
|
import java.util.concurrent.Phaser;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.TimeoutException;
|
import java.util.concurrent.TimeoutException;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.concurrent.locks.LockSupport;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
@ -29,8 +29,9 @@ import reactor.core.publisher.Mono;
|
|||||||
import reactor.core.publisher.Sinks;
|
import reactor.core.publisher.Sinks;
|
||||||
import reactor.core.publisher.Sinks.Empty;
|
import reactor.core.publisher.Sinks.Empty;
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
public class CachedIndexSearcherManager {
|
public class CachedIndexSearcherManager implements IndexSearcherManager {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(CachedIndexSearcherManager.class);
|
private static final Logger logger = LoggerFactory.getLogger(CachedIndexSearcherManager.class);
|
||||||
|
|
||||||
@ -41,8 +42,8 @@ public class CachedIndexSearcherManager {
|
|||||||
private final Phaser activeSearchers = new Phaser(1);
|
private final Phaser activeSearchers = new Phaser(1);
|
||||||
private final Phaser activeRefreshes = new Phaser(1);
|
private final Phaser activeRefreshes = new Phaser(1);
|
||||||
|
|
||||||
private final LoadingCache<LLSnapshot, Mono<CachedIndexSearcher>> cachedSnapshotSearchers;
|
private final LoadingCache<LLSnapshot, Mono<Send<LLIndexSearcher>>> cachedSnapshotSearchers;
|
||||||
private final Mono<CachedIndexSearcher> cachedMainSearcher;
|
private final Mono<Send<LLIndexSearcher>> cachedMainSearcher;
|
||||||
|
|
||||||
private final Empty<Void> closeRequested = Sinks.empty();
|
private final Empty<Void> closeRequested = Sinks.empty();
|
||||||
private final Empty<Void> refresherClosed = Sinks.empty();
|
private final Empty<Void> refresherClosed = Sinks.empty();
|
||||||
@ -84,7 +85,7 @@ public class CachedIndexSearcherManager {
|
|||||||
.maximumSize(3)
|
.maximumSize(3)
|
||||||
.build(new CacheLoader<>() {
|
.build(new CacheLoader<>() {
|
||||||
@Override
|
@Override
|
||||||
public Mono<CachedIndexSearcher> load(@NotNull LLSnapshot snapshot) {
|
public Mono<Send<LLIndexSearcher>> load(@NotNull LLSnapshot snapshot) {
|
||||||
return CachedIndexSearcherManager.this.generateCachedSearcher(snapshot);
|
return CachedIndexSearcherManager.this.generateCachedSearcher(snapshot);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -129,47 +130,39 @@ public class CachedIndexSearcherManager {
|
|||||||
})).cache();
|
})).cache();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<CachedIndexSearcher> generateCachedSearcher(@Nullable LLSnapshot snapshot) {
|
private Mono<Send<LLIndexSearcher>> generateCachedSearcher(@Nullable LLSnapshot snapshot) {
|
||||||
return Mono.fromCallable(() -> {
|
// todo: check if defer is really needed
|
||||||
activeSearchers.register();
|
return Mono.defer(() -> {
|
||||||
IndexSearcher indexSearcher;
|
var onClose = this.closeRequested.asMono();
|
||||||
SearcherManager associatedSearcherManager;
|
var onQueryRefresh = Mono.delay(queryRefreshDebounceTime).then();
|
||||||
if (snapshot == null) {
|
var onInvalidateCache = Mono.firstWithSignal(onClose, onQueryRefresh).doOnNext(s -> System.err.println("Invalidation triggered"));
|
||||||
indexSearcher = searcherManager.acquire();
|
|
||||||
|
return Mono.fromCallable(() -> {
|
||||||
|
activeSearchers.register();
|
||||||
|
IndexSearcher indexSearcher;
|
||||||
|
if (snapshot == null) {
|
||||||
|
indexSearcher = searcherManager.acquire();
|
||||||
|
} else {
|
||||||
|
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher();
|
||||||
|
}
|
||||||
indexSearcher.setSimilarity(similarity);
|
indexSearcher.setSimilarity(similarity);
|
||||||
associatedSearcherManager = searcherManager;
|
assert indexSearcher.getIndexReader().getRefCount() > 0;
|
||||||
} else {
|
return indexSearcher;
|
||||||
indexSearcher = snapshotsManager.resolveSnapshot(snapshot).getIndexSearcher();
|
})
|
||||||
associatedSearcherManager = null;
|
// todo: re-enable caching if needed
|
||||||
}
|
//.cacheInvalidateWhen(tuple -> onInvalidateCache)
|
||||||
AtomicBoolean alreadyDeregistered = new AtomicBoolean(false);
|
.map(indexSearcher -> new LLIndexSearcher(indexSearcher, this::dropCachedIndexSearcher).send())
|
||||||
return new CachedIndexSearcher(indexSearcher, associatedSearcherManager,
|
.takeUntilOther(onClose)
|
||||||
() -> {
|
.doOnDiscard(Send.class, Send::close);
|
||||||
// This shouldn't happen more than once,
|
});
|
||||||
// but I put this AtomicBoolean to be sure that this will NEVER happen more than once.
|
|
||||||
if (alreadyDeregistered.compareAndSet(false, true)) {
|
|
||||||
activeSearchers.arriveAndDeregister();
|
|
||||||
} else {
|
|
||||||
logger.error("Disposed CachedIndexSearcher twice! This is an implementation bug!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.cacheInvalidateWhen(indexSearcher -> Mono
|
|
||||||
.firstWithSignal(
|
|
||||||
this.closeRequested.asMono(),
|
|
||||||
Mono.delay(queryRefreshDebounceTime).then()
|
|
||||||
),
|
|
||||||
indexSearcher -> {
|
|
||||||
try {
|
|
||||||
// Mark as removed from cache
|
|
||||||
indexSearcher.removeFromCache();
|
|
||||||
} catch (Exception ex) {
|
|
||||||
logger.error("Failed to release an old cached IndexSearcher", ex);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void dropCachedIndexSearcher(LLIndexSearcher cachedIndexSearcher) {
|
||||||
|
// This shouldn't happen more than once per searcher.
|
||||||
|
activeSearchers.arriveAndDeregister();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void maybeRefreshBlocking() throws IOException {
|
public void maybeRefreshBlocking() throws IOException {
|
||||||
try {
|
try {
|
||||||
activeRefreshes.register();
|
activeRefreshes.register();
|
||||||
@ -181,6 +174,7 @@ public class CachedIndexSearcherManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void maybeRefresh() throws IOException {
|
public void maybeRefresh() throws IOException {
|
||||||
try {
|
try {
|
||||||
activeRefreshes.register();
|
activeRefreshes.register();
|
||||||
@ -192,30 +186,8 @@ public class CachedIndexSearcherManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public <T> Flux<T> searchMany(@Nullable LLSnapshot snapshot, Function<IndexSearcher, Flux<T>> searcherFunction) {
|
@Override
|
||||||
return Flux.usingWhen(
|
public Mono<Send<LLIndexSearcher>> retrieveSearcher(@Nullable LLSnapshot snapshot) {
|
||||||
this.captureIndexSearcher(snapshot),
|
|
||||||
indexSearcher -> searcherFunction.apply(indexSearcher.getIndexSearcher()),
|
|
||||||
this::releaseUsedIndexSearcher
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public <T> Mono<T> search(@Nullable LLSnapshot snapshot, Function<IndexSearcher, Mono<T>> searcherFunction) {
|
|
||||||
return Mono.usingWhen(
|
|
||||||
this.captureIndexSearcher(snapshot),
|
|
||||||
indexSearcher -> searcherFunction.apply(indexSearcher.getIndexSearcher()),
|
|
||||||
this::releaseUsedIndexSearcher
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Mono<CachedIndexSearcher> captureIndexSearcher(@Nullable LLSnapshot snapshot) {
|
|
||||||
return this
|
|
||||||
.retrieveCachedIndexSearcher(snapshot)
|
|
||||||
// Increment reference count
|
|
||||||
.doOnNext(CachedIndexSearcher::incUsage);
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<CachedIndexSearcher> retrieveCachedIndexSearcher(LLSnapshot snapshot) {
|
|
||||||
if (snapshot == null) {
|
if (snapshot == null) {
|
||||||
return this.cachedMainSearcher;
|
return this.cachedMainSearcher;
|
||||||
} else {
|
} else {
|
||||||
@ -223,17 +195,7 @@ public class CachedIndexSearcherManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Void> releaseUsedIndexSearcher(CachedIndexSearcher indexSearcher) {
|
@Override
|
||||||
return Mono.fromRunnable(() -> {
|
|
||||||
try {
|
|
||||||
// Decrement reference count
|
|
||||||
indexSearcher.decUsage();
|
|
||||||
} catch (Exception ex) {
|
|
||||||
logger.error("Failed to release an used IndexSearcher", ex);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public Mono<Void> close() {
|
public Mono<Void> close() {
|
||||||
return closeMono;
|
return closeMono;
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,21 @@
|
|||||||
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import it.cavallium.dbengine.database.LLSnapshot;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
public interface IndexSearcherManager {
|
||||||
|
|
||||||
|
void maybeRefreshBlocking() throws IOException;
|
||||||
|
|
||||||
|
void maybeRefresh() throws IOException;
|
||||||
|
|
||||||
|
Mono<Send<LLIndexSearcher>> retrieveSearcher(@Nullable LLSnapshot snapshot);
|
||||||
|
|
||||||
|
Mono<Void> close();
|
||||||
|
}
|
@ -0,0 +1,52 @@
|
|||||||
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
|
import java.io.IOException;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.SearcherManager;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
public class LLIndexSearcher extends LiveResourceSupport<LLIndexSearcher, LLIndexSearcher> {
|
||||||
|
|
||||||
|
private IndexSearcher indexSearcher;
|
||||||
|
|
||||||
|
public LLIndexSearcher(IndexSearcher indexSearcher, Drop<LLIndexSearcher> drop) {
|
||||||
|
super(drop);
|
||||||
|
this.indexSearcher = indexSearcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IndexReader getIndexReader() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("LLIndexSearcher must be owned to be used"));
|
||||||
|
}
|
||||||
|
return indexSearcher.getIndexReader();
|
||||||
|
}
|
||||||
|
|
||||||
|
public IndexSearcher getIndexSearcher() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("LLIndexSearcher must be owned to be used"));
|
||||||
|
}
|
||||||
|
return indexSearcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
return new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<LLIndexSearcher> prepareSend() {
|
||||||
|
var indexSearcher = this.indexSearcher;
|
||||||
|
return drop -> new LLIndexSearcher(indexSearcher, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.indexSearcher = null;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,207 @@
|
|||||||
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.Resource;
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
|
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import org.apache.lucene.index.Fields;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.IndexReaderContext;
|
||||||
|
import org.apache.lucene.index.MultiReader;
|
||||||
|
import org.apache.lucene.index.StoredFieldVisitor;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
|
||||||
|
public interface LLIndexSearchers extends Resource<LLIndexSearchers> {
|
||||||
|
|
||||||
|
static LLIndexSearchers of(List<Send<LLIndexSearcher>> indexSearchers) {
|
||||||
|
return new ShardedIndexSearchers(indexSearchers, d -> {});
|
||||||
|
}
|
||||||
|
|
||||||
|
static UnshardedIndexSearchers unsharded(Send<LLIndexSearcher> indexSearcher) {
|
||||||
|
return new UnshardedIndexSearchers(indexSearcher, d -> {});
|
||||||
|
}
|
||||||
|
|
||||||
|
List<IndexSearcher> shards();
|
||||||
|
|
||||||
|
IndexSearcher shard(int shardIndex);
|
||||||
|
|
||||||
|
IndexReader allShards();
|
||||||
|
|
||||||
|
class UnshardedIndexSearchers extends LiveResourceSupport<LLIndexSearchers, UnshardedIndexSearchers>
|
||||||
|
implements LLIndexSearchers {
|
||||||
|
|
||||||
|
private LLIndexSearcher indexSearcher;
|
||||||
|
|
||||||
|
public UnshardedIndexSearchers(Send<LLIndexSearcher> indexSearcher, Drop<UnshardedIndexSearchers> drop) {
|
||||||
|
super(new CloseOnDrop(drop));
|
||||||
|
this.indexSearcher = indexSearcher.receive();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<IndexSearcher> shards() {
|
||||||
|
return List.of(indexSearcher.getIndexSearcher());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexSearcher shard(int shardIndex) {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("UnshardedIndexSearchers must be owned to be used"));
|
||||||
|
}
|
||||||
|
if (shardIndex != -1) {
|
||||||
|
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid, this is a unsharded index");
|
||||||
|
}
|
||||||
|
return indexSearcher.getIndexSearcher();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexReader allShards() {
|
||||||
|
return indexSearcher.getIndexReader();
|
||||||
|
}
|
||||||
|
|
||||||
|
public IndexSearcher shard() {
|
||||||
|
return this.shard(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
return new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<UnshardedIndexSearchers> prepareSend() {
|
||||||
|
Send<LLIndexSearcher> indexSearcher = this.indexSearcher.send();
|
||||||
|
return drop -> new UnshardedIndexSearchers(indexSearcher, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.indexSearcher = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop implements Drop<UnshardedIndexSearchers> {
|
||||||
|
|
||||||
|
private final Drop<UnshardedIndexSearchers> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<UnshardedIndexSearchers> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(UnshardedIndexSearchers obj) {
|
||||||
|
if (obj.indexSearcher != null) obj.indexSearcher.close();
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ShardedIndexSearchers extends LiveResourceSupport<LLIndexSearchers, ShardedIndexSearchers>
|
||||||
|
implements LLIndexSearchers {
|
||||||
|
|
||||||
|
private List<LLIndexSearcher> indexSearchers;
|
||||||
|
private List<IndexSearcher> indexSearchersVals;
|
||||||
|
|
||||||
|
public ShardedIndexSearchers(List<Send<LLIndexSearcher>> indexSearchers, Drop<ShardedIndexSearchers> drop) {
|
||||||
|
super(new CloseOnDrop(drop));
|
||||||
|
this.indexSearchers = new ArrayList<>(indexSearchers.size());
|
||||||
|
this.indexSearchersVals = new ArrayList<>(indexSearchers.size());
|
||||||
|
for (Send<LLIndexSearcher> llIndexSearcher : indexSearchers) {
|
||||||
|
var indexSearcher = llIndexSearcher.receive();
|
||||||
|
this.indexSearchers.add(indexSearcher);
|
||||||
|
this.indexSearchersVals.add(indexSearcher.getIndexSearcher());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<IndexSearcher> shards() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("ShardedIndexSearchers must be owned to be used"));
|
||||||
|
}
|
||||||
|
return Collections.unmodifiableList(indexSearchersVals);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexSearcher shard(int shardIndex) {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("ShardedIndexSearchers must be owned to be used"));
|
||||||
|
}
|
||||||
|
if (shardIndex < 0) {
|
||||||
|
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid");
|
||||||
|
}
|
||||||
|
return indexSearchersVals.get(shardIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexReader allShards() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("ShardedIndexSearchers must be owned to be used"));
|
||||||
|
}
|
||||||
|
var irs = new IndexReader[indexSearchersVals.size()];
|
||||||
|
for (int i = 0, s = indexSearchersVals.size(); i < s; i++) {
|
||||||
|
irs[i] = indexSearchersVals.get(i).getIndexReader();
|
||||||
|
}
|
||||||
|
Object2IntOpenHashMap<IndexReader> indexes = new Object2IntOpenHashMap<>();
|
||||||
|
for (int i = 0; i < irs.length; i++) {
|
||||||
|
indexes.put(irs[i], i);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return new MultiReader(irs, Comparator.comparingInt(indexes::getInt), false);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
// This shouldn't happen
|
||||||
|
throw new UncheckedIOException(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
return new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<ShardedIndexSearchers> prepareSend() {
|
||||||
|
List<Send<LLIndexSearcher>> indexSearchers = new ArrayList<>(this.indexSearchers.size());
|
||||||
|
for (LLIndexSearcher indexSearcher : this.indexSearchers) {
|
||||||
|
indexSearchers.add(indexSearcher.send());
|
||||||
|
}
|
||||||
|
return drop -> new ShardedIndexSearchers(indexSearchers, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.indexSearchers = null;
|
||||||
|
this.indexSearchersVals = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop implements Drop<ShardedIndexSearchers> {
|
||||||
|
|
||||||
|
private volatile boolean dropped = false;
|
||||||
|
private final Drop<ShardedIndexSearchers> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<ShardedIndexSearchers> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(ShardedIndexSearchers obj) {
|
||||||
|
assert !dropped;
|
||||||
|
if (obj.indexSearchers != null) {
|
||||||
|
for (LLIndexSearcher indexSearcher : obj.indexSearchers) {
|
||||||
|
if (indexSearcher.isAccessible()) {
|
||||||
|
indexSearcher.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dropped = true;
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -267,10 +267,23 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
stamp = 0;
|
stamp = 0;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
Buffer logKey;
|
||||||
if (logger.isTraceEnabled(MARKER_ROCKSDB)) {
|
if (logger.isTraceEnabled(MARKER_ROCKSDB)) {
|
||||||
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toStringSafe(key));
|
logKey = key.copy();
|
||||||
|
} else {
|
||||||
|
logKey = null;
|
||||||
|
}
|
||||||
|
try (logKey) {
|
||||||
|
var result = dbGet(cfh, resolveSnapshot(snapshot), key.send(), existsAlmostCertainly);
|
||||||
|
if (logger.isTraceEnabled(MARKER_ROCKSDB)) {
|
||||||
|
try (var result2 = result == null ? null : result.receive()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Reading {}: {}", LLUtils.toStringSafe(logKey), LLUtils.toString(result2));
|
||||||
|
return result2 == null ? null : result2.send();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return dbGet(cfh, resolveSnapshot(snapshot), key.send(), existsAlmostCertainly);
|
|
||||||
} finally {
|
} finally {
|
||||||
if (updateMode == UpdateMode.ALLOW) {
|
if (updateMode == UpdateMode.ALLOW) {
|
||||||
lock.unlockRead(stamp);
|
lock.unlockRead(stamp);
|
||||||
@ -300,7 +313,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
// Unfortunately it's not feasible until RocksDB implements keyMayExist with buffers
|
// Unfortunately it's not feasible until RocksDB implements keyMayExist with buffers
|
||||||
|
|
||||||
// Create the key nio buffer to pass to RocksDB
|
// Create the key nio buffer to pass to RocksDB
|
||||||
var keyNioBuffer = LLUtils.convertToDirect(alloc, key.send());
|
var keyNioBuffer = LLUtils.convertToReadableDirect(alloc, key.send());
|
||||||
// Create a direct result buffer because RocksDB works only with direct buffers
|
// Create a direct result buffer because RocksDB works only with direct buffers
|
||||||
try (Buffer resultBuf = alloc.allocate(INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES)) {
|
try (Buffer resultBuf = alloc.allocate(INITIAL_DIRECT_READ_BYTE_BUF_SIZE_BYTES)) {
|
||||||
int valueSize;
|
int valueSize;
|
||||||
@ -308,7 +321,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
ByteBuffer resultNioBuf;
|
ByteBuffer resultNioBuf;
|
||||||
do {
|
do {
|
||||||
// Create the result nio buffer to pass to RocksDB
|
// Create the result nio buffer to pass to RocksDB
|
||||||
resultNioBuf = LLUtils.obtainDirect(resultBuf);
|
resultNioBuf = LLUtils.obtainDirect(resultBuf, true);
|
||||||
assert keyNioBuffer.byteBuffer().isDirect();
|
assert keyNioBuffer.byteBuffer().isDirect();
|
||||||
assert resultNioBuf.isDirect();
|
assert resultNioBuf.isDirect();
|
||||||
valueSize = db.get(cfh,
|
valueSize = db.get(cfh,
|
||||||
@ -414,11 +427,13 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
if (Schedulers.isInNonBlockingThread()) {
|
if (Schedulers.isInNonBlockingThread()) {
|
||||||
throw new UnsupportedOperationException("Called dbPut in a nonblocking thread");
|
throw new UnsupportedOperationException("Called dbPut in a nonblocking thread");
|
||||||
}
|
}
|
||||||
|
assert key.isAccessible();
|
||||||
|
assert value.isAccessible();
|
||||||
if (databaseOptions.allowNettyDirect()) {
|
if (databaseOptions.allowNettyDirect()) {
|
||||||
var keyNioBuffer = LLUtils.convertToDirect(alloc, key.send());
|
var keyNioBuffer = LLUtils.convertToReadableDirect(alloc, key.send());
|
||||||
try (var ignored1 = keyNioBuffer.buffer().receive()) {
|
try (var ignored1 = keyNioBuffer.buffer().receive()) {
|
||||||
assert keyNioBuffer.byteBuffer().isDirect();
|
assert keyNioBuffer.byteBuffer().isDirect();
|
||||||
var valueNioBuffer = LLUtils.convertToDirect(alloc, value.send());
|
var valueNioBuffer = LLUtils.convertToReadableDirect(alloc, value.send());
|
||||||
try (var ignored2 = valueNioBuffer.buffer().receive()) {
|
try (var ignored2 = valueNioBuffer.buffer().receive()) {
|
||||||
assert valueNioBuffer.byteBuffer().isDirect();
|
assert valueNioBuffer.byteBuffer().isDirect();
|
||||||
db.put(cfh, validWriteOptions, keyNioBuffer.byteBuffer(), valueNioBuffer.byteBuffer());
|
db.put(cfh, validWriteOptions, keyNioBuffer.byteBuffer(), valueNioBuffer.byteBuffer());
|
||||||
@ -479,7 +494,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
if (range.hasMin()) {
|
if (range.hasMin()) {
|
||||||
try (var rangeMin = range.getMin().receive()) {
|
try (var rangeMin = range.getMin().receive()) {
|
||||||
if (databaseOptions.allowNettyDirect()) {
|
if (databaseOptions.allowNettyDirect()) {
|
||||||
var directBuf = LLUtils.convertToDirect(alloc, rangeMin.send());
|
var directBuf = LLUtils.convertToReadableDirect(alloc, rangeMin.send());
|
||||||
cloned1 = directBuf.buffer().receive();
|
cloned1 = directBuf.buffer().receive();
|
||||||
direct1 = directBuf.byteBuffer();
|
direct1 = directBuf.byteBuffer();
|
||||||
readOpts.setIterateLowerBound(slice1 = new DirectSlice(directBuf.byteBuffer()));
|
readOpts.setIterateLowerBound(slice1 = new DirectSlice(directBuf.byteBuffer()));
|
||||||
@ -491,7 +506,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
if (range.hasMax()) {
|
if (range.hasMax()) {
|
||||||
try (var rangeMax = range.getMax().receive()) {
|
try (var rangeMax = range.getMax().receive()) {
|
||||||
if (databaseOptions.allowNettyDirect()) {
|
if (databaseOptions.allowNettyDirect()) {
|
||||||
var directBuf = LLUtils.convertToDirect(alloc, rangeMax.send());
|
var directBuf = LLUtils.convertToReadableDirect(alloc, rangeMax.send());
|
||||||
cloned2 = directBuf.buffer().receive();
|
cloned2 = directBuf.buffer().receive();
|
||||||
direct2 = directBuf.byteBuffer();
|
direct2 = directBuf.byteBuffer();
|
||||||
readOpts.setIterateUpperBound(slice2 = new DirectSlice(directBuf.byteBuffer()));
|
readOpts.setIterateUpperBound(slice2 = new DirectSlice(directBuf.byteBuffer()));
|
||||||
@ -504,7 +519,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
if (!LLLocalDictionary.PREFER_SEEK_TO_FIRST && range.hasMin()) {
|
if (!LLLocalDictionary.PREFER_SEEK_TO_FIRST && range.hasMin()) {
|
||||||
try (var rangeMin = range.getMin().receive()) {
|
try (var rangeMin = range.getMin().receive()) {
|
||||||
if (databaseOptions.allowNettyDirect()) {
|
if (databaseOptions.allowNettyDirect()) {
|
||||||
var directBuf = LLUtils.convertToDirect(alloc, rangeMin.send());
|
var directBuf = LLUtils.convertToReadableDirect(alloc, rangeMin.send());
|
||||||
cloned3 = directBuf.buffer().receive();
|
cloned3 = directBuf.buffer().receive();
|
||||||
direct3 = directBuf.byteBuffer();
|
direct3 = directBuf.byteBuffer();
|
||||||
rocksIterator.seek(directBuf.byteBuffer());
|
rocksIterator.seek(directBuf.byteBuffer());
|
||||||
@ -592,6 +607,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
valueSend -> this.<Send<Buffer>>runOnDb(() -> {
|
valueSend -> this.<Send<Buffer>>runOnDb(() -> {
|
||||||
try (var key = keySend.receive()) {
|
try (var key = keySend.receive()) {
|
||||||
try (var value = valueSend.receive()) {
|
try (var value = valueSend.receive()) {
|
||||||
|
assert key.isAccessible();
|
||||||
|
assert value.isAccessible();
|
||||||
StampedLock lock;
|
StampedLock lock;
|
||||||
long stamp;
|
long stamp;
|
||||||
if (updateMode == UpdateMode.ALLOW) {
|
if (updateMode == UpdateMode.ALLOW) {
|
||||||
@ -656,9 +673,6 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
stamp = 0;
|
stamp = 0;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
|
||||||
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toStringSafe(key));
|
|
||||||
}
|
|
||||||
while (true) {
|
while (true) {
|
||||||
@Nullable Buffer prevData;
|
@Nullable Buffer prevData;
|
||||||
var prevDataHolder = existsAlmostCertainly ? null : new Holder<byte[]>();
|
var prevDataHolder = existsAlmostCertainly ? null : new Holder<byte[]>();
|
||||||
@ -682,19 +696,37 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
} else {
|
} else {
|
||||||
prevData = null;
|
prevData = null;
|
||||||
}
|
}
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Reading {}: {} (before update)",
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(prevData)
|
||||||
|
);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
@Nullable Buffer newData;
|
@Nullable Buffer newData;
|
||||||
try (Buffer prevDataToSendToUpdater = prevData == null ? null : prevData.copy()) {
|
try (Buffer prevDataToSendToUpdater = prevData == null ? null : prevData.copy()) {
|
||||||
try (var newDataToReceive = updater.apply(
|
try (var sentData = prevDataToSendToUpdater == null ? null
|
||||||
prevDataToSendToUpdater == null ? null : prevDataToSendToUpdater.send())) {
|
: prevDataToSendToUpdater.send()) {
|
||||||
if (newDataToReceive != null) {
|
try (var newDataToReceive = updater.apply(sentData)) {
|
||||||
newData = newDataToReceive.receive();
|
if (newDataToReceive != null) {
|
||||||
} else {
|
newData = newDataToReceive.receive();
|
||||||
newData = null;
|
} else {
|
||||||
|
newData = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert newData == null || newData.isAccessible();
|
||||||
try {
|
try {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Updating {}. previous data: {}, updated data: {}",
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(prevData),
|
||||||
|
LLUtils.toStringSafe(newData)
|
||||||
|
);
|
||||||
|
}
|
||||||
if (prevData != null && newData == null) {
|
if (prevData != null && newData == null) {
|
||||||
//noinspection DuplicatedCode
|
//noinspection DuplicatedCode
|
||||||
if (updateMode == UpdateMode.ALLOW) {
|
if (updateMode == UpdateMode.ALLOW) {
|
||||||
@ -709,7 +741,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace(MARKER_ROCKSDB, "Deleting {}", LLUtils.toStringSafe(key));
|
logger.trace(MARKER_ROCKSDB, "Deleting {} (after update)", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
dbDelete(cfh, null, key.send());
|
dbDelete(cfh, null, key.send());
|
||||||
} else if (newData != null
|
} else if (newData != null
|
||||||
@ -727,7 +759,11 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace(MARKER_ROCKSDB, "Writing {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Writing {}: {} (after update)",
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(newData)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Buffer dataToPut;
|
Buffer dataToPut;
|
||||||
if (updateReturnMode == UpdateReturnMode.GET_NEW_VALUE) {
|
if (updateReturnMode == UpdateReturnMode.GET_NEW_VALUE) {
|
||||||
@ -779,7 +815,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> updater,
|
SerializationFunction<@Nullable Send<Buffer>, @Nullable Send<Buffer>> updater,
|
||||||
boolean existsAlmostCertainly) {
|
boolean existsAlmostCertainly) {
|
||||||
return Mono.usingWhen(keyMono,
|
return Mono.usingWhen(keyMono,
|
||||||
keySend -> this.runOnDb(() -> {
|
keySend -> runOnDb(() -> {
|
||||||
try (var key = keySend.receive()) {
|
try (var key = keySend.receive()) {
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
if (Schedulers.isInNonBlockingThread()) {
|
||||||
throw new UnsupportedOperationException("Called update in a nonblocking thread");
|
throw new UnsupportedOperationException("Called update in a nonblocking thread");
|
||||||
@ -798,9 +834,6 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
stamp = 0;
|
stamp = 0;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
|
||||||
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toStringSafe(key));
|
|
||||||
}
|
|
||||||
while (true) {
|
while (true) {
|
||||||
@Nullable Buffer prevData;
|
@Nullable Buffer prevData;
|
||||||
var prevDataHolder = existsAlmostCertainly ? null : new Holder<byte[]>();
|
var prevDataHolder = existsAlmostCertainly ? null : new Holder<byte[]>();
|
||||||
@ -824,19 +857,37 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
} else {
|
} else {
|
||||||
prevData = null;
|
prevData = null;
|
||||||
}
|
}
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Reading {}: {} (before update)",
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(prevData)
|
||||||
|
);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
@Nullable Buffer newData;
|
@Nullable Buffer newData;
|
||||||
try (Buffer prevDataToSendToUpdater = prevData == null ? null : prevData.copy()) {
|
try (Buffer prevDataToSendToUpdater = prevData == null ? null : prevData.copy()) {
|
||||||
try (var newDataToReceive = updater.apply(
|
try (var sentData = prevDataToSendToUpdater == null ? null
|
||||||
prevDataToSendToUpdater == null ? null : prevDataToSendToUpdater.send())) {
|
: prevDataToSendToUpdater.send()) {
|
||||||
if (newDataToReceive != null) {
|
try (var newDataToReceive = updater.apply(sentData)) {
|
||||||
newData = newDataToReceive.receive();
|
if (newDataToReceive != null) {
|
||||||
} else {
|
newData = newDataToReceive.receive();
|
||||||
newData = null;
|
} else {
|
||||||
|
newData = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert newData == null || newData.isAccessible();
|
||||||
try {
|
try {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Updating {}. previous data: {}, updated data: {}",
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(prevData),
|
||||||
|
LLUtils.toStringSafe(newData)
|
||||||
|
);
|
||||||
|
}
|
||||||
if (prevData != null && newData == null) {
|
if (prevData != null && newData == null) {
|
||||||
//noinspection DuplicatedCode
|
//noinspection DuplicatedCode
|
||||||
if (updateMode == UpdateMode.ALLOW) {
|
if (updateMode == UpdateMode.ALLOW) {
|
||||||
@ -851,7 +902,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace(MARKER_ROCKSDB, "Deleting {}", LLUtils.toStringSafe(key));
|
logger.trace(MARKER_ROCKSDB, "Deleting {} (after update)", LLUtils.toStringSafe(key));
|
||||||
}
|
}
|
||||||
dbDelete(cfh, null, key.send());
|
dbDelete(cfh, null, key.send());
|
||||||
} else if (newData != null
|
} else if (newData != null
|
||||||
@ -869,8 +920,11 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace(MARKER_ROCKSDB, "Writing {}: {}",
|
logger.trace(MARKER_ROCKSDB,
|
||||||
LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
|
"Writing {}: {} (after update)",
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(newData)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
assert key.isAccessible();
|
assert key.isAccessible();
|
||||||
assert newData.isAccessible();
|
assert newData.isAccessible();
|
||||||
@ -910,7 +964,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
}
|
}
|
||||||
var validWriteOptions = Objects.requireNonNullElse(writeOptions, EMPTY_WRITE_OPTIONS);
|
var validWriteOptions = Objects.requireNonNullElse(writeOptions, EMPTY_WRITE_OPTIONS);
|
||||||
if (databaseOptions.allowNettyDirect()) {
|
if (databaseOptions.allowNettyDirect()) {
|
||||||
var keyNioBuffer = LLUtils.convertToDirect(alloc, key.send());
|
var keyNioBuffer = LLUtils.convertToReadableDirect(alloc, key.send());
|
||||||
try {
|
try {
|
||||||
db.delete(cfh, validWriteOptions, keyNioBuffer.byteBuffer());
|
db.delete(cfh, validWriteOptions, keyNioBuffer.byteBuffer());
|
||||||
} finally {
|
} finally {
|
||||||
@ -986,18 +1040,24 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
stamp = 0;
|
stamp = 0;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (logger.isTraceEnabled()) {
|
|
||||||
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toArray(key));
|
|
||||||
}
|
|
||||||
var data = new Holder<byte[]>();
|
var data = new Holder<byte[]>();
|
||||||
|
Buffer bufferResult;
|
||||||
if (db.keyMayExist(cfh, LLUtils.toArray(key), data)) {
|
if (db.keyMayExist(cfh, LLUtils.toArray(key), data)) {
|
||||||
if (data.getValue() != null) {
|
if (data.getValue() != null) {
|
||||||
return LLUtils.fromByteArray(alloc, data.getValue()).send();
|
bufferResult = LLUtils.fromByteArray(alloc, data.getValue());
|
||||||
} else {
|
} else {
|
||||||
return dbGet(cfh, null, key.send(), true);
|
try (var bufferResultToReceive = dbGet(cfh, null, key.send(), true)) {
|
||||||
|
bufferResult = bufferResultToReceive == null ? null : bufferResultToReceive.receive();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return null;
|
bufferResult = null;
|
||||||
|
}
|
||||||
|
try (bufferResult) {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Reading {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(bufferResult));
|
||||||
|
}
|
||||||
|
return bufferResult == null ? null : bufferResult.send();
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
if (updateMode == UpdateMode.ALLOW) {
|
if (updateMode == UpdateMode.ALLOW) {
|
||||||
@ -1176,9 +1236,9 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
batch.close();
|
batch.close();
|
||||||
} else {
|
} else {
|
||||||
for (LLEntry entry : entriesWindow) {
|
for (LLEntry entry : entriesWindow) {
|
||||||
var k = LLUtils.convertToDirect(alloc, entry.getKey());
|
var k = LLUtils.convertToReadableDirect(alloc, entry.getKey());
|
||||||
try {
|
try {
|
||||||
var v = LLUtils.convertToDirect(alloc, entry.getValue());
|
var v = LLUtils.convertToReadableDirect(alloc, entry.getValue());
|
||||||
try {
|
try {
|
||||||
db.put(cfh, EMPTY_WRITE_OPTIONS, k.byteBuffer(), v.byteBuffer());
|
db.put(cfh, EMPTY_WRITE_OPTIONS, k.byteBuffer(), v.byteBuffer());
|
||||||
} finally {
|
} finally {
|
||||||
@ -1309,9 +1369,9 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
} else {
|
} else {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for (Tuple2<Buffer, X> entry : entriesWindow) {
|
for (Tuple2<Buffer, X> entry : entriesWindow) {
|
||||||
var k = LLUtils.convertToDirect(alloc, entry.getT1().send());
|
var k = LLUtils.convertToReadableDirect(alloc, entry.getT1().send());
|
||||||
try {
|
try {
|
||||||
var v = LLUtils.convertToDirect(alloc, updatedValuesToWrite.get(i));
|
var v = LLUtils.convertToReadableDirect(alloc, updatedValuesToWrite.get(i));
|
||||||
try {
|
try {
|
||||||
db.put(cfh, EMPTY_WRITE_OPTIONS, k.byteBuffer(), v.byteBuffer());
|
db.put(cfh, EMPTY_WRITE_OPTIONS, k.byteBuffer(), v.byteBuffer());
|
||||||
} finally {
|
} finally {
|
||||||
@ -1565,7 +1625,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
return Flux.usingWhen(rangeMono,
|
return Flux.usingWhen(rangeMono,
|
||||||
rangeSend -> Flux.using(
|
rangeSend -> Flux.using(
|
||||||
() -> new LLLocalKeyReactiveRocksIterator(db, alloc, cfh, rangeSend,
|
() -> new LLLocalKeyReactiveRocksIterator(db, alloc, cfh, rangeSend,
|
||||||
databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot), getRangeKeysMultiDebugName),
|
databaseOptions.allowNettyDirect(), resolveSnapshot(snapshot)
|
||||||
|
),
|
||||||
llLocalKeyReactiveRocksIterator -> llLocalKeyReactiveRocksIterator.flux().subscribeOn(dbScheduler),
|
llLocalKeyReactiveRocksIterator -> llLocalKeyReactiveRocksIterator.flux().subscribeOn(dbScheduler),
|
||||||
LLLocalReactiveRocksIterator::release
|
LLLocalReactiveRocksIterator::release
|
||||||
).transform(LLUtils::handleDiscard),
|
).transform(LLUtils::handleDiscard),
|
||||||
@ -1679,9 +1740,9 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
if (!USE_WRITE_BATCHES_IN_SET_RANGE) {
|
if (!USE_WRITE_BATCHES_IN_SET_RANGE) {
|
||||||
for (LLEntry entry : entriesList) {
|
for (LLEntry entry : entriesList) {
|
||||||
assert entry.isAccessible();
|
assert entry.isAccessible();
|
||||||
var k = LLUtils.convertToDirect(alloc, entry.getKey());
|
var k = LLUtils.convertToReadableDirect(alloc, entry.getKey());
|
||||||
try {
|
try {
|
||||||
var v = LLUtils.convertToDirect(alloc, entry.getValue());
|
var v = LLUtils.convertToReadableDirect(alloc, entry.getValue());
|
||||||
try {
|
try {
|
||||||
db.put(cfh, EMPTY_WRITE_OPTIONS, k.byteBuffer(), v.byteBuffer());
|
db.put(cfh, EMPTY_WRITE_OPTIONS, k.byteBuffer(), v.byteBuffer());
|
||||||
} finally {
|
} finally {
|
||||||
@ -1799,7 +1860,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
try {
|
try {
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
while (rocksIterator.isValid()) {
|
while (rocksIterator.isValid()) {
|
||||||
writeBatch.delete(cfh, LLUtils.readDirectNioBuffer(alloc, rocksIterator::key));
|
writeBatch.delete(cfh, LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).send());
|
||||||
rocksIterator.next();
|
rocksIterator.next();
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
}
|
}
|
||||||
@ -1874,7 +1935,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
Send<Buffer> bufferToReceive) {
|
Send<Buffer> bufferToReceive) {
|
||||||
try (var buffer = bufferToReceive.receive()) {
|
try (var buffer = bufferToReceive.receive()) {
|
||||||
if (allowNettyDirect) {
|
if (allowNettyDirect) {
|
||||||
var direct = LLUtils.convertToDirect(alloc, buffer.send());
|
var direct = LLUtils.convertToReadableDirect(alloc, buffer.send());
|
||||||
assert direct.byteBuffer().isDirect();
|
assert direct.byteBuffer().isDirect();
|
||||||
rocksIterator.seek(direct.byteBuffer());
|
rocksIterator.seek(direct.byteBuffer());
|
||||||
return () -> {
|
return () -> {
|
||||||
@ -1895,7 +1956,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
requireNonNull(buffer);
|
requireNonNull(buffer);
|
||||||
AbstractSlice<?> slice;
|
AbstractSlice<?> slice;
|
||||||
if (allowNettyDirect && LLLocalDictionary.USE_DIRECT_BUFFER_BOUNDS) {
|
if (allowNettyDirect && LLLocalDictionary.USE_DIRECT_BUFFER_BOUNDS) {
|
||||||
var direct = LLUtils.convertToDirect(alloc, buffer.send());
|
var direct = LLUtils.convertToReadableDirect(alloc, buffer.send());
|
||||||
buffer = direct.buffer().receive();
|
buffer = direct.buffer().receive();
|
||||||
assert direct.byteBuffer().isDirect();
|
assert direct.byteBuffer().isDirect();
|
||||||
slice = new DirectSlice(direct.byteBuffer(), buffer.readableBytes());
|
slice = new DirectSlice(direct.byteBuffer(), buffer.readableBytes());
|
||||||
@ -2123,8 +2184,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
try {
|
try {
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
if (rocksIterator.isValid()) {
|
if (rocksIterator.isValid()) {
|
||||||
try (var key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).receive()) {
|
try (var key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key)) {
|
||||||
try (var value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value).receive()) {
|
try (var value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value)) {
|
||||||
return LLEntry.of(key.send(), value.send()).send();
|
return LLEntry.of(key.send(), value.send()).send();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2184,7 +2245,7 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
try {
|
try {
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
if (rocksIterator.isValid()) {
|
if (rocksIterator.isValid()) {
|
||||||
return LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
return LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).send();
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -2354,8 +2415,8 @@ public class LLLocalDictionary implements LLDictionary {
|
|||||||
if (!rocksIterator.isValid()) {
|
if (!rocksIterator.isValid()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).receive()) {
|
try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key)) {
|
||||||
try (Buffer value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value).receive()) {
|
try (Buffer value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value)) {
|
||||||
dbDelete(cfh, null, key.copy().send());
|
dbDelete(cfh, null, key.copy().send());
|
||||||
return LLEntry.of(key.send(), value.send()).send();
|
return LLEntry.of(key.send(), value.send()).send();
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ public class LLLocalEntryReactiveRocksIterator extends LLLocalReactiveRocksItera
|
|||||||
boolean allowNettyDirect,
|
boolean allowNettyDirect,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
String debugName) {
|
String debugName) {
|
||||||
super(db, alloc, cfh, range, allowNettyDirect, readOptions, true, debugName);
|
super(db, alloc, cfh, range, allowNettyDirect, readOptions, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
@ -7,14 +9,18 @@ import it.cavallium.dbengine.database.LLRange;
|
|||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksDBException;
|
import org.rocksdb.RocksDBException;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
|
||||||
public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
||||||
|
|
||||||
|
protected static final Logger logger = LoggerFactory.getLogger(LLLocalGroupedReactiveRocksIterator.class);
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
private final BufferAllocator alloc;
|
private final BufferAllocator alloc;
|
||||||
private final ColumnFamilyHandle cfh;
|
private final ColumnFamilyHandle cfh;
|
||||||
@ -51,6 +57,9 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
.generate(() -> {
|
.generate(() -> {
|
||||||
var readOptions = new ReadOptions(this.readOptions);
|
var readOptions = new ReadOptions(this.readOptions);
|
||||||
readOptions.setFillCache(canFillCache && range.hasMin() && range.hasMax());
|
readOptions.setFillCache(canFillCache && range.hasMin() && range.hasMax());
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} started", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
return LLLocalDictionary.getRocksIterator(alloc, allowNettyDirect, readOptions, range.copy().send(), db, cfh);
|
return LLLocalDictionary.getRocksIterator(alloc, allowNettyDirect, readOptions, range.copy().send(), db, cfh);
|
||||||
}, (tuple, sink) -> {
|
}, (tuple, sink) -> {
|
||||||
try {
|
try {
|
||||||
@ -60,26 +69,38 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
try {
|
try {
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
while (rocksIterator.isValid()) {
|
while (rocksIterator.isValid()) {
|
||||||
try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).receive()) {
|
try (Buffer key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key)) {
|
||||||
if (firstGroupKey == null) {
|
if (firstGroupKey == null) {
|
||||||
firstGroupKey = key.copy();
|
firstGroupKey = key.copy();
|
||||||
} else if (!LLUtils.equals(firstGroupKey, firstGroupKey.readerOffset(),
|
} else if (!LLUtils.equals(firstGroupKey, firstGroupKey.readerOffset(),
|
||||||
key, key.readerOffset(), prefixLength)) {
|
key, key.readerOffset(), prefixLength)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Buffer value;
|
@Nullable Buffer value;
|
||||||
if (readValues) {
|
if (readValues) {
|
||||||
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value).receive();
|
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
|
||||||
} else {
|
} else {
|
||||||
value = alloc.allocate(0);
|
value = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Range {} is reading {}: {}",
|
||||||
|
LLUtils.toStringSafe(range),
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(value)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
rocksIterator.next();
|
rocksIterator.next();
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
T entry = getEntry(key.send(), value.send());
|
T entry = getEntry(key.send(), value == null ? null : value.send());
|
||||||
values.add(entry);
|
values.add(entry);
|
||||||
} finally {
|
} finally {
|
||||||
value.close();
|
if (value != null) {
|
||||||
|
value.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -91,9 +112,15 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
if (!values.isEmpty()) {
|
if (!values.isEmpty()) {
|
||||||
sink.next(values);
|
sink.next(values);
|
||||||
} else {
|
} else {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} ended", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
sink.complete();
|
sink.complete();
|
||||||
}
|
}
|
||||||
} catch (RocksDBException ex) {
|
} catch (RocksDBException ex) {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} failed", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
sink.error(ex);
|
sink.error(ex);
|
||||||
}
|
}
|
||||||
return tuple;
|
return tuple;
|
||||||
@ -106,7 +133,7 @@ public abstract class LLLocalGroupedReactiveRocksIterator<T> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract T getEntry(Send<Buffer> key, Send<Buffer> value);
|
public abstract T getEntry(@Nullable Send<Buffer> key, @Nullable Send<Buffer> value);
|
||||||
|
|
||||||
public void release() {
|
public void release() {
|
||||||
range.close();
|
range.close();
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
@ -9,10 +11,13 @@ import org.rocksdb.ColumnFamilyHandle;
|
|||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksDBException;
|
import org.rocksdb.RocksDBException;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
|
||||||
public class LLLocalKeyPrefixReactiveRocksIterator {
|
public class LLLocalKeyPrefixReactiveRocksIterator {
|
||||||
|
|
||||||
|
protected static final Logger logger = LoggerFactory.getLogger(LLLocalKeyPrefixReactiveRocksIterator.class);
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
private final BufferAllocator alloc;
|
private final BufferAllocator alloc;
|
||||||
private final ColumnFamilyHandle cfh;
|
private final ColumnFamilyHandle cfh;
|
||||||
@ -54,6 +59,9 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
readOptions.setReadaheadSize(32 * 1024); // 32KiB
|
readOptions.setReadaheadSize(32 * 1024); // 32KiB
|
||||||
readOptions.setFillCache(canFillCache);
|
readOptions.setFillCache(canFillCache);
|
||||||
}
|
}
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} started", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
return LLLocalDictionary.getRocksIterator(alloc, allowNettyDirect, readOptions, rangeSend, db, cfh);
|
return LLLocalDictionary.getRocksIterator(alloc, allowNettyDirect, readOptions, rangeSend, db, cfh);
|
||||||
}, (tuple, sink) -> {
|
}, (tuple, sink) -> {
|
||||||
try {
|
try {
|
||||||
@ -64,7 +72,7 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
while (rocksIterator.isValid()) {
|
while (rocksIterator.isValid()) {
|
||||||
Buffer key;
|
Buffer key;
|
||||||
if (allowNettyDirect) {
|
if (allowNettyDirect) {
|
||||||
key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).receive();
|
key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||||
} else {
|
} else {
|
||||||
key = LLUtils.fromByteArray(alloc, rocksIterator.key());
|
key = LLUtils.fromByteArray(alloc, rocksIterator.key());
|
||||||
}
|
}
|
||||||
@ -79,11 +87,24 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (firstGroupKey != null) {
|
if (firstGroupKey != null) {
|
||||||
var groupKeyPrefix = firstGroupKey.copy(firstGroupKey.readerOffset(), prefixLength);
|
var groupKeyPrefix = firstGroupKey.copy(firstGroupKey.readerOffset(), prefixLength);
|
||||||
assert groupKeyPrefix.isAccessible();
|
assert groupKeyPrefix.isAccessible();
|
||||||
|
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Range {} is reading prefix {}",
|
||||||
|
LLUtils.toStringSafe(range),
|
||||||
|
LLUtils.toStringSafe(groupKeyPrefix)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
sink.next(groupKeyPrefix.send());
|
sink.next(groupKeyPrefix.send());
|
||||||
} else {
|
} else {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} ended", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
sink.complete();
|
sink.complete();
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
@ -92,6 +113,9 @@ public class LLLocalKeyPrefixReactiveRocksIterator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (RocksDBException ex) {
|
} catch (RocksDBException ex) {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} failed", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
sink.error(ex);
|
sink.error(ex);
|
||||||
}
|
}
|
||||||
return tuple;
|
return tuple;
|
||||||
|
@ -15,9 +15,8 @@ public class LLLocalKeyReactiveRocksIterator extends LLLocalReactiveRocksIterato
|
|||||||
ColumnFamilyHandle cfh,
|
ColumnFamilyHandle cfh,
|
||||||
Send<LLRange> range,
|
Send<LLRange> range,
|
||||||
boolean allowNettyDirect,
|
boolean allowNettyDirect,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions) {
|
||||||
String debugName) {
|
super(db, alloc, cfh, range, allowNettyDirect, readOptions, false);
|
||||||
super(db, alloc, cfh, range, allowNettyDirect, readOptions, false, debugName);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -100,7 +100,9 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
|
|||||||
}
|
}
|
||||||
if (!MemorySegmentUtils.isSupported()) {
|
if (!MemorySegmentUtils.isSupported()) {
|
||||||
throw new UnsupportedOperationException("Foreign Memory Access API support is disabled."
|
throw new UnsupportedOperationException("Foreign Memory Access API support is disabled."
|
||||||
+ " Please set \"--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit\"");
|
+ " Please set \"" + MemorySegmentUtils.getSuggestedArgs() + "\"",
|
||||||
|
MemorySegmentUtils.getUnsupportedCause()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
import static it.cavallium.dbengine.database.LLUtils.MARKER_LUCENE;
|
import static it.cavallium.dbengine.database.LLUtils.MARKER_LUCENE;
|
||||||
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
|
import static it.cavallium.dbengine.lucene.searcher.LLSearchTransformer.NO_TRANSFORMATION;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.DirectIOOptions;
|
import it.cavallium.dbengine.client.DirectIOOptions;
|
||||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||||
import it.cavallium.dbengine.client.LuceneOptions;
|
import it.cavallium.dbengine.client.LuceneOptions;
|
||||||
import it.cavallium.dbengine.client.NRTCachingOptions;
|
import it.cavallium.dbengine.client.NRTCachingOptions;
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||||
import it.cavallium.dbengine.database.EnglishItalianStopFilter;
|
|
||||||
import it.cavallium.dbengine.database.LLDocument;
|
import it.cavallium.dbengine.database.LLDocument;
|
||||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||||
@ -21,16 +21,16 @@ import it.cavallium.dbengine.lucene.LuceneUtils;
|
|||||||
import it.cavallium.dbengine.lucene.searcher.AdaptiveLuceneLocalSearcher;
|
import it.cavallium.dbengine.lucene.searcher.AdaptiveLuceneLocalSearcher;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LuceneLocalSearcher;
|
import it.cavallium.dbengine.lucene.searcher.LuceneLocalSearcher;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LuceneShardSearcher;
|
import it.cavallium.dbengine.lucene.searcher.LuceneMultiSearcher;
|
||||||
|
import it.cavallium.dbengine.lucene.searcher.LLSearchTransformer;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.Phaser;
|
import java.util.concurrent.Phaser;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
|
||||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
@ -39,15 +39,7 @@ import org.apache.lucene.index.MergeScheduler;
|
|||||||
import org.apache.lucene.index.SerialMergeScheduler;
|
import org.apache.lucene.index.SerialMergeScheduler;
|
||||||
import org.apache.lucene.index.SnapshotDeletionPolicy;
|
import org.apache.lucene.index.SnapshotDeletionPolicy;
|
||||||
import org.apache.lucene.misc.store.DirectIODirectory;
|
import org.apache.lucene.misc.store.DirectIODirectory;
|
||||||
import org.apache.lucene.queries.mlt.MoreLikeThis;
|
|
||||||
import org.apache.lucene.search.BooleanClause.Occur;
|
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
|
||||||
import org.apache.lucene.search.ConstantScoreQuery;
|
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
import org.apache.lucene.search.similarities.TFIDFSimilarity;
|
|
||||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
@ -73,22 +65,14 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
* There is only a single thread globally to not overwhelm the disk with
|
* There is only a single thread globally to not overwhelm the disk with
|
||||||
* concurrent commits or concurrent refreshes.
|
* concurrent commits or concurrent refreshes.
|
||||||
*/
|
*/
|
||||||
private static final Scheduler luceneHeavyTasksScheduler = Schedulers.newBoundedElastic(1,
|
private static final Scheduler luceneHeavyTasksScheduler = Schedulers.single(Schedulers.boundedElastic());
|
||||||
Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
|
|
||||||
"lucene",
|
|
||||||
Integer.MAX_VALUE,
|
|
||||||
true
|
|
||||||
);
|
|
||||||
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
|
|
||||||
protected final Scheduler luceneSearcherScheduler = LuceneUtils.newLuceneSearcherScheduler(false);
|
|
||||||
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
|
|
||||||
private static final Scheduler luceneWriterScheduler = Schedulers.boundedElastic();
|
|
||||||
|
|
||||||
private final String luceneIndexName;
|
private final String luceneIndexName;
|
||||||
private final IndexWriter indexWriter;
|
private final IndexWriter indexWriter;
|
||||||
private final SnapshotsManager snapshotsManager;
|
private final SnapshotsManager snapshotsManager;
|
||||||
private final CachedIndexSearcherManager searcherManager;
|
private final IndexSearcherManager searcherManager;
|
||||||
private final Similarity similarity;
|
private final PerFieldAnalyzerWrapper luceneAnalyzer;
|
||||||
|
private final Similarity luceneSimilarity;
|
||||||
private final Directory directory;
|
private final Directory directory;
|
||||||
private final boolean lowMemory;
|
private final boolean lowMemory;
|
||||||
|
|
||||||
@ -166,7 +150,8 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
|
|
||||||
if (luceneOptions.nrtCachingOptions().isPresent()) {
|
if (luceneOptions.nrtCachingOptions().isPresent()) {
|
||||||
NRTCachingOptions nrtCachingOptions = luceneOptions.nrtCachingOptions().get();
|
NRTCachingOptions nrtCachingOptions = luceneOptions.nrtCachingOptions().get();
|
||||||
directory = new NRTCachingDirectory(directory, nrtCachingOptions.maxMergeSizeMB(), nrtCachingOptions.maxCachedMB());
|
directory = new NRTCachingDirectory(directory, nrtCachingOptions.maxMergeSizeMB(),
|
||||||
|
nrtCachingOptions.maxCachedMB());
|
||||||
}
|
}
|
||||||
|
|
||||||
this.directory = directory;
|
this.directory = directory;
|
||||||
@ -175,9 +160,10 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
this.luceneIndexName = name;
|
this.luceneIndexName = name;
|
||||||
var snapshotter = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
var snapshotter = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||||
this.lowMemory = lowMemory;
|
this.lowMemory = lowMemory;
|
||||||
this.similarity = LuceneUtils.toPerFieldSimilarityWrapper(indicizerSimilarities);
|
this.luceneAnalyzer = LuceneUtils.toPerFieldAnalyzerWrapper(indicizerAnalyzers);
|
||||||
|
this.luceneSimilarity = LuceneUtils.toPerFieldSimilarityWrapper(indicizerSimilarities);
|
||||||
|
|
||||||
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LuceneUtils.toPerFieldAnalyzerWrapper(indicizerAnalyzers));
|
var indexWriterConfig = new IndexWriterConfig(luceneAnalyzer);
|
||||||
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
|
indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
|
||||||
indexWriterConfig.setIndexDeletionPolicy(snapshotter);
|
indexWriterConfig.setIndexDeletionPolicy(snapshotter);
|
||||||
indexWriterConfig.setCommitOnClose(true);
|
indexWriterConfig.setCommitOnClose(true);
|
||||||
@ -197,15 +183,16 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
writerSchedulerMaxThreadCount = concurrentMergeScheduler.getMaxThreadCount();
|
writerSchedulerMaxThreadCount = concurrentMergeScheduler.getMaxThreadCount();
|
||||||
mergeScheduler = concurrentMergeScheduler;
|
mergeScheduler = concurrentMergeScheduler;
|
||||||
}
|
}
|
||||||
|
logger.trace("WriterSchedulerMaxThreadCount: {}", writerSchedulerMaxThreadCount);
|
||||||
indexWriterConfig.setMergeScheduler(mergeScheduler);
|
indexWriterConfig.setMergeScheduler(mergeScheduler);
|
||||||
indexWriterConfig.setRAMBufferSizeMB(luceneOptions.indexWriterBufferSize() / 1024D / 1024D);
|
indexWriterConfig.setRAMBufferSizeMB(luceneOptions.indexWriterBufferSize() / 1024D / 1024D);
|
||||||
indexWriterConfig.setReaderPooling(false);
|
indexWriterConfig.setReaderPooling(false);
|
||||||
indexWriterConfig.setSimilarity(getSimilarity());
|
indexWriterConfig.setSimilarity(getLuceneSimilarity());
|
||||||
this.indexWriter = new IndexWriter(directory, indexWriterConfig);
|
this.indexWriter = new IndexWriter(directory, indexWriterConfig);
|
||||||
this.snapshotsManager = new SnapshotsManager(indexWriter, snapshotter);
|
this.snapshotsManager = new SnapshotsManager(indexWriter, snapshotter);
|
||||||
this.searcherManager = new CachedIndexSearcherManager(indexWriter,
|
this.searcherManager = new CachedIndexSearcherManager(indexWriter,
|
||||||
snapshotsManager,
|
snapshotsManager,
|
||||||
getSimilarity(),
|
getLuceneSimilarity(),
|
||||||
luceneOptions.applyAllDeletes(),
|
luceneOptions.applyAllDeletes(),
|
||||||
luceneOptions.writeAllDeletes(),
|
luceneOptions.writeAllDeletes(),
|
||||||
luceneOptions.queryRefreshDebounceTime()
|
luceneOptions.queryRefreshDebounceTime()
|
||||||
@ -217,8 +204,8 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
TimeUnit.MILLISECONDS);
|
TimeUnit.MILLISECONDS);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Similarity getSimilarity() {
|
private Similarity getLuceneSimilarity() {
|
||||||
return similarity;
|
return luceneSimilarity;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -241,13 +228,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
return Mono.<Void>fromCallable(() -> {
|
return Mono.<Void>fromCallable(() -> {
|
||||||
activeTasks.register();
|
activeTasks.register();
|
||||||
try {
|
try {
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexWriter.addDocument(LLUtils.toDocument(doc));
|
indexWriter.addDocument(LLUtils.toDocument(doc));
|
||||||
return null;
|
return null;
|
||||||
} finally {
|
} finally {
|
||||||
activeTasks.arriveAndDeregister();
|
activeTasks.arriveAndDeregister();
|
||||||
}
|
}
|
||||||
}).subscribeOn(luceneWriterScheduler);
|
}).subscribeOn(Schedulers.boundedElastic());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -258,13 +244,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
.<Void>fromCallable(() -> {
|
.<Void>fromCallable(() -> {
|
||||||
activeTasks.register();
|
activeTasks.register();
|
||||||
try {
|
try {
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexWriter.addDocuments(LLUtils.toDocumentsFromEntries(documentsList));
|
indexWriter.addDocuments(LLUtils.toDocumentsFromEntries(documentsList));
|
||||||
return null;
|
return null;
|
||||||
} finally {
|
} finally {
|
||||||
activeTasks.arriveAndDeregister();
|
activeTasks.arriveAndDeregister();
|
||||||
}
|
}
|
||||||
}).subscribeOn(luceneWriterScheduler)
|
}).subscribeOn(Schedulers.boundedElastic())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,13 +259,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
return Mono.<Void>fromCallable(() -> {
|
return Mono.<Void>fromCallable(() -> {
|
||||||
activeTasks.register();
|
activeTasks.register();
|
||||||
try {
|
try {
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexWriter.deleteDocuments(LLUtils.toTerm(id));
|
indexWriter.deleteDocuments(LLUtils.toTerm(id));
|
||||||
return null;
|
return null;
|
||||||
} finally {
|
} finally {
|
||||||
activeTasks.arriveAndDeregister();
|
activeTasks.arriveAndDeregister();
|
||||||
}
|
}
|
||||||
}).subscribeOn(luceneWriterScheduler);
|
}).subscribeOn(Schedulers.boundedElastic());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -288,13 +272,12 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
return Mono.<Void>fromCallable(() -> {
|
return Mono.<Void>fromCallable(() -> {
|
||||||
activeTasks.register();
|
activeTasks.register();
|
||||||
try {
|
try {
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document));
|
indexWriter.updateDocument(LLUtils.toTerm(id), LLUtils.toDocument(document));
|
||||||
} finally {
|
} finally {
|
||||||
activeTasks.arriveAndDeregister();
|
activeTasks.arriveAndDeregister();
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}).subscribeOn(luceneWriterScheduler);
|
}).subscribeOn(Schedulers.boundedElastic());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -310,7 +293,6 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) {
|
for (Entry<LLTerm, LLDocument> entry : documentsMap.entrySet()) {
|
||||||
LLTerm key = entry.getKey();
|
LLTerm key = entry.getKey();
|
||||||
LLDocument value = entry.getValue();
|
LLDocument value = entry.getValue();
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value));
|
indexWriter.updateDocument(LLUtils.toTerm(key), LLUtils.toDocument(value));
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
@ -318,7 +300,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
activeTasks.arriveAndDeregister();
|
activeTasks.arriveAndDeregister();
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.subscribeOn(luceneWriterScheduler);
|
.subscribeOn(Schedulers.boundedElastic());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -340,124 +322,36 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
|
public Mono<Send<LLSearchResultShard>> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||||
QueryParams queryParams,
|
QueryParams queryParams,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux) {
|
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux) {
|
||||||
return getMoreLikeThisQuery(snapshot, LuceneUtils.toLocalQueryParams(queryParams), mltDocumentFieldsFlux)
|
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
||||||
.flatMap(modifiedLocalQuery -> searcherManager.captureIndexSearcher(snapshot)
|
var searcher = this.searcherManager.retrieveSearcher(snapshot);
|
||||||
.flatMap(indexSearcher -> {
|
var transformer = new MoreLikeThisTransformer(mltDocumentFieldsFlux);
|
||||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
|
||||||
return localSearcher
|
|
||||||
.collect(indexSearcher.getIndexSearcher(), releaseMono, modifiedLocalQuery, keyFieldName, luceneSearcherScheduler)
|
|
||||||
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()))
|
|
||||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Mono<Void> distributedMoreLikeThis(@Nullable LLSnapshot snapshot,
|
return localSearcher.collect(searcher, localQueryParams, keyFieldName, transformer).map(resultToReceive -> {
|
||||||
QueryParams queryParams,
|
var result = resultToReceive.receive();
|
||||||
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux,
|
return new LLSearchResultShard(result.results(), result.totalHitsCount(), d -> result.close()).send();
|
||||||
LuceneShardSearcher shardSearcher) {
|
}).doOnDiscard(Send.class, Send::close);
|
||||||
return getMoreLikeThisQuery(snapshot, LuceneUtils.toLocalQueryParams(queryParams), mltDocumentFieldsFlux)
|
|
||||||
.flatMap(modifiedLocalQuery -> searcherManager.captureIndexSearcher(snapshot)
|
|
||||||
.flatMap(indexSearcher -> {
|
|
||||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
|
||||||
return shardSearcher
|
|
||||||
.searchOn(indexSearcher.getIndexSearcher(), releaseMono, modifiedLocalQuery, luceneSearcherScheduler)
|
|
||||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Mono<LocalQueryParams> getMoreLikeThisQuery(@Nullable LLSnapshot snapshot,
|
|
||||||
LocalQueryParams localQueryParams,
|
|
||||||
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux) {
|
|
||||||
Query luceneAdditionalQuery;
|
|
||||||
try {
|
|
||||||
luceneAdditionalQuery = localQueryParams.query();
|
|
||||||
} catch (Exception e) {
|
|
||||||
return Mono.error(e);
|
|
||||||
}
|
|
||||||
return mltDocumentFieldsFlux
|
|
||||||
.collectMap(Tuple2::getT1, Tuple2::getT2, HashMap::new)
|
|
||||||
.flatMap(mltDocumentFields -> {
|
|
||||||
mltDocumentFields.entrySet().removeIf(entry -> entry.getValue().isEmpty());
|
|
||||||
if (mltDocumentFields.isEmpty()) {
|
|
||||||
return Mono.just(new LocalQueryParams(new MatchNoDocsQuery(),
|
|
||||||
localQueryParams.offset(),
|
|
||||||
localQueryParams.limit(),
|
|
||||||
localQueryParams.minCompetitiveScore(),
|
|
||||||
localQueryParams.sort(),
|
|
||||||
localQueryParams.scoreMode()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
return this.searcherManager.search(snapshot, indexSearcher -> Mono.fromCallable(() -> {
|
|
||||||
var mlt = new MoreLikeThis(indexSearcher.getIndexReader());
|
|
||||||
mlt.setAnalyzer(indexWriter.getAnalyzer());
|
|
||||||
mlt.setFieldNames(mltDocumentFields.keySet().toArray(String[]::new));
|
|
||||||
mlt.setMinTermFreq(1);
|
|
||||||
mlt.setMinDocFreq(3);
|
|
||||||
mlt.setMaxDocFreqPct(20);
|
|
||||||
mlt.setBoost(localQueryParams.scoreMode().needsScores());
|
|
||||||
mlt.setStopWords(EnglishItalianStopFilter.getStopWordsString());
|
|
||||||
var similarity = getSimilarity();
|
|
||||||
if (similarity instanceof TFIDFSimilarity) {
|
|
||||||
mlt.setSimilarity((TFIDFSimilarity) similarity);
|
|
||||||
} else {
|
|
||||||
logger.trace(MARKER_ROCKSDB, "Using an unsupported similarity algorithm for MoreLikeThis:"
|
|
||||||
+ " {}. You must use a similarity instance based on TFIDFSimilarity!", similarity);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the reference docId and apply it to MoreLikeThis, to generate the query
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
|
||||||
var mltQuery = mlt.like((Map) mltDocumentFields);
|
|
||||||
Query luceneQuery;
|
|
||||||
if (!(luceneAdditionalQuery instanceof MatchAllDocsQuery)) {
|
|
||||||
luceneQuery = new BooleanQuery.Builder()
|
|
||||||
.add(mltQuery, Occur.MUST)
|
|
||||||
.add(new ConstantScoreQuery(luceneAdditionalQuery), Occur.MUST)
|
|
||||||
.build();
|
|
||||||
} else {
|
|
||||||
luceneQuery = mltQuery;
|
|
||||||
}
|
|
||||||
|
|
||||||
return luceneQuery;
|
|
||||||
})
|
|
||||||
.subscribeOn(Schedulers.boundedElastic())
|
|
||||||
.map(luceneQuery -> new LocalQueryParams(luceneQuery,
|
|
||||||
localQueryParams.offset(),
|
|
||||||
localQueryParams.limit(),
|
|
||||||
localQueryParams.minCompetitiveScore(),
|
|
||||||
localQueryParams.sort(),
|
|
||||||
localQueryParams.scoreMode()
|
|
||||||
)));
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LLSearchResultShard> search(@Nullable LLSnapshot snapshot, QueryParams queryParams, String keyFieldName) {
|
public Mono<Send<LLSearchResultShard>> search(@Nullable LLSnapshot snapshot, QueryParams queryParams,
|
||||||
|
String keyFieldName) {
|
||||||
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
||||||
return searcherManager.captureIndexSearcher(snapshot).flatMap(indexSearcher -> {
|
var searcher = searcherManager.retrieveSearcher(snapshot);
|
||||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
|
||||||
return localSearcher
|
return localSearcher.collect(searcher, localQueryParams, keyFieldName, NO_TRANSFORMATION).map(resultToReceive -> {
|
||||||
.collect(indexSearcher.getIndexSearcher(), releaseMono, localQueryParams, keyFieldName, luceneSearcherScheduler)
|
var result = resultToReceive.receive();
|
||||||
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()))
|
return new LLSearchResultShard(result.results(), result.totalHitsCount(), d -> result.close()).send();
|
||||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
}).doOnDiscard(Send.class, Send::close);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Void> distributedSearch(@Nullable LLSnapshot snapshot,
|
public Mono<Send<LLIndexSearcher>> retrieveSearcher(@Nullable LLSnapshot snapshot) {
|
||||||
QueryParams queryParams,
|
return searcherManager
|
||||||
LuceneShardSearcher shardSearcher) {
|
.retrieveSearcher(snapshot)
|
||||||
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
.doOnDiscard(Send.class, Send::close);
|
||||||
return searcherManager.captureIndexSearcher(snapshot)
|
|
||||||
.flatMap(indexSearcher -> {
|
|
||||||
Mono<Void> releaseMono = searcherManager.releaseUsedIndexSearcher(indexSearcher);
|
|
||||||
return shardSearcher.searchOn(indexSearcher.getIndexSearcher(), releaseMono, localQueryParams, luceneSearcherScheduler)
|
|
||||||
.onErrorResume(ex -> releaseMono.then(Mono.error(ex)));
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -535,4 +429,19 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
|
|||||||
public boolean isLowMemoryMode() {
|
public boolean isLowMemoryMode() {
|
||||||
return lowMemory;
|
return lowMemory;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class MoreLikeThisTransformer implements LLSearchTransformer {
|
||||||
|
|
||||||
|
private final Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux;
|
||||||
|
|
||||||
|
public MoreLikeThisTransformer(Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux) {
|
||||||
|
this.mltDocumentFieldsFlux = mltDocumentFieldsFlux;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Mono<LocalQueryParams> transform(Mono<TransformerInput> inputMono) {
|
||||||
|
return inputMono.flatMap(input -> LuceneUtils.getMoreLikeThisQuery(input.indexSearchers(), input.queryParams(),
|
||||||
|
luceneAnalyzer, luceneSimilarity, mltDocumentFieldsFlux));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,29 +1,20 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
import com.google.common.cache.Cache;
|
import io.net5.buffer.api.Send;
|
||||||
import com.google.common.cache.CacheBuilder;
|
|
||||||
import com.google.common.cache.CacheLoader.InvalidCacheLoadException;
|
|
||||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||||
import it.cavallium.dbengine.client.LuceneIndex;
|
|
||||||
import it.cavallium.dbengine.client.LuceneOptions;
|
import it.cavallium.dbengine.client.LuceneOptions;
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||||
import it.cavallium.dbengine.database.LLDocument;
|
import it.cavallium.dbengine.database.LLDocument;
|
||||||
import it.cavallium.dbengine.database.LLLuceneIndex;
|
import it.cavallium.dbengine.database.LLLuceneIndex;
|
||||||
import it.cavallium.dbengine.database.LLSearchResult;
|
|
||||||
import it.cavallium.dbengine.database.LLSearchResultShard;
|
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||||
import it.cavallium.dbengine.database.LLSnapshot;
|
import it.cavallium.dbengine.database.LLSnapshot;
|
||||||
import it.cavallium.dbengine.database.LLTerm;
|
import it.cavallium.dbengine.database.LLTerm;
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
|
||||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
|
||||||
import it.cavallium.dbengine.lucene.searcher.AdaptiveLuceneMultiSearcher;
|
import it.cavallium.dbengine.lucene.searcher.AdaptiveLuceneMultiSearcher;
|
||||||
|
import it.cavallium.dbengine.lucene.searcher.LLSearchTransformer;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LuceneMultiSearcher;
|
import it.cavallium.dbengine.lucene.searcher.LuceneMultiSearcher;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LuceneShardSearcher;
|
|
||||||
import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
|
|
||||||
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
@ -33,38 +24,24 @@ import java.util.HashMap;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.CopyOnWriteArrayList;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import org.apache.lucene.search.CollectionStatistics;
|
import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.warp.commonutils.batch.ParallelUtils;
|
|
||||||
import org.warp.commonutils.functional.IOBiConsumer;
|
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.GroupedFlux;
|
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
import reactor.util.function.Tuple2;
|
import reactor.util.function.Tuple2;
|
||||||
import reactor.util.function.Tuples;
|
|
||||||
|
|
||||||
public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
||||||
|
|
||||||
// Scheduler used to get callback values of LuceneStreamSearcher without creating deadlocks
|
|
||||||
protected final Scheduler luceneSearcherScheduler = LuceneUtils.newLuceneSearcherScheduler(true);
|
|
||||||
|
|
||||||
private final ConcurrentHashMap<Long, LLSnapshot[]> registeredSnapshots = new ConcurrentHashMap<>();
|
private final ConcurrentHashMap<Long, LLSnapshot[]> registeredSnapshots = new ConcurrentHashMap<>();
|
||||||
private final AtomicLong nextSnapshotNumber = new AtomicLong(1);
|
private final AtomicLong nextSnapshotNumber = new AtomicLong(1);
|
||||||
private final LLLocalLuceneIndex[] luceneIndices;
|
private final LLLocalLuceneIndex[] luceneIndices;
|
||||||
|
private final PerFieldAnalyzerWrapper luceneAnalyzer;
|
||||||
|
private final PerFieldSimilarityWrapper luceneSimilarity;
|
||||||
|
|
||||||
private final LuceneMultiSearcher multiSearcher = new AdaptiveLuceneMultiSearcher();
|
private final LuceneMultiSearcher multiSearcher = new AdaptiveLuceneMultiSearcher();
|
||||||
|
|
||||||
@ -95,6 +72,8 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
this.luceneIndices = luceneIndices;
|
this.luceneIndices = luceneIndices;
|
||||||
|
this.luceneAnalyzer = LuceneUtils.toPerFieldAnalyzerWrapper(indicizerAnalyzers);
|
||||||
|
this.luceneSimilarity = LuceneUtils.toPerFieldSimilarityWrapper(indicizerSimilarities);
|
||||||
}
|
}
|
||||||
|
|
||||||
private LLLocalLuceneIndex getLuceneIndex(LLTerm id) {
|
private LLLocalLuceneIndex getLuceneIndex(LLTerm id) {
|
||||||
@ -110,6 +89,19 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
|||||||
return luceneIndices[0].getLuceneIndexName();
|
return luceneIndices[0].getLuceneIndexName();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Mono<Send<LLIndexSearchers>> getIndexSearchers(LLSnapshot snapshot) {
|
||||||
|
return Flux
|
||||||
|
.fromArray(luceneIndices)
|
||||||
|
.index()
|
||||||
|
// Resolve the snapshot of each shard
|
||||||
|
.flatMap(tuple -> Mono
|
||||||
|
.fromCallable(() -> resolveSnapshotOptional(snapshot, (int) (long) tuple.getT1()))
|
||||||
|
.flatMap(luceneSnapshot -> tuple.getT2().retrieveSearcher(luceneSnapshot.orElse(null)))
|
||||||
|
)
|
||||||
|
.collectList()
|
||||||
|
.map(searchers -> LLIndexSearchers.of(searchers).send());
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> addDocument(LLTerm id, LLDocument doc) {
|
public Mono<Void> addDocument(LLTerm id, LLDocument doc) {
|
||||||
return getLuceneIndex(id).addDocument(id, doc);
|
return getLuceneIndex(id).addDocument(id, doc);
|
||||||
@ -200,60 +192,43 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LLSearchResultShard> moreLikeThis(@Nullable LLSnapshot snapshot,
|
public Mono<Send<LLSearchResultShard>> moreLikeThis(@Nullable LLSnapshot snapshot,
|
||||||
QueryParams queryParams,
|
QueryParams queryParams,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Flux<Tuple2<String, Set<String>>> mltDocumentFields) {
|
Flux<Tuple2<String, Set<String>>> mltDocumentFields) {
|
||||||
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
||||||
record LuceneIndexWithSnapshot(LLLocalLuceneIndex luceneIndex, Optional<LLSnapshot> snapshot) {}
|
var searchers = this.getIndexSearchers(snapshot);
|
||||||
|
var transformer = new MultiMoreLikeThisTransformer(mltDocumentFields);
|
||||||
|
|
||||||
|
// Collect all the shards results into a single global result
|
||||||
return multiSearcher
|
return multiSearcher
|
||||||
// Create shard searcher
|
.collectMulti(searchers, localQueryParams, keyFieldName, transformer)
|
||||||
.createShardSearcher(localQueryParams)
|
// Transform the result type
|
||||||
.flatMap(shardSearcher -> Flux
|
.map(resultToReceive -> {
|
||||||
// Iterate the indexed shards
|
var result = resultToReceive.receive();
|
||||||
.fromArray(luceneIndices).index()
|
return new LLSearchResultShard(result.results(), result.totalHitsCount(),
|
||||||
// Resolve the snapshot of each shard
|
d -> result.close()).send();
|
||||||
.flatMap(tuple -> Mono
|
})
|
||||||
.fromCallable(() -> resolveSnapshotOptional(snapshot, (int) (long) tuple.getT1()))
|
.doOnDiscard(Send.class, Send::close);
|
||||||
.map(luceneSnapshot -> new LuceneIndexWithSnapshot(tuple.getT2(), luceneSnapshot))
|
|
||||||
)
|
|
||||||
// Execute the query and collect it using the shard searcher
|
|
||||||
.flatMap(luceneIndexWithSnapshot -> luceneIndexWithSnapshot.luceneIndex()
|
|
||||||
.distributedMoreLikeThis(luceneIndexWithSnapshot.snapshot.orElse(null), queryParams, mltDocumentFields, shardSearcher))
|
|
||||||
// Collect all the shards results into a single global result
|
|
||||||
.then(shardSearcher.collect(localQueryParams, keyFieldName, luceneSearcherScheduler))
|
|
||||||
)
|
|
||||||
// Fix the result type
|
|
||||||
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LLSearchResultShard> search(@Nullable LLSnapshot snapshot,
|
public Mono<Send<LLSearchResultShard>> search(@Nullable LLSnapshot snapshot,
|
||||||
QueryParams queryParams,
|
QueryParams queryParams,
|
||||||
String keyFieldName) {
|
String keyFieldName) {
|
||||||
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
LocalQueryParams localQueryParams = LuceneUtils.toLocalQueryParams(queryParams);
|
||||||
record LuceneIndexWithSnapshot(LLLocalLuceneIndex luceneIndex, Optional<LLSnapshot> snapshot) {}
|
var searchers = getIndexSearchers(snapshot);
|
||||||
|
|
||||||
|
// Collect all the shards results into a single global result
|
||||||
return multiSearcher
|
return multiSearcher
|
||||||
// Create shard searcher
|
.collectMulti(searchers, localQueryParams, keyFieldName, LLSearchTransformer.NO_TRANSFORMATION)
|
||||||
.createShardSearcher(localQueryParams)
|
// Transform the result type
|
||||||
.flatMap(shardSearcher -> Flux
|
.map(resultToReceive -> {
|
||||||
// Iterate the indexed shards
|
var result = resultToReceive.receive();
|
||||||
.fromArray(luceneIndices).index()
|
return new LLSearchResultShard(result.results(), result.totalHitsCount(),
|
||||||
// Resolve the snapshot of each shard
|
d -> result.close()).send();
|
||||||
.flatMap(tuple -> Mono
|
})
|
||||||
.fromCallable(() -> resolveSnapshotOptional(snapshot, (int) (long) tuple.getT1()))
|
.doOnDiscard(Send.class, Send::close);
|
||||||
.map(luceneSnapshot -> new LuceneIndexWithSnapshot(tuple.getT2(), luceneSnapshot))
|
|
||||||
)
|
|
||||||
// Execute the query and collect it using the shard searcher
|
|
||||||
.flatMap(luceneIndexWithSnapshot -> luceneIndexWithSnapshot.luceneIndex()
|
|
||||||
.distributedSearch(luceneIndexWithSnapshot.snapshot.orElse(null), queryParams, shardSearcher))
|
|
||||||
// Collect all the shards results into a single global result
|
|
||||||
.then(shardSearcher.collect(localQueryParams, keyFieldName, luceneSearcherScheduler))
|
|
||||||
)
|
|
||||||
// Fix the result type
|
|
||||||
.map(result -> new LLSearchResultShard(result.results(), result.totalHitsCount(), result.release()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -313,4 +288,19 @@ public class LLLocalMultiLuceneIndex implements LLLuceneIndex {
|
|||||||
public boolean isLowMemoryMode() {
|
public boolean isLowMemoryMode() {
|
||||||
return luceneIndices[0].isLowMemoryMode();
|
return luceneIndices[0].isLowMemoryMode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class MultiMoreLikeThisTransformer implements LLSearchTransformer {
|
||||||
|
|
||||||
|
private final Flux<Tuple2<String, Set<String>>> mltDocumentFields;
|
||||||
|
|
||||||
|
public MultiMoreLikeThisTransformer(Flux<Tuple2<String, Set<String>>> mltDocumentFields) {
|
||||||
|
this.mltDocumentFields = mltDocumentFields;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Mono<LocalQueryParams> transform(Mono<TransformerInput> inputMono) {
|
||||||
|
return inputMono.flatMap(input -> LuceneUtils.getMoreLikeThisQuery(input.indexSearchers(), input.queryParams(),
|
||||||
|
luceneAnalyzer, luceneSimilarity, mltDocumentFields));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package it.cavallium.dbengine.database.disk;
|
package it.cavallium.dbengine.database.disk;
|
||||||
|
|
||||||
|
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
|
||||||
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
|
import static it.cavallium.dbengine.database.disk.LLLocalDictionary.getRocksIterator;
|
||||||
|
|
||||||
import io.net5.buffer.api.Buffer;
|
import io.net5.buffer.api.Buffer;
|
||||||
@ -9,14 +10,18 @@ import io.net5.util.IllegalReferenceCountException;
|
|||||||
import it.cavallium.dbengine.database.LLRange;
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.rocksdb.ColumnFamilyHandle;
|
import org.rocksdb.ColumnFamilyHandle;
|
||||||
import org.rocksdb.ReadOptions;
|
import org.rocksdb.ReadOptions;
|
||||||
import org.rocksdb.RocksDB;
|
import org.rocksdb.RocksDB;
|
||||||
import org.rocksdb.RocksDBException;
|
import org.rocksdb.RocksDBException;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
|
|
||||||
public abstract class LLLocalReactiveRocksIterator<T> {
|
public abstract class LLLocalReactiveRocksIterator<T> {
|
||||||
|
|
||||||
|
protected static final Logger logger = LoggerFactory.getLogger(LLLocalReactiveRocksIterator.class);
|
||||||
private final AtomicBoolean released = new AtomicBoolean(false);
|
private final AtomicBoolean released = new AtomicBoolean(false);
|
||||||
private final RocksDB db;
|
private final RocksDB db;
|
||||||
private final BufferAllocator alloc;
|
private final BufferAllocator alloc;
|
||||||
@ -25,7 +30,6 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
private final boolean allowNettyDirect;
|
private final boolean allowNettyDirect;
|
||||||
private final ReadOptions readOptions;
|
private final ReadOptions readOptions;
|
||||||
private final boolean readValues;
|
private final boolean readValues;
|
||||||
private final String debugName;
|
|
||||||
|
|
||||||
public LLLocalReactiveRocksIterator(RocksDB db,
|
public LLLocalReactiveRocksIterator(RocksDB db,
|
||||||
BufferAllocator alloc,
|
BufferAllocator alloc,
|
||||||
@ -33,8 +37,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
Send<LLRange> range,
|
Send<LLRange> range,
|
||||||
boolean allowNettyDirect,
|
boolean allowNettyDirect,
|
||||||
ReadOptions readOptions,
|
ReadOptions readOptions,
|
||||||
boolean readValues,
|
boolean readValues) {
|
||||||
String debugName) {
|
|
||||||
this.db = db;
|
this.db = db;
|
||||||
this.alloc = alloc;
|
this.alloc = alloc;
|
||||||
this.cfh = cfh;
|
this.cfh = cfh;
|
||||||
@ -42,7 +45,6 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
this.allowNettyDirect = allowNettyDirect;
|
this.allowNettyDirect = allowNettyDirect;
|
||||||
this.readOptions = readOptions;
|
this.readOptions = readOptions;
|
||||||
this.readValues = readValues;
|
this.readValues = readValues;
|
||||||
this.debugName = debugName;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Flux<T> flux() {
|
public Flux<T> flux() {
|
||||||
@ -53,6 +55,9 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
readOptions.setReadaheadSize(32 * 1024); // 32KiB
|
readOptions.setReadaheadSize(32 * 1024); // 32KiB
|
||||||
readOptions.setFillCache(false);
|
readOptions.setFillCache(false);
|
||||||
}
|
}
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} started", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
return getRocksIterator(alloc, allowNettyDirect, readOptions, range.copy().send(), db, cfh);
|
return getRocksIterator(alloc, allowNettyDirect, readOptions, range.copy().send(), db, cfh);
|
||||||
}, (tuple, sink) -> {
|
}, (tuple, sink) -> {
|
||||||
try {
|
try {
|
||||||
@ -61,7 +66,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
if (rocksIterator.isValid()) {
|
if (rocksIterator.isValid()) {
|
||||||
Buffer key;
|
Buffer key;
|
||||||
if (allowNettyDirect) {
|
if (allowNettyDirect) {
|
||||||
key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key).receive();
|
key = LLUtils.readDirectNioBuffer(alloc, rocksIterator::key);
|
||||||
} else {
|
} else {
|
||||||
key = LLUtils.fromByteArray(alloc, rocksIterator.key());
|
key = LLUtils.fromByteArray(alloc, rocksIterator.key());
|
||||||
}
|
}
|
||||||
@ -69,25 +74,43 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
Buffer value;
|
Buffer value;
|
||||||
if (readValues) {
|
if (readValues) {
|
||||||
if (allowNettyDirect) {
|
if (allowNettyDirect) {
|
||||||
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value).receive();
|
value = LLUtils.readDirectNioBuffer(alloc, rocksIterator::value);
|
||||||
} else {
|
} else {
|
||||||
value = LLUtils.fromByteArray(alloc, rocksIterator.value());
|
value = LLUtils.fromByteArray(alloc, rocksIterator.value());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
value = alloc.allocate(0);
|
value = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB,
|
||||||
|
"Range {} is reading {}: {}",
|
||||||
|
LLUtils.toStringSafe(range),
|
||||||
|
LLUtils.toStringSafe(key),
|
||||||
|
LLUtils.toStringSafe(value)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
rocksIterator.next();
|
rocksIterator.next();
|
||||||
rocksIterator.status();
|
rocksIterator.status();
|
||||||
sink.next(getEntry(key.send(), value.send()));
|
sink.next(getEntry(key.send(), value == null ? null : value.send()));
|
||||||
} finally {
|
} finally {
|
||||||
value.close();
|
if (value != null) {
|
||||||
|
value.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} ended", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
sink.complete();
|
sink.complete();
|
||||||
}
|
}
|
||||||
} catch (RocksDBException ex) {
|
} catch (RocksDBException ex) {
|
||||||
|
if (logger.isTraceEnabled()) {
|
||||||
|
logger.trace(MARKER_ROCKSDB, "Range {} failed", LLUtils.toStringSafe(range));
|
||||||
|
}
|
||||||
sink.error(ex);
|
sink.error(ex);
|
||||||
}
|
}
|
||||||
return tuple;
|
return tuple;
|
||||||
@ -100,7 +123,7 @@ public abstract class LLLocalReactiveRocksIterator<T> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract T getEntry(Send<Buffer> key, Send<Buffer> value);
|
public abstract T getEntry(@Nullable Send<Buffer> key, @Nullable Send<Buffer> value);
|
||||||
|
|
||||||
public void release() {
|
public void release() {
|
||||||
if (released.compareAndSet(false, true)) {
|
if (released.compareAndSet(false, true)) {
|
||||||
|
@ -17,32 +17,29 @@ public class MemorySegmentUtils {
|
|||||||
private static final Object NATIVE;
|
private static final Object NATIVE;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
Lookup lookup = MethodHandles.publicLookup();
|
Lookup lookup = MethodHandles.lookup();
|
||||||
|
|
||||||
Object nativeVal = null;
|
Object nativeVal = null;
|
||||||
|
|
||||||
MethodHandle ofNativeRestricted;
|
var ofNativeRestricted = getJava16NativeRestricted(lookup);
|
||||||
try {
|
if (ofNativeRestricted == null) {
|
||||||
ofNativeRestricted = lookup.findStatic(Class.forName("jdk.incubator.foreign.MemorySegment"),
|
cause = null;
|
||||||
"ofNativeRestricted",
|
ofNativeRestricted = getJava17NativeRestricted(lookup);
|
||||||
MethodType.methodType(Class.forName("jdk.incubator.foreign.MemorySegment"))
|
}
|
||||||
);
|
if (ofNativeRestricted != null) {
|
||||||
try {
|
try {
|
||||||
nativeVal = ofNativeRestricted.invoke();
|
nativeVal = ofNativeRestricted.invoke();
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
cause = e;
|
cause = e;
|
||||||
}
|
}
|
||||||
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
|
||||||
ofNativeRestricted = null;
|
|
||||||
cause = e;
|
|
||||||
}
|
}
|
||||||
OF_NATIVE_RESTRICTED = ofNativeRestricted;
|
OF_NATIVE_RESTRICTED = ofNativeRestricted;
|
||||||
|
|
||||||
MethodHandle asSlice;
|
MethodHandle asSlice;
|
||||||
try {
|
try {
|
||||||
asSlice = lookup.findVirtual(Class.forName("jdk.incubator.foreign.MemorySegment"),
|
asSlice = lookup.findVirtual(lookup.findClass("jdk.incubator.foreign.MemorySegment"),
|
||||||
"asSlice",
|
"asSlice",
|
||||||
MethodType.methodType(Class.forName("jdk.incubator.foreign.MemorySegment"), long.class, long.class)
|
MethodType.methodType(lookup.findClass("jdk.incubator.foreign.MemorySegment"), long.class, long.class)
|
||||||
);
|
);
|
||||||
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
||||||
asSlice = null;
|
asSlice = null;
|
||||||
@ -52,7 +49,7 @@ public class MemorySegmentUtils {
|
|||||||
|
|
||||||
MethodHandle asByteBuffer;
|
MethodHandle asByteBuffer;
|
||||||
try {
|
try {
|
||||||
asByteBuffer = lookup.findVirtual(Class.forName("jdk.incubator.foreign.MemorySegment"),
|
asByteBuffer = lookup.findVirtual(lookup.findClass("jdk.incubator.foreign.MemorySegment"),
|
||||||
"asByteBuffer", MethodType.methodType(ByteBuffer.class));
|
"asByteBuffer", MethodType.methodType(ByteBuffer.class));
|
||||||
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
||||||
asByteBuffer = null;
|
asByteBuffer = null;
|
||||||
@ -63,6 +60,36 @@ public class MemorySegmentUtils {
|
|||||||
NATIVE = nativeVal;
|
NATIVE = nativeVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("JavaLangInvokeHandleSignature")
|
||||||
|
private static MethodHandle getJava16NativeRestricted(Lookup lookup) {
|
||||||
|
MethodHandle ofNativeRestricted;
|
||||||
|
try {
|
||||||
|
ofNativeRestricted = lookup.findStatic(lookup.findClass("jdk.incubator.foreign.MemorySegment"),
|
||||||
|
"ofNativeRestricted",
|
||||||
|
MethodType.methodType(lookup.findClass("jdk.incubator.foreign.MemorySegment"))
|
||||||
|
);
|
||||||
|
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
||||||
|
ofNativeRestricted = null;
|
||||||
|
cause = e;
|
||||||
|
}
|
||||||
|
return ofNativeRestricted;
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("JavaLangInvokeHandleSignature")
|
||||||
|
private static MethodHandle getJava17NativeRestricted(Lookup lookup) {
|
||||||
|
MethodHandle ofNativeRestricted;
|
||||||
|
try {
|
||||||
|
ofNativeRestricted = lookup.findStatic(lookup.findClass("jdk.incubator.foreign.MemorySegment"),
|
||||||
|
"globalNativeSegment",
|
||||||
|
MethodType.methodType(lookup.findClass("jdk.incubator.foreign.MemorySegment"))
|
||||||
|
);
|
||||||
|
} catch (NoSuchMethodException | IllegalAccessException | ClassNotFoundException e) {
|
||||||
|
ofNativeRestricted = null;
|
||||||
|
cause = e;
|
||||||
|
}
|
||||||
|
return ofNativeRestricted;
|
||||||
|
}
|
||||||
|
|
||||||
public static ByteBuffer directBuffer(long address, long size) {
|
public static ByteBuffer directBuffer(long address, long size) {
|
||||||
if (address <= 0) {
|
if (address <= 0) {
|
||||||
throw new IllegalArgumentException("Address is " + address);
|
throw new IllegalArgumentException("Address is " + address);
|
||||||
@ -76,13 +103,15 @@ public class MemorySegmentUtils {
|
|||||||
return PlatformDependent.directBuffer(address, (int) size);
|
return PlatformDependent.directBuffer(address, (int) size);
|
||||||
}
|
}
|
||||||
throw new UnsupportedOperationException("Foreign Memory Access API is disabled!"
|
throw new UnsupportedOperationException("Foreign Memory Access API is disabled!"
|
||||||
+ " Please set \"--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit\"");
|
+ " Please set \"" + MemorySegmentUtils.getSuggestedArgs() + "\"",
|
||||||
|
getUnsupportedCause()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
var memorySegment = AS_SLICE.invoke(NATIVE, address, size);
|
var memorySegment = AS_SLICE.invoke(NATIVE, address, size);
|
||||||
return (ByteBuffer) AS_BYTE_BUFFER.invoke(memorySegment);
|
return (ByteBuffer) AS_BYTE_BUFFER.invoke(memorySegment);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
throw new UnsupportedOperationException("Foreign Memory Access API is disabled!"
|
throw new UnsupportedOperationException("Foreign Memory Access API is disabled!"
|
||||||
+ " Please set \"--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit\"", e);
|
+ " Please set \"" + MemorySegmentUtils.getSuggestedArgs() + "\"", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,4 +122,8 @@ public class MemorySegmentUtils {
|
|||||||
public static Throwable getUnsupportedCause() {
|
public static Throwable getUnsupportedCause() {
|
||||||
return cause;
|
return cause;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String getSuggestedArgs() {
|
||||||
|
return "--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit --enable-native-access=ALL-UNNAMED";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,15 +6,17 @@ import it.cavallium.dbengine.database.SafeCloseable;
|
|||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public class BufferDataInput implements DataInput, SafeCloseable {
|
public class BufferDataInput implements DataInput, SafeCloseable {
|
||||||
|
|
||||||
|
@Nullable
|
||||||
private final Buffer buf;
|
private final Buffer buf;
|
||||||
private final int initialReaderOffset;
|
private final int initialReaderOffset;
|
||||||
|
|
||||||
public BufferDataInput(Send<Buffer> bufferSend) {
|
public BufferDataInput(@Nullable Send<Buffer> bufferSend) {
|
||||||
this.buf = bufferSend.receive().makeReadOnly();
|
this.buf = bufferSend == null ? null : bufferSend.receive().makeReadOnly();
|
||||||
this.initialReaderOffset = buf.readerOffset();
|
this.initialReaderOffset = buf == null ? 0 : buf.readerOffset();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -24,75 +26,100 @@ public class BufferDataInput implements DataInput, SafeCloseable {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFully(byte @NotNull [] b, int off, int len) {
|
public void readFully(byte @NotNull [] b, int off, int len) {
|
||||||
buf.copyInto(buf.readerOffset(), b, off, len);
|
if (buf == null) {
|
||||||
buf.readerOffset(buf.readerOffset() + len);
|
if (len != 0) {
|
||||||
|
throw new IndexOutOfBoundsException();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
buf.copyInto(buf.readerOffset(), b, off, len);
|
||||||
|
buf.readerOffset(buf.readerOffset() + len);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int skipBytes(int n) {
|
public int skipBytes(int n) {
|
||||||
n = Math.min(n, buf.readerOffset() - buf.writerOffset());
|
if (buf == null) {
|
||||||
buf.readerOffset(buf.readerOffset() + n);
|
if (n != 0) {
|
||||||
return n;
|
throw new IndexOutOfBoundsException();
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
} else {
|
||||||
|
n = Math.min(n, buf.readerOffset() - buf.writerOffset());
|
||||||
|
buf.readerOffset(buf.readerOffset() + n);
|
||||||
|
return n;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean readBoolean() {
|
public boolean readBoolean() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readUnsignedByte() != 0;
|
return buf.readUnsignedByte() != 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte readByte() {
|
public byte readByte() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readByte();
|
return buf.readByte();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readUnsignedByte() {
|
public int readUnsignedByte() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readUnsignedByte();
|
return buf.readUnsignedByte();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public short readShort() {
|
public short readShort() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readShort();
|
return buf.readShort();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readUnsignedShort() {
|
public int readUnsignedShort() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readUnsignedShort();
|
return buf.readUnsignedShort();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public char readChar() {
|
public char readChar() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readChar();
|
return buf.readChar();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int readInt() {
|
public int readInt() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readInt();
|
return buf.readInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long readLong() {
|
public long readLong() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readLong();
|
return buf.readLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float readFloat() {
|
public float readFloat() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readFloat();
|
return buf.readFloat();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double readDouble() {
|
public double readDouble() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
return buf.readDouble();
|
return buf.readDouble();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String readLine() {
|
public String readLine() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
@NotNull
|
||||||
@Override
|
@Override
|
||||||
public String readUTF() {
|
public String readUTF() {
|
||||||
|
if (buf == null) throw new IndexOutOfBoundsException();
|
||||||
var len = buf.readUnsignedShort();
|
var len = buf.readUnsignedShort();
|
||||||
byte[] bytes = new byte[len];
|
byte[] bytes = new byte[len];
|
||||||
buf.copyInto(buf.readerOffset(), bytes, 0, len);
|
buf.copyInto(buf.readerOffset(), bytes, 0, len);
|
||||||
@ -102,10 +129,16 @@ public class BufferDataInput implements DataInput, SafeCloseable {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
buf.close();
|
if (buf != null) {
|
||||||
|
buf.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getReadBytesCount() {
|
public int getReadBytesCount() {
|
||||||
return buf.readerOffset() - initialReaderOffset;
|
if (buf == null) {
|
||||||
|
return 0;
|
||||||
|
} else {
|
||||||
|
return buf.readerOffset() - initialReaderOffset;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ import io.net5.buffer.api.Send;
|
|||||||
import java.io.IOError;
|
import java.io.IOError;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
||||||
|
|
||||||
public class CodecSerializer<A> implements Serializer<A> {
|
public class CodecSerializer<A> implements Serializer<A> {
|
||||||
@ -37,7 +38,7 @@ public class CodecSerializer<A> implements Serializer<A> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<A> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
public @NotNull DeserializationResult<A> deserialize(@Nullable Send<Buffer> serializedToReceive) {
|
||||||
try (var is = new BufferDataInput(serializedToReceive)) {
|
try (var is = new BufferDataInput(serializedToReceive)) {
|
||||||
int codecId;
|
int codecId;
|
||||||
if (microCodecs) {
|
if (microCodecs) {
|
||||||
|
@ -4,8 +4,11 @@ import io.net5.buffer.api.Buffer;
|
|||||||
import io.net5.buffer.api.BufferAllocator;
|
import io.net5.buffer.api.BufferAllocator;
|
||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.netty.NullableBuffer;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Objects;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public interface Serializer<A> {
|
public interface Serializer<A> {
|
||||||
|
|
||||||
@ -18,7 +21,7 @@ public interface Serializer<A> {
|
|||||||
Serializer<Send<Buffer>> NOOP_SERIALIZER = new Serializer<>() {
|
Serializer<Send<Buffer>> NOOP_SERIALIZER = new Serializer<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Send<Buffer>> deserialize(@NotNull Send<Buffer> serialized) {
|
public @NotNull DeserializationResult<Send<Buffer>> deserialize(@NotNull Send<Buffer> serialized) {
|
||||||
try (var serializedBuf = serialized.receive()) {
|
try (var serializedBuf = serialized.receive()) {
|
||||||
var readableBytes = serializedBuf.readableBytes();
|
var readableBytes = serializedBuf.readableBytes();
|
||||||
return new DeserializationResult<>(serializedBuf.send(), readableBytes);
|
return new DeserializationResult<>(serializedBuf.send(), readableBytes);
|
||||||
}
|
}
|
||||||
@ -37,7 +40,8 @@ public interface Serializer<A> {
|
|||||||
static Serializer<String> utf8(BufferAllocator allocator) {
|
static Serializer<String> utf8(BufferAllocator allocator) {
|
||||||
return new Serializer<>() {
|
return new Serializer<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<String> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
public @NotNull DeserializationResult<String> deserialize(@Nullable Send<Buffer> serializedToReceive) {
|
||||||
|
Objects.requireNonNull(serializedToReceive);
|
||||||
try (Buffer serialized = serializedToReceive.receive()) {
|
try (Buffer serialized = serializedToReceive.receive()) {
|
||||||
assert serialized.isAccessible();
|
assert serialized.isAccessible();
|
||||||
int length = serialized.readInt();
|
int length = serialized.readInt();
|
||||||
|
@ -5,7 +5,9 @@ import io.net5.buffer.api.BufferAllocator;
|
|||||||
import io.net5.buffer.api.Send;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Objects;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public interface SerializerFixedBinaryLength<A> extends Serializer<A> {
|
public interface SerializerFixedBinaryLength<A> extends Serializer<A> {
|
||||||
@ -16,6 +18,7 @@ public interface SerializerFixedBinaryLength<A> extends Serializer<A> {
|
|||||||
return new SerializerFixedBinaryLength<>() {
|
return new SerializerFixedBinaryLength<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Send<Buffer>> deserialize(@NotNull Send<Buffer> serialized) {
|
public @NotNull DeserializationResult<Send<Buffer>> deserialize(@NotNull Send<Buffer> serialized) {
|
||||||
|
Objects.requireNonNull(serialized);
|
||||||
try (var buf = serialized.receive()) {
|
try (var buf = serialized.receive()) {
|
||||||
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
@ -68,7 +71,9 @@ public interface SerializerFixedBinaryLength<A> extends Serializer<A> {
|
|||||||
// UTF-8 uses max. 3 bytes per char, so calculate the worst case.
|
// UTF-8 uses max. 3 bytes per char, so calculate the worst case.
|
||||||
try (Buffer buf = allocator.allocate(LLUtils.utf8MaxBytes(deserialized))) {
|
try (Buffer buf = allocator.allocate(LLUtils.utf8MaxBytes(deserialized))) {
|
||||||
assert buf.isAccessible();
|
assert buf.isAccessible();
|
||||||
buf.writeBytes(deserialized.getBytes(StandardCharsets.UTF_8));
|
var bytes = deserialized.getBytes(StandardCharsets.UTF_8);
|
||||||
|
buf.ensureWritable(bytes.length);
|
||||||
|
buf.writeBytes(bytes);
|
||||||
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
if (buf.readableBytes() != getSerializedBinaryLength()) {
|
||||||
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength()
|
throw new SerializationException("Fixed serializer with " + getSerializedBinaryLength()
|
||||||
+ " bytes has tried to serialize an element with "
|
+ " bytes has tried to serialize an element with "
|
||||||
@ -90,6 +95,7 @@ public interface SerializerFixedBinaryLength<A> extends Serializer<A> {
|
|||||||
return new SerializerFixedBinaryLength<>() {
|
return new SerializerFixedBinaryLength<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Integer> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
public @NotNull DeserializationResult<Integer> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
||||||
|
Objects.requireNonNull(serializedToReceive);
|
||||||
try (var serialized = serializedToReceive.receive()) {
|
try (var serialized = serializedToReceive.receive()) {
|
||||||
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
@ -118,6 +124,7 @@ public interface SerializerFixedBinaryLength<A> extends Serializer<A> {
|
|||||||
return new SerializerFixedBinaryLength<>() {
|
return new SerializerFixedBinaryLength<>() {
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Long> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
public @NotNull DeserializationResult<Long> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
||||||
|
Objects.requireNonNull(serializedToReceive);
|
||||||
try (var serialized = serializedToReceive.receive()) {
|
try (var serialized = serializedToReceive.receive()) {
|
||||||
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
if (serialized.readableBytes() != getSerializedBinaryLength()) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
|
@ -3,37 +3,39 @@ package it.cavallium.dbengine.lucene;
|
|||||||
import it.cavallium.dbengine.client.CompositeSnapshot;
|
import it.cavallium.dbengine.client.CompositeSnapshot;
|
||||||
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
import it.cavallium.dbengine.client.IndicizerAnalyzers;
|
||||||
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
import it.cavallium.dbengine.client.IndicizerSimilarities;
|
||||||
import it.cavallium.dbengine.client.query.BasicType;
|
|
||||||
import it.cavallium.dbengine.client.query.QueryParser;
|
import it.cavallium.dbengine.client.query.QueryParser;
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
|
import it.cavallium.dbengine.database.EnglishItalianStopFilter;
|
||||||
import it.cavallium.dbengine.database.LLKeyScore;
|
import it.cavallium.dbengine.database.LLKeyScore;
|
||||||
import it.cavallium.dbengine.database.LLScoreMode;
|
import it.cavallium.dbengine.database.LLSnapshot;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionary;
|
||||||
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
import it.cavallium.dbengine.database.collections.DatabaseMapDictionaryDeep;
|
||||||
import it.cavallium.dbengine.database.collections.ValueGetter;
|
import it.cavallium.dbengine.database.collections.ValueGetter;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
import it.cavallium.dbengine.lucene.analyzer.NCharGramAnalyzer;
|
import it.cavallium.dbengine.lucene.analyzer.NCharGramAnalyzer;
|
||||||
import it.cavallium.dbengine.lucene.analyzer.NCharGramEdgeAnalyzer;
|
import it.cavallium.dbengine.lucene.analyzer.NCharGramEdgeAnalyzer;
|
||||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
import it.cavallium.dbengine.lucene.analyzer.TextFieldsAnalyzer;
|
||||||
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
import it.cavallium.dbengine.lucene.analyzer.TextFieldsSimilarity;
|
||||||
import it.cavallium.dbengine.lucene.analyzer.WordAnalyzer;
|
import it.cavallium.dbengine.lucene.analyzer.WordAnalyzer;
|
||||||
import it.cavallium.dbengine.lucene.searcher.IndexSearchers;
|
import it.cavallium.dbengine.lucene.mlt.MultiMoreLikeThis;
|
||||||
|
import it.cavallium.dbengine.lucene.searcher.ExponentialPageLimits;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
import it.cavallium.dbengine.lucene.searcher.LocalQueryParams;
|
||||||
import it.cavallium.dbengine.lucene.searcher.LuceneMultiSearcher;
|
import it.cavallium.dbengine.lucene.searcher.PageLimits;
|
||||||
import it.cavallium.dbengine.lucene.similarity.NGramSimilarity;
|
import it.cavallium.dbengine.lucene.similarity.NGramSimilarity;
|
||||||
import java.io.EOFException;
|
import java.io.EOFException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.channels.FileChannel;
|
import java.nio.channels.FileChannel;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.NoSuchElementException;
|
import java.util.NoSuchElementException;
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.LowerCaseFilter;
|
import org.apache.lucene.analysis.LowerCaseFilter;
|
||||||
@ -46,7 +48,14 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
|
import org.apache.lucene.search.ConstantScoreQuery;
|
||||||
import org.apache.lucene.search.FieldDoc;
|
import org.apache.lucene.search.FieldDoc;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
@ -56,6 +65,7 @@ import org.apache.lucene.search.similarities.BooleanSimilarity;
|
|||||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||||
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
|
import org.apache.lucene.search.similarities.TFIDFSimilarity;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.novasearch.lucene.search.similarities.BM25Similarity;
|
import org.novasearch.lucene.search.similarities.BM25Similarity;
|
||||||
@ -63,13 +73,14 @@ import org.novasearch.lucene.search.similarities.BM25Similarity.BM25Model;
|
|||||||
import org.novasearch.lucene.search.similarities.LdpSimilarity;
|
import org.novasearch.lucene.search.similarities.LdpSimilarity;
|
||||||
import org.novasearch.lucene.search.similarities.LtcSimilarity;
|
import org.novasearch.lucene.search.similarities.LtcSimilarity;
|
||||||
import org.novasearch.lucene.search.similarities.RobertsonSimilarity;
|
import org.novasearch.lucene.search.similarities.RobertsonSimilarity;
|
||||||
import org.reactivestreams.Publisher;
|
|
||||||
import org.warp.commonutils.log.Logger;
|
import org.warp.commonutils.log.Logger;
|
||||||
import org.warp.commonutils.log.LoggerFactory;
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
import reactor.core.scheduler.Scheduler;
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
import reactor.util.concurrent.Queues;
|
||||||
|
import reactor.util.function.Tuple2;
|
||||||
|
|
||||||
public class LuceneUtils {
|
public class LuceneUtils {
|
||||||
|
|
||||||
@ -106,6 +117,8 @@ public class LuceneUtils {
|
|||||||
private static final Similarity luceneLDPNoLengthSimilarityInstance = new LdpSimilarity(0, 0.5f);
|
private static final Similarity luceneLDPNoLengthSimilarityInstance = new LdpSimilarity(0, 0.5f);
|
||||||
private static final Similarity luceneBooleanSimilarityInstance = new BooleanSimilarity();
|
private static final Similarity luceneBooleanSimilarityInstance = new BooleanSimilarity();
|
||||||
private static final Similarity luceneRobertsonSimilarityInstance = new RobertsonSimilarity();
|
private static final Similarity luceneRobertsonSimilarityInstance = new RobertsonSimilarity();
|
||||||
|
// TODO: remove this default page limits and make the limits configurable into QueryParams
|
||||||
|
private static final PageLimits DEFAULT_PAGE_LIMITS = new ExponentialPageLimits();
|
||||||
|
|
||||||
@SuppressWarnings("DuplicatedCode")
|
@SuppressWarnings("DuplicatedCode")
|
||||||
public static Analyzer getAnalyzer(TextFieldsAnalyzer analyzer) {
|
public static Analyzer getAnalyzer(TextFieldsAnalyzer analyzer) {
|
||||||
@ -178,7 +191,6 @@ public class LuceneUtils {
|
|||||||
*
|
*
|
||||||
* @return false if the result is not relevant
|
* @return false if the result is not relevant
|
||||||
*/
|
*/
|
||||||
@Nullable
|
|
||||||
public static boolean filterTopDoc(float score, Float minCompetitiveScore) {
|
public static boolean filterTopDoc(float score, Float minCompetitiveScore) {
|
||||||
return minCompetitiveScore == null || score >= minCompetitiveScore;
|
return minCompetitiveScore == null || score >= minCompetitiveScore;
|
||||||
}
|
}
|
||||||
@ -220,9 +232,8 @@ public class LuceneUtils {
|
|||||||
public static <T, U, V> ValueGetter<Entry<T, U>, V> getAsyncDbValueGetterDeep(
|
public static <T, U, V> ValueGetter<Entry<T, U>, V> getAsyncDbValueGetterDeep(
|
||||||
CompositeSnapshot snapshot,
|
CompositeSnapshot snapshot,
|
||||||
DatabaseMapDictionaryDeep<T, Map<U, V>, DatabaseMapDictionary<U, V>> dictionaryDeep) {
|
DatabaseMapDictionaryDeep<T, Map<U, V>, DatabaseMapDictionary<U, V>> dictionaryDeep) {
|
||||||
return entry -> dictionaryDeep
|
return entry -> LLUtils.usingResource(dictionaryDeep
|
||||||
.at(snapshot, entry.getKey())
|
.at(snapshot, entry.getKey()), sub -> sub.getValue(snapshot, entry.getValue()), true);
|
||||||
.flatMap(sub -> sub.getValue(snapshot, entry.getValue()).doAfterTerminate(sub::release));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static PerFieldAnalyzerWrapper toPerFieldAnalyzerWrapper(IndicizerAnalyzers indicizerAnalyzers) {
|
public static PerFieldAnalyzerWrapper toPerFieldAnalyzerWrapper(IndicizerAnalyzers indicizerAnalyzers) {
|
||||||
@ -315,7 +326,7 @@ public class LuceneUtils {
|
|||||||
|
|
||||||
assert i > 0 : "FileChannel.read with non zero-length bb.remaining() must always read at least one byte (FileChannel is in blocking mode, see spec of ReadableByteChannel)";
|
assert i > 0 : "FileChannel.read with non zero-length bb.remaining() must always read at least one byte (FileChannel is in blocking mode, see spec of ReadableByteChannel)";
|
||||||
|
|
||||||
pos += (long)i;
|
pos += i;
|
||||||
}
|
}
|
||||||
|
|
||||||
assert readLength == 0;
|
assert readLength == 0;
|
||||||
@ -357,34 +368,40 @@ public class LuceneUtils {
|
|||||||
return new LocalQueryParams(QueryParser.toQuery(queryParams.query()),
|
return new LocalQueryParams(QueryParser.toQuery(queryParams.query()),
|
||||||
safeLongToInt(queryParams.offset()),
|
safeLongToInt(queryParams.offset()),
|
||||||
safeLongToInt(queryParams.limit()),
|
safeLongToInt(queryParams.limit()),
|
||||||
|
DEFAULT_PAGE_LIMITS,
|
||||||
queryParams.minCompetitiveScore().getNullable(),
|
queryParams.minCompetitiveScore().getNullable(),
|
||||||
QueryParser.toSort(queryParams.sort()),
|
QueryParser.toSort(queryParams.sort()),
|
||||||
QueryParser.toScoreMode(queryParams.scoreMode())
|
QueryParser.toScoreMode(queryParams.scoreMode())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Flux<LLKeyScore> convertHits(Flux<ScoreDoc> hits,
|
public static Flux<LLKeyScore> convertHits(Flux<ScoreDoc> hitsFlux,
|
||||||
IndexSearchers indexSearchers,
|
List<IndexSearcher> indexSearchers,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Scheduler scheduler,
|
|
||||||
boolean preserveOrder) {
|
boolean preserveOrder) {
|
||||||
|
if (preserveOrder) {
|
||||||
|
return hitsFlux
|
||||||
|
.publishOn(Schedulers.boundedElastic())
|
||||||
|
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
|
||||||
|
} else {
|
||||||
|
// Compute parallelism
|
||||||
|
var availableProcessors = Runtime.getRuntime().availableProcessors();
|
||||||
|
var min = Queues.XS_BUFFER_SIZE;
|
||||||
|
var maxParallelGroups = Math.max(availableProcessors, min);
|
||||||
|
|
||||||
return hits.transform(hitsFlux -> {
|
return hitsFlux
|
||||||
if (preserveOrder) {
|
.groupBy(hit -> hit.shardIndex % maxParallelGroups) // Max n groups
|
||||||
return hitsFlux
|
.flatMap(shardHits -> shardHits
|
||||||
.publishOn(scheduler)
|
.publishOn(Schedulers.boundedElastic())
|
||||||
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
|
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName)),
|
||||||
} else {
|
maxParallelGroups // Max n concurrency. Concurrency must be >= total groups count
|
||||||
return hitsFlux
|
);
|
||||||
.publishOn(scheduler)
|
}
|
||||||
.mapNotNull(hit -> mapHitBlocking(hit, indexSearchers, keyFieldName));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private static LLKeyScore mapHitBlocking(ScoreDoc hit,
|
private static LLKeyScore mapHitBlocking(ScoreDoc hit,
|
||||||
IndexSearchers indexSearchers,
|
List<IndexSearcher> indexSearchers,
|
||||||
String keyFieldName) {
|
String keyFieldName) {
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
if (Schedulers.isInNonBlockingThread()) {
|
||||||
throw new UnsupportedOperationException("Called mapHitBlocking in a nonblocking thread");
|
throw new UnsupportedOperationException("Called mapHitBlocking in a nonblocking thread");
|
||||||
@ -392,7 +409,10 @@ public class LuceneUtils {
|
|||||||
int shardDocId = hit.doc;
|
int shardDocId = hit.doc;
|
||||||
int shardIndex = hit.shardIndex;
|
int shardIndex = hit.shardIndex;
|
||||||
float score = hit.score;
|
float score = hit.score;
|
||||||
var indexSearcher = indexSearchers.shard(shardIndex);
|
if (shardIndex == -1 && indexSearchers.size() == 1) {
|
||||||
|
shardIndex = 0;
|
||||||
|
}
|
||||||
|
var indexSearcher = indexSearchers.get(shardIndex);
|
||||||
try {
|
try {
|
||||||
String collectedDoc = keyOfTopDoc(shardDocId, indexSearcher.getIndexReader(), keyFieldName);
|
String collectedDoc = keyOfTopDoc(shardDocId, indexSearcher.getIndexReader(), keyFieldName);
|
||||||
return new LLKeyScore(shardDocId, score, collectedDoc);
|
return new LLKeyScore(shardDocId, score, collectedDoc);
|
||||||
@ -508,4 +528,77 @@ public class LuceneUtils {
|
|||||||
true
|
true
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Mono<LocalQueryParams> getMoreLikeThisQuery(
|
||||||
|
List<LLIndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams localQueryParams,
|
||||||
|
Analyzer analyzer,
|
||||||
|
Similarity similarity,
|
||||||
|
Flux<Tuple2<String, Set<String>>> mltDocumentFieldsFlux) {
|
||||||
|
Query luceneAdditionalQuery;
|
||||||
|
try {
|
||||||
|
luceneAdditionalQuery = localQueryParams.query();
|
||||||
|
} catch (Exception e) {
|
||||||
|
return Mono.error(e);
|
||||||
|
}
|
||||||
|
return mltDocumentFieldsFlux
|
||||||
|
.collectMap(Tuple2::getT1, Tuple2::getT2, HashMap::new)
|
||||||
|
.flatMap(mltDocumentFields -> Mono.fromCallable(() -> {
|
||||||
|
mltDocumentFields.entrySet().removeIf(entry -> entry.getValue().isEmpty());
|
||||||
|
if (mltDocumentFields.isEmpty()) {
|
||||||
|
return new LocalQueryParams(new MatchNoDocsQuery(),
|
||||||
|
localQueryParams.offset(),
|
||||||
|
localQueryParams.limit(),
|
||||||
|
DEFAULT_PAGE_LIMITS,
|
||||||
|
localQueryParams.minCompetitiveScore(),
|
||||||
|
localQueryParams.sort(),
|
||||||
|
localQueryParams.scoreMode()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
MultiMoreLikeThis mlt;
|
||||||
|
if (indexSearchers.size() == 1) {
|
||||||
|
mlt = new MultiMoreLikeThis(indexSearchers.get(0).getIndexReader(), null);
|
||||||
|
} else {
|
||||||
|
IndexReader[] indexReaders = new IndexReader[indexSearchers.size()];
|
||||||
|
for (int i = 0, size = indexSearchers.size(); i < size; i++) {
|
||||||
|
indexReaders[i] = indexSearchers.get(i).getIndexReader();
|
||||||
|
}
|
||||||
|
mlt = new MultiMoreLikeThis(indexReaders, null);
|
||||||
|
}
|
||||||
|
mlt.setAnalyzer(analyzer);
|
||||||
|
mlt.setFieldNames(mltDocumentFields.keySet().toArray(String[]::new));
|
||||||
|
mlt.setMinTermFreq(1);
|
||||||
|
mlt.setMinDocFreq(3);
|
||||||
|
mlt.setMaxDocFreqPct(20);
|
||||||
|
mlt.setBoost(localQueryParams.scoreMode().needsScores());
|
||||||
|
mlt.setStopWords(EnglishItalianStopFilter.getStopWordsString());
|
||||||
|
if (similarity instanceof TFIDFSimilarity tfidfSimilarity) {
|
||||||
|
mlt.setSimilarity(tfidfSimilarity);
|
||||||
|
} else {
|
||||||
|
mlt.setSimilarity(new ClassicSimilarity());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the reference docId and apply it to MoreLikeThis, to generate the query
|
||||||
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
|
var mltQuery = mlt.like((Map) mltDocumentFields);
|
||||||
|
Query luceneQuery;
|
||||||
|
if (!(luceneAdditionalQuery instanceof MatchAllDocsQuery)) {
|
||||||
|
luceneQuery = new BooleanQuery.Builder()
|
||||||
|
.add(mltQuery, Occur.MUST)
|
||||||
|
.add(new ConstantScoreQuery(luceneAdditionalQuery), Occur.MUST)
|
||||||
|
.build();
|
||||||
|
} else {
|
||||||
|
luceneQuery = mltQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new LocalQueryParams(luceneQuery,
|
||||||
|
localQueryParams.offset(),
|
||||||
|
localQueryParams.limit(),
|
||||||
|
DEFAULT_PAGE_LIMITS,
|
||||||
|
localQueryParams.minCompetitiveScore(),
|
||||||
|
localQueryParams.sort(),
|
||||||
|
localQueryParams.scoreMode()
|
||||||
|
);
|
||||||
|
}).subscribeOn(Schedulers.boundedElastic()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,40 +1,25 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import io.net5.buffer.api.Send;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
|
|
||||||
public class AdaptiveLuceneLocalSearcher implements LuceneLocalSearcher {
|
public class AdaptiveLuceneLocalSearcher implements LuceneLocalSearcher {
|
||||||
|
|
||||||
private static final LuceneLocalSearcher localSearcher = new SimpleLuceneLocalSearcher();
|
private static final LuceneLocalSearcher localSearcher = new SimpleLuceneLocalSearcher();
|
||||||
|
|
||||||
private static final LuceneLocalSearcher unscoredPagedLuceneLocalSearcher = new LocalLuceneWrapper(new UnscoredUnsortedContinuousLuceneMultiSearcher());
|
|
||||||
|
|
||||||
private static final LuceneLocalSearcher countSearcher = new CountLuceneLocalSearcher();
|
private static final LuceneLocalSearcher countSearcher = new CountLuceneLocalSearcher();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
public Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcher,
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
LocalQueryParams queryParams,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Scheduler scheduler) {
|
LLSearchTransformer transformer) {
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
return releaseIndexSearcher
|
|
||||||
.then(Mono.error(() -> new UnsupportedOperationException("Called collect in a nonblocking thread")));
|
|
||||||
}
|
|
||||||
if (queryParams.limit() == 0) {
|
if (queryParams.limit() == 0) {
|
||||||
return countSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName, scheduler);
|
return countSearcher.collect(indexSearcher, queryParams, keyFieldName, transformer);
|
||||||
} else if (!queryParams.isScored() && queryParams.offset() == 0 && queryParams.limit() >= 2147483630
|
|
||||||
&& !queryParams.isSorted()) {
|
|
||||||
return unscoredPagedLuceneLocalSearcher.collect(indexSearcher,
|
|
||||||
releaseIndexSearcher,
|
|
||||||
queryParams,
|
|
||||||
keyFieldName,
|
|
||||||
scheduler
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
return localSearcher.collect(indexSearcher, releaseIndexSearcher, queryParams, keyFieldName, scheduler);
|
return localSearcher.collect(indexSearcher, queryParams, keyFieldName, transformer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,27 +1,33 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
public class AdaptiveLuceneMultiSearcher implements LuceneMultiSearcher {
|
public class AdaptiveLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||||
|
|
||||||
private static final LuceneMultiSearcher scoredLuceneMultiSearcher = new ScoredLuceneMultiSearcher();
|
private static final LuceneMultiSearcher countLuceneMultiSearcher
|
||||||
|
= new SimpleUnsortedUnscoredLuceneMultiSearcher(new CountLuceneLocalSearcher());
|
||||||
|
|
||||||
private static final LuceneMultiSearcher unscoredPagedLuceneMultiSearcher = new UnscoredPagedLuceneMultiSearcher();
|
private static final LuceneMultiSearcher scoredSimpleLuceneShardSearcher
|
||||||
|
= new ScoredSimpleLuceneShardSearcher();
|
||||||
|
|
||||||
private static final LuceneMultiSearcher unscoredIterableLuceneMultiSearcher = new UnscoredUnsortedContinuousLuceneMultiSearcher();
|
private static final LuceneMultiSearcher unscoredPagedLuceneMultiSearcher
|
||||||
|
= new SimpleUnsortedUnscoredLuceneMultiSearcher(new SimpleLuceneLocalSearcher());
|
||||||
private static final LuceneMultiSearcher countLuceneMultiSearcher = new CountLuceneMultiSearcher();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
public Mono<Send<LuceneSearchResult>> collectMulti(Mono<Send<LLIndexSearchers>> indexSearchersMono,
|
||||||
if (queryParams.limit() <= 0) {
|
LocalQueryParams queryParams,
|
||||||
return countLuceneMultiSearcher.createShardSearcher(queryParams);
|
String keyFieldName,
|
||||||
} else if (queryParams.isScored()) {
|
LLSearchTransformer transformer) {
|
||||||
return scoredLuceneMultiSearcher.createShardSearcher(queryParams);
|
if (queryParams.limit() == 0) {
|
||||||
} else if (queryParams.offset() == 0 && queryParams.limit() >= 2147483630 && !queryParams.isSorted()) {
|
return countLuceneMultiSearcher.collectMulti(indexSearchersMono, queryParams, keyFieldName, transformer);
|
||||||
return unscoredIterableLuceneMultiSearcher.createShardSearcher(queryParams);
|
} else if (queryParams.isSorted() || queryParams.isScored()) {
|
||||||
|
return scoredSimpleLuceneShardSearcher.collectMulti(indexSearchersMono, queryParams, keyFieldName, transformer);
|
||||||
} else {
|
} else {
|
||||||
return unscoredPagedLuceneMultiSearcher.createShardSearcher(queryParams);
|
return unscoredPagedLuceneMultiSearcher.collectMulti(indexSearchersMono, queryParams, keyFieldName, transformer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,28 +1,32 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
import it.cavallium.dbengine.client.query.QueryParser;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
public class CountLuceneLocalSearcher implements LuceneLocalSearcher {
|
public class CountLuceneLocalSearcher implements LuceneLocalSearcher {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher, Mono<Void> releaseIndexSearcher,
|
public Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||||
LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) {
|
LocalQueryParams queryParams,
|
||||||
return Mono.fromCallable(() -> {
|
String keyFieldName,
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
LLSearchTransformer transformer) {
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
return Mono
|
||||||
}
|
.usingWhen(
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
indexSearcherMono,
|
||||||
return new LuceneSearchResult(TotalHitsCount.of(indexSearcher.count(queryParams.query()), true),
|
indexSearcher -> Mono.fromCallable(() -> {
|
||||||
Flux.empty(),
|
try (var is = indexSearcher.receive()) {
|
||||||
releaseIndexSearcher
|
LLUtils.ensureBlocking();
|
||||||
);
|
return is.getIndexSearcher().count(queryParams.query());
|
||||||
}).subscribeOn(scheduler);
|
}
|
||||||
|
}).subscribeOn(Schedulers.boundedElastic()),
|
||||||
|
is -> Mono.empty()
|
||||||
|
)
|
||||||
|
.map(count -> new LuceneSearchResult(TotalHitsCount.of(count, true), Flux.empty(), drop -> {}).send())
|
||||||
|
.doOnDiscard(Send.class, Send::close);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,55 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.client.query.QueryParser;
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
|
||||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
|
|
||||||
public class CountLuceneMultiSearcher implements LuceneMultiSearcher {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
|
||||||
return Mono
|
|
||||||
.fromCallable(() -> {
|
|
||||||
AtomicLong totalHits = new AtomicLong(0);
|
|
||||||
ConcurrentLinkedQueue<Mono<Void>> release = new ConcurrentLinkedQueue<>();
|
|
||||||
return new LuceneShardSearcher() {
|
|
||||||
@Override
|
|
||||||
public Mono<Void> searchOn(IndexSearcher indexSearcher,
|
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
|
||||||
Scheduler scheduler) {
|
|
||||||
return Mono
|
|
||||||
.<Void>fromCallable(() -> {
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
totalHits.addAndGet(indexSearcher.count(queryParams.query()));
|
|
||||||
release.add(releaseIndexSearcher);
|
|
||||||
return null;
|
|
||||||
})
|
|
||||||
.subscribeOn(scheduler);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) {
|
|
||||||
return Mono.fromCallable(() -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
|
||||||
}
|
|
||||||
return new LuceneSearchResult(TotalHitsCount.of(totalHits.get(), true),
|
|
||||||
Flux.empty(),
|
|
||||||
Mono.when(release)
|
|
||||||
);
|
|
||||||
}).subscribeOn(scheduler);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -8,17 +8,6 @@ import org.jetbrains.annotations.Nullable;
|
|||||||
|
|
||||||
record CurrentPageInfo(@Nullable ScoreDoc last, long remainingLimit, int pageIndex) {
|
record CurrentPageInfo(@Nullable ScoreDoc last, long remainingLimit, int pageIndex) {
|
||||||
|
|
||||||
private static final int MAX_ITEMS_PER_PAGE = 500;
|
|
||||||
|
|
||||||
public static final Comparator<ScoreDoc> TIE_BREAKER = Comparator.comparingInt((d) -> d.shardIndex);
|
public static final Comparator<ScoreDoc> TIE_BREAKER = Comparator.comparingInt((d) -> d.shardIndex);
|
||||||
public static final CurrentPageInfo EMPTY_STATUS = new CurrentPageInfo(null, 0, 0);
|
public static final CurrentPageInfo EMPTY_STATUS = new CurrentPageInfo(null, 0, 0);
|
||||||
|
|
||||||
int currentPageLimit() {
|
|
||||||
if (pageIndex >= 10) { // safety
|
|
||||||
return MAX_ITEMS_PER_PAGE;
|
|
||||||
}
|
|
||||||
var min = Math.min(MAX_ITEMS_PER_PAGE, LuceneUtils.safeLongToInt(pageIndex * (0b1L << pageIndex)));
|
|
||||||
assert min > 0;
|
|
||||||
return min;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,47 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <pre>y = 2 ^ (x + pageIndexOffset) + firstPageLimit</pre>
|
||||||
|
*/
|
||||||
|
public class ExponentialPageLimits implements PageLimits {
|
||||||
|
|
||||||
|
private static final int DEFAULT_PAGE_INDEX_OFFSET = 0;
|
||||||
|
|
||||||
|
private final int pageIndexOffset;
|
||||||
|
private final int firstPageLimit;
|
||||||
|
private final int maxItemsPerPage;
|
||||||
|
|
||||||
|
public ExponentialPageLimits() {
|
||||||
|
this(DEFAULT_PAGE_INDEX_OFFSET);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ExponentialPageLimits(int pageIndexOffset) {
|
||||||
|
this(pageIndexOffset, DEFAULT_MIN_ITEMS_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ExponentialPageLimits(int pageIndexOffset, int firstPageLimit) {
|
||||||
|
this(pageIndexOffset, firstPageLimit, DEFAULT_MAX_ITEMS_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ExponentialPageLimits(int pageIndexOffset, int firstPageLimit, int maxItemsPerPage) {
|
||||||
|
this.pageIndexOffset = pageIndexOffset;
|
||||||
|
this.firstPageLimit = firstPageLimit;
|
||||||
|
this.maxItemsPerPage = maxItemsPerPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPageLimit(int pageIndex) {
|
||||||
|
var offsetedIndex = pageIndex + pageIndexOffset;
|
||||||
|
var power = 0b1L << offsetedIndex;
|
||||||
|
|
||||||
|
if (offsetedIndex >= 30) { // safety
|
||||||
|
return maxItemsPerPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
var min = Math.max(firstPageLimit, Math.min(maxItemsPerPage, firstPageLimit + power));
|
||||||
|
assert min > 0;
|
||||||
|
return LuceneUtils.safeLongToInt(min);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,8 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
|
import it.cavallium.dbengine.database.LLKeyScore;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
|
||||||
|
record FirstPageResults(TotalHitsCount totalHitsCount, Flux<LLKeyScore> firstPageHitsFlux,
|
||||||
|
CurrentPageInfo nextPageInfo) {}
|
@ -1,27 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
|
|
||||||
public interface IndexSearchers {
|
|
||||||
|
|
||||||
static IndexSearchers of(List<IndexSearcher> indexSearchers) {
|
|
||||||
return shardIndex -> {
|
|
||||||
if (shardIndex < 0) {
|
|
||||||
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid");
|
|
||||||
}
|
|
||||||
return indexSearchers.get(shardIndex);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
static IndexSearchers unsharded(IndexSearcher indexSearcher) {
|
|
||||||
return shardIndex -> {
|
|
||||||
if (shardIndex != -1) {
|
|
||||||
throw new IndexOutOfBoundsException("Shard index " + shardIndex + " is invalid, this is a unsharded index");
|
|
||||||
}
|
|
||||||
return indexSearcher;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
IndexSearcher shard(int shardIndex);
|
|
||||||
}
|
|
@ -0,0 +1,17 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import java.util.List;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
public interface LLSearchTransformer {
|
||||||
|
|
||||||
|
LLSearchTransformer NO_TRANSFORMATION = queryParamsMono -> queryParamsMono
|
||||||
|
.map(TransformerInput::queryParams);
|
||||||
|
|
||||||
|
record TransformerInput(List<LLIndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams queryParams) {}
|
||||||
|
|
||||||
|
Mono<LocalQueryParams> transform(Mono<TransformerInput> inputMono);
|
||||||
|
}
|
@ -0,0 +1,40 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <pre>y = (x * factor) + firstPageLimit</pre>
|
||||||
|
*/
|
||||||
|
public class LinearPageLimits implements PageLimits {
|
||||||
|
|
||||||
|
private static final double DEFAULT_FACTOR = 0.5d;
|
||||||
|
|
||||||
|
private final double factor;
|
||||||
|
private final double firstPageLimit;
|
||||||
|
private final double maxItemsPerPage;
|
||||||
|
|
||||||
|
public LinearPageLimits() {
|
||||||
|
this(DEFAULT_FACTOR, DEFAULT_MIN_ITEMS_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public LinearPageLimits(double factor) {
|
||||||
|
this(factor, DEFAULT_MIN_ITEMS_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public LinearPageLimits(double factor, int firstPageLimit) {
|
||||||
|
this(factor, firstPageLimit, DEFAULT_MAX_ITEMS_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public LinearPageLimits(double factor, int firstPageLimit, int maxItemsPerPage) {
|
||||||
|
this.factor = factor;
|
||||||
|
this.firstPageLimit = firstPageLimit;
|
||||||
|
this.maxItemsPerPage = maxItemsPerPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPageLimit(int pageIndex) {
|
||||||
|
double min = Math.min(maxItemsPerPage, firstPageLimit + (pageIndex * factor));
|
||||||
|
assert min > 0d;
|
||||||
|
return (int) min;
|
||||||
|
}
|
||||||
|
}
|
@ -1,28 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
|
|
||||||
public class LocalLuceneWrapper implements LuceneLocalSearcher {
|
|
||||||
|
|
||||||
private final LuceneMultiSearcher luceneMultiSearcher;
|
|
||||||
|
|
||||||
public LocalLuceneWrapper(LuceneMultiSearcher luceneMultiSearcher) {
|
|
||||||
this.luceneMultiSearcher = luceneMultiSearcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
|
||||||
String keyFieldName,
|
|
||||||
Scheduler scheduler) {
|
|
||||||
var shardSearcher = luceneMultiSearcher.createShardSearcher(queryParams);
|
|
||||||
return shardSearcher
|
|
||||||
.flatMap(luceneShardSearcher -> luceneShardSearcher
|
|
||||||
.searchOn(indexSearcher, releaseIndexSearcher, queryParams, scheduler)
|
|
||||||
.then(luceneShardSearcher.collect(queryParams, keyFieldName, scheduler))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
@ -6,7 +6,7 @@ import org.apache.lucene.search.Sort;
|
|||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
public record LocalQueryParams(@NotNull Query query, int offset, int limit,
|
public record LocalQueryParams(@NotNull Query query, int offset, int limit, @NotNull PageLimits pageLimits,
|
||||||
@Nullable Float minCompetitiveScore, @Nullable Sort sort,
|
@Nullable Float minCompetitiveScore, @Nullable Sort sort,
|
||||||
@NotNull ScoreMode scoreMode) {
|
@NotNull ScoreMode scoreMode) {
|
||||||
|
|
||||||
|
@ -1,21 +1,19 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
import io.net5.buffer.api.Send;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
|
|
||||||
public interface LuceneLocalSearcher {
|
public interface LuceneLocalSearcher {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param indexSearcher Lucene index searcher
|
* @param indexSearcherMono Lucene index searcher
|
||||||
* @param queryParams the query parameters
|
* @param queryParams the query parameters
|
||||||
* @param keyFieldName the name of the key field
|
* @param keyFieldName the name of the key field
|
||||||
* @param scheduler a blocking scheduler
|
* @param transformer the search query transformer
|
||||||
*/
|
*/
|
||||||
Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
LocalQueryParams queryParams,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Scheduler scheduler);
|
LLSearchTransformer transformer);
|
||||||
}
|
}
|
||||||
|
@ -1,28 +1,36 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLKeyScore;
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
|
||||||
import org.apache.lucene.search.ScoreMode;
|
|
||||||
import org.apache.lucene.search.Sort;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import org.warp.commonutils.log.Logger;
|
|
||||||
import org.warp.commonutils.log.LoggerFactory;
|
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
|
|
||||||
public interface LuceneMultiSearcher {
|
public interface LuceneMultiSearcher extends LuceneLocalSearcher {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Do a lucene query, receiving the single results using a consumer
|
* @param indexSearchersMono Lucene index searcher
|
||||||
* @param queryParams the query parameters
|
* @param queryParams the query parameters
|
||||||
|
* @param keyFieldName the name of the key field
|
||||||
|
* @param transformer the search query transformer
|
||||||
*/
|
*/
|
||||||
Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams);
|
Mono<Send<LuceneSearchResult>> collectMulti(Mono<Send<LLIndexSearchers>> indexSearchersMono,
|
||||||
|
LocalQueryParams queryParams,
|
||||||
|
String keyFieldName,
|
||||||
|
LLSearchTransformer transformer);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param indexSearcherMono Lucene index searcher
|
||||||
|
* @param queryParams the query parameters
|
||||||
|
* @param keyFieldName the name of the key field
|
||||||
|
* @param transformer the search query transformer
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
default Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||||
|
LocalQueryParams queryParams,
|
||||||
|
String keyFieldName,
|
||||||
|
LLSearchTransformer transformer) {
|
||||||
|
var searchers = indexSearcherMono.map(a -> LLIndexSearchers.unsharded(a).send());
|
||||||
|
return this.collectMulti(searchers, queryParams, keyFieldName, transformer);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
import it.cavallium.dbengine.database.LLKeyScore;
|
import it.cavallium.dbengine.database.LLKeyScore;
|
||||||
|
import it.cavallium.dbengine.database.LLSearchResultShard;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
import it.cavallium.dbengine.database.disk.LLLocalKeyValueDatabase;
|
import it.cavallium.dbengine.database.disk.LLLocalKeyValueDatabase;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -10,39 +15,33 @@ import org.warp.commonutils.log.LoggerFactory;
|
|||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
public final class LuceneSearchResult {
|
public final class LuceneSearchResult extends LiveResourceSupport<LuceneSearchResult, LuceneSearchResult> {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(LuceneSearchResult.class);
|
private static final Logger logger = LoggerFactory.getLogger(LuceneSearchResult.class);
|
||||||
|
|
||||||
private volatile boolean releaseCalled;
|
private TotalHitsCount totalHitsCount;
|
||||||
|
private Flux<LLKeyScore> results;
|
||||||
|
|
||||||
private final TotalHitsCount totalHitsCount;
|
public LuceneSearchResult(TotalHitsCount totalHitsCount, Flux<LLKeyScore> results, Drop<LuceneSearchResult> drop) {
|
||||||
private final Flux<LLKeyScore> results;
|
super(drop);
|
||||||
private final Mono<Void> release;
|
|
||||||
|
|
||||||
public LuceneSearchResult(TotalHitsCount totalHitsCount, Flux<LLKeyScore> results, Mono<Void> release) {
|
|
||||||
this.totalHitsCount = totalHitsCount;
|
this.totalHitsCount = totalHitsCount;
|
||||||
this.results = results;
|
this.results = results;
|
||||||
this.release = Mono.fromRunnable(() -> {
|
|
||||||
if (releaseCalled) {
|
|
||||||
logger.warn(this.getClass().getName() + "::release has been called twice!");
|
|
||||||
}
|
|
||||||
releaseCalled = true;
|
|
||||||
}).then(release);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public TotalHitsCount totalHitsCount() {
|
public TotalHitsCount totalHitsCount() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("LuceneSearchResult must be owned to be used"));
|
||||||
|
}
|
||||||
return totalHitsCount;
|
return totalHitsCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Flux<LLKeyScore> results() {
|
public Flux<LLKeyScore> results() {
|
||||||
|
if (!isOwned()) {
|
||||||
|
throw attachTrace(new IllegalStateException("LuceneSearchResult must be owned to be used"));
|
||||||
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Mono<Void> release() {
|
|
||||||
return release;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == this)
|
if (obj == this)
|
||||||
@ -63,13 +62,21 @@ public final class LuceneSearchResult {
|
|||||||
return "LuceneSearchResult[" + "totalHitsCount=" + totalHitsCount + ", " + "results=" + results + ']';
|
return "LuceneSearchResult[" + "totalHitsCount=" + totalHitsCount + ", " + "results=" + results + ']';
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
@Override
|
@Override
|
||||||
protected void finalize() throws Throwable {
|
protected RuntimeException createResourceClosedException() {
|
||||||
if (!releaseCalled) {
|
return new IllegalStateException("Closed");
|
||||||
logger.warn(this.getClass().getName() + "::release has not been called before class finalization!");
|
}
|
||||||
}
|
|
||||||
super.finalize();
|
@Override
|
||||||
|
protected Owned<LuceneSearchResult> prepareSend() {
|
||||||
|
var totalHitsCount = this.totalHitsCount;
|
||||||
|
var results = this.results;
|
||||||
|
return drop -> new LuceneSearchResult(totalHitsCount, results, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.totalHitsCount = null;
|
||||||
|
this.results = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,26 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.QueryParams;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
|
|
||||||
public interface LuceneShardSearcher {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param indexSearcher the index searcher, which contains all the lucene data
|
|
||||||
* @param queryParams the query parameters
|
|
||||||
* @param scheduler a blocking scheduler
|
|
||||||
*/
|
|
||||||
Mono<Void> searchOn(IndexSearcher indexSearcher,
|
|
||||||
Mono<Void> indexSearcherRelease,
|
|
||||||
LocalQueryParams queryParams,
|
|
||||||
Scheduler scheduler);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param queryParams the query parameters
|
|
||||||
* @param keyFieldName the name of the key field
|
|
||||||
* @param collectorScheduler a blocking scheduler
|
|
||||||
*/
|
|
||||||
Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler collectorScheduler);
|
|
||||||
}
|
|
@ -0,0 +1,5 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.TopDocs;
|
||||||
|
|
||||||
|
record PageData(TopDocs topDocs, CurrentPageInfo nextPageInfo) {}
|
@ -0,0 +1,11 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
|
|
||||||
|
public interface PageLimits {
|
||||||
|
|
||||||
|
int DEFAULT_MIN_ITEMS_PER_PAGE = 10;
|
||||||
|
int DEFAULT_MAX_ITEMS_PER_PAGE = 250;
|
||||||
|
|
||||||
|
int getPageLimit(int pageIndex);
|
||||||
|
}
|
@ -3,7 +3,7 @@ package it.cavallium.dbengine.lucene.searcher;
|
|||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
|
|
||||||
public record PaginationInfo(long totalLimit, long firstPageOffset, long firstPageLimit, boolean forceSinglePage) {
|
public record PaginationInfo(long totalLimit, long firstPageOffset, PageLimits pageLimits, boolean forceSinglePage) {
|
||||||
|
|
||||||
public static final int MAX_SINGLE_SEARCH_LIMIT = 256;
|
public static final int MAX_SINGLE_SEARCH_LIMIT = 256;
|
||||||
public static final int FIRST_PAGE_LIMIT = 10;
|
public static final int FIRST_PAGE_LIMIT = 10;
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.FIRST_PAGE_LIMIT;
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
|
||||||
import org.apache.lucene.search.CollectorManager;
|
|
||||||
import org.apache.lucene.search.Sort;
|
|
||||||
import org.apache.lucene.search.TopDocs;
|
|
||||||
import org.apache.lucene.search.TopFieldCollector;
|
|
||||||
import org.apache.lucene.search.TopFieldDocs;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
public class ScoredLuceneMultiSearcher implements LuceneMultiSearcher {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
|
||||||
return Mono
|
|
||||||
.fromCallable(() -> {
|
|
||||||
Sort luceneSort = queryParams.sort();
|
|
||||||
if (luceneSort == null) {
|
|
||||||
luceneSort = Sort.RELEVANCE;
|
|
||||||
}
|
|
||||||
PaginationInfo paginationInfo;
|
|
||||||
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
|
||||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.limit(), true);
|
|
||||||
} else {
|
|
||||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), FIRST_PAGE_LIMIT, false);
|
|
||||||
}
|
|
||||||
CollectorManager<TopFieldCollector, TopDocs> sharedManager = new ScoringShardsCollectorManager(luceneSort,
|
|
||||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset() + paginationInfo.firstPageLimit()),
|
|
||||||
null, LuceneUtils.totalHitsThreshold(), LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset()),
|
|
||||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageLimit()));
|
|
||||||
return new ScoredSimpleLuceneShardSearcher(sharedManager, queryParams.query(), paginationInfo);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,168 +1,200 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS;
|
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.FIRST_PAGE_LIMIT;
|
||||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.TIE_BREAKER;
|
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLKeyScore;
|
import it.cavallium.dbengine.database.LLKeyScore;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLLocalGroupedReactiveRocksIterator;
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import org.apache.lucene.search.CollectorManager;
|
|
||||||
import org.apache.lucene.search.FieldDoc;
|
import org.apache.lucene.search.FieldDoc;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.warp.commonutils.log.Logger;
|
||||||
import org.apache.lucene.search.TopFieldCollector;
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import org.apache.lucene.search.TopFieldDocs;
|
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.publisher.Sinks;
|
|
||||||
import reactor.core.publisher.Sinks.Empty;
|
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
class ScoredSimpleLuceneShardSearcher implements LuceneShardSearcher {
|
public class ScoredSimpleLuceneShardSearcher implements LuceneMultiSearcher {
|
||||||
|
|
||||||
private final Object lock = new Object();
|
protected static final Logger logger = LoggerFactory.getLogger(ScoredSimpleLuceneShardSearcher.class);
|
||||||
private final List<IndexSearcher> indexSearchersArray = new ArrayList<>();
|
|
||||||
private final List<Mono<Void>> indexSearcherReleasersArray = new ArrayList<>();
|
|
||||||
private final List<TopFieldCollector> collectors = new ArrayList<>();
|
|
||||||
private final CollectorManager<TopFieldCollector, TopDocs> firstPageSharedManager;
|
|
||||||
private final Query luceneQuery;
|
|
||||||
private final PaginationInfo paginationInfo;
|
|
||||||
|
|
||||||
public ScoredSimpleLuceneShardSearcher(CollectorManager<TopFieldCollector, TopDocs> firstPageSharedManager,
|
public ScoredSimpleLuceneShardSearcher() {
|
||||||
Query luceneQuery, PaginationInfo paginationInfo) {
|
|
||||||
this.firstPageSharedManager = firstPageSharedManager;
|
|
||||||
this.luceneQuery = luceneQuery;
|
|
||||||
this.paginationInfo = paginationInfo;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> searchOn(IndexSearcher indexSearcher,
|
public Mono<Send<LuceneSearchResult>> collectMulti(Mono<Send<LLIndexSearchers>> indexSearchersMono,
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
LocalQueryParams queryParams,
|
||||||
Scheduler scheduler) {
|
String keyFieldName,
|
||||||
return Mono.<Void>fromCallable(() -> {
|
LLSearchTransformer transformer) {
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
|
||||||
throw new UnsupportedOperationException("Called searchOn in a nonblocking thread");
|
PaginationInfo paginationInfo = getPaginationInfo(queryParams);
|
||||||
}
|
|
||||||
TopFieldCollector collector;
|
return LLUtils.usingSendResource(indexSearchersMono, indexSearchers -> this
|
||||||
synchronized (lock) {
|
// Search first page results
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
.searchFirstPage(indexSearchers.shards(), queryParams, paginationInfo)
|
||||||
collector = firstPageSharedManager.newCollector();
|
// Compute the results of the first page
|
||||||
indexSearchersArray.add(indexSearcher);
|
.transform(firstPageTopDocsMono -> this.computeFirstPageResults(firstPageTopDocsMono, indexSearchers,
|
||||||
indexSearcherReleasersArray.add(releaseIndexSearcher);
|
keyFieldName, queryParams))
|
||||||
collectors.add(collector);
|
// Compute other results
|
||||||
}
|
.map(firstResult -> this.computeOtherResults(firstResult, indexSearchers.shards(), queryParams, keyFieldName, indexSearchers::close))
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
// Ensure that one LuceneSearchResult is always returned
|
||||||
indexSearcher.search(luceneQuery, collector);
|
.single(),
|
||||||
return null;
|
false);
|
||||||
}).subscribeOn(scheduler);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private Sort getSort(LocalQueryParams queryParams) {
|
||||||
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler collectorScheduler) {
|
Sort luceneSort = queryParams.sort();
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
if (luceneSort == null) {
|
||||||
return Mono.error(() -> new UnsupportedOperationException("Called collect in a nonblocking thread"));
|
luceneSort = Sort.RELEVANCE;
|
||||||
}
|
}
|
||||||
if (!queryParams.isScored()) {
|
return luceneSort;
|
||||||
return Mono.error(() -> new UnsupportedOperationException("Can't execute an unscored query"
|
}
|
||||||
+ " with a scored lucene shard searcher"));
|
|
||||||
|
/**
|
||||||
|
* Get the pagination info
|
||||||
|
*/
|
||||||
|
private PaginationInfo getPaginationInfo(LocalQueryParams queryParams) {
|
||||||
|
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
||||||
|
return new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.pageLimits(), true);
|
||||||
|
} else {
|
||||||
|
return new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.pageLimits(), false);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search effectively the raw results of the first page
|
||||||
|
*/
|
||||||
|
private Mono<PageData> searchFirstPage(Iterable<IndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams queryParams,
|
||||||
|
PaginationInfo paginationInfo) {
|
||||||
|
var limit = paginationInfo.totalLimit();
|
||||||
|
var pageLimits = paginationInfo.pageLimits();
|
||||||
|
var pagination = !paginationInfo.forceSinglePage();
|
||||||
|
var resultsOffset = LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset());
|
||||||
|
return Mono
|
||||||
|
.fromSupplier(() -> new CurrentPageInfo(null, limit, 0))
|
||||||
|
.flatMap(s -> this.searchPage(queryParams, indexSearchers, pagination, pageLimits, resultsOffset, s));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the results of the first page, extracting useful data
|
||||||
|
*/
|
||||||
|
private Mono<FirstPageResults> computeFirstPageResults(Mono<PageData> firstPageDataMono,
|
||||||
|
LLIndexSearchers indexSearchers,
|
||||||
|
String keyFieldName,
|
||||||
|
LocalQueryParams queryParams) {
|
||||||
|
return firstPageDataMono.map(firstPageData -> {
|
||||||
|
var totalHitsCount = LuceneUtils.convertTotalHitsCount(firstPageData.topDocs().totalHits);
|
||||||
|
var scoreDocs = firstPageData.topDocs().scoreDocs;
|
||||||
|
assert LLUtils.isSet(scoreDocs);
|
||||||
|
|
||||||
|
Flux<LLKeyScore> firstPageHitsFlux = LuceneUtils.convertHits(Flux.fromArray(scoreDocs),
|
||||||
|
indexSearchers.shards(), keyFieldName, true)
|
||||||
|
.take(queryParams.limit(), true);
|
||||||
|
|
||||||
|
CurrentPageInfo nextPageInfo = firstPageData.nextPageInfo();
|
||||||
|
|
||||||
|
return new FirstPageResults(totalHitsCount, firstPageHitsFlux, nextPageInfo);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private Send<LuceneSearchResult> computeOtherResults(FirstPageResults firstResult,
|
||||||
|
List<IndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams queryParams,
|
||||||
|
String keyFieldName,
|
||||||
|
Runnable drop) {
|
||||||
|
var totalHitsCount = firstResult.totalHitsCount();
|
||||||
|
var firstPageHitsFlux = firstResult.firstPageHitsFlux();
|
||||||
|
var secondPageInfo = firstResult.nextPageInfo();
|
||||||
|
|
||||||
|
Flux<LLKeyScore> nextHitsFlux = searchOtherPages(indexSearchers, queryParams, keyFieldName, secondPageInfo);
|
||||||
|
|
||||||
|
Flux<LLKeyScore> combinedFlux = firstPageHitsFlux.concatWith(nextHitsFlux);
|
||||||
|
return new LuceneSearchResult(totalHitsCount, combinedFlux, d -> drop.run()).send();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search effectively the merged raw results of the next pages
|
||||||
|
*/
|
||||||
|
private Flux<LLKeyScore> searchOtherPages(List<IndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams queryParams, String keyFieldName, CurrentPageInfo secondPageInfo) {
|
||||||
|
return Flux
|
||||||
|
.defer(() -> {
|
||||||
|
AtomicReference<CurrentPageInfo> currentPageInfoRef = new AtomicReference<>(secondPageInfo);
|
||||||
|
return Mono
|
||||||
|
.fromSupplier(currentPageInfoRef::get)
|
||||||
|
.doOnNext(s -> logger.debug("Current page info: {}", s))
|
||||||
|
.flatMap(currentPageInfo -> this.searchPage(queryParams, indexSearchers, true,
|
||||||
|
queryParams.pageLimits(), 0, currentPageInfo))
|
||||||
|
.doOnNext(s -> logger.debug("Next page info: {}", s.nextPageInfo()))
|
||||||
|
.doOnNext(s -> currentPageInfoRef.set(s.nextPageInfo()))
|
||||||
|
.repeatWhen(s -> s.takeWhile(n -> n > 0));
|
||||||
|
})
|
||||||
|
.subscribeOn(Schedulers.boundedElastic())
|
||||||
|
.map(PageData::topDocs)
|
||||||
|
.flatMapIterable(topDocs -> Arrays.asList(topDocs.scoreDocs))
|
||||||
|
.transform(topFieldDocFlux -> LuceneUtils.convertHits(topFieldDocFlux, indexSearchers,
|
||||||
|
keyFieldName, true));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param resultsOffset offset of the resulting topDocs. Useful if you want to
|
||||||
|
* skip the first n results in the first page
|
||||||
|
*/
|
||||||
|
private Mono<PageData> searchPage(LocalQueryParams queryParams,
|
||||||
|
Iterable<IndexSearcher> indexSearchers,
|
||||||
|
boolean allowPagination,
|
||||||
|
PageLimits pageLimits,
|
||||||
|
int resultsOffset,
|
||||||
|
CurrentPageInfo s) {
|
||||||
return Mono
|
return Mono
|
||||||
.fromCallable(() -> {
|
.fromCallable(() -> {
|
||||||
TopDocs result;
|
LLUtils.ensureBlocking();
|
||||||
Mono<Void> release;
|
if (resultsOffset < 0) {
|
||||||
synchronized (lock) {
|
throw new IndexOutOfBoundsException(resultsOffset);
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
result = firstPageSharedManager.reduce(collectors);
|
|
||||||
release = Mono.when(indexSearcherReleasersArray);
|
|
||||||
}
|
}
|
||||||
IndexSearchers indexSearchers;
|
if ((s.pageIndex() == 0 || s.last() != null) && s.remainingLimit() > 0) {
|
||||||
synchronized (lock) {
|
var sort = getSort(queryParams);
|
||||||
indexSearchers = IndexSearchers.of(indexSearchersArray);
|
var pageLimit = pageLimits.getPageLimit(s.pageIndex());
|
||||||
|
var after = (FieldDoc) s.last();
|
||||||
|
var totalHitsThreshold = LuceneUtils.totalHitsThreshold();
|
||||||
|
return new ScoringShardsCollectorManager(sort, pageLimit, after, totalHitsThreshold,
|
||||||
|
resultsOffset);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
Flux<LLKeyScore> firstPageHits = LuceneUtils
|
|
||||||
.convertHits(Flux.fromArray(result.scoreDocs), indexSearchers, keyFieldName, collectorScheduler, true);
|
|
||||||
|
|
||||||
Flux<LLKeyScore> nextHits;
|
|
||||||
nextHits = Flux
|
|
||||||
.<TopDocs, CurrentPageInfo>generate(
|
|
||||||
() -> new CurrentPageInfo(LuceneUtils.getLastFieldDoc(result.scoreDocs),
|
|
||||||
paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
|
|
||||||
(s, emitter) -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (s.last() != null && s.remainingLimit() > 0) {
|
|
||||||
Sort luceneSort = queryParams.sort();
|
|
||||||
if (luceneSort == null) {
|
|
||||||
luceneSort = Sort.RELEVANCE;
|
|
||||||
}
|
|
||||||
CollectorManager<TopFieldCollector, TopDocs> sharedManager
|
|
||||||
= new ScoringShardsCollectorManager(luceneSort, s.currentPageLimit(),
|
|
||||||
(FieldDoc) s.last(), LuceneUtils.totalHitsThreshold(), 0, s.currentPageLimit());
|
|
||||||
|
|
||||||
try {
|
|
||||||
var collectors = new ObjectArrayList<TopFieldCollector>(indexSearchersArray.size());
|
|
||||||
for (IndexSearcher indexSearcher : indexSearchersArray) {
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
TopFieldCollector collector = sharedManager.newCollector();
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexSearcher.search(luceneQuery, collector);
|
|
||||||
|
|
||||||
collectors.add(collector);
|
|
||||||
}
|
|
||||||
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
var pageTopDocs = sharedManager.reduce(collectors);
|
|
||||||
var pageLastDoc = LuceneUtils.getLastFieldDoc(pageTopDocs.scoreDocs);
|
|
||||||
emitter.next(pageTopDocs);
|
|
||||||
|
|
||||||
s = new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(),
|
|
||||||
s.pageIndex() + 1);
|
|
||||||
} catch (IOException ex) {
|
|
||||||
emitter.error(ex);
|
|
||||||
s = EMPTY_STATUS;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
emitter.complete();
|
|
||||||
s = EMPTY_STATUS;
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
})
|
|
||||||
.subscribeOn(collectorScheduler)
|
|
||||||
.transform(flux -> {
|
|
||||||
if (paginationInfo.forceSinglePage()
|
|
||||||
|| paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
|
|
||||||
return Flux.empty();
|
|
||||||
} else {
|
|
||||||
return flux;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.flatMapIterable(topFieldDoc -> Arrays.asList(topFieldDoc.scoreDocs))
|
|
||||||
.transform(scoreDocs -> LuceneUtils.convertHits(scoreDocs,
|
|
||||||
indexSearchers, keyFieldName, collectorScheduler, true));
|
|
||||||
|
|
||||||
return new LuceneSearchResult(LuceneUtils.convertTotalHitsCount(result.totalHits),
|
|
||||||
firstPageHits
|
|
||||||
.concatWith(nextHits),
|
|
||||||
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
|
|
||||||
release
|
|
||||||
);
|
|
||||||
})
|
})
|
||||||
.subscribeOn(collectorScheduler);
|
.flatMap(sharedManager -> Flux
|
||||||
|
.fromIterable(indexSearchers)
|
||||||
|
.flatMap(shard -> Mono.fromCallable(() -> {
|
||||||
|
var collector = sharedManager.newCollector();
|
||||||
|
shard.search(queryParams.query(), collector);
|
||||||
|
return collector;
|
||||||
|
}))
|
||||||
|
.collectList()
|
||||||
|
.flatMap(collectors -> Mono.fromCallable(() -> {
|
||||||
|
var pageTopDocs = sharedManager.reduce(collectors);
|
||||||
|
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
|
||||||
|
long nextRemainingLimit;
|
||||||
|
if (allowPagination) {
|
||||||
|
nextRemainingLimit = s.remainingLimit() - pageLimits.getPageLimit(s.pageIndex());
|
||||||
|
} else {
|
||||||
|
nextRemainingLimit = 0L;
|
||||||
|
}
|
||||||
|
var nextPageIndex = s.pageIndex() + 1;
|
||||||
|
var nextPageInfo = new CurrentPageInfo(pageLastDoc, nextRemainingLimit, nextPageIndex);
|
||||||
|
return new PageData(pageTopDocs, nextPageInfo);
|
||||||
|
}))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ package it.cavallium.dbengine.lucene.searcher;
|
|||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.TIE_BREAKER;
|
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.TIE_BREAKER;
|
||||||
|
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
@ -34,6 +35,14 @@ public class ScoringShardsCollectorManager implements CollectorManager<TopFieldC
|
|||||||
this(sort, numHits, after, totalHitsThreshold, (Integer) startN, (Integer) topN);
|
this(sort, numHits, after, totalHitsThreshold, (Integer) startN, (Integer) topN);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ScoringShardsCollectorManager(final Sort sort,
|
||||||
|
final int numHits,
|
||||||
|
final FieldDoc after,
|
||||||
|
final int totalHitsThreshold,
|
||||||
|
int startN) {
|
||||||
|
this(sort, numHits, after, totalHitsThreshold, (Integer) startN, (Integer) 2147483630);
|
||||||
|
}
|
||||||
|
|
||||||
public ScoringShardsCollectorManager(final Sort sort,
|
public ScoringShardsCollectorManager(final Sort sort,
|
||||||
final int numHits,
|
final int numHits,
|
||||||
final FieldDoc after,
|
final FieldDoc after,
|
||||||
@ -52,7 +61,13 @@ public class ScoringShardsCollectorManager implements CollectorManager<TopFieldC
|
|||||||
this.after = after;
|
this.after = after;
|
||||||
this.totalHitsThreshold = totalHitsThreshold;
|
this.totalHitsThreshold = totalHitsThreshold;
|
||||||
this.startN = startN;
|
this.startN = startN;
|
||||||
this.topN = topN;
|
if (topN != null && startN != null && (long) topN + (long) startN > 2147483630) {
|
||||||
|
this.topN = 2147483630 - startN;
|
||||||
|
} else if (topN != null && topN > 2147483630) {
|
||||||
|
this.topN = 2147483630;
|
||||||
|
} else {
|
||||||
|
this.topN = topN;
|
||||||
|
}
|
||||||
this.sharedCollectorManager = TopFieldCollector.createSharedManager(sort, numHits, after, totalHitsThreshold);
|
this.sharedCollectorManager = TopFieldCollector.createSharedManager(sort, numHits, after, totalHitsThreshold);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,10 +4,16 @@ import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS
|
|||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.FIRST_PAGE_LIMIT;
|
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.FIRST_PAGE_LIMIT;
|
||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
|
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
import it.cavallium.dbengine.database.LLKeyScore;
|
import it.cavallium.dbengine.database.LLKeyScore;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers.UnshardedIndexSearchers;
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
import it.cavallium.dbengine.lucene.LuceneUtils;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
@ -15,105 +21,164 @@ import org.apache.lucene.search.TopDocs;
|
|||||||
import org.apache.lucene.search.TopDocsCollector;
|
import org.apache.lucene.search.TopDocsCollector;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Scheduler;
|
import reactor.core.publisher.SynchronousSink;
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
|
public class SimpleLuceneLocalSearcher implements LuceneLocalSearcher {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<LuceneSearchResult> collect(IndexSearcher indexSearcher,
|
public Mono<Send<LuceneSearchResult>> collect(Mono<Send<LLIndexSearcher>> indexSearcherMono,
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
LocalQueryParams queryParams,
|
||||||
String keyFieldName,
|
String keyFieldName,
|
||||||
Scheduler scheduler) {
|
LLSearchTransformer transformer) {
|
||||||
|
|
||||||
|
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
|
||||||
|
PaginationInfo paginationInfo = getPaginationInfo(queryParams);
|
||||||
|
|
||||||
|
var indexSearchersMono = indexSearcherMono.map(LLIndexSearchers::unsharded);
|
||||||
|
|
||||||
|
return LLUtils.usingResource(indexSearchersMono, indexSearchers -> this
|
||||||
|
// Search first page results
|
||||||
|
.searchFirstPage(indexSearchers.shards(), queryParams, paginationInfo)
|
||||||
|
// Compute the results of the first page
|
||||||
|
.transform(firstPageTopDocsMono -> this.computeFirstPageResults(firstPageTopDocsMono, indexSearchers.shards(),
|
||||||
|
keyFieldName, queryParams))
|
||||||
|
// Compute other results
|
||||||
|
.transform(firstResult -> this.computeOtherResults(firstResult, indexSearchers.shards(), queryParams,
|
||||||
|
keyFieldName, indexSearchers::close))
|
||||||
|
// Ensure that one LuceneSearchResult is always returned
|
||||||
|
.single(),
|
||||||
|
false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the pagination info
|
||||||
|
*/
|
||||||
|
private PaginationInfo getPaginationInfo(LocalQueryParams queryParams) {
|
||||||
|
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
||||||
|
return new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.pageLimits(), true);
|
||||||
|
} else {
|
||||||
|
return new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.pageLimits(), false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search effectively the raw results of the first page
|
||||||
|
*/
|
||||||
|
private Mono<PageData> searchFirstPage(List<IndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams queryParams,
|
||||||
|
PaginationInfo paginationInfo) {
|
||||||
|
var limit = paginationInfo.totalLimit();
|
||||||
|
var pagination = !paginationInfo.forceSinglePage();
|
||||||
|
var resultsOffset = LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset());
|
||||||
return Mono
|
return Mono
|
||||||
.fromCallable(() -> {
|
.fromSupplier(() -> new CurrentPageInfo(null, limit, 0))
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
.handle((s, sink) -> this.searchPageSync(queryParams, indexSearchers, pagination, resultsOffset, s, sink));
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
}
|
||||||
}
|
|
||||||
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
|
|
||||||
PaginationInfo paginationInfo;
|
|
||||||
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
|
||||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.limit(), true);
|
|
||||||
} else {
|
|
||||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), FIRST_PAGE_LIMIT, false);
|
|
||||||
}
|
|
||||||
TopDocs firstPageTopDocs;
|
|
||||||
{
|
|
||||||
TopDocsCollector<ScoreDoc> firstPageCollector = TopDocsSearcher.getTopDocsCollector(
|
|
||||||
queryParams.sort(),
|
|
||||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset() + paginationInfo.firstPageLimit()),
|
|
||||||
null,
|
|
||||||
LuceneUtils.totalHitsThreshold(),
|
|
||||||
!paginationInfo.forceSinglePage(),
|
|
||||||
queryParams.isScored());
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexSearcher.search(queryParams.query(), firstPageCollector);
|
|
||||||
firstPageTopDocs = firstPageCollector.topDocs(LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset()),
|
|
||||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageLimit())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Flux<LLKeyScore> firstPageMono = LuceneUtils
|
|
||||||
.convertHits(Flux.fromArray(firstPageTopDocs.scoreDocs), IndexSearchers.unsharded(indexSearcher),
|
|
||||||
keyFieldName, scheduler, true)
|
|
||||||
.take(queryParams.limit(), true);
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the results of the first page, extracting useful data
|
||||||
|
*/
|
||||||
|
private Mono<FirstPageResults> computeFirstPageResults(Mono<PageData> firstPageDataMono,
|
||||||
|
List<IndexSearcher> indexSearchers,
|
||||||
|
String keyFieldName,
|
||||||
|
LocalQueryParams queryParams) {
|
||||||
|
return firstPageDataMono.map(firstPageData -> {
|
||||||
|
var totalHitsCount = LuceneUtils.convertTotalHitsCount(firstPageData.topDocs().totalHits);
|
||||||
|
var scoreDocs = firstPageData.topDocs().scoreDocs;
|
||||||
|
assert LLUtils.isSet(scoreDocs);
|
||||||
|
|
||||||
Flux<LLKeyScore> nextHits;
|
Flux<LLKeyScore> firstPageHitsFlux = LuceneUtils.convertHits(Flux.fromArray(scoreDocs),
|
||||||
if (paginationInfo.forceSinglePage() || paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
|
indexSearchers, keyFieldName, true)
|
||||||
nextHits = null;
|
.take(queryParams.limit(), true);
|
||||||
} else {
|
|
||||||
nextHits = Flux.defer(() -> Flux
|
|
||||||
.<TopDocs, CurrentPageInfo>generate(
|
|
||||||
() -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(firstPageTopDocs.scoreDocs), paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
|
|
||||||
(s, sink) -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
|
||||||
}
|
|
||||||
if (s.last() != null && s.remainingLimit() > 0) {
|
|
||||||
TopDocs pageTopDocs;
|
|
||||||
try {
|
|
||||||
TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
|
|
||||||
s.currentPageLimit(), s.last(), LuceneUtils.totalHitsThreshold(), true,
|
|
||||||
queryParams.isScored());
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexSearcher.search(queryParams.query(), collector);
|
|
||||||
pageTopDocs = collector.topDocs();
|
|
||||||
} catch (IOException e) {
|
|
||||||
sink.error(e);
|
|
||||||
return EMPTY_STATUS;
|
|
||||||
}
|
|
||||||
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
|
|
||||||
sink.next(pageTopDocs);
|
|
||||||
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(), s.pageIndex() + 1);
|
|
||||||
} else {
|
|
||||||
sink.complete();
|
|
||||||
return EMPTY_STATUS;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
s -> {}
|
|
||||||
)
|
|
||||||
.subscribeOn(scheduler)
|
|
||||||
.flatMapIterable(topDocs -> Arrays.asList(topDocs.scoreDocs))
|
|
||||||
.transform(topFieldDocFlux -> LuceneUtils.convertHits(topFieldDocFlux,
|
|
||||||
IndexSearchers.unsharded(indexSearcher), keyFieldName, scheduler, true))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Flux<LLKeyScore> combinedFlux;
|
CurrentPageInfo nextPageInfo = firstPageData.nextPageInfo();
|
||||||
|
|
||||||
if (nextHits != null) {
|
return new FirstPageResults(totalHitsCount, firstPageHitsFlux, nextPageInfo);
|
||||||
combinedFlux = firstPageMono
|
});
|
||||||
.concatWith(nextHits);
|
}
|
||||||
} else {
|
|
||||||
combinedFlux = firstPageMono;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new LuceneSearchResult(LuceneUtils.convertTotalHitsCount(firstPageTopDocs.totalHits), combinedFlux,
|
private Mono<Send<LuceneSearchResult>> computeOtherResults(Mono<FirstPageResults> firstResultMono,
|
||||||
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
|
List<IndexSearcher> indexSearchers,
|
||||||
releaseIndexSearcher
|
LocalQueryParams queryParams,
|
||||||
);
|
String keyFieldName,
|
||||||
})
|
Runnable drop) {
|
||||||
.subscribeOn(scheduler);
|
return firstResultMono.map(firstResult -> {
|
||||||
|
var totalHitsCount = firstResult.totalHitsCount();
|
||||||
|
var firstPageHitsFlux = firstResult.firstPageHitsFlux();
|
||||||
|
var secondPageInfo = firstResult.nextPageInfo();
|
||||||
|
|
||||||
|
Flux<LLKeyScore> nextHitsFlux = searchOtherPages(indexSearchers, queryParams, keyFieldName, secondPageInfo);
|
||||||
|
|
||||||
|
Flux<LLKeyScore> combinedFlux = firstPageHitsFlux.concatWith(nextHitsFlux);
|
||||||
|
return new LuceneSearchResult(totalHitsCount, combinedFlux, d -> drop.run()).send();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search effectively the merged raw results of the next pages
|
||||||
|
*/
|
||||||
|
private Flux<LLKeyScore> searchOtherPages(List<IndexSearcher> indexSearchers,
|
||||||
|
LocalQueryParams queryParams, String keyFieldName, CurrentPageInfo secondPageInfo) {
|
||||||
|
return Flux
|
||||||
|
.<PageData, CurrentPageInfo>generate(
|
||||||
|
() -> secondPageInfo,
|
||||||
|
(s, sink) -> searchPageSync(queryParams, indexSearchers, true, 0, s, sink),
|
||||||
|
s -> {}
|
||||||
|
)
|
||||||
|
.subscribeOn(Schedulers.boundedElastic())
|
||||||
|
.map(PageData::topDocs)
|
||||||
|
.flatMapIterable(topDocs -> Arrays.asList(topDocs.scoreDocs))
|
||||||
|
.transform(topFieldDocFlux -> LuceneUtils.convertHits(topFieldDocFlux, indexSearchers,
|
||||||
|
keyFieldName, true));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param resultsOffset offset of the resulting topDocs. Useful if you want to
|
||||||
|
* skip the first n results in the first page
|
||||||
|
*/
|
||||||
|
private CurrentPageInfo searchPageSync(LocalQueryParams queryParams,
|
||||||
|
List<IndexSearcher> indexSearchers,
|
||||||
|
boolean allowPagination,
|
||||||
|
int resultsOffset,
|
||||||
|
CurrentPageInfo s,
|
||||||
|
SynchronousSink<PageData> sink) {
|
||||||
|
LLUtils.ensureBlocking();
|
||||||
|
if (resultsOffset < 0) {
|
||||||
|
throw new IndexOutOfBoundsException(resultsOffset);
|
||||||
|
}
|
||||||
|
var currentPageLimit = queryParams.pageLimits().getPageLimit(s.pageIndex());
|
||||||
|
if ((s.pageIndex() == 0 || s.last() != null) && s.remainingLimit() > 0) {
|
||||||
|
TopDocs pageTopDocs;
|
||||||
|
try {
|
||||||
|
TopDocsCollector<ScoreDoc> collector = TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
|
||||||
|
currentPageLimit, s.last(), LuceneUtils.totalHitsThreshold(),
|
||||||
|
allowPagination, queryParams.isScored());
|
||||||
|
indexSearchers.get(0).search(queryParams.query(), collector);
|
||||||
|
if (resultsOffset > 0) {
|
||||||
|
pageTopDocs = collector.topDocs(resultsOffset, currentPageLimit);
|
||||||
|
} else {
|
||||||
|
pageTopDocs = collector.topDocs();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
sink.error(e);
|
||||||
|
return EMPTY_STATUS;
|
||||||
|
}
|
||||||
|
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
|
||||||
|
long nextRemainingLimit;
|
||||||
|
if (allowPagination) {
|
||||||
|
nextRemainingLimit = s.remainingLimit() - currentPageLimit;
|
||||||
|
} else {
|
||||||
|
nextRemainingLimit = 0L;
|
||||||
|
}
|
||||||
|
var nextPageIndex = s.pageIndex() + 1;
|
||||||
|
var nextPageInfo = new CurrentPageInfo(pageLastDoc, nextRemainingLimit, nextPageIndex);
|
||||||
|
sink.next(new PageData(pageTopDocs, nextPageInfo));
|
||||||
|
return nextPageInfo;
|
||||||
|
} else {
|
||||||
|
sink.complete();
|
||||||
|
return EMPTY_STATUS;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,90 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
||||||
|
import it.cavallium.dbengine.database.LLKeyScore;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearcher;
|
||||||
|
import it.cavallium.dbengine.database.disk.LLIndexSearchers;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
public class SimpleUnsortedUnscoredLuceneMultiSearcher implements LuceneMultiSearcher {
|
||||||
|
|
||||||
|
private final LuceneLocalSearcher localSearcher;
|
||||||
|
|
||||||
|
public SimpleUnsortedUnscoredLuceneMultiSearcher(LuceneLocalSearcher localSearcher) {
|
||||||
|
this.localSearcher = localSearcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Mono<Send<LuceneSearchResult>> collectMulti(Mono<Send<LLIndexSearchers>> indexSearchersMono,
|
||||||
|
LocalQueryParams queryParams,
|
||||||
|
String keyFieldName,
|
||||||
|
LLSearchTransformer transformer) {
|
||||||
|
var indexSearchersSendResource = Mono
|
||||||
|
.fromRunnable(() -> {
|
||||||
|
LLUtils.ensureBlocking();
|
||||||
|
if (queryParams.isSorted() && queryParams.limit() > 0) {
|
||||||
|
throw new UnsupportedOperationException("Sorted queries are not supported"
|
||||||
|
+ " by SimpleUnsortedUnscoredLuceneMultiSearcher");
|
||||||
|
}
|
||||||
|
if (queryParams.isScored() && queryParams.limit() > 0) {
|
||||||
|
throw new UnsupportedOperationException("Scored queries are not supported"
|
||||||
|
+ " by SimpleUnsortedUnscoredLuceneMultiSearcher");
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(indexSearchersMono);
|
||||||
|
var localQueryParams = getLocalQueryParams(queryParams);
|
||||||
|
|
||||||
|
return LLUtils.usingSendResource(indexSearchersSendResource,
|
||||||
|
indexSearchers -> Flux
|
||||||
|
.fromIterable(indexSearchers.shards())
|
||||||
|
.flatMap(searcher -> {
|
||||||
|
var llSearcher = Mono.fromCallable(() -> new LLIndexSearcher(searcher, d -> {}).send());
|
||||||
|
return localSearcher.collect(llSearcher, localQueryParams, keyFieldName, transformer);
|
||||||
|
})
|
||||||
|
.collectList()
|
||||||
|
.map(results -> {
|
||||||
|
List<LuceneSearchResult> resultsToDrop = new ArrayList<>(results.size());
|
||||||
|
List<Flux<LLKeyScore>> resultsFluxes = new ArrayList<>(results.size());
|
||||||
|
boolean exactTotalHitsCount = true;
|
||||||
|
long totalHitsCountValue = 0;
|
||||||
|
for (Send<LuceneSearchResult> resultToReceive : results) {
|
||||||
|
LuceneSearchResult result = resultToReceive.receive();
|
||||||
|
resultsToDrop.add(result);
|
||||||
|
resultsFluxes.add(result.results());
|
||||||
|
exactTotalHitsCount &= result.totalHitsCount().exact();
|
||||||
|
totalHitsCountValue += result.totalHitsCount().value();
|
||||||
|
}
|
||||||
|
|
||||||
|
var totalHitsCount = new TotalHitsCount(totalHitsCountValue, exactTotalHitsCount);
|
||||||
|
Flux<LLKeyScore> mergedFluxes = Flux
|
||||||
|
.merge(resultsFluxes)
|
||||||
|
.skip(queryParams.offset())
|
||||||
|
.take(queryParams.limit(), true);
|
||||||
|
|
||||||
|
return new LuceneSearchResult(totalHitsCount, mergedFluxes, d -> {
|
||||||
|
for (LuceneSearchResult luceneSearchResult : resultsToDrop) {
|
||||||
|
luceneSearchResult.close();
|
||||||
|
}
|
||||||
|
indexSearchers.close();
|
||||||
|
}).send();
|
||||||
|
}),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private LocalQueryParams getLocalQueryParams(LocalQueryParams queryParams) {
|
||||||
|
return new LocalQueryParams(queryParams.query(),
|
||||||
|
0,
|
||||||
|
queryParams.limit(),
|
||||||
|
queryParams.pageLimits(),
|
||||||
|
queryParams.minCompetitiveScore(),
|
||||||
|
queryParams.sort(),
|
||||||
|
queryParams.scoreMode()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,23 @@
|
|||||||
|
package it.cavallium.dbengine.lucene.searcher;
|
||||||
|
|
||||||
|
public class SinglePageLimits implements PageLimits {
|
||||||
|
|
||||||
|
private final int firstPageLimit;
|
||||||
|
|
||||||
|
public SinglePageLimits() {
|
||||||
|
this(DEFAULT_MIN_ITEMS_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SinglePageLimits(int firstPageLimit) {
|
||||||
|
this.firstPageLimit = firstPageLimit;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPageLimit(int pageIndex) {
|
||||||
|
if (pageIndex == 0) {
|
||||||
|
return firstPageLimit;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,34 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.FIRST_PAGE_LIMIT;
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.MAX_SINGLE_SEARCH_LIMIT;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
public class UnscoredPagedLuceneMultiSearcher implements LuceneMultiSearcher {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
|
||||||
return Mono
|
|
||||||
.fromCallable(() -> {
|
|
||||||
if (queryParams.isScored()) {
|
|
||||||
throw new UnsupportedOperationException("Can't use the unscored searcher to do a scored or sorted query");
|
|
||||||
}
|
|
||||||
PaginationInfo paginationInfo;
|
|
||||||
if (queryParams.limit() <= MAX_SINGLE_SEARCH_LIMIT) {
|
|
||||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), queryParams.limit(), true);
|
|
||||||
} else {
|
|
||||||
paginationInfo = new PaginationInfo(queryParams.limit(), queryParams.offset(), FIRST_PAGE_LIMIT, false);
|
|
||||||
}
|
|
||||||
UnscoredTopDocsCollectorManager unsortedCollectorManager = new UnscoredTopDocsCollectorManager(() -> TopDocsSearcher.getTopDocsCollector(queryParams.sort(),
|
|
||||||
LuceneUtils.safeLongToInt(paginationInfo.firstPageOffset() + paginationInfo.firstPageLimit()),
|
|
||||||
null,
|
|
||||||
LuceneUtils.totalHitsThreshold(),
|
|
||||||
!paginationInfo.forceSinglePage(),
|
|
||||||
queryParams.isScored()
|
|
||||||
), queryParams.offset(), queryParams.limit(), queryParams.sort());
|
|
||||||
return new UnscoredPagedLuceneShardSearcher(unsortedCollectorManager, queryParams.query(), paginationInfo);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,152 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.EMPTY_STATUS;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.database.LLKeyScore;
|
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import org.apache.lucene.search.CollectorManager;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.Query;
|
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
|
||||||
import org.apache.lucene.search.TopDocs;
|
|
||||||
import org.apache.lucene.search.TopDocsCollector;
|
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
|
|
||||||
class UnscoredPagedLuceneShardSearcher implements LuceneShardSearcher {
|
|
||||||
|
|
||||||
private final Object lock = new Object();
|
|
||||||
private final List<IndexSearcher> indexSearchersArray = new ArrayList<>();
|
|
||||||
private final List<Mono<Void>> indexSearcherReleasersArray = new ArrayList<>();
|
|
||||||
private final List<TopDocsCollector<ScoreDoc>> collectors = new ArrayList<>();
|
|
||||||
private final CollectorManager<TopDocsCollector<ScoreDoc>, TopDocs> firstPageUnsortedCollectorManager;
|
|
||||||
private final Query luceneQuery;
|
|
||||||
private final PaginationInfo paginationInfo;
|
|
||||||
|
|
||||||
public UnscoredPagedLuceneShardSearcher(
|
|
||||||
CollectorManager<TopDocsCollector<ScoreDoc>, TopDocs> firstPagensortedCollectorManager,
|
|
||||||
Query luceneQuery,
|
|
||||||
PaginationInfo paginationInfo) {
|
|
||||||
this.firstPageUnsortedCollectorManager = firstPagensortedCollectorManager;
|
|
||||||
this.luceneQuery = luceneQuery;
|
|
||||||
this.paginationInfo = paginationInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<Void> searchOn(IndexSearcher indexSearcher,
|
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
|
||||||
Scheduler scheduler) {
|
|
||||||
return Mono.<Void>fromCallable(() -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called searchOn in a nonblocking thread");
|
|
||||||
}
|
|
||||||
TopDocsCollector<ScoreDoc> collector;
|
|
||||||
synchronized (lock) {
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
collector = firstPageUnsortedCollectorManager.newCollector();
|
|
||||||
indexSearchersArray.add(indexSearcher);
|
|
||||||
indexSearcherReleasersArray.add(releaseIndexSearcher);
|
|
||||||
collectors.add(collector);
|
|
||||||
}
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexSearcher.search(luceneQuery, collector);
|
|
||||||
return null;
|
|
||||||
}).subscribeOn(scheduler);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams, String keyFieldName, Scheduler scheduler) {
|
|
||||||
return Mono
|
|
||||||
.fromCallable(() -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
|
||||||
}
|
|
||||||
TopDocs result;
|
|
||||||
Mono<Void> release;
|
|
||||||
synchronized (lock) {
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
result = firstPageUnsortedCollectorManager.reduce(collectors);
|
|
||||||
release = Mono.when(indexSearcherReleasersArray);
|
|
||||||
}
|
|
||||||
IndexSearchers indexSearchers;
|
|
||||||
synchronized (lock) {
|
|
||||||
indexSearchers = IndexSearchers.of(indexSearchersArray);
|
|
||||||
}
|
|
||||||
Flux<LLKeyScore> firstPageHits = LuceneUtils
|
|
||||||
.convertHits(Flux.fromArray(result.scoreDocs), indexSearchers, keyFieldName, scheduler, false);
|
|
||||||
|
|
||||||
Flux<LLKeyScore> nextHits = Flux
|
|
||||||
.<TopDocs, CurrentPageInfo>generate(
|
|
||||||
() -> new CurrentPageInfo(LuceneUtils.getLastScoreDoc(result.scoreDocs),
|
|
||||||
paginationInfo.totalLimit() - paginationInfo.firstPageLimit(), 1),
|
|
||||||
(s, sink) -> {
|
|
||||||
if (s.last() != null && s.remainingLimit() > 0 && s.currentPageLimit() > 0) {
|
|
||||||
Objects.requireNonNull(queryParams.scoreMode(), "ScoreMode must not be null");
|
|
||||||
Query luceneQuery = queryParams.query();
|
|
||||||
int perShardCollectorLimit = s.currentPageLimit() / indexSearchersArray.size();
|
|
||||||
UnscoredTopDocsCollectorManager currentPageUnsortedCollectorManager
|
|
||||||
= new UnscoredTopDocsCollectorManager(
|
|
||||||
() -> TopDocsSearcher.getTopDocsCollector(queryParams.sort(), perShardCollectorLimit,
|
|
||||||
s.last(), LuceneUtils.totalHitsThreshold(), true, queryParams.isScored()),
|
|
||||||
0, s.currentPageLimit(), queryParams.sort());
|
|
||||||
|
|
||||||
try {
|
|
||||||
var collectors = new ObjectArrayList<TopDocsCollector<ScoreDoc>>(indexSearchersArray.size());
|
|
||||||
for (IndexSearcher indexSearcher : indexSearchersArray) {
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
var collector = currentPageUnsortedCollectorManager.newCollector();
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
indexSearcher.search(luceneQuery, collector);
|
|
||||||
|
|
||||||
collectors.add(collector);
|
|
||||||
}
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
TopDocs pageTopDocs = currentPageUnsortedCollectorManager.reduce(collectors);
|
|
||||||
var pageLastDoc = LuceneUtils.getLastScoreDoc(pageTopDocs.scoreDocs);
|
|
||||||
|
|
||||||
sink.next(pageTopDocs);
|
|
||||||
return new CurrentPageInfo(pageLastDoc, s.remainingLimit() - s.currentPageLimit(),
|
|
||||||
s.pageIndex() + 1);
|
|
||||||
} catch (IOException ex) {
|
|
||||||
sink.error(ex);
|
|
||||||
return EMPTY_STATUS;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
sink.complete();
|
|
||||||
return EMPTY_STATUS;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.subscribeOn(scheduler)
|
|
||||||
.flatMapIterable(topDocs -> Arrays.asList(topDocs.scoreDocs))
|
|
||||||
.transform(scoreDocsFlux -> LuceneUtils.convertHits(scoreDocsFlux,
|
|
||||||
indexSearchers, keyFieldName, scheduler, false))
|
|
||||||
.transform(flux -> {
|
|
||||||
if (paginationInfo.forceSinglePage()
|
|
||||||
|| paginationInfo.totalLimit() - paginationInfo.firstPageLimit() <= 0) {
|
|
||||||
return Flux.empty();
|
|
||||||
} else {
|
|
||||||
return flux;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return new LuceneSearchResult(LuceneUtils.convertTotalHitsCount(result.totalHits), firstPageHits
|
|
||||||
.concatWith(nextHits),
|
|
||||||
//.transform(flux -> LuceneUtils.filterTopDoc(flux, queryParams)),
|
|
||||||
release
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.subscribeOn(scheduler);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,70 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.CurrentPageInfo.TIE_BREAKER;
|
|
||||||
import static it.cavallium.dbengine.lucene.searcher.PaginationInfo.ALLOW_UNSCORED_PAGINATION_MODE;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.CollectorManager;
|
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
|
||||||
import org.apache.lucene.search.Sort;
|
|
||||||
import org.apache.lucene.search.TopDocs;
|
|
||||||
import org.apache.lucene.search.TopDocsCollector;
|
|
||||||
import org.apache.lucene.search.TopFieldDocs;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
|
|
||||||
public class UnscoredTopDocsCollectorManager implements
|
|
||||||
CollectorManager<TopDocsCollector<ScoreDoc>, TopDocs> {
|
|
||||||
|
|
||||||
private final Supplier<TopDocsCollector<ScoreDoc>> collectorSupplier;
|
|
||||||
private final long offset;
|
|
||||||
private final long limit;
|
|
||||||
private final Sort sort;
|
|
||||||
|
|
||||||
public UnscoredTopDocsCollectorManager(Supplier<TopDocsCollector<ScoreDoc>> collectorSupplier,
|
|
||||||
long offset,
|
|
||||||
long limit,
|
|
||||||
@Nullable Sort sort) {
|
|
||||||
this.collectorSupplier = collectorSupplier;
|
|
||||||
this.offset = offset;
|
|
||||||
this.limit = limit;
|
|
||||||
this.sort = sort;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TopDocsCollector<ScoreDoc> newCollector() throws IOException {
|
|
||||||
return collectorSupplier.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TopDocs reduce(Collection<TopDocsCollector<ScoreDoc>> collection) throws IOException {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called reduce in a nonblocking thread");
|
|
||||||
}
|
|
||||||
int i = 0;
|
|
||||||
TopDocs[] topDocsArray;
|
|
||||||
if (sort != null) {
|
|
||||||
topDocsArray = new TopFieldDocs[collection.size()];
|
|
||||||
} else {
|
|
||||||
topDocsArray = new TopDocs[collection.size()];
|
|
||||||
}
|
|
||||||
for (TopDocsCollector<? extends ScoreDoc> topDocsCollector : collection) {
|
|
||||||
var topDocs = topDocsCollector.topDocs();
|
|
||||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
|
||||||
scoreDoc.shardIndex = i;
|
|
||||||
}
|
|
||||||
topDocsArray[i] = topDocs;
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
return LuceneUtils.mergeTopDocs(sort,
|
|
||||||
LuceneUtils.safeLongToInt(offset),
|
|
||||||
LuceneUtils.safeLongToInt(limit),
|
|
||||||
topDocsArray,
|
|
||||||
TIE_BREAKER
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,183 +0,0 @@
|
|||||||
package it.cavallium.dbengine.lucene.searcher;
|
|
||||||
|
|
||||||
import it.cavallium.dbengine.client.query.current.data.TotalHitsCount;
|
|
||||||
import it.cavallium.dbengine.lucene.LuceneUtils;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.concurrent.locks.LockSupport;
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.CollectorManager;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
|
||||||
import org.apache.lucene.search.ScoreMode;
|
|
||||||
import org.apache.lucene.search.SimpleCollector;
|
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
import reactor.core.publisher.Sinks;
|
|
||||||
import reactor.core.publisher.Sinks.EmitResult;
|
|
||||||
import reactor.core.publisher.Sinks.Many;
|
|
||||||
import reactor.core.scheduler.Scheduler;
|
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
import reactor.util.concurrent.Queues;
|
|
||||||
|
|
||||||
public class UnscoredUnsortedContinuousLuceneMultiSearcher implements LuceneMultiSearcher {
|
|
||||||
|
|
||||||
private static final Scheduler UNSCORED_UNSORTED_EXECUTOR = Schedulers.newBoundedElastic(Runtime
|
|
||||||
.getRuntime()
|
|
||||||
.availableProcessors(), Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE, "UnscoredUnsortedExecutor");
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneShardSearcher> createShardSearcher(LocalQueryParams queryParams) {
|
|
||||||
return Mono
|
|
||||||
.fromCallable(() -> {
|
|
||||||
AtomicBoolean alreadySubscribed = new AtomicBoolean(false);
|
|
||||||
Many<ScoreDoc> scoreDocsSink = Sinks.many().unicast().onBackpressureBuffer();
|
|
||||||
// 1 is the collect phase
|
|
||||||
AtomicInteger remainingCollectors = new AtomicInteger(1);
|
|
||||||
|
|
||||||
if (queryParams.isScored()) {
|
|
||||||
throw new UnsupportedOperationException("Can't use the unscored searcher to do a scored or sorted query");
|
|
||||||
}
|
|
||||||
|
|
||||||
var cm = new CollectorManager<Collector, Void>() {
|
|
||||||
|
|
||||||
class IterableCollector extends SimpleCollector {
|
|
||||||
|
|
||||||
private int shardIndex;
|
|
||||||
private LeafReaderContext context;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(int i) {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
|
||||||
}
|
|
||||||
var scoreDoc = new ScoreDoc(context.docBase + i, 0, shardIndex);
|
|
||||||
synchronized (scoreDocsSink) {
|
|
||||||
while (scoreDocsSink.tryEmitNext(scoreDoc) == EmitResult.FAIL_OVERFLOW) {
|
|
||||||
LockSupport.parkNanos(10);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doSetNextReader(LeafReaderContext context) {
|
|
||||||
this.context = context;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ScoreMode scoreMode() {
|
|
||||||
return ScoreMode.COMPLETE_NO_SCORES;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setShardIndex(int shardIndex) {
|
|
||||||
this.shardIndex = shardIndex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IterableCollector newCollector() {
|
|
||||||
return new IterableCollector();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Void reduce(Collection<Collector> collection) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return new LuceneShardSearcher() {
|
|
||||||
private final Object lock = new Object();
|
|
||||||
private final List<IndexSearcher> indexSearchersArray = new ArrayList<>();
|
|
||||||
private final List<Mono<Void>> indexSearcherReleasersArray = new ArrayList<>();
|
|
||||||
@Override
|
|
||||||
public Mono<Void> searchOn(IndexSearcher indexSearcher,
|
|
||||||
Mono<Void> releaseIndexSearcher,
|
|
||||||
LocalQueryParams queryParams,
|
|
||||||
Scheduler scheduler) {
|
|
||||||
return Mono
|
|
||||||
.<Void>fromCallable(() -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called searchOn in a nonblocking thread");
|
|
||||||
}
|
|
||||||
//noinspection BlockingMethodInNonBlockingContext
|
|
||||||
var collector = cm.newCollector();
|
|
||||||
int collectorShardIndex;
|
|
||||||
synchronized (lock) {
|
|
||||||
collectorShardIndex = indexSearchersArray.size();
|
|
||||||
indexSearchersArray.add(indexSearcher);
|
|
||||||
indexSearcherReleasersArray.add(releaseIndexSearcher);
|
|
||||||
}
|
|
||||||
collector.setShardIndex(collectorShardIndex);
|
|
||||||
remainingCollectors.incrementAndGet();
|
|
||||||
UNSCORED_UNSORTED_EXECUTOR.schedule(() -> {
|
|
||||||
try {
|
|
||||||
indexSearcher.search(queryParams.query(), collector);
|
|
||||||
|
|
||||||
synchronized (scoreDocsSink) {
|
|
||||||
decrementRemainingCollectors(scoreDocsSink, remainingCollectors);
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
scoreDocsSink.tryEmitError(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return null;
|
|
||||||
})
|
|
||||||
.subscribeOn(scheduler);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<LuceneSearchResult> collect(LocalQueryParams queryParams,
|
|
||||||
String keyFieldName,
|
|
||||||
Scheduler scheduler) {
|
|
||||||
return Mono
|
|
||||||
.fromCallable(() -> {
|
|
||||||
if (Schedulers.isInNonBlockingThread()) {
|
|
||||||
throw new UnsupportedOperationException("Called collect in a nonblocking thread");
|
|
||||||
}
|
|
||||||
synchronized (scoreDocsSink) {
|
|
||||||
decrementRemainingCollectors(scoreDocsSink, remainingCollectors);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!alreadySubscribed.compareAndSet(false, true)) {
|
|
||||||
throw new UnsupportedOperationException("Already subscribed!");
|
|
||||||
}
|
|
||||||
|
|
||||||
IndexSearchers indexSearchers;
|
|
||||||
Mono<Void> release;
|
|
||||||
synchronized (lock) {
|
|
||||||
indexSearchers = IndexSearchers.of(indexSearchersArray);
|
|
||||||
release = Mono.when(indexSearcherReleasersArray);
|
|
||||||
}
|
|
||||||
|
|
||||||
AtomicBoolean resultsAlreadySubscribed = new AtomicBoolean(false);
|
|
||||||
|
|
||||||
var scoreDocsFlux = Mono.<Void>fromCallable(() -> {
|
|
||||||
if (!resultsAlreadySubscribed.compareAndSet(false, true)) {
|
|
||||||
throw new UnsupportedOperationException("Already subscribed!");
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}).thenMany(scoreDocsSink.asFlux());
|
|
||||||
var resultsFlux = LuceneUtils
|
|
||||||
.convertHits(scoreDocsFlux, indexSearchers, keyFieldName, scheduler, false);
|
|
||||||
|
|
||||||
return new LuceneSearchResult(TotalHitsCount.of(0, false), resultsFlux, release);
|
|
||||||
})
|
|
||||||
.subscribeOn(scheduler);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void decrementRemainingCollectors(Many<ScoreDoc> scoreDocsSink, AtomicInteger remainingCollectors) {
|
|
||||||
if (remainingCollectors.decrementAndGet() <= 0) {
|
|
||||||
scoreDocsSink.tryEmitComplete();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,70 @@
|
|||||||
|
package it.cavallium.dbengine.netty;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Buffer;
|
||||||
|
import io.net5.buffer.api.Drop;
|
||||||
|
import io.net5.buffer.api.Owned;
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
|
import it.cavallium.dbengine.client.SearchResult;
|
||||||
|
import it.cavallium.dbengine.database.LiveResourceSupport;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
|
public class NullableBuffer extends LiveResourceSupport<NullableBuffer, NullableBuffer> {
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private Buffer buffer;
|
||||||
|
|
||||||
|
public NullableBuffer(@Nullable Buffer buffer, Drop<NullableBuffer> drop) {
|
||||||
|
super(new CloseOnDrop(drop));
|
||||||
|
this.buffer = buffer == null ? null : buffer.send().receive();
|
||||||
|
}
|
||||||
|
|
||||||
|
public NullableBuffer(@Nullable Send<Buffer> buffer, Drop<NullableBuffer> drop) {
|
||||||
|
super(new CloseOnDrop(drop));
|
||||||
|
this.buffer = buffer == null ? null : buffer.receive();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public Buffer buf() {
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public Send<Buffer> sendBuf() {
|
||||||
|
return buffer == null ? null : buffer.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected RuntimeException createResourceClosedException() {
|
||||||
|
return new IllegalStateException("Closed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Owned<NullableBuffer> prepareSend() {
|
||||||
|
var buffer = this.buffer == null ? null : this.buffer.send();
|
||||||
|
return drop -> new NullableBuffer(buffer, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void makeInaccessible() {
|
||||||
|
this.buffer = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CloseOnDrop implements Drop<NullableBuffer> {
|
||||||
|
|
||||||
|
private final Drop<NullableBuffer> delegate;
|
||||||
|
|
||||||
|
public CloseOnDrop(Drop<NullableBuffer> drop) {
|
||||||
|
this.delegate = drop;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void drop(NullableBuffer obj) {
|
||||||
|
if (obj.buffer != null) {
|
||||||
|
if (obj.buffer.isAccessible()) {
|
||||||
|
obj.buffer.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
delegate.drop(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -105,13 +105,13 @@ public class CappedWriteBatch extends WriteBatch {
|
|||||||
var value = valueToReceive.receive();
|
var value = valueToReceive.receive();
|
||||||
if (USE_FAST_DIRECT_BUFFERS && isDirect(key) && isDirect(value)) {
|
if (USE_FAST_DIRECT_BUFFERS && isDirect(key) && isDirect(value)) {
|
||||||
buffersToRelease.add(value);
|
buffersToRelease.add(value);
|
||||||
var keyNioBuffer = LLUtils.convertToDirect(alloc, key.send());
|
var keyNioBuffer = LLUtils.convertToReadableDirect(alloc, key.send());
|
||||||
key = keyNioBuffer.buffer().receive();
|
key = keyNioBuffer.buffer().receive();
|
||||||
buffersToRelease.add(key);
|
buffersToRelease.add(key);
|
||||||
byteBuffersToRelease.add(keyNioBuffer.byteBuffer());
|
byteBuffersToRelease.add(keyNioBuffer.byteBuffer());
|
||||||
assert keyNioBuffer.byteBuffer().isDirect();
|
assert keyNioBuffer.byteBuffer().isDirect();
|
||||||
|
|
||||||
var valueNioBuffer = LLUtils.convertToDirect(alloc, value.send());
|
var valueNioBuffer = LLUtils.convertToReadableDirect(alloc, value.send());
|
||||||
value = valueNioBuffer.buffer().receive();
|
value = valueNioBuffer.buffer().receive();
|
||||||
buffersToRelease.add(value);
|
buffersToRelease.add(value);
|
||||||
byteBuffersToRelease.add(valueNioBuffer.byteBuffer());
|
byteBuffersToRelease.add(valueNioBuffer.byteBuffer());
|
||||||
@ -172,7 +172,7 @@ public class CappedWriteBatch extends WriteBatch {
|
|||||||
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, Send<Buffer> keyToReceive) throws RocksDBException {
|
public synchronized void delete(ColumnFamilyHandle columnFamilyHandle, Send<Buffer> keyToReceive) throws RocksDBException {
|
||||||
var key = keyToReceive.receive();
|
var key = keyToReceive.receive();
|
||||||
if (USE_FAST_DIRECT_BUFFERS) {
|
if (USE_FAST_DIRECT_BUFFERS) {
|
||||||
var keyNioBuffer = LLUtils.convertToDirect(alloc, key.send());
|
var keyNioBuffer = LLUtils.convertToReadableDirect(alloc, key.send());
|
||||||
key = keyNioBuffer.buffer().receive();
|
key = keyNioBuffer.buffer().receive();
|
||||||
buffersToRelease.add(key);
|
buffersToRelease.add(key);
|
||||||
byteBuffersToRelease.add(keyNioBuffer.byteBuffer());
|
byteBuffersToRelease.add(keyNioBuffer.byteBuffer());
|
||||||
|
@ -25,11 +25,14 @@ import it.cavallium.dbengine.database.serialization.Serializer;
|
|||||||
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
import it.cavallium.dbengine.database.serialization.SerializerFixedBinaryLength;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.reactivestreams.Publisher;
|
import org.reactivestreams.Publisher;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
public class DbTestUtils {
|
public class DbTestUtils {
|
||||||
|
|
||||||
@ -40,6 +43,38 @@ public class DbTestUtils {
|
|||||||
return "0123456789".repeat(1024);
|
return "0123456789".repeat(1024);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void run(Flux<?> publisher) {
|
||||||
|
publisher.subscribeOn(Schedulers.immediate()).blockLast();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void runVoid(Mono<Void> publisher) {
|
||||||
|
publisher.then().subscribeOn(Schedulers.immediate()).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T> T run(Mono<T> publisher) {
|
||||||
|
return publisher.subscribeOn(Schedulers.immediate()).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T> T run(boolean shouldFail, Mono<T> publisher) {
|
||||||
|
return publisher.subscribeOn(Schedulers.immediate()).transform(mono -> {
|
||||||
|
if (shouldFail) {
|
||||||
|
return mono.onErrorResume(ex -> Mono.empty());
|
||||||
|
} else {
|
||||||
|
return mono;
|
||||||
|
}
|
||||||
|
}).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void runVoid(boolean shouldFail, Mono<Void> publisher) {
|
||||||
|
publisher.then().subscribeOn(Schedulers.immediate()).transform(mono -> {
|
||||||
|
if (shouldFail) {
|
||||||
|
return mono.onErrorResume(ex -> Mono.empty());
|
||||||
|
} else {
|
||||||
|
return mono;
|
||||||
|
}
|
||||||
|
}).block();
|
||||||
|
}
|
||||||
|
|
||||||
public static record TestAllocator(PooledBufferAllocator allocator) {}
|
public static record TestAllocator(PooledBufferAllocator allocator) {}
|
||||||
|
|
||||||
public static TestAllocator newAllocator() {
|
public static TestAllocator newAllocator() {
|
||||||
@ -98,7 +133,7 @@ public class DbTestUtils {
|
|||||||
if (!MemorySegmentUtils.isSupported()) {
|
if (!MemorySegmentUtils.isSupported()) {
|
||||||
System.err.println("Warning! Foreign Memory Access API is not available!"
|
System.err.println("Warning! Foreign Memory Access API is not available!"
|
||||||
+ " Netty direct buffers will not be used in tests!"
|
+ " Netty direct buffers will not be used in tests!"
|
||||||
+ " Please set \"--enable-preview --add-modules jdk.incubator.foreign -Dforeign.restricted=permit\"");
|
+ " Please set \"" + MemorySegmentUtils.getSuggestedArgs() + "\"");
|
||||||
if (MemorySegmentUtils.getUnsupportedCause() != null) {
|
if (MemorySegmentUtils.getUnsupportedCause() != null) {
|
||||||
System.err.println("\tCause: " + MemorySegmentUtils.getUnsupportedCause().getClass().getName()
|
System.err.println("\tCause: " + MemorySegmentUtils.getUnsupportedCause().getClass().getName()
|
||||||
+ ":" + MemorySegmentUtils.getUnsupportedCause().getLocalizedMessage());
|
+ ":" + MemorySegmentUtils.getUnsupportedCause().getLocalizedMessage());
|
||||||
@ -144,7 +179,8 @@ public class DbTestUtils {
|
|||||||
if (mapType == MapType.MAP) {
|
if (mapType == MapType.MAP) {
|
||||||
return DatabaseMapDictionary.simple(dictionary,
|
return DatabaseMapDictionary.simple(dictionary,
|
||||||
SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), keyBytes),
|
SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), keyBytes),
|
||||||
Serializer.utf8(dictionary.getAllocator())
|
Serializer.utf8(dictionary.getAllocator()),
|
||||||
|
d -> {}
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
return DatabaseMapDictionaryHashed.simple(dictionary,
|
return DatabaseMapDictionaryHashed.simple(dictionary,
|
||||||
@ -158,7 +194,8 @@ public class DbTestUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public @NotNull DeserializationResult<Short> deserialize(@NotNull Send<Buffer> serializedToReceive) {
|
public @NotNull DeserializationResult<Short> deserialize(@Nullable Send<Buffer> serializedToReceive) {
|
||||||
|
Objects.requireNonNull(serializedToReceive);
|
||||||
try (var serialized = serializedToReceive.receive()) {
|
try (var serialized = serializedToReceive.receive()) {
|
||||||
var val = serialized.readShort();
|
var val = serialized.readShort();
|
||||||
return new DeserializationResult<>(val, Short.BYTES);
|
return new DeserializationResult<>(val, Short.BYTES);
|
||||||
@ -173,7 +210,8 @@ public class DbTestUtils {
|
|||||||
return out.send();
|
return out.send();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
d -> {}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -188,7 +226,8 @@ public class DbTestUtils {
|
|||||||
key2Bytes,
|
key2Bytes,
|
||||||
new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), key2Bytes),
|
new SubStageGetterMap<>(SerializerFixedBinaryLength.utf8(dictionary.getAllocator(), key2Bytes),
|
||||||
Serializer.utf8(dictionary.getAllocator())
|
Serializer.utf8(dictionary.getAllocator())
|
||||||
)
|
),
|
||||||
|
d -> {}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -203,7 +242,8 @@ public class DbTestUtils {
|
|||||||
Serializer.utf8(dictionary.getAllocator()),
|
Serializer.utf8(dictionary.getAllocator()),
|
||||||
String::hashCode,
|
String::hashCode,
|
||||||
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator())
|
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator())
|
||||||
)
|
),
|
||||||
|
d -> {}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -213,7 +253,8 @@ public class DbTestUtils {
|
|||||||
Serializer.utf8(dictionary.getAllocator()),
|
Serializer.utf8(dictionary.getAllocator()),
|
||||||
Serializer.utf8(dictionary.getAllocator()),
|
Serializer.utf8(dictionary.getAllocator()),
|
||||||
String::hashCode,
|
String::hashCode,
|
||||||
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator())
|
SerializerFixedBinaryLength.intSerializer(dictionary.getAllocator()),
|
||||||
|
d -> {}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,15 +19,20 @@ import org.junit.jupiter.api.BeforeEach;
|
|||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.Arguments;
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.test.StepVerifier;
|
import reactor.test.StepVerifier;
|
||||||
import reactor.test.StepVerifier.Step;
|
import reactor.test.StepVerifier.Step;
|
||||||
|
import reactor.test.util.TestLogger;
|
||||||
|
import reactor.util.Loggers;
|
||||||
import reactor.util.function.Tuple2;
|
import reactor.util.function.Tuple2;
|
||||||
import reactor.util.function.Tuples;
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
public abstract class TestDictionaryMap {
|
public abstract class TestDictionaryMap {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(TestDictionaryMap.class);
|
||||||
private TestAllocator allocator;
|
private TestAllocator allocator;
|
||||||
private boolean checkLeaks = true;
|
private boolean checkLeaks = true;
|
||||||
|
|
||||||
@ -97,21 +102,24 @@ public abstract class TestDictionaryMap {
|
|||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@MethodSource("provideArgumentsPut")
|
@MethodSource("provideArgumentsPut")
|
||||||
public void testPut(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
public void testPut(MapType mapType, UpdateMode updateMode, String key, String value, boolean shouldFail) {
|
||||||
var stpVer = StepVerifier
|
var gen = getTempDbGenerator();
|
||||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
var db = run(gen.openTempDb(allocator));
|
||||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5))
|
var dict = run(tempDictionary(db.db(), updateMode));
|
||||||
.flatMap(map -> map
|
var map = tempDatabaseMapDictionaryMap(dict, mapType, 5);
|
||||||
.putValue(key, value)
|
|
||||||
.then(map.getValue(null, key))
|
runVoid(shouldFail, map.putValue(key, value));
|
||||||
.doAfterTerminate(map::release)
|
|
||||||
)
|
var resultingMapSize = run(map.leavesCount(null, false));
|
||||||
));
|
Assertions.assertEquals(shouldFail ? 0 : 1, resultingMapSize);
|
||||||
if (shouldFail) {
|
|
||||||
this.checkLeaks = false;
|
var resultingMap = run(map.get(null));
|
||||||
stpVer.verifyError();
|
Assertions.assertEquals(shouldFail ? null : Map.of(key, value), resultingMap);
|
||||||
} else {
|
|
||||||
stpVer.expectNext(value).verifyComplete();
|
map.close();
|
||||||
}
|
|
||||||
|
//if (shouldFail) this.checkLeaks = false;
|
||||||
|
|
||||||
|
gen.closeTempDb(db);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ -120,10 +128,10 @@ public abstract class TestDictionaryMap {
|
|||||||
var stpVer = StepVerifier
|
var stpVer = StepVerifier
|
||||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
||||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5))
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5))
|
||||||
.flatMap(map -> map
|
.flatMap(map -> LLUtils
|
||||||
.at(null, key).flatMap(v -> v.set(value).doAfterTerminate(v::release))
|
.usingResource(map.at(null, key), v -> v.set(value), true)
|
||||||
.then(map.at(null, key).flatMap(v -> v.get(null).doAfterTerminate(v::release)))
|
.then(LLUtils.usingResource(map.at(null, key), v -> v.get(null), true))
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -146,7 +154,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.putValueAndGetPrevious(key, value),
|
map.putValueAndGetPrevious(key, value),
|
||||||
map.putValueAndGetPrevious(key, value)
|
map.putValueAndGetPrevious(key, value)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -169,7 +177,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||||
map.removeAndGetPrevious(key)
|
map.removeAndGetPrevious(key)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -192,7 +200,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||||
map.removeAndGetStatus(key)
|
map.removeAndGetStatus(key)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -235,7 +243,7 @@ public abstract class TestDictionaryMap {
|
|||||||
return value;
|
return value;
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -257,28 +265,52 @@ public abstract class TestDictionaryMap {
|
|||||||
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5))
|
.map(dict -> tempDatabaseMapDictionaryMap(dict, mapType, 5))
|
||||||
.flatMapMany(map -> Flux
|
.flatMapMany(map -> Flux
|
||||||
.concat(
|
.concat(
|
||||||
map.updateValue(key, old -> {
|
Mono
|
||||||
assert old == null;
|
.fromRunnable(() -> log.debug("1. Updating value: {}", key))
|
||||||
return "error?";
|
.then(map.updateValue(key, old -> {
|
||||||
}).then(map.getValue(null, key)),
|
assert old == null;
|
||||||
map.updateValue(key, false, old -> {
|
return "error?";
|
||||||
assert Objects.equals(old, "error?");
|
}))
|
||||||
return "error?";
|
.doOnSuccess(s -> log.debug("1. Getting value: {}", key))
|
||||||
}).then(map.getValue(null, key)),
|
.then(map.getValue(null, key)),
|
||||||
map.updateValue(key, true, old -> {
|
|
||||||
assert Objects.equals(old, "error?");
|
Mono
|
||||||
return "error?";
|
.fromRunnable(() -> log.debug("2. Updating value: {}", key))
|
||||||
}).then(map.getValue(null, key)),
|
.then(map.updateValue(key, false, old -> {
|
||||||
map.updateValue(key, true, old -> {
|
assert Objects.equals(old, "error?");
|
||||||
assert Objects.equals(old, "error?");
|
return "error?";
|
||||||
return value;
|
}))
|
||||||
}).then(map.getValue(null, key)),
|
.doOnSuccess(s -> log.debug("2. Getting value: {}", key))
|
||||||
map.updateValue(key, true, old -> {
|
.then(map.getValue(null, key)),
|
||||||
assert Objects.equals(old, value);
|
|
||||||
return value;
|
Mono
|
||||||
}).then(map.getValue(null, key))
|
.fromRunnable(() -> log.debug("3. Updating value: {}", key))
|
||||||
|
.then(map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return "error?";
|
||||||
|
}))
|
||||||
|
.doOnSuccess(s -> log.debug("3. Getting value: {}", key))
|
||||||
|
.then(map.getValue(null, key)),
|
||||||
|
|
||||||
|
Mono
|
||||||
|
.fromRunnable(() -> log.debug("4. Updating value: {}", key))
|
||||||
|
.then(map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, "error?");
|
||||||
|
return value;
|
||||||
|
}))
|
||||||
|
.doOnSuccess(s -> log.debug("4. Getting value: {}", key))
|
||||||
|
.then(map.getValue(null, key)),
|
||||||
|
|
||||||
|
Mono
|
||||||
|
.fromRunnable(() -> log.debug("5. Updating value: {}", key))
|
||||||
|
.then(map.updateValue(key, true, old -> {
|
||||||
|
assert Objects.equals(old, value);
|
||||||
|
return value;
|
||||||
|
}))
|
||||||
|
.doOnSuccess(s -> log.debug("5. Getting value: {}", key))
|
||||||
|
.then(map.getValue(null, key))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -303,7 +335,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.remove(key),
|
map.remove(key),
|
||||||
map.putValueAndGetChanged(key, "error?").single()
|
map.putValueAndGetChanged(key, "error?").single()
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -362,7 +394,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.filter(k -> k.getValue().isPresent())
|
.filter(k -> k.getValue().isPresent())
|
||||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||||
@ -390,7 +422,7 @@ public abstract class TestDictionaryMap {
|
|||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.filter(k -> k.getValue().isPresent())
|
.filter(k -> k.getValue().isPresent())
|
||||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||||
@ -420,7 +452,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -448,7 +480,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.set(entries).then(Mono.empty()),
|
map.set(entries).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.filter(k -> k.getValue().isPresent())
|
.filter(k -> k.getValue().isPresent())
|
||||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||||
@ -489,7 +521,7 @@ public abstract class TestDictionaryMap {
|
|||||||
removalMono.then(Mono.empty()),
|
removalMono.then(Mono.empty()),
|
||||||
map.setAndGetChanged(entries).single()
|
map.setAndGetChanged(entries).single()
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release);
|
.doFinally(s -> map.close());
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -511,7 +543,7 @@ public abstract class TestDictionaryMap {
|
|||||||
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
.concat(map.setAndGetPrevious(entries), map.setAndGetPrevious(entries))
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.concatMapIterable(list -> list)
|
.concatMapIterable(list -> list)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -537,7 +569,7 @@ public abstract class TestDictionaryMap {
|
|||||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.concatMapIterable(list -> list)
|
.concatMapIterable(list -> list)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -564,7 +596,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getAllValues(null)
|
map.getAllValues(null)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -594,7 +626,7 @@ public abstract class TestDictionaryMap {
|
|||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMapIterable(list -> list)
|
.flatMapIterable(list -> list)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -626,10 +658,10 @@ public abstract class TestDictionaryMap {
|
|||||||
.getValue()
|
.getValue()
|
||||||
.get(null)
|
.get(null)
|
||||||
.map(val -> Map.entry(stage.getKey(), val))
|
.map(val -> Map.entry(stage.getKey(), val))
|
||||||
.doAfterTerminate(() -> stage.getValue().release())
|
.doFinally(s -> stage.getValue().close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -658,7 +690,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
@ -685,7 +717,7 @@ public abstract class TestDictionaryMap {
|
|||||||
map.clear().then(Mono.empty()),
|
map.clear().then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
.flatMap(val -> shouldFail ? Mono.empty() : Mono.just(val))
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
|
@ -5,10 +5,14 @@ import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
|
|||||||
import static it.cavallium.dbengine.DbTestUtils.isCIMode;
|
import static it.cavallium.dbengine.DbTestUtils.isCIMode;
|
||||||
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
|
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
|
||||||
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
|
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
|
||||||
|
import static it.cavallium.dbengine.DbTestUtils.run;
|
||||||
|
import static it.cavallium.dbengine.DbTestUtils.runVoid;
|
||||||
import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMap;
|
import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryDeepMap;
|
||||||
|
import static it.cavallium.dbengine.DbTestUtils.tempDatabaseMapDictionaryMap;
|
||||||
import static it.cavallium.dbengine.DbTestUtils.tempDb;
|
import static it.cavallium.dbengine.DbTestUtils.tempDb;
|
||||||
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
|
import static it.cavallium.dbengine.DbTestUtils.tempDictionary;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.internal.ResourceSupport;
|
||||||
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
|
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
|
||||||
import it.cavallium.dbengine.database.LLUtils;
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
import it.cavallium.dbengine.database.UpdateMode;
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
@ -23,12 +27,15 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.MethodOrderer;
|
import org.junit.jupiter.api.MethodOrderer;
|
||||||
import org.junit.jupiter.api.TestMethodOrder;
|
import org.junit.jupiter.api.TestMethodOrder;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.Arguments;
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
import reactor.core.publisher.Flux;
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.test.StepVerifier;
|
import reactor.test.StepVerifier;
|
||||||
@ -42,6 +49,7 @@ import reactor.util.function.Tuples;
|
|||||||
@TestMethodOrder(MethodOrderer.MethodName.class)
|
@TestMethodOrder(MethodOrderer.MethodName.class)
|
||||||
public abstract class TestDictionaryMapDeep {
|
public abstract class TestDictionaryMapDeep {
|
||||||
|
|
||||||
|
private final Logger log = LoggerFactory.getLogger(this.getClass());
|
||||||
private TestAllocator allocator;
|
private TestAllocator allocator;
|
||||||
private boolean checkLeaks = true;
|
private boolean checkLeaks = true;
|
||||||
|
|
||||||
@ -174,22 +182,49 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@MethodSource("provideArgumentsSet")
|
@MethodSource("provideArgumentsSet")
|
||||||
public void testSetValueGetValue(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
public void testPutValue(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||||
var stpVer = StepVerifier
|
var gen = getTempDbGenerator();
|
||||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
var db = run(gen.openTempDb(allocator));
|
||||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
var dict = run(tempDictionary(db.db(), updateMode));
|
||||||
.flatMap(map -> map
|
var map = tempDatabaseMapDictionaryDeepMap(dict, 5, 6);
|
||||||
.putValue(key, value)
|
|
||||||
.then(map.getValue(null, key))
|
log.debug("Put \"{}\" = \"{}\"", key, value);
|
||||||
.doAfterTerminate(map::release)
|
runVoid(shouldFail, map.putValue(key, value));
|
||||||
)
|
|
||||||
));
|
var resultingMapSize = run(map.leavesCount(null, false));
|
||||||
if (shouldFail) {
|
Assertions.assertEquals(shouldFail ? 0 : value.size(), resultingMapSize);
|
||||||
this.checkLeaks = false;
|
|
||||||
stpVer.verifyError();
|
var resultingMap = run(map.get(null));
|
||||||
} else {
|
Assertions.assertEquals(shouldFail ? null : Map.of(key, value), resultingMap);
|
||||||
stpVer.expectNext(value).verifyComplete();
|
|
||||||
}
|
map.close();
|
||||||
|
|
||||||
|
//if (shouldFail) this.checkLeaks = false;
|
||||||
|
|
||||||
|
gen.closeTempDb(db);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArgumentsSet")
|
||||||
|
public void testGetValue(UpdateMode updateMode, String key, Map<String, String> value, boolean shouldFail) {
|
||||||
|
var gen = getTempDbGenerator();
|
||||||
|
var db = run(gen.openTempDb(allocator));
|
||||||
|
var dict = run(tempDictionary(db.db(), updateMode));
|
||||||
|
var map = tempDatabaseMapDictionaryDeepMap(dict, 5, 6);
|
||||||
|
|
||||||
|
log.debug("Put \"{}\" = \"{}\"", key, value);
|
||||||
|
runVoid(shouldFail, map.putValue(key, value));
|
||||||
|
|
||||||
|
log.debug("Get \"{}\"", key);
|
||||||
|
var returnedValue = run(shouldFail, map.getValue(null, key));
|
||||||
|
|
||||||
|
Assertions.assertEquals(shouldFail ? null : value, returnedValue);
|
||||||
|
|
||||||
|
map.close();
|
||||||
|
|
||||||
|
//if (shouldFail) this.checkLeaks = false;
|
||||||
|
|
||||||
|
gen.closeTempDb(db);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ -204,7 +239,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.putValue(key, value)
|
.putValue(key, value)
|
||||||
.thenMany(map.getAllValues(null))
|
.thenMany(map.getAllValues(null))
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -229,14 +264,14 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.flatMap(v_ -> Mono.using(
|
.flatMap(v_ -> Mono.using(
|
||||||
() -> v_,
|
() -> v_,
|
||||||
v -> v.set(value),
|
v -> v.set(value),
|
||||||
DatabaseMapDictionaryDeep::release
|
ResourceSupport::close
|
||||||
))
|
))
|
||||||
.then(map
|
.then(map
|
||||||
.at(null, "capra")
|
.at(null, "capra")
|
||||||
.flatMap(v_ -> Mono.using(
|
.flatMap(v_ -> Mono.using(
|
||||||
() -> v_,
|
() -> v_,
|
||||||
v -> v.set(Map.of("normal", "123", "ormaln", "456")),
|
v -> v.set(Map.of("normal", "123", "ormaln", "456")),
|
||||||
DatabaseMapDictionaryDeep::release
|
ResourceSupport::close
|
||||||
))
|
))
|
||||||
)
|
)
|
||||||
.thenMany(map
|
.thenMany(map
|
||||||
@ -244,10 +279,10 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.flatMap(v -> v.getValue()
|
.flatMap(v -> v.getValue()
|
||||||
.getAllValues(null)
|
.getAllValues(null)
|
||||||
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue()))
|
.map(result -> Tuples.of(v.getKey(), result.getKey(), result.getValue()))
|
||||||
.doAfterTerminate(() -> v.getValue().release())
|
.doFinally(s -> v.getValue().close())
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
DatabaseMapDictionaryDeep::release
|
ResourceSupport::close
|
||||||
))
|
))
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -272,9 +307,9 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
||||||
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
.map(dict -> tempDatabaseMapDictionaryDeepMap(dict, 5, 6))
|
||||||
.flatMap(map -> map
|
.flatMap(map -> map
|
||||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doAfterTerminate(v::release))
|
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.close()))
|
||||||
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doAfterTerminate(v::release)))
|
.then(map.at(null, key1).flatMap(v -> v.getValue(null, key2).doFinally(s -> v.close())))
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -299,7 +334,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.putValueAndGetPrevious(key, value),
|
map.putValueAndGetPrevious(key, value),
|
||||||
map.putValueAndGetPrevious(key, value)
|
map.putValueAndGetPrevious(key, value)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -322,22 +357,22 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValueAndGetPrevious(key2, "error?")
|
.putValueAndGetPrevious(key2, "error?")
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValueAndGetPrevious(key2, value)
|
.putValueAndGetPrevious(key2, value)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValueAndGetPrevious(key2, value)
|
.putValueAndGetPrevious(key2, value)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -360,7 +395,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
map.putValue(key, value).then(map.removeAndGetPrevious(key)),
|
||||||
map.removeAndGetPrevious(key)
|
map.removeAndGetPrevious(key)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -384,22 +419,22 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, "error?")
|
.putValue(key2, "error?")
|
||||||
.then(v.removeAndGetPrevious(key2))
|
.then(v.removeAndGetPrevious(key2))
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, value)
|
.putValue(key2, value)
|
||||||
.then(v.removeAndGetPrevious(key2))
|
.then(v.removeAndGetPrevious(key2))
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v.removeAndGetPrevious(key2)
|
.flatMap(v -> v.removeAndGetPrevious(key2)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -422,7 +457,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
map.putValue(key, value).then(map.removeAndGetStatus(key)),
|
||||||
map.removeAndGetStatus(key)
|
map.removeAndGetStatus(key)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -446,22 +481,22 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, "error?")
|
.putValue(key2, "error?")
|
||||||
.then(v.removeAndGetStatus(key2))
|
.then(v.removeAndGetStatus(key2))
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.putValue(key2, value)
|
.putValue(key2, value)
|
||||||
.then(v.removeAndGetStatus(key2))
|
.then(v.removeAndGetStatus(key2))
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v.removeAndGetStatus(key2)
|
.flatMap(v -> v.removeAndGetStatus(key2)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -504,7 +539,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
return value;
|
return value;
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||||
@ -529,28 +564,28 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> prev)
|
.updateValue(key2, prev -> prev)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
.flatMap(v -> v
|
.flatMap(v -> v
|
||||||
.updateValue(key2, prev -> null)
|
.updateValue(key2, prev -> null)
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
@ -590,7 +625,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
assert Objects.equals(old, value);
|
assert Objects.equals(old, value);
|
||||||
return value;
|
return value;
|
||||||
}).then(map.getValue(null, key))
|
}).then(map.getValue(null, key))
|
||||||
).doAfterTerminate(map::release))
|
).doFinally(s -> map.close()))
|
||||||
));
|
));
|
||||||
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
if (updateMode != UpdateMode.ALLOW_UNSAFE || shouldFail) {
|
||||||
stpVer.verifyError();
|
stpVer.verifyError();
|
||||||
@ -616,7 +651,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> prev)
|
.updateValue(key2, prev -> prev)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
@ -624,7 +659,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
@ -632,7 +667,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> value)
|
.updateValue(key2, prev -> value)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
),
|
),
|
||||||
map
|
map
|
||||||
.at(null, key1)
|
.at(null, key1)
|
||||||
@ -640,10 +675,10 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.updateValue(key2, prev -> null)
|
.updateValue(key2, prev -> null)
|
||||||
.then(v.getValue(null, key2))
|
.then(v.getValue(null, key2))
|
||||||
.defaultIfEmpty("empty")
|
.defaultIfEmpty("empty")
|
||||||
.doAfterTerminate(v::release)
|
.doFinally(s -> v.close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
@ -668,7 +703,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.remove(key),
|
map.remove(key),
|
||||||
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single()
|
map.putValueAndGetChanged(key, Map.of("error?", "error.")).single()
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -722,7 +757,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.filter(k -> k.getValue().isPresent())
|
.filter(k -> k.getValue().isPresent())
|
||||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||||
@ -750,7 +785,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
.setAllValues(Flux.fromIterable(entries.entrySet()))
|
||||||
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
.thenMany(map.getMulti(null, Flux.fromIterable(entries.keySet())))
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.filter(k -> k.getValue().isPresent())
|
.filter(k -> k.getValue().isPresent())
|
||||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||||
@ -779,7 +814,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet())),
|
||||||
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
map.setAllValuesAndGetPrevious(Flux.fromIterable(entries.entrySet()))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
@ -807,7 +842,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.set(entries).then(Mono.empty()),
|
map.set(entries).then(Mono.empty()),
|
||||||
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
map.getMulti(null, Flux.fromIterable(entries.keySet()))
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.filter(k -> k.getValue().isPresent())
|
.filter(k -> k.getValue().isPresent())
|
||||||
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
.map(k -> Map.entry(k.getKey(), k.getValue().orElseThrow()))
|
||||||
@ -845,7 +880,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
removalMono.then(Mono.empty()),
|
removalMono.then(Mono.empty()),
|
||||||
map.setAndGetChanged(entries).single()
|
map.setAndGetChanged(entries).single()
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release);
|
.doFinally(s -> map.close());
|
||||||
})
|
})
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -871,7 +906,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
)
|
)
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.concatMapIterable(list -> list)
|
.concatMapIterable(list -> list)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -897,7 +932,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
.concat(map.set(entries).then(Mono.empty()), map.clearAndGetPrevious(), map.get(null))
|
||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.concatMapIterable(list -> list)
|
.concatMapIterable(list -> list)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -925,7 +960,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.getAllValues(null)
|
map.getAllValues(null)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -954,7 +989,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.map(Map::entrySet)
|
.map(Map::entrySet)
|
||||||
.flatMapIterable(list -> list)
|
.flatMapIterable(list -> list)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -985,10 +1020,10 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
.getValue()
|
.getValue()
|
||||||
.get(null)
|
.get(null)
|
||||||
.map(val -> Map.entry(stage.getKey(), val))
|
.map(val -> Map.entry(stage.getKey(), val))
|
||||||
.doAfterTerminate(() -> stage.getValue().release())
|
.doFinally(s -> stage.getValue().close())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
@ -1015,7 +1050,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
map.putMulti(Flux.fromIterable(entries.entrySet())).then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
.transform(LLUtils::handleDiscard)
|
.transform(LLUtils::handleDiscard)
|
||||||
));
|
));
|
||||||
@ -1041,7 +1076,7 @@ public abstract class TestDictionaryMapDeep {
|
|||||||
map.clear().then(Mono.empty()),
|
map.clear().then(Mono.empty()),
|
||||||
map.isEmpty(null)
|
map.isEmpty(null)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
|
@ -121,14 +121,14 @@ public abstract class TestDictionaryMapDeepHashMap {
|
|||||||
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
.create(tempDb(getTempDbGenerator(), allocator, db -> tempDictionary(db, updateMode)
|
||||||
.map(dict -> tempDatabaseMapDictionaryDeepMapHashMap(dict, 5))
|
.map(dict -> tempDatabaseMapDictionaryDeepMapHashMap(dict, 5))
|
||||||
.flatMapMany(map -> map
|
.flatMapMany(map -> map
|
||||||
.at(null, key1).flatMap(v -> v.putValue(key2, value).doAfterTerminate(v::release))
|
.at(null, key1).flatMap(v -> v.putValue(key2, value).doFinally(s -> v.close()))
|
||||||
.thenMany(map
|
.thenMany(map
|
||||||
.getAllValues(null)
|
.getAllValues(null)
|
||||||
.map(Entry::getValue)
|
.map(Entry::getValue)
|
||||||
.flatMap(maps -> Flux.fromIterable(maps.entrySet()))
|
.flatMap(maps -> Flux.fromIterable(maps.entrySet()))
|
||||||
.map(Entry::getValue)
|
.map(Entry::getValue)
|
||||||
)
|
)
|
||||||
.doAfterTerminate(map::release)
|
.doFinally(s -> map.close())
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
if (shouldFail) {
|
if (shouldFail) {
|
||||||
|
311
src/test/java/it/cavallium/dbengine/TestLLDictionary.java
Normal file
311
src/test/java/it/cavallium/dbengine/TestLLDictionary.java
Normal file
@ -0,0 +1,311 @@
|
|||||||
|
package it.cavallium.dbengine;
|
||||||
|
|
||||||
|
import static it.cavallium.dbengine.DbTestUtils.destroyAllocator;
|
||||||
|
import static it.cavallium.dbengine.DbTestUtils.ensureNoLeaks;
|
||||||
|
import static it.cavallium.dbengine.DbTestUtils.newAllocator;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import io.net5.buffer.api.Buffer;
|
||||||
|
import io.net5.buffer.api.Send;
|
||||||
|
import it.cavallium.dbengine.DbTestUtils.TempDb;
|
||||||
|
import it.cavallium.dbengine.DbTestUtils.TestAllocator;
|
||||||
|
import it.cavallium.dbengine.database.LLDictionary;
|
||||||
|
import it.cavallium.dbengine.database.LLDictionaryResultType;
|
||||||
|
import it.cavallium.dbengine.database.LLKeyValueDatabase;
|
||||||
|
import it.cavallium.dbengine.database.LLRange;
|
||||||
|
import it.cavallium.dbengine.database.LLUtils;
|
||||||
|
import it.cavallium.dbengine.database.UpdateMode;
|
||||||
|
import it.cavallium.dbengine.database.UpdateReturnMode;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
import org.warp.commonutils.log.Logger;
|
||||||
|
import org.warp.commonutils.log.LoggerFactory;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
|
public abstract class TestLLDictionary {
|
||||||
|
|
||||||
|
private final Logger log = LoggerFactory.getLogger(this.getClass());
|
||||||
|
private static final Mono<Send<LLRange>> RANGE_ALL = Mono.fromCallable(() -> LLRange.all().send());
|
||||||
|
private TestAllocator allocator;
|
||||||
|
private TempDb tempDb;
|
||||||
|
private LLKeyValueDatabase db;
|
||||||
|
|
||||||
|
protected abstract TemporaryDbGenerator getTempDbGenerator();
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void beforeEach() {
|
||||||
|
this.allocator = newAllocator();
|
||||||
|
ensureNoLeaks(allocator.allocator(), false, false);
|
||||||
|
tempDb = Objects.requireNonNull(getTempDbGenerator().openTempDb(allocator).block(), "TempDB");
|
||||||
|
db = tempDb.db();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void afterEach() {
|
||||||
|
getTempDbGenerator().closeTempDb(tempDb).block();
|
||||||
|
ensureNoLeaks(allocator.allocator(), true, false);
|
||||||
|
destroyAllocator(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Stream<Arguments> provideArguments() {
|
||||||
|
return Arrays.stream(UpdateMode.values()).map(Arguments::of);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Stream<Arguments> providePutArguments() {
|
||||||
|
var updateModes = Arrays.stream(UpdateMode.values());
|
||||||
|
return updateModes.flatMap(updateMode -> {
|
||||||
|
var resultTypes = Arrays.stream(LLDictionaryResultType.values());
|
||||||
|
return resultTypes.map(resultType -> Arguments.of(updateMode, resultType));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Stream<Arguments> provideUpdateArguments() {
|
||||||
|
var updateModes = Arrays.stream(UpdateMode.values());
|
||||||
|
return updateModes.flatMap(updateMode -> {
|
||||||
|
var resultTypes = Arrays.stream(UpdateReturnMode.values());
|
||||||
|
return resultTypes.map(resultType -> Arguments.of(updateMode, resultType));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private LLDictionary getDict(UpdateMode updateMode) {
|
||||||
|
var dict = DbTestUtils.tempDictionary(db, updateMode).blockOptional().orElseThrow();
|
||||||
|
var key1 = Mono.fromCallable(() -> fromString("test-key-1"));
|
||||||
|
var key2 = Mono.fromCallable(() -> fromString("test-key-2"));
|
||||||
|
var key3 = Mono.fromCallable(() -> fromString("test-key-3"));
|
||||||
|
var key4 = Mono.fromCallable(() -> fromString("test-key-4"));
|
||||||
|
var value = Mono.fromCallable(() -> fromString("test-value"));
|
||||||
|
dict.put(key1, value, LLDictionaryResultType.VOID).block();
|
||||||
|
dict.put(key2, value, LLDictionaryResultType.VOID).block();
|
||||||
|
dict.put(key3, value, LLDictionaryResultType.VOID).block();
|
||||||
|
dict.put(key4, value, LLDictionaryResultType.VOID).block();
|
||||||
|
return dict;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Send<Buffer> fromString(String s) {
|
||||||
|
var sb = s.getBytes(StandardCharsets.UTF_8);
|
||||||
|
try (var b = db.getAllocator().allocate(sb.length + 3 + 13)) {
|
||||||
|
assert b.writerOffset() == 0;
|
||||||
|
assert b.readerOffset() == 0;
|
||||||
|
b.writerOffset(3).writeBytes(sb);
|
||||||
|
b.readerOffset(3);
|
||||||
|
assert b.readableBytes() == sb.length;
|
||||||
|
|
||||||
|
var part1 = b.split();
|
||||||
|
|
||||||
|
return LLUtils.compositeBuffer(db.getAllocator(), part1.send(), b.send()).send();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String toString(Send<Buffer> b) {
|
||||||
|
try (var bb = b.receive()) {
|
||||||
|
byte[] data = new byte[bb.readableBytes()];
|
||||||
|
bb.copyInto(bb.readerOffset(), data, 0, data.length);
|
||||||
|
return new String(data, StandardCharsets.UTF_8);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void run(Flux<?> publisher) {
|
||||||
|
publisher.subscribeOn(Schedulers.immediate()).blockLast();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void runVoid(Mono<Void> publisher) {
|
||||||
|
publisher.then().subscribeOn(Schedulers.immediate()).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T run(Mono<T> publisher) {
|
||||||
|
return publisher.subscribeOn(Schedulers.immediate()).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T run(boolean shouldFail, Mono<T> publisher) {
|
||||||
|
return publisher.subscribeOn(Schedulers.immediate()).transform(mono -> {
|
||||||
|
if (shouldFail) {
|
||||||
|
return mono.onErrorResume(ex -> Mono.empty());
|
||||||
|
} else {
|
||||||
|
return mono;
|
||||||
|
}
|
||||||
|
}).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void runVoid(boolean shouldFail, Mono<Void> publisher) {
|
||||||
|
publisher.then().subscribeOn(Schedulers.immediate()).transform(mono -> {
|
||||||
|
if (shouldFail) {
|
||||||
|
return mono.onErrorResume(ex -> Mono.empty());
|
||||||
|
} else {
|
||||||
|
return mono;
|
||||||
|
}
|
||||||
|
}).block();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNoOp() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNoOpAllocation() {
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
var a = allocator.allocator().allocate(i * 512);
|
||||||
|
a.send().receive().close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testGetDict(UpdateMode updateMode) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
Assertions.assertNotNull(dict);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testGetColumnName(UpdateMode updateMode) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
Assertions.assertEquals("hash_map_testmap", dict.getColumnName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testGetAllocator(UpdateMode updateMode) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
var alloc = dict.getAllocator();
|
||||||
|
Assertions.assertEquals(alloc, alloc);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testGet(UpdateMode updateMode) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
var keyEx = Mono.fromCallable(() -> fromString("test-key-1"));
|
||||||
|
var keyNonEx = Mono.fromCallable(() -> fromString("test-nonexistent"));
|
||||||
|
Assertions.assertEquals("test-value", run(dict.get(null, keyEx).map(this::toString).transform(LLUtils::handleDiscard)));
|
||||||
|
Assertions.assertEquals("test-value", run(dict.get(null, keyEx, true).map(this::toString).transform(LLUtils::handleDiscard)));
|
||||||
|
Assertions.assertEquals("test-value", run(dict.get(null, keyEx, false).map(this::toString).transform(LLUtils::handleDiscard)));
|
||||||
|
Assertions.assertEquals((String) null, run(dict.get(null, keyNonEx).map(this::toString).transform(LLUtils::handleDiscard)));
|
||||||
|
Assertions.assertEquals((String) null, run(dict.get(null, keyNonEx, true).map(this::toString).transform(LLUtils::handleDiscard)));
|
||||||
|
Assertions.assertEquals((String) null, run(dict.get(null, keyNonEx, false).map(this::toString).transform(LLUtils::handleDiscard)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("providePutArguments")
|
||||||
|
public void testPutExisting(UpdateMode updateMode, LLDictionaryResultType resultType) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
var keyEx = Mono.fromCallable(() -> fromString("test-key-1"));
|
||||||
|
var value = Mono.fromCallable(() -> fromString("test-value"));
|
||||||
|
|
||||||
|
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
|
||||||
|
runVoid(dict.put(keyEx, value, resultType).then().doOnDiscard(Send.class, Send::close));
|
||||||
|
|
||||||
|
var afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
Assertions.assertEquals(0, afterSize - beforeSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("providePutArguments")
|
||||||
|
public void testPutNew(UpdateMode updateMode, LLDictionaryResultType resultType) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
var keyNonEx = Mono.fromCallable(() -> fromString("test-nonexistent"));
|
||||||
|
var value = Mono.fromCallable(() -> fromString("test-value"));
|
||||||
|
|
||||||
|
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
|
||||||
|
runVoid(dict.put(keyNonEx, value, resultType).then().doOnDiscard(Send.class, Send::close));
|
||||||
|
|
||||||
|
var afterSize = run(dict.sizeRange(null, Mono.fromCallable(() -> LLRange.all().send()), false));
|
||||||
|
Assertions.assertEquals(1, afterSize - beforeSize);
|
||||||
|
|
||||||
|
Assertions.assertTrue(run(dict.getRangeKeys(null, RANGE_ALL).map(this::toString).collectList()).contains("test-nonexistent"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testGetUpdateMode(UpdateMode updateMode) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
assertEquals(updateMode, run(dict.getUpdateMode()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideUpdateArguments")
|
||||||
|
public void testUpdateExisting(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
var keyEx = Mono.fromCallable(() -> fromString("test-key-1"));
|
||||||
|
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
long afterSize;
|
||||||
|
runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
|
dict.update(keyEx, old -> fromString("test-value"), updateReturnMode, true).then().transform(LLUtils::handleDiscard)
|
||||||
|
);
|
||||||
|
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
assertEquals(0, afterSize - beforeSize);
|
||||||
|
runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
|
dict.update(keyEx, old -> fromString("test-value"), updateReturnMode, false).then().transform(LLUtils::handleDiscard)
|
||||||
|
);
|
||||||
|
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
assertEquals(0, afterSize - beforeSize);
|
||||||
|
runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
|
dict.update(keyEx, old -> fromString("test-value"), updateReturnMode).then().transform(LLUtils::handleDiscard)
|
||||||
|
);
|
||||||
|
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
assertEquals(0, afterSize - beforeSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideUpdateArguments")
|
||||||
|
public void testUpdateNew(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
|
||||||
|
int expected = updateMode == UpdateMode.DISALLOW ? 0 : 1;
|
||||||
|
var dict = getDict(updateMode);
|
||||||
|
var keyNonEx = Mono.fromCallable(() -> fromString("test-nonexistent"));
|
||||||
|
var beforeSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
long afterSize;
|
||||||
|
runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
|
dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode, true).then().transform(LLUtils::handleDiscard)
|
||||||
|
);
|
||||||
|
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
assertEquals(expected, afterSize - beforeSize);
|
||||||
|
runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
|
dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode, false).then().transform(LLUtils::handleDiscard)
|
||||||
|
);
|
||||||
|
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
assertEquals(expected, afterSize - beforeSize);
|
||||||
|
runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
|
dict.update(keyNonEx, old -> fromString("test-value"), updateReturnMode).then().transform(LLUtils::handleDiscard)
|
||||||
|
);
|
||||||
|
afterSize = run(dict.sizeRange(null, RANGE_ALL, false));
|
||||||
|
assertEquals(expected, afterSize - beforeSize);
|
||||||
|
|
||||||
|
if (updateMode != UpdateMode.DISALLOW) {
|
||||||
|
Assertions.assertTrue(run(dict.getRangeKeys(null, RANGE_ALL).map(this::toString).collectList()).contains(
|
||||||
|
"test-nonexistent"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testUpdateAndGetDelta(UpdateMode updateMode) {
|
||||||
|
log.warn("Test not implemented");
|
||||||
|
//todo: implement
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("provideArguments")
|
||||||
|
public void testClear(UpdateMode updateMode) {
|
||||||
|
log.warn("Test not implemented");
|
||||||
|
//todo: implement
|
||||||
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@MethodSource("providePutArguments")
|
||||||
|
public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) {
|
||||||
|
log.warn("Test not implemented");
|
||||||
|
//todo: implement
|
||||||
|
}
|
||||||
|
}
|
@ -26,9 +26,7 @@ import org.junit.jupiter.api.Test;
|
|||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.Arguments;
|
import org.junit.jupiter.params.provider.Arguments;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Schedulers;
|
|
||||||
|
|
||||||
public abstract class TestLLDictionaryLeaks {
|
public abstract class TestLLDictionaryLeaks {
|
||||||
|
|
||||||
@ -90,43 +88,12 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
private Send<Buffer> fromString(String s) {
|
private Send<Buffer> fromString(String s) {
|
||||||
var sb = s.getBytes(StandardCharsets.UTF_8);
|
var sb = s.getBytes(StandardCharsets.UTF_8);
|
||||||
try (var b = db.getAllocator().allocate(sb.length)) {
|
try (var b = db.getAllocator().allocate(sb.length)) {
|
||||||
b.writeBytes(b);
|
b.writeBytes(sb);
|
||||||
|
assert b.readableBytes() == sb.length;
|
||||||
return b.send();
|
return b.send();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void run(Flux<?> publisher) {
|
|
||||||
publisher.subscribeOn(Schedulers.immediate()).blockLast();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void runVoid(Mono<Void> publisher) {
|
|
||||||
publisher.then().subscribeOn(Schedulers.immediate()).block();
|
|
||||||
}
|
|
||||||
|
|
||||||
private <T> T run(Mono<T> publisher) {
|
|
||||||
return publisher.subscribeOn(Schedulers.immediate()).block();
|
|
||||||
}
|
|
||||||
|
|
||||||
private <T> T run(boolean shouldFail, Mono<T> publisher) {
|
|
||||||
return publisher.subscribeOn(Schedulers.immediate()).transform(mono -> {
|
|
||||||
if (shouldFail) {
|
|
||||||
return mono.onErrorResume(ex -> Mono.empty());
|
|
||||||
} else {
|
|
||||||
return mono;
|
|
||||||
}
|
|
||||||
}).block();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void runVoid(boolean shouldFail, Mono<Void> publisher) {
|
|
||||||
publisher.then().subscribeOn(Schedulers.immediate()).transform(mono -> {
|
|
||||||
if (shouldFail) {
|
|
||||||
return mono.onErrorResume(ex -> Mono.empty());
|
|
||||||
} else {
|
|
||||||
return mono;
|
|
||||||
}
|
|
||||||
}).block();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNoOp() {
|
public void testNoOp() {
|
||||||
}
|
}
|
||||||
@ -164,9 +131,9 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
public void testGet(UpdateMode updateMode) {
|
public void testGet(UpdateMode updateMode) {
|
||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
var key = Mono.fromCallable(() -> fromString("test"));
|
var key = Mono.fromCallable(() -> fromString("test"));
|
||||||
runVoid(dict.get(null, key).then().transform(LLUtils::handleDiscard));
|
DbTestUtils.runVoid(dict.get(null, key).then().transform(LLUtils::handleDiscard));
|
||||||
runVoid(dict.get(null, key, true).then().transform(LLUtils::handleDiscard));
|
DbTestUtils.runVoid(dict.get(null, key, true).then().transform(LLUtils::handleDiscard));
|
||||||
runVoid(dict.get(null, key, false).then().transform(LLUtils::handleDiscard));
|
DbTestUtils.runVoid(dict.get(null, key, false).then().transform(LLUtils::handleDiscard));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ -175,14 +142,14 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
var key = Mono.fromCallable(() -> fromString("test-key"));
|
var key = Mono.fromCallable(() -> fromString("test-key"));
|
||||||
var value = Mono.fromCallable(() -> fromString("test-value"));
|
var value = Mono.fromCallable(() -> fromString("test-value"));
|
||||||
runVoid(dict.put(key, value, resultType).then().doOnDiscard(Send.class, Send::close));
|
DbTestUtils.runVoid(dict.put(key, value, resultType).then().doOnDiscard(Send.class, Send::close));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@MethodSource("provideArguments")
|
@MethodSource("provideArguments")
|
||||||
public void testGetUpdateMode(UpdateMode updateMode) {
|
public void testGetUpdateMode(UpdateMode updateMode) {
|
||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
assertEquals(updateMode, run(dict.getUpdateMode()));
|
assertEquals(updateMode, DbTestUtils.run(dict.getUpdateMode()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ -190,13 +157,13 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
public void testUpdate(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
|
public void testUpdate(UpdateMode updateMode, UpdateReturnMode updateReturnMode) {
|
||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
var key = Mono.fromCallable(() -> fromString("test-key"));
|
var key = Mono.fromCallable(() -> fromString("test-key"));
|
||||||
runVoid(updateMode == UpdateMode.DISALLOW,
|
DbTestUtils.runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
dict.update(key, old -> old, updateReturnMode, true).then().transform(LLUtils::handleDiscard)
|
dict.update(key, old -> old, updateReturnMode, true).then().transform(LLUtils::handleDiscard)
|
||||||
);
|
);
|
||||||
runVoid(updateMode == UpdateMode.DISALLOW,
|
DbTestUtils.runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
dict.update(key, old -> old, updateReturnMode, false).then().transform(LLUtils::handleDiscard)
|
dict.update(key, old -> old, updateReturnMode, false).then().transform(LLUtils::handleDiscard)
|
||||||
);
|
);
|
||||||
runVoid(updateMode == UpdateMode.DISALLOW,
|
DbTestUtils.runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
dict.update(key, old -> old, updateReturnMode).then().transform(LLUtils::handleDiscard)
|
dict.update(key, old -> old, updateReturnMode).then().transform(LLUtils::handleDiscard)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -206,13 +173,13 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
public void testUpdateAndGetDelta(UpdateMode updateMode) {
|
public void testUpdateAndGetDelta(UpdateMode updateMode) {
|
||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
var key = Mono.fromCallable(() -> fromString("test-key"));
|
var key = Mono.fromCallable(() -> fromString("test-key"));
|
||||||
runVoid(updateMode == UpdateMode.DISALLOW,
|
DbTestUtils.runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
dict.updateAndGetDelta(key, old -> old, true).then().transform(LLUtils::handleDiscard)
|
dict.updateAndGetDelta(key, old -> old, true).then().transform(LLUtils::handleDiscard)
|
||||||
);
|
);
|
||||||
runVoid(updateMode == UpdateMode.DISALLOW,
|
DbTestUtils.runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
dict.updateAndGetDelta(key, old -> old, false).then().transform(LLUtils::handleDiscard)
|
dict.updateAndGetDelta(key, old -> old, false).then().transform(LLUtils::handleDiscard)
|
||||||
);
|
);
|
||||||
runVoid(updateMode == UpdateMode.DISALLOW,
|
DbTestUtils.runVoid(updateMode == UpdateMode.DISALLOW,
|
||||||
dict.updateAndGetDelta(key, old -> old).then().transform(LLUtils::handleDiscard)
|
dict.updateAndGetDelta(key, old -> old).then().transform(LLUtils::handleDiscard)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -221,7 +188,7 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
@MethodSource("provideArguments")
|
@MethodSource("provideArguments")
|
||||||
public void testClear(UpdateMode updateMode) {
|
public void testClear(UpdateMode updateMode) {
|
||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
runVoid(dict.clear());
|
DbTestUtils.runVoid(dict.clear());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ -229,6 +196,6 @@ public abstract class TestLLDictionaryLeaks {
|
|||||||
public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) {
|
public void testRemove(UpdateMode updateMode, LLDictionaryResultType resultType) {
|
||||||
var dict = getDict(updateMode);
|
var dict = getDict(updateMode);
|
||||||
var key = Mono.fromCallable(() -> fromString("test-key"));
|
var key = Mono.fromCallable(() -> fromString("test-key"));
|
||||||
runVoid(dict.remove(key, resultType).then().doOnDiscard(Send.class, Send::close));
|
DbTestUtils.runVoid(dict.remove(key, resultType).then().doOnDiscard(Send.class, Send::close));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,11 @@
|
|||||||
|
package it.cavallium.dbengine;
|
||||||
|
|
||||||
|
public class TestLocalLLDictionary extends TestLLDictionary {
|
||||||
|
|
||||||
|
private static final TemporaryDbGenerator GENERATOR = new LocalTemporaryDbGenerator();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TemporaryDbGenerator getTempDbGenerator() {
|
||||||
|
return GENERATOR;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,11 @@
|
|||||||
|
package it.cavallium.dbengine;
|
||||||
|
|
||||||
|
public class TestMemoryLLDictionary extends TestLLDictionary {
|
||||||
|
|
||||||
|
private static final TemporaryDbGenerator GENERATOR = new MemoryTemporaryDbGenerator();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TemporaryDbGenerator getTempDbGenerator() {
|
||||||
|
return GENERATOR;
|
||||||
|
}
|
||||||
|
}
|
@ -51,7 +51,7 @@ public class TestRanges {
|
|||||||
byte[] firstRangeKey;
|
byte[] firstRangeKey;
|
||||||
try (var firstRangeKeyBuf = DatabaseMapDictionaryDeep.firstRangeKey(alloc,
|
try (var firstRangeKeyBuf = DatabaseMapDictionaryDeep.firstRangeKey(alloc,
|
||||||
alloc.allocate(prefixKey.length).writeBytes(prefixKey).send(),
|
alloc.allocate(prefixKey.length).writeBytes(prefixKey).send(),
|
||||||
prefixKey.length, 7, 3).receive()) {
|
prefixKey.length, 7, 3)) {
|
||||||
firstRangeKey = LLUtils.toArray(firstRangeKeyBuf);
|
firstRangeKey = LLUtils.toArray(firstRangeKeyBuf);
|
||||||
}
|
}
|
||||||
byte[] nextRangeKey;
|
byte[] nextRangeKey;
|
||||||
@ -60,7 +60,7 @@ public class TestRanges {
|
|||||||
prefixKey.length,
|
prefixKey.length,
|
||||||
7,
|
7,
|
||||||
3
|
3
|
||||||
).receive()) {
|
)) {
|
||||||
nextRangeKey = LLUtils.toArray(nextRangeKeyBuf);
|
nextRangeKey = LLUtils.toArray(nextRangeKeyBuf);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,7 +114,7 @@ public class TestRanges {
|
|||||||
prefixKey.length,
|
prefixKey.length,
|
||||||
3,
|
3,
|
||||||
7
|
7
|
||||||
).receive()) {
|
)) {
|
||||||
firstRangeKey = LLUtils.toArray(firstRangeKeyBuf);
|
firstRangeKey = LLUtils.toArray(firstRangeKeyBuf);
|
||||||
}
|
}
|
||||||
try (var nextRangeKeyBuf = DatabaseMapDictionaryDeep.nextRangeKey(alloc,
|
try (var nextRangeKeyBuf = DatabaseMapDictionaryDeep.nextRangeKey(alloc,
|
||||||
@ -123,7 +123,7 @@ public class TestRanges {
|
|||||||
prefixKey.length,
|
prefixKey.length,
|
||||||
3,
|
3,
|
||||||
7
|
7
|
||||||
).receive()) {
|
)) {
|
||||||
byte[] nextRangeKey = LLUtils.toArray(nextRangeKeyBuf);
|
byte[] nextRangeKey = LLUtils.toArray(nextRangeKeyBuf);
|
||||||
|
|
||||||
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF}) && Arrays.equals(suffixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
|
if (Arrays.equals(prefixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF}) && Arrays.equals(suffixKey, new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF})) {
|
||||||
|
20
src/test/resources/log4j2.xml
Normal file
20
src/test/resources/log4j2.xml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<Configuration strict="true"
|
||||||
|
xmlns="http://logging.apache.org/log4j/2.0/config"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://logging.apache.org/log4j/2.0/config
|
||||||
|
https://raw.githubusercontent.com/apache/logging-log4j2/log4j-2.14.1/log4j-core/src/main/resources/Log4j-config.xsd"
|
||||||
|
status="ALL">
|
||||||
|
<Appenders>
|
||||||
|
<Console name="Console" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout
|
||||||
|
pattern="%d{${LOG_DATEFORMAT_PATTERN:-yyyy-MM-dd HH:mm:ss.SSS}} %highlight{${LOG_LEVEL_PATTERN:-%5p}}{FATAL=red blink, ERROR=red, WARN=yellow bold, INFO=green, DEBUG=green bold, TRACE=blue} %style{%processId}{magenta} %style{%-20.20c{1}}{cyan} : %m%n%ex"/>
|
||||||
|
</Console>
|
||||||
|
|
||||||
|
</Appenders>
|
||||||
|
<Loggers>
|
||||||
|
<Root level="ALL">
|
||||||
|
<AppenderRef ref="Console"/>
|
||||||
|
</Root>
|
||||||
|
</Loggers>
|
||||||
|
</Configuration>
|
Loading…
Reference in New Issue
Block a user