Code cleanup
This commit is contained in:
parent
a412246212
commit
df4f4b253c
2
pom.xml
2
pom.xml
@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
<artifactId>common-utils</artifactId>
|
<artifactId>common-utils</artifactId>
|
||||||
<groupId>org.warp</groupId>
|
<groupId>org.warp</groupId>
|
||||||
<version>1.1.5</version>
|
<version>1.1.6</version>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
@ -1,763 +0,0 @@
|
|||||||
package org.warp.commonutils.batch;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.CompletionException;
|
|
||||||
import java.util.concurrent.RejectedExecutionException;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
|
||||||
import java.util.function.BiConsumer;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.warp.commonutils.concurrency.executor.BlockingOnFullQueueExecutorServiceDecorator;
|
|
||||||
import org.warp.commonutils.concurrency.executor.BoundedExecutorService;
|
|
||||||
import org.warp.commonutils.functional.CancellableBiConsumer;
|
|
||||||
import org.warp.commonutils.functional.CancellableConsumer;
|
|
||||||
import org.warp.commonutils.functional.CancellableTriConsumer;
|
|
||||||
import org.warp.commonutils.functional.ConsumerResult;
|
|
||||||
import org.warp.commonutils.functional.IOBiConsumer;
|
|
||||||
import org.warp.commonutils.functional.IOConsumer;
|
|
||||||
import org.warp.commonutils.functional.IOTriConsumer;
|
|
||||||
import org.warp.commonutils.functional.TriConsumer;
|
|
||||||
import org.warp.commonutils.type.IntWrapper;
|
|
||||||
import org.warp.commonutils.type.ShortNamedThreadFactory;
|
|
||||||
import org.warp.commonutils.type.VariableWrapper;
|
|
||||||
|
|
||||||
public class ParallelUtils {
|
|
||||||
|
|
||||||
public static <V> void parallelizeIO(IOConsumer<IOConsumer<V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
IOConsumer<V> consumer) throws IOException {
|
|
||||||
Consumer<Consumer<V>> action = (cons) -> {
|
|
||||||
try {
|
|
||||||
iterator.consume(cons::accept);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
parallelize(action, maxQueueSize, parallelism, groupSize, (v) -> {
|
|
||||||
try {
|
|
||||||
consumer.consume(v);
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new CompletionException(ex);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (CompletionException ex) {
|
|
||||||
if (ex.getCause() instanceof CompletionException && ex.getCause().getCause() instanceof IOException) {
|
|
||||||
throw (IOException) ex.getCause().getCause();
|
|
||||||
} else if (ex.getCause() instanceof IOException) {
|
|
||||||
throw (IOException) ex.getCause();
|
|
||||||
} else {
|
|
||||||
throw new IOException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <V> void parallelize(Consumer<Consumer<V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize, Consumer<V> consumer) throws CompletionException {
|
|
||||||
var parallelExecutor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
parallelism,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("ForEachParallel"),
|
|
||||||
(a, b) -> {}
|
|
||||||
);
|
|
||||||
final int CHUNK_SIZE = groupSize;
|
|
||||||
IntWrapper count = new IntWrapper(CHUNK_SIZE);
|
|
||||||
VariableWrapper<Object[]> values = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference = new AtomicReference<>(null);
|
|
||||||
final Object arraysAccessLock = new Object();
|
|
||||||
iterator.accept((value) -> {
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
|
|
||||||
values.var[CHUNK_SIZE - count.var] = value;
|
|
||||||
count.var--;
|
|
||||||
if (count.var == 0) {
|
|
||||||
sendChunkItems(values, CHUNK_SIZE, count, consumer, parallelExecutor, firstExceptionReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
parallelExecutor.shutdown();
|
|
||||||
try {
|
|
||||||
parallelExecutor.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException("Parallel forEach interrupted", e);
|
|
||||||
}
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
if (count.var > 0) {
|
|
||||||
sendChunkItems(values, CHUNK_SIZE, count, consumer, null, firstExceptionReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <V> void sendChunkItems(VariableWrapper<Object[]> values,
|
|
||||||
int CHUNK_SIZE,
|
|
||||||
IntWrapper count,
|
|
||||||
Consumer<V> consumer,
|
|
||||||
BlockingOnFullQueueExecutorServiceDecorator parallelExecutor,
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference) {
|
|
||||||
var itemsCount = CHUNK_SIZE - count.var;
|
|
||||||
count.var = CHUNK_SIZE;
|
|
||||||
Object[] valuesCopy = values.var;
|
|
||||||
values.var = new Object[itemsCount];
|
|
||||||
try {
|
|
||||||
Runnable action = () -> {
|
|
||||||
for (int i = 0; i < itemsCount; i++) {
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
consumer.accept((V) valuesCopy[i]);
|
|
||||||
} catch (Exception ex) {
|
|
||||||
firstExceptionReference.compareAndSet(null, new CompletionException(ex));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (parallelExecutor != null) {
|
|
||||||
parallelExecutor.execute(action);
|
|
||||||
} else {
|
|
||||||
action.run();
|
|
||||||
}
|
|
||||||
} catch (RejectedExecutionException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K> ConsumerResult parallelize(Consumer<CancellableConsumer<K>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
CancellableConsumer<K> consumer) throws CompletionException {
|
|
||||||
if (parallelism <= 1) {
|
|
||||||
iterator.accept(consumer);
|
|
||||||
return ConsumerResult.result();
|
|
||||||
} else {
|
|
||||||
var parallelExecutor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
parallelism,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("ForEachParallel"),
|
|
||||||
(a, b) -> {}
|
|
||||||
);
|
|
||||||
final int CHUNK_SIZE = groupSize;
|
|
||||||
IntWrapper count = new IntWrapper(CHUNK_SIZE);
|
|
||||||
VariableWrapper<Object[]> keys = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference = new AtomicReference<>(null);
|
|
||||||
AtomicBoolean cancelled = new AtomicBoolean(false);
|
|
||||||
final Object arraysAccessLock = new Object();
|
|
||||||
iterator.accept((key) -> {
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
var cancelledVal = cancelled.get();
|
|
||||||
if (cancelledVal) {
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
keys.var[CHUNK_SIZE - count.var] = key;
|
|
||||||
count.var--;
|
|
||||||
|
|
||||||
if (count.var == 0) {
|
|
||||||
return sendChunkItems(keys,
|
|
||||||
CHUNK_SIZE,
|
|
||||||
count,
|
|
||||||
consumer,
|
|
||||||
parallelExecutor,
|
|
||||||
firstExceptionReference,
|
|
||||||
cancelled
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return ConsumerResult.result();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
parallelExecutor.shutdown();
|
|
||||||
try {
|
|
||||||
parallelExecutor.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException("Parallel forEach interrupted", e);
|
|
||||||
}
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
if (count.var > 0) {
|
|
||||||
var sendChunkItemsResult = sendChunkItems(keys,
|
|
||||||
CHUNK_SIZE,
|
|
||||||
count,
|
|
||||||
consumer,
|
|
||||||
null,
|
|
||||||
firstExceptionReference,
|
|
||||||
cancelled
|
|
||||||
);
|
|
||||||
cancelled.compareAndSet(false, sendChunkItemsResult.isCancelled());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
if (cancelled.get()) {
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
} else {
|
|
||||||
return ConsumerResult.result();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <K> ConsumerResult sendChunkItems(VariableWrapper<Object[]> keys,
|
|
||||||
final int CHUNK_SIZE,
|
|
||||||
IntWrapper count,
|
|
||||||
CancellableConsumer<K> consumer,
|
|
||||||
BlockingOnFullQueueExecutorServiceDecorator parallelExecutor,
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference,
|
|
||||||
AtomicBoolean cancelled) {
|
|
||||||
int itemsCount = CHUNK_SIZE - count.var;
|
|
||||||
count.var = CHUNK_SIZE;
|
|
||||||
Object[] keysCopy = keys.var;
|
|
||||||
keys.var = new Object[itemsCount];
|
|
||||||
try {
|
|
||||||
Supplier<ConsumerResult> action = () -> {
|
|
||||||
for (int i = 0; i < itemsCount; i++) {
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
if (consumer.acceptCancellable((K) keysCopy[i]).isCancelled()) {
|
|
||||||
cancelled.set(true);
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
} catch (Exception ex) {
|
|
||||||
firstExceptionReference.compareAndSet(null, new CompletionException(ex));
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ConsumerResult.result();
|
|
||||||
};
|
|
||||||
if (parallelExecutor != null) {
|
|
||||||
parallelExecutor.execute(action::get);
|
|
||||||
return ConsumerResult.result();
|
|
||||||
} else {
|
|
||||||
return action.get();
|
|
||||||
}
|
|
||||||
} catch (RejectedExecutionException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K, V> void parallelizeIO(IOConsumer<IOBiConsumer<K, V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
IOBiConsumer<K, V> consumer) throws IOException {
|
|
||||||
Consumer<BiConsumer<K, V>> action = (cons) -> {
|
|
||||||
try {
|
|
||||||
iterator.consume(cons::accept);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
parallelize(action, maxQueueSize, parallelism, groupSize, (k, v) -> {
|
|
||||||
try {
|
|
||||||
consumer.consume(k, v);
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new CompletionException(ex);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (CompletionException ex) {
|
|
||||||
if (ex.getCause() instanceof CompletionException && ex.getCause().getCause() instanceof IOException) {
|
|
||||||
throw (IOException) ex.getCause().getCause();
|
|
||||||
} else if (ex.getCause() instanceof IOException) {
|
|
||||||
throw (IOException) ex.getCause();
|
|
||||||
} else {
|
|
||||||
throw new IOException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K, V> void parallelize(Consumer<BiConsumer<K, V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize, BiConsumer<K, V> consumer) throws CompletionException {
|
|
||||||
if (parallelism <= 1) {
|
|
||||||
iterator.accept(consumer);
|
|
||||||
} else {
|
|
||||||
var parallelExecutor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
parallelism,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("ForEachParallel"),
|
|
||||||
(a, b) -> {}
|
|
||||||
);
|
|
||||||
final int CHUNK_SIZE = groupSize;
|
|
||||||
IntWrapper count = new IntWrapper(CHUNK_SIZE);
|
|
||||||
VariableWrapper<Object[]> keys = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
VariableWrapper<Object[]> values = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference = new AtomicReference<>(null);
|
|
||||||
final Object arraysAccessLock = new Object();
|
|
||||||
iterator.accept((key, value) -> {
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
|
|
||||||
keys.var[CHUNK_SIZE - count.var] = key;
|
|
||||||
values.var[CHUNK_SIZE - count.var] = value;
|
|
||||||
count.var--;
|
|
||||||
|
|
||||||
if (count.var == 0) {
|
|
||||||
sendChunkItems(keys, values, CHUNK_SIZE, count, consumer, parallelExecutor, firstExceptionReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
parallelExecutor.shutdown();
|
|
||||||
try {
|
|
||||||
parallelExecutor.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException("Parallel forEach interrupted", e);
|
|
||||||
}
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
if (count.var > 0) {
|
|
||||||
sendChunkItems(keys, values, CHUNK_SIZE, count, consumer, null, firstExceptionReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <K, V> void sendChunkItems(VariableWrapper<Object[]> keys,
|
|
||||||
VariableWrapper<Object[]> values,
|
|
||||||
final int CHUNK_SIZE,
|
|
||||||
IntWrapper count,
|
|
||||||
BiConsumer<K, V> consumer,
|
|
||||||
BlockingOnFullQueueExecutorServiceDecorator parallelExecutor,
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference) {
|
|
||||||
int itemsCount = CHUNK_SIZE - count.var;
|
|
||||||
count.var = CHUNK_SIZE;
|
|
||||||
Object[] keysCopy = keys.var;
|
|
||||||
Object[] valuesCopy = values.var;
|
|
||||||
keys.var = new Object[itemsCount];
|
|
||||||
values.var = new Object[itemsCount];
|
|
||||||
try {
|
|
||||||
Runnable action = () -> {
|
|
||||||
for (int i = 0; i < itemsCount; i++) {
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
consumer.accept((K) keysCopy[i], (V) valuesCopy[i]);
|
|
||||||
} catch (Exception ex) {
|
|
||||||
firstExceptionReference.compareAndSet(null, new CompletionException(ex));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (parallelExecutor != null) {
|
|
||||||
parallelExecutor.execute(action);
|
|
||||||
} else {
|
|
||||||
action.run();
|
|
||||||
}
|
|
||||||
} catch (RejectedExecutionException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K, V> ConsumerResult parallelize(Consumer<CancellableBiConsumer<K, V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
CancellableBiConsumer<K, V> consumer) throws CompletionException {
|
|
||||||
if (parallelism <= 1) {
|
|
||||||
iterator.accept(consumer);
|
|
||||||
return ConsumerResult.result();
|
|
||||||
} else {
|
|
||||||
var parallelExecutor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
parallelism,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("ForEachParallel"),
|
|
||||||
(a, b) -> {}
|
|
||||||
);
|
|
||||||
final int CHUNK_SIZE = groupSize;
|
|
||||||
IntWrapper count = new IntWrapper(CHUNK_SIZE);
|
|
||||||
VariableWrapper<Object[]> keys = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
VariableWrapper<Object[]> values = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference = new AtomicReference<>(null);
|
|
||||||
AtomicBoolean cancelled = new AtomicBoolean(false);
|
|
||||||
final Object arraysAccessLock = new Object();
|
|
||||||
iterator.accept((key, value) -> {
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
var cancelledVal = cancelled.get();
|
|
||||||
if (cancelledVal) {
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
keys.var[CHUNK_SIZE - count.var] = key;
|
|
||||||
values.var[CHUNK_SIZE - count.var] = value;
|
|
||||||
count.var--;
|
|
||||||
|
|
||||||
if (count.var == 0) {
|
|
||||||
return sendChunkItems(keys,
|
|
||||||
values,
|
|
||||||
CHUNK_SIZE,
|
|
||||||
count,
|
|
||||||
consumer,
|
|
||||||
parallelExecutor,
|
|
||||||
firstExceptionReference,
|
|
||||||
cancelled
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return ConsumerResult.result();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
parallelExecutor.shutdown();
|
|
||||||
try {
|
|
||||||
parallelExecutor.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException("Parallel forEach interrupted", e);
|
|
||||||
}
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
if (count.var > 0) {
|
|
||||||
var sendChunkItemsResult = sendChunkItems(keys,
|
|
||||||
values,
|
|
||||||
CHUNK_SIZE,
|
|
||||||
count,
|
|
||||||
consumer,
|
|
||||||
null,
|
|
||||||
firstExceptionReference,
|
|
||||||
cancelled
|
|
||||||
);
|
|
||||||
cancelled.compareAndSet(false, sendChunkItemsResult.isCancelled());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
if (cancelled.get()) {
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
} else {
|
|
||||||
return ConsumerResult.result();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <K, V> ConsumerResult sendChunkItems(VariableWrapper<Object[]> keys,
|
|
||||||
VariableWrapper<Object[]> values,
|
|
||||||
final int CHUNK_SIZE,
|
|
||||||
IntWrapper count,
|
|
||||||
CancellableBiConsumer<K, V> consumer,
|
|
||||||
BlockingOnFullQueueExecutorServiceDecorator parallelExecutor,
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference,
|
|
||||||
AtomicBoolean cancelled) {
|
|
||||||
int itemsCount = CHUNK_SIZE - count.var;
|
|
||||||
count.var = CHUNK_SIZE;
|
|
||||||
Object[] keysCopy = keys.var;
|
|
||||||
Object[] valuesCopy = values.var;
|
|
||||||
keys.var = new Object[itemsCount];
|
|
||||||
values.var = new Object[itemsCount];
|
|
||||||
try {
|
|
||||||
Supplier<ConsumerResult> action = () -> {
|
|
||||||
for (int i = 0; i < itemsCount; i++) {
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
if (consumer.acceptCancellable((K) keysCopy[i], (V) valuesCopy[i]).isCancelled()) {
|
|
||||||
cancelled.set(true);
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
} catch (Exception ex) {
|
|
||||||
firstExceptionReference.compareAndSet(null, new CompletionException(ex));
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ConsumerResult.result();
|
|
||||||
};
|
|
||||||
if (parallelExecutor != null) {
|
|
||||||
parallelExecutor.execute(action::get);
|
|
||||||
return ConsumerResult.result();
|
|
||||||
} else {
|
|
||||||
return action.get();
|
|
||||||
}
|
|
||||||
} catch (RejectedExecutionException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K1, K2, V> void parallelizeIO(IOConsumer<IOTriConsumer<K1, K2, V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
IOTriConsumer<K1, K2, V> consumer) throws IOException {
|
|
||||||
Consumer<TriConsumer<K1, K2, V>> action = (cons) -> {
|
|
||||||
try {
|
|
||||||
iterator.consume(cons::accept);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
parallelize(action, maxQueueSize, parallelism, groupSize, (k1, k2, v) -> {
|
|
||||||
try {
|
|
||||||
consumer.accept(k1, k2, v);
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new CompletionException(ex);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (CompletionException ex) {
|
|
||||||
if (ex.getCause() instanceof CompletionException && ex.getCause().getCause() instanceof IOException) {
|
|
||||||
throw (IOException) ex.getCause().getCause();
|
|
||||||
} else if (ex.getCause() instanceof IOException) {
|
|
||||||
throw (IOException) ex.getCause();
|
|
||||||
} else {
|
|
||||||
throw new IOException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K1, K2, V> void parallelize(Consumer<TriConsumer<K1, K2, V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
TriConsumer<K1, K2, V> consumer) throws CompletionException {
|
|
||||||
var parallelExecutor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
parallelism,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("ForEachParallel"),
|
|
||||||
(a, b) -> {}
|
|
||||||
);
|
|
||||||
final int CHUNK_SIZE = groupSize;
|
|
||||||
IntWrapper count = new IntWrapper(CHUNK_SIZE);
|
|
||||||
VariableWrapper<Object[]> keys1 = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
VariableWrapper<Object[]> keys2 = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
VariableWrapper<Object[]> values = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference = new AtomicReference<>(null);
|
|
||||||
final Object arraysAccessLock = new Object();
|
|
||||||
iterator.accept((key1, key2, value) -> {
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
|
|
||||||
keys1.var[CHUNK_SIZE - count.var] = key1;
|
|
||||||
keys2.var[CHUNK_SIZE - count.var] = key2;
|
|
||||||
values.var[CHUNK_SIZE - count.var] = value;
|
|
||||||
count.var--;
|
|
||||||
if (count.var == 0) {
|
|
||||||
sendChunkItems(keys1, keys2, values, CHUNK_SIZE, count, consumer, parallelExecutor, firstExceptionReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
parallelExecutor.shutdown();
|
|
||||||
try {
|
|
||||||
parallelExecutor.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException("Parallel forEach interrupted", e);
|
|
||||||
}
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
if (count.var > 0) {
|
|
||||||
sendChunkItems(keys1, keys2, values, CHUNK_SIZE, count, consumer, null, firstExceptionReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <K1, K2, V> void sendChunkItems(VariableWrapper<Object[]> keys1,
|
|
||||||
VariableWrapper<Object[]> keys2,
|
|
||||||
VariableWrapper<Object[]> values,
|
|
||||||
int CHUNK_SIZE,
|
|
||||||
IntWrapper count,
|
|
||||||
TriConsumer<K1, K2, V> consumer,
|
|
||||||
BlockingOnFullQueueExecutorServiceDecorator parallelExecutor,
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference) {
|
|
||||||
int itemsCount = CHUNK_SIZE - count.var;
|
|
||||||
count.var = CHUNK_SIZE;
|
|
||||||
Object[] keys1Copy = keys1.var;
|
|
||||||
Object[] keys2Copy = keys2.var;
|
|
||||||
Object[] valuesCopy = values.var;
|
|
||||||
keys1.var = new Object[itemsCount];
|
|
||||||
keys2.var = new Object[itemsCount];
|
|
||||||
values.var = new Object[itemsCount];
|
|
||||||
try {
|
|
||||||
Runnable action = () -> {
|
|
||||||
for (int i = 0; i < itemsCount; i++) {
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
consumer.accept((K1) keys1Copy[i], (K2) keys2Copy[i], (V) valuesCopy[i]);
|
|
||||||
} catch (Exception ex) {
|
|
||||||
firstExceptionReference.compareAndSet(null, new CompletionException(ex));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (parallelExecutor != null) {
|
|
||||||
parallelExecutor.execute(action);
|
|
||||||
} else {
|
|
||||||
action.run();
|
|
||||||
}
|
|
||||||
} catch (RejectedExecutionException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <K1, K2, V> ConsumerResult parallelize(Consumer<CancellableTriConsumer<K1, K2, V>> iterator,
|
|
||||||
int maxQueueSize,
|
|
||||||
int parallelism,
|
|
||||||
int groupSize,
|
|
||||||
CancellableTriConsumer<K1, K2, V> consumer) throws CompletionException {
|
|
||||||
if (parallelism <= 1) {
|
|
||||||
iterator.accept(consumer);
|
|
||||||
return ConsumerResult.result();
|
|
||||||
} else {
|
|
||||||
var parallelExecutor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
parallelism,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("ForEachParallel"),
|
|
||||||
(a, b) -> {}
|
|
||||||
);
|
|
||||||
final int CHUNK_SIZE = groupSize;
|
|
||||||
IntWrapper count = new IntWrapper(CHUNK_SIZE);
|
|
||||||
VariableWrapper<Object[]> keys1 = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
VariableWrapper<Object[]> keys2 = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
VariableWrapper<Object[]> values = new VariableWrapper<>(new Object[CHUNK_SIZE]);
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference = new AtomicReference<>(null);
|
|
||||||
AtomicBoolean cancelled = new AtomicBoolean(false);
|
|
||||||
final Object arraysAccessLock = new Object();
|
|
||||||
iterator.accept((key1, key2, value) -> {
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
var cancelledVal = cancelled.get();
|
|
||||||
if (cancelledVal) {
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
keys1.var[CHUNK_SIZE - count.var] = key1;
|
|
||||||
keys2.var[CHUNK_SIZE - count.var] = key2;
|
|
||||||
values.var[CHUNK_SIZE - count.var] = value;
|
|
||||||
count.var--;
|
|
||||||
|
|
||||||
if (count.var == 0) {
|
|
||||||
return sendChunkItems(keys1,
|
|
||||||
keys2,
|
|
||||||
values,
|
|
||||||
CHUNK_SIZE,
|
|
||||||
count,
|
|
||||||
consumer,
|
|
||||||
parallelExecutor,
|
|
||||||
firstExceptionReference,
|
|
||||||
cancelled
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return ConsumerResult.result();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
parallelExecutor.shutdown();
|
|
||||||
try {
|
|
||||||
parallelExecutor.awaitTermination(Integer.MAX_VALUE, TimeUnit.DAYS);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException("Parallel forEach interrupted", e);
|
|
||||||
}
|
|
||||||
synchronized (arraysAccessLock) {
|
|
||||||
if (count.var > 0) {
|
|
||||||
var sendChunkItemsResult = sendChunkItems(keys1,
|
|
||||||
keys2,
|
|
||||||
values,
|
|
||||||
CHUNK_SIZE,
|
|
||||||
count,
|
|
||||||
consumer,
|
|
||||||
null,
|
|
||||||
firstExceptionReference,
|
|
||||||
cancelled
|
|
||||||
);
|
|
||||||
cancelled.compareAndSet(false, sendChunkItemsResult.isCancelled());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var firstException = firstExceptionReference.get();
|
|
||||||
if (firstException != null) {
|
|
||||||
throw firstException;
|
|
||||||
}
|
|
||||||
if (cancelled.get()) {
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
} else {
|
|
||||||
return ConsumerResult.result();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <K1, K2, V> ConsumerResult sendChunkItems(VariableWrapper<Object[]> keys1,
|
|
||||||
VariableWrapper<Object[]> keys2,
|
|
||||||
VariableWrapper<Object[]> values,
|
|
||||||
final int CHUNK_SIZE,
|
|
||||||
IntWrapper count,
|
|
||||||
CancellableTriConsumer<K1, K2, V> consumer,
|
|
||||||
BlockingOnFullQueueExecutorServiceDecorator parallelExecutor,
|
|
||||||
AtomicReference<CompletionException> firstExceptionReference,
|
|
||||||
AtomicBoolean cancelled) {
|
|
||||||
int itemsCount = CHUNK_SIZE - count.var;
|
|
||||||
count.var = CHUNK_SIZE;
|
|
||||||
Object[] keys1Copy = keys1.var;
|
|
||||||
Object[] keys2Copy = keys2.var;
|
|
||||||
Object[] valuesCopy = values.var;
|
|
||||||
keys1.var = new Object[itemsCount];
|
|
||||||
keys2.var = new Object[itemsCount];
|
|
||||||
values.var = new Object[itemsCount];
|
|
||||||
try {
|
|
||||||
Supplier<ConsumerResult> action = () -> {
|
|
||||||
for (int i = 0; i < itemsCount; i++) {
|
|
||||||
try {
|
|
||||||
//noinspection unchecked
|
|
||||||
if (consumer.acceptCancellable((K1) keys1Copy[i], (K2) keys2Copy[i], (V) valuesCopy[i]).isCancelled()) {
|
|
||||||
cancelled.set(true);
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
} catch (Exception ex) {
|
|
||||||
firstExceptionReference.compareAndSet(null, new CompletionException(ex));
|
|
||||||
return ConsumerResult.cancelNext();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ConsumerResult.result();
|
|
||||||
};
|
|
||||||
if (parallelExecutor != null) {
|
|
||||||
parallelExecutor.execute(action::get);
|
|
||||||
return ConsumerResult.result();
|
|
||||||
} else {
|
|
||||||
return action.get();
|
|
||||||
}
|
|
||||||
} catch (RejectedExecutionException e) {
|
|
||||||
throw new CompletionException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.atomicity;
|
|
||||||
|
|
||||||
import java.lang.annotation.ElementType;
|
|
||||||
import java.lang.annotation.Retention;
|
|
||||||
import java.lang.annotation.RetentionPolicy;
|
|
||||||
import java.lang.annotation.Target;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This element can be considered atomic
|
|
||||||
*/
|
|
||||||
@Retention(RetentionPolicy.SOURCE)
|
|
||||||
@Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
|
|
||||||
public @interface Atomic {}
|
|
@ -1,13 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.atomicity;
|
|
||||||
|
|
||||||
import java.lang.annotation.ElementType;
|
|
||||||
import java.lang.annotation.Retention;
|
|
||||||
import java.lang.annotation.RetentionPolicy;
|
|
||||||
import java.lang.annotation.Target;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This element cannot be considered atomic
|
|
||||||
*/
|
|
||||||
@Retention(RetentionPolicy.SOURCE)
|
|
||||||
@Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
|
|
||||||
public @interface NotAtomic {}
|
|
@ -1,155 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.Future;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.function.BiConsumer;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
|
|
||||||
public class BlockingOnFullQueueExecutorServiceDecorator extends ExecutorServiceDecorator {
|
|
||||||
|
|
||||||
private volatile boolean ignoreTaskLimit;
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private final Duration timeout;
|
|
||||||
|
|
||||||
private final int maximumTaskNumber;
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private final Supplier<Integer> queueSizeSupplier;
|
|
||||||
|
|
||||||
private final @Nullable BiConsumer<Boolean, Integer> queueSizeStatus;
|
|
||||||
|
|
||||||
public BlockingOnFullQueueExecutorServiceDecorator(@NotNull final ExecutorService executor, final int maximumTaskNumber, @NotNull final Duration maximumTimeout, @NotNull Supplier<Integer> queueSizeSupplier, @Nullable BiConsumer<Boolean, Integer> queueSizeStatus) {
|
|
||||||
super(executor);
|
|
||||||
if (maximumTaskNumber < 0) {
|
|
||||||
throw new IllegalArgumentException(String.format("At least zero tasks must be permitted, not '%d'", maximumTaskNumber));
|
|
||||||
} else if (maximumTaskNumber == 0) {
|
|
||||||
ignoreTaskLimit = true;
|
|
||||||
}
|
|
||||||
this.timeout = Objects.requireNonNull(maximumTimeout, "'maximumTimeout' must not be null");
|
|
||||||
if (this.timeout.isNegative()) {
|
|
||||||
throw new IllegalArgumentException("'maximumTimeout' must not be negative");
|
|
||||||
}
|
|
||||||
this.maximumTaskNumber = maximumTaskNumber;
|
|
||||||
this.queueSizeSupplier = queueSizeSupplier;
|
|
||||||
this.queueSizeStatus = queueSizeStatus;
|
|
||||||
}
|
|
||||||
|
|
||||||
public BlockingOnFullQueueExecutorServiceDecorator(@NotNull final ExecutorService executor, final int maximumTaskNumber, @NotNull final Duration maximumTimeout, @NotNull Supplier<Integer> queueSizeSupplier) {
|
|
||||||
this(executor, maximumTaskNumber, maximumTimeout, queueSizeSupplier, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void preExecute(Object command) {
|
|
||||||
Objects.requireNonNull(command, "'command' must not be null");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final void execute(final @NotNull Runnable command) {
|
|
||||||
preExecute(command);
|
|
||||||
|
|
||||||
super.execute(new PermitReleasingRunnableDecorator(command, this::updateQueue));
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> Future<T> submit(@NotNull Callable<T> task) {
|
|
||||||
preExecute(task);
|
|
||||||
|
|
||||||
return super.submit(new PermitReleasingCallableDecorator<>(task, this::updateQueue));
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> Future<T> submit(@NotNull Runnable task, T result) {
|
|
||||||
preExecute(task);
|
|
||||||
|
|
||||||
return super.submit(new PermitReleasingRunnableDecorator(task, this::updateQueue), result);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Future<?> submit(@NotNull Runnable task) {
|
|
||||||
preExecute(task);
|
|
||||||
|
|
||||||
return super.submit(new PermitReleasingRunnableDecorator(task, this::updateQueue));
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateQueue(boolean beforeRunning) {
|
|
||||||
var queueSize = queueSizeSupplier.get() + (beforeRunning ? 1 : 0);
|
|
||||||
var full = !ignoreTaskLimit && queueSize >= maximumTaskNumber;
|
|
||||||
if (queueSizeStatus != null) queueSizeStatus.accept(full, queueSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void shutdown() {
|
|
||||||
this.ignoreTaskLimit = true;
|
|
||||||
super.shutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
void testShutdown() {
|
|
||||||
super.shutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public List<Runnable> shutdownNow() {
|
|
||||||
this.ignoreTaskLimit = true;
|
|
||||||
return super.shutdownNow();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isShutdown() {
|
|
||||||
return super.isShutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isTerminated() {
|
|
||||||
return super.isTerminated();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean awaitTermination(long timeout, @NotNull TimeUnit unit) throws InterruptedException {
|
|
||||||
return super.awaitTermination(timeout, unit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> tasks) {
|
|
||||||
throw new UnsupportedOperationException("invokeAll(tasks) is not supported");
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> tasks,
|
|
||||||
long timeout,
|
|
||||||
@NotNull TimeUnit unit) {
|
|
||||||
throw new UnsupportedOperationException("invokeAll(tasks, timeout, unit) is not supported");
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> tasks) {
|
|
||||||
throw new UnsupportedOperationException("invokeAny(tasks) is not supported");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> tasks, long timeout, @NotNull TimeUnit unit) {
|
|
||||||
throw new UnsupportedOperationException("invokeAny(tasks, timeout, unit) is not supported");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final String toString() {
|
|
||||||
return String.format("%s[timeout='%s',delegate='%s']", getClass().getSimpleName(),
|
|
||||||
this.timeout, super.toString());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,96 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.concurrent.BlockingQueue;
|
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
import java.util.concurrent.LinkedBlockingQueue;
|
|
||||||
import java.util.concurrent.ThreadFactory;
|
|
||||||
import java.util.concurrent.ThreadPoolExecutor;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.function.BiConsumer;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
|
|
||||||
public class BoundedExecutorService {
|
|
||||||
|
|
||||||
private static final int MAX_BLOCKING_QUEUE_SIZE = 50000;
|
|
||||||
|
|
||||||
private BoundedExecutorService() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public static BlockingOnFullQueueExecutorServiceDecorator createUnbounded(
|
|
||||||
int maxPoolSize,
|
|
||||||
long keepAliveTime,
|
|
||||||
TimeUnit unit,
|
|
||||||
@Nullable BiConsumer<Boolean, Integer> queueSizeStatus) {
|
|
||||||
return create(0, maxPoolSize, keepAliveTime, unit, Executors.defaultThreadFactory(), queueSizeStatus);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static BlockingOnFullQueueExecutorServiceDecorator createUnbounded(
|
|
||||||
int maxPoolSize,
|
|
||||||
long keepAliveTime,
|
|
||||||
TimeUnit unit,
|
|
||||||
ThreadFactory threadFactory,
|
|
||||||
@Nullable BiConsumer<Boolean, Integer> queueSizeStatus) {
|
|
||||||
return createCustom(0, maxPoolSize, keepAliveTime, unit, threadFactory, Duration.ofDays(100000), queueSizeStatus, new LinkedBlockingQueue<>());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static BlockingOnFullQueueExecutorServiceDecorator createUnbounded(
|
|
||||||
int maxPoolSize,
|
|
||||||
long keepAliveTime,
|
|
||||||
TimeUnit unit,
|
|
||||||
ThreadFactory threadFactory,
|
|
||||||
@Nullable BiConsumer<Boolean, Integer> queueSizeStatus,
|
|
||||||
BlockingQueue<Runnable> queue) {
|
|
||||||
return createCustom(0, maxPoolSize, keepAliveTime, unit, threadFactory, Duration.ofDays(100000), queueSizeStatus, queue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public static BlockingOnFullQueueExecutorServiceDecorator create(
|
|
||||||
int maxQueueSize,
|
|
||||||
int maxPoolSize,
|
|
||||||
long keepAliveTime,
|
|
||||||
TimeUnit unit,
|
|
||||||
@Nullable BiConsumer<Boolean, Integer> queueSizeStatus) {
|
|
||||||
return create(maxQueueSize, maxPoolSize, keepAliveTime, unit, Executors.defaultThreadFactory(), queueSizeStatus);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static BlockingOnFullQueueExecutorServiceDecorator create(
|
|
||||||
int maxQueueSize,
|
|
||||||
int maxPoolSize,
|
|
||||||
long keepAliveTime,
|
|
||||||
TimeUnit unit,
|
|
||||||
ThreadFactory threadFactory,
|
|
||||||
@Nullable BiConsumer<Boolean, Integer> queueSizeStatus) {
|
|
||||||
return createCustom(maxQueueSize, maxPoolSize, keepAliveTime, unit, threadFactory, Duration.ofDays(100000), queueSizeStatus, new LinkedBlockingQueue<>(maxQueueSize));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static BlockingOnFullQueueExecutorServiceDecorator createCustom(
|
|
||||||
int maxQueueSize,
|
|
||||||
int maxPoolSize,
|
|
||||||
long keepAliveTime,
|
|
||||||
TimeUnit unit,
|
|
||||||
ThreadFactory threadFactory,
|
|
||||||
Duration queueItemTtl,
|
|
||||||
@Nullable BiConsumer<Boolean, Integer> queueSizeStatus,
|
|
||||||
BlockingQueue<Runnable> queue) {
|
|
||||||
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(maxPoolSize,
|
|
||||||
maxPoolSize,
|
|
||||||
keepAliveTime,
|
|
||||||
unit,
|
|
||||||
queue,
|
|
||||||
threadFactory
|
|
||||||
);
|
|
||||||
if (keepAliveTime > 0) {
|
|
||||||
threadPoolExecutor.allowCoreThreadTimeOut(true);
|
|
||||||
}
|
|
||||||
threadPoolExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
|
|
||||||
return new BlockingOnFullQueueExecutorServiceDecorator(threadPoolExecutor,
|
|
||||||
maxQueueSize,
|
|
||||||
queueItemTtl,
|
|
||||||
queue::size,
|
|
||||||
queueSizeStatus
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,18 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
|
|
||||||
public abstract class CallableDecorator<T> implements Callable<T> {
|
|
||||||
|
|
||||||
private final Callable<T> callable;
|
|
||||||
|
|
||||||
public CallableDecorator(Callable<T> callable) {
|
|
||||||
this.callable = Objects.requireNonNull(callable);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T call() throws Exception {
|
|
||||||
return callable.call();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,40 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
|
|
||||||
final class ConcurrencySegment<K, V> {
|
|
||||||
|
|
||||||
private final Map<K, Entry> store = new HashMap<K, Entry>();
|
|
||||||
private final Supplier<V> valuesSupplier;
|
|
||||||
|
|
||||||
ConcurrencySegment(Supplier<V> valuesSupplier) {
|
|
||||||
this.valuesSupplier = valuesSupplier;
|
|
||||||
}
|
|
||||||
|
|
||||||
synchronized V getValue(K key) {
|
|
||||||
Entry current = store.get(key);
|
|
||||||
if (current == null) {
|
|
||||||
current = new Entry();
|
|
||||||
store.put(key, current);
|
|
||||||
} else {
|
|
||||||
current.users++;
|
|
||||||
}
|
|
||||||
return current.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
synchronized void releaseKey(K key) {
|
|
||||||
Entry current = store.get(key);
|
|
||||||
if (current.users == 1) {
|
|
||||||
store.remove(key);
|
|
||||||
} else {
|
|
||||||
current.users--;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private class Entry {
|
|
||||||
private int users = 1;
|
|
||||||
private V value = valuesSupplier.get();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,18 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.concurrent.Executor;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public abstract class ExecutorDecorator implements Executor {
|
|
||||||
private final Executor executor;
|
|
||||||
|
|
||||||
public ExecutorDecorator(Executor executor) {
|
|
||||||
this.executor = Objects.requireNonNull(executor);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void execute(@NotNull Runnable runnable) {
|
|
||||||
executor.execute(runnable);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,97 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.Future;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.TimeoutException;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public abstract class ExecutorServiceDecorator implements ExecutorService {
|
|
||||||
private ExecutorService executorService;
|
|
||||||
|
|
||||||
public ExecutorServiceDecorator(ExecutorService executorService) {
|
|
||||||
this.executorService = Objects.requireNonNull(executorService);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void shutdown() {
|
|
||||||
executorService.shutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public List<Runnable> shutdownNow() {
|
|
||||||
return executorService.shutdownNow();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isShutdown() {
|
|
||||||
return executorService.isShutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isTerminated() {
|
|
||||||
return executorService.isTerminated();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean awaitTermination(long l, @NotNull TimeUnit timeUnit) throws InterruptedException {
|
|
||||||
return executorService.awaitTermination(l, timeUnit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> Future<T> submit(@NotNull Callable<T> callable) {
|
|
||||||
return executorService.submit(callable);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> Future<T> submit(@NotNull Runnable runnable, T t) {
|
|
||||||
return executorService.submit(runnable, t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Future<?> submit(@NotNull Runnable runnable) {
|
|
||||||
return executorService.submit(runnable);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> collection)
|
|
||||||
throws InterruptedException {
|
|
||||||
return executorService.invokeAll(collection);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> collection,
|
|
||||||
long l,
|
|
||||||
@NotNull TimeUnit timeUnit) throws InterruptedException {
|
|
||||||
return executorService.invokeAll(collection, l, timeUnit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> collection)
|
|
||||||
throws InterruptedException, ExecutionException {
|
|
||||||
return executorService.invokeAny(collection);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> collection, long l, @NotNull TimeUnit timeUnit)
|
|
||||||
throws InterruptedException, ExecutionException, TimeoutException {
|
|
||||||
return executorService.invokeAny(collection, l, timeUnit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void execute(@NotNull Runnable runnable) {
|
|
||||||
executorService.execute(runnable);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,108 +0,0 @@
|
|||||||
|
|
||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.locks.Lock;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.warp.commonutils.functional.IORunnable;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
import org.warp.commonutils.random.HashUtil;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An Executor which executes tasks on the caller thread.
|
|
||||||
* The tasks will be executed synchronously on a <b>per-key basis</b>.
|
|
||||||
* By saying <b>per-key</b>, we mean that thread safety is guaranteed for threads calling it with equals keys.
|
|
||||||
* When two threads calling the executor with equals keys, the executions will never overlap each other.
|
|
||||||
* On the other hand, the executor is implemented so calls from different threads, with keys that are not equals, will be executed concurrently with minimal contention between the calls.
|
|
||||||
* Calling threads might be suspended.
|
|
||||||
* Calling execute from different threads with equals keys has the same memory semantics as locking and releasing a java.util.concurrent.locks.{@link Lock}.
|
|
||||||
*/
|
|
||||||
public final class PerKeyReadWriteExecutor<KEY_TYPE> extends ReadWriteExecutor implements Closeable {
|
|
||||||
|
|
||||||
private static final int BASE_CONCURRENCY_LEVEL = 32;
|
|
||||||
|
|
||||||
private final int concurrencyLevel;
|
|
||||||
|
|
||||||
private final ConcurrencySegment<KEY_TYPE, ReadWriteExecutor>[] segments;
|
|
||||||
|
|
||||||
private boolean closed = false;
|
|
||||||
|
|
||||||
public PerKeyReadWriteExecutor() {
|
|
||||||
this(BASE_CONCURRENCY_LEVEL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
public PerKeyReadWriteExecutor(int concurrencyLevel) {
|
|
||||||
super();
|
|
||||||
this.concurrencyLevel = concurrencyLevel;
|
|
||||||
segments = (ConcurrencySegment<KEY_TYPE, ReadWriteExecutor>[]) new ConcurrencySegment[concurrencyLevel];
|
|
||||||
for (int i = 0; i < concurrencyLevel; i++) {
|
|
||||||
segments[i] = new ConcurrencySegment<>(ReadWriteExecutor::new);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void execute(KEY_TYPE key, ReadWriteExecutor.LockMode lockMode, Runnable task) {
|
|
||||||
super.execute(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeyReadWriteExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, ReadWriteExecutor> s = segments[segmentIndex];
|
|
||||||
ReadWriteExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
executor.execute(lockMode, task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public void executeIO(KEY_TYPE key, ReadWriteExecutor.LockMode lockMode, IORunnable task) throws IOException {
|
|
||||||
super.executeIO(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeyReadWriteExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, ReadWriteExecutor> s = segments[segmentIndex];
|
|
||||||
ReadWriteExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
executor.executeIO(lockMode, task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R execute(KEY_TYPE key, ReadWriteExecutor.LockMode lockMode, Supplier<R> task) {
|
|
||||||
return super.execute(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeyReadWriteExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, ReadWriteExecutor> s = segments[segmentIndex];
|
|
||||||
ReadWriteExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
return executor.execute(lockMode, task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R executeIO(KEY_TYPE key, ReadWriteExecutor.LockMode lockMode, IOSupplier<R> task) throws IOException {
|
|
||||||
return super.executeIO(LockMode.READ, () -> {
|
|
||||||
if (closed)
|
|
||||||
throw new IllegalStateException(PerKeyReadWriteExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, ReadWriteExecutor> s = segments[segmentIndex];
|
|
||||||
ReadWriteExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
return executor.executeIO(lockMode, task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
super.execute(LockMode.WRITE, () -> {
|
|
||||||
closed = true;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,106 +0,0 @@
|
|||||||
|
|
||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.locks.Lock;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.warp.commonutils.functional.IORunnable;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
import org.warp.commonutils.random.HashUtil;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An Executor which executes tasks on the caller thread.
|
|
||||||
* The tasks will be executed synchronously on a <b>per-key basis</b>.
|
|
||||||
* By saying <b>per-key</b>, we mean that thread safety is guaranteed for threads calling it with equals keys.
|
|
||||||
* When two threads calling the executor with equals keys, the executions will never overlap each other.
|
|
||||||
* On the other hand, the executor is implemented so calls from different threads, with keys that are not equals, will be executed concurrently with minimal contention between the calls.
|
|
||||||
* Calling threads might be suspended.
|
|
||||||
* Calling execute from different threads with equals keys has the same memory semantics as locking and releasing a java.util.concurrent.locks.{@link Lock}.
|
|
||||||
*/
|
|
||||||
public final class PerKeySynchronizedExecutor<KEY_TYPE> extends ReadWriteExecutor implements Closeable {
|
|
||||||
|
|
||||||
private static final int BASE_CONCURRENCY_LEVEL = 32;
|
|
||||||
|
|
||||||
private final int concurrencyLevel;
|
|
||||||
|
|
||||||
private final ConcurrencySegment<KEY_TYPE, SynchronizedExecutor>[] segments;
|
|
||||||
|
|
||||||
private boolean closed = false;
|
|
||||||
|
|
||||||
public PerKeySynchronizedExecutor() {
|
|
||||||
this(BASE_CONCURRENCY_LEVEL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
public PerKeySynchronizedExecutor(int concurrencyLevel) {
|
|
||||||
this.concurrencyLevel = concurrencyLevel;
|
|
||||||
segments = (ConcurrencySegment<KEY_TYPE, SynchronizedExecutor>[]) new ConcurrencySegment[concurrencyLevel];
|
|
||||||
for (int i = 0; i < concurrencyLevel; i++) {
|
|
||||||
segments[i] = new ConcurrencySegment<>(SynchronizedExecutor::new);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void execute(KEY_TYPE key, Runnable task) {
|
|
||||||
super.execute(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeySynchronizedExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, SynchronizedExecutor> s = segments[segmentIndex];
|
|
||||||
SynchronizedExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
executor.execute(task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public void executeIO(KEY_TYPE key, IORunnable task) throws IOException {
|
|
||||||
super.executeIO(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeySynchronizedExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, SynchronizedExecutor> s = segments[segmentIndex];
|
|
||||||
SynchronizedExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
executor.executeIO(task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R execute(KEY_TYPE key, Supplier<R> task) {
|
|
||||||
return super.execute(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeySynchronizedExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, SynchronizedExecutor> s = segments[segmentIndex];
|
|
||||||
SynchronizedExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
return executor.execute(task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R executeIO(KEY_TYPE key, IOSupplier<R> task) throws IOException {
|
|
||||||
return super.executeIO(LockMode.READ, () -> {
|
|
||||||
if (closed) throw new IllegalStateException(PerKeySynchronizedExecutor.class.getSimpleName() + " is closed");
|
|
||||||
int segmentIndex = HashUtil.boundedHash(key, concurrencyLevel);
|
|
||||||
ConcurrencySegment<KEY_TYPE, SynchronizedExecutor> s = segments[segmentIndex];
|
|
||||||
SynchronizedExecutor executor = s.getValue(key);
|
|
||||||
try {
|
|
||||||
return executor.executeIO(task);
|
|
||||||
} finally {
|
|
||||||
s.releaseKey(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
super.execute(LockMode.WRITE, () -> {
|
|
||||||
closed = true;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,34 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public final class PermitReleasingCallableDecorator<T> extends CallableDecorator<T> {
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private final QueueSizeUpdater queueSizeUpdater;
|
|
||||||
|
|
||||||
PermitReleasingCallableDecorator(@NotNull final Callable<T> task,
|
|
||||||
@NotNull final QueueSizeUpdater queueSizeUpdater) {
|
|
||||||
super(task);
|
|
||||||
this.queueSizeUpdater = queueSizeUpdater;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T call() throws Exception {
|
|
||||||
try {
|
|
||||||
queueSizeUpdater.update(true);
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
return super.call();
|
|
||||||
} finally {
|
|
||||||
queueSizeUpdater.update(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final String toString() {
|
|
||||||
return String.format("%s[delegate='%s']", getClass().getSimpleName(), super.toString());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,32 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public final class PermitReleasingRunnableDecorator extends RunnableDecorator {
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private final QueueSizeUpdater queueSizeUpdater;
|
|
||||||
PermitReleasingRunnableDecorator(@NotNull final Runnable task,
|
|
||||||
@NotNull final QueueSizeUpdater queueSizeUpdater) {
|
|
||||||
super(task);
|
|
||||||
this.queueSizeUpdater = queueSizeUpdater;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
try {
|
|
||||||
queueSizeUpdater.update(true);
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
super.run();
|
|
||||||
} finally {
|
|
||||||
queueSizeUpdater.update(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final String toString() {
|
|
||||||
return String.format("%s[delegate='%s']", getClass().getSimpleName(), super.toString());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,5 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
public interface QueueSizeUpdater {
|
|
||||||
void update(boolean isBeforeRunning);
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.locks.Lock;
|
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.warp.commonutils.functional.IORunnable;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
import org.warp.commonutils.locks.LockUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An Executor which executes tasks on the caller thread.
|
|
||||||
* The tasks will be executed synchronously, so no overlapping between two tasks running on different threads will ever occur.
|
|
||||||
* Calling threads might be suspended.
|
|
||||||
* Executing a task has the same memory semantics as locking and releasing a java.util.concurrent.locks.{@link Lock}.
|
|
||||||
*/
|
|
||||||
public class ReadWriteExecutor {
|
|
||||||
|
|
||||||
private final ReentrantReadWriteLock lock;
|
|
||||||
|
|
||||||
public ReadWriteExecutor() {
|
|
||||||
this.lock = new ReentrantReadWriteLock();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void execute(LockMode lockMode, Runnable task) {
|
|
||||||
LockUtils.lock(lockMode == LockMode.READ ? lock.readLock() : lock.writeLock(), task);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void executeIO(LockMode lockMode, IORunnable task) throws IOException {
|
|
||||||
LockUtils.lockIO(lockMode == LockMode.READ ? lock.readLock() : lock.writeLock(), task);
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R execute(LockMode lockMode, Supplier<R> task) {
|
|
||||||
return LockUtils.lock(lockMode == LockMode.READ ? lock.readLock() : lock.writeLock(), task);
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R executeIO(LockMode lockMode, IOSupplier<R> task) throws IOException {
|
|
||||||
return LockUtils.lockIO(lockMode == LockMode.READ ? lock.readLock() : lock.writeLock(), task);
|
|
||||||
}
|
|
||||||
|
|
||||||
public enum LockMode {
|
|
||||||
READ,
|
|
||||||
WRITE
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,17 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public abstract class RunnableDecorator implements Runnable {
|
|
||||||
|
|
||||||
private final Runnable runnable;
|
|
||||||
|
|
||||||
public RunnableDecorator(Runnable runnable) {
|
|
||||||
this.runnable = Objects.requireNonNull(runnable);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
runnable.run();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,51 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.ScheduledFuture;
|
|
||||||
import java.util.concurrent.locks.StampedLock;
|
|
||||||
import org.warp.commonutils.concurrency.atomicity.Atomic;
|
|
||||||
|
|
||||||
@Atomic
|
|
||||||
public class ScheduledTaskLifecycle {
|
|
||||||
|
|
||||||
private final StampedLock lock;
|
|
||||||
private final ConcurrentHashMap<ScheduledFuture<?>, Object> tasks = new ConcurrentHashMap<>();
|
|
||||||
|
|
||||||
public ScheduledTaskLifecycle() {
|
|
||||||
this.lock = new StampedLock();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Register a scheduled task
|
|
||||||
*/
|
|
||||||
public void registerScheduledTask(ScheduledFuture<?> task) {
|
|
||||||
this.tasks.put(task, new Object());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mark this task as running.
|
|
||||||
* After calling this method, please call {@method endScheduledTask} inside a finally block!
|
|
||||||
*/
|
|
||||||
public void startScheduledTask() {
|
|
||||||
this.lock.readLock();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mark this task as ended. Must be called after {@method startScheduledTask}
|
|
||||||
*/
|
|
||||||
public void endScheduledTask() {
|
|
||||||
this.lock.tryUnlockRead();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cancel all scheduled tasks and wait all running methods to finish
|
|
||||||
*/
|
|
||||||
public void cancelAndWait() {
|
|
||||||
tasks.forEach((task, obj) -> {
|
|
||||||
task.cancel(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Acquire a write lock to wait all tasks to end
|
|
||||||
lock.unlockWrite(lock.writeLock());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,101 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.Executor;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.Future;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.TimeoutException;
|
|
||||||
import java.util.function.Function;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public abstract class SimplerExecutorServiceDecorator extends ExecutorServiceDecorator {
|
|
||||||
|
|
||||||
private final Executor executorDecorator;
|
|
||||||
|
|
||||||
public SimplerExecutorServiceDecorator(ExecutorService executorService,
|
|
||||||
Function<Executor, Executor> executorDecoratorInitializer) {
|
|
||||||
super(executorService);
|
|
||||||
this.executorDecorator = executorDecoratorInitializer.apply(executorService);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void shutdown() {
|
|
||||||
super.shutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public List<Runnable> shutdownNow() {
|
|
||||||
return super.shutdownNow();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isShutdown() {
|
|
||||||
return super.isShutdown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isTerminated() {
|
|
||||||
return super.isTerminated();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean awaitTermination(long l, @NotNull TimeUnit timeUnit) throws InterruptedException {
|
|
||||||
return super.awaitTermination(l, timeUnit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> Future<T> submit(@NotNull Callable<T> callable) {
|
|
||||||
return super.submit(callable);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> Future<T> submit(@NotNull Runnable runnable, T t) {
|
|
||||||
return super.submit(runnable, t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Future<?> submit(@NotNull Runnable runnable) {
|
|
||||||
return super.submit(runnable);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> collection)
|
|
||||||
throws InterruptedException {
|
|
||||||
return super.invokeAll(collection);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends Callable<T>> collection,
|
|
||||||
long l,
|
|
||||||
@NotNull TimeUnit timeUnit) throws InterruptedException {
|
|
||||||
return super.invokeAll(collection, l, timeUnit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> collection)
|
|
||||||
throws InterruptedException, ExecutionException {
|
|
||||||
return super.invokeAny(collection);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> collection, long l, @NotNull TimeUnit timeUnit)
|
|
||||||
throws InterruptedException, ExecutionException, TimeoutException {
|
|
||||||
return super.invokeAny(collection, l, timeUnit);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void execute(@NotNull Runnable runnable) {
|
|
||||||
executorDecorator.execute(runnable);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,63 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.locks.Lock;
|
|
||||||
import java.util.concurrent.locks.ReentrantLock;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.warp.commonutils.functional.IORunnable;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An Executor which executes tasks on the caller thread.
|
|
||||||
* The tasks will be executed synchronously, so no overlapping between two tasks running on different threads will ever occur.
|
|
||||||
* Calling threads might be suspended.
|
|
||||||
* Executing a task has the same memory semantics as locking and releasing a java.util.concurrent.locks.{@link Lock}.
|
|
||||||
*/
|
|
||||||
public final class SynchronizedExecutor {
|
|
||||||
|
|
||||||
private final Lock lock;
|
|
||||||
|
|
||||||
public SynchronizedExecutor() {
|
|
||||||
this.lock = new ReentrantLock();
|
|
||||||
}
|
|
||||||
|
|
||||||
SynchronizedExecutor(Lock lock) {
|
|
||||||
this.lock = lock;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void execute(Runnable task) {
|
|
||||||
lock.lock();
|
|
||||||
try {
|
|
||||||
task.run();
|
|
||||||
} finally {
|
|
||||||
lock.unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void executeIO(IORunnable task) throws IOException {
|
|
||||||
lock.lock();
|
|
||||||
try {
|
|
||||||
task.run();
|
|
||||||
} finally {
|
|
||||||
lock.unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R execute(Supplier<R> task) {
|
|
||||||
lock.lock();
|
|
||||||
try {
|
|
||||||
return task.get();
|
|
||||||
} finally {
|
|
||||||
lock.unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public <R> R executeIO(IOSupplier<R> task) throws IOException {
|
|
||||||
lock.lock();
|
|
||||||
try {
|
|
||||||
return task.get();
|
|
||||||
} finally {
|
|
||||||
lock.unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,487 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.future;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.PriorityQueue;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.TreeSet;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
import java.util.concurrent.CompletionStage;
|
|
||||||
import java.util.concurrent.Executor;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import org.warp.commonutils.functional.BiCompletableFunction;
|
|
||||||
import org.warp.commonutils.functional.CompletableFunction;
|
|
||||||
import org.warp.commonutils.functional.IOCompletableFunction;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
import org.warp.commonutils.functional.TriCompletableFunction;
|
|
||||||
import org.warp.commonutils.type.FloatPriorityQueue;
|
|
||||||
import org.warp.commonutils.type.ScoredValue;
|
|
||||||
|
|
||||||
public class CompletableFutureUtils {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture asynchronously
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<T> getCompletableFutureAsync(Supplier<CompletableFuture<T>> completableFutureSupplier, Executor executor) {
|
|
||||||
CompletableFuture<T> cf = new CompletableFuture<>();
|
|
||||||
executor.execute(() -> {
|
|
||||||
try {
|
|
||||||
var cf2 = completableFutureSupplier.get();
|
|
||||||
cf2.whenComplete((result, error) -> {
|
|
||||||
if (error == null) {
|
|
||||||
cf.complete(result);
|
|
||||||
} else {
|
|
||||||
cf.completeExceptionally(error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (Exception ex) {
|
|
||||||
cf.completeExceptionally(ex);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return cf;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<T> getCompletableFuture(Supplier<CompletableFuture<T>> completableFutureSupplier) {
|
|
||||||
CompletableFuture<T> cf;
|
|
||||||
try {
|
|
||||||
cf = completableFutureSupplier.get();
|
|
||||||
} catch (Exception ex) {
|
|
||||||
cf = CompletableFuture.failedFuture(ex);
|
|
||||||
}
|
|
||||||
return cf;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F, T> CompletableFuture<T> getCompletableFuture(CompletableFunction<F, T> completableFutureFunction, F value) {
|
|
||||||
return getCompletableFuture(() -> completableFutureFunction.apply(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F, T> CompletableFuture<T> getCompletableFutureSupply(CompletableFunction<F, T> completableFutureFunction, Supplier<F> valueSupplier) {
|
|
||||||
return getCompletableFuture(() -> completableFutureFunction.apply(valueSupplier.get()));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F1, F2, T> CompletableFuture<T> getCompletableFuture(BiCompletableFunction<F1, F2, T> completableFutureFunction, F1 value1, F2 value2) {
|
|
||||||
return getCompletableFuture(() -> completableFutureFunction.apply(value1, value2));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F1, F2, T> CompletableFuture<T> getCompletableFutureSupply(BiCompletableFunction<F1, F2, T> completableFutureFunction, Supplier<F1> value1Supplier, Supplier<F2> value2Supplier) {
|
|
||||||
return getCompletableFuture(() -> completableFutureFunction.apply(value1Supplier.get(), value2Supplier.get()));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F1, F2, F3, T> CompletableFuture<T> getCompletableFuture(TriCompletableFunction<F1, F2, F3, T> completableFutureFunction, F1 value1, F2 value2, F3 value3) {
|
|
||||||
return getCompletableFuture(() -> completableFutureFunction.apply(value1, value2, value3));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F1, F2, F3, T> CompletableFuture<T> getCompletableFutureSupply(TriCompletableFunction<F1, F2, F3, T> completableFutureFunction, Supplier<F1> value1Supplier, Supplier<F2> value2Supplier, Supplier<F3> value3Supplier) {
|
|
||||||
return getCompletableFuture(() -> completableFutureFunction.apply(value1Supplier.get(), value2Supplier.get(), value3Supplier.get()));
|
|
||||||
}
|
|
||||||
|
|
||||||
////
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<T> getCompletableFutureIO(IOSupplier<CompletableFuture<T>> completableFutureSupplier) {
|
|
||||||
CompletableFuture<T> cf;
|
|
||||||
try {
|
|
||||||
cf = completableFutureSupplier.get();
|
|
||||||
} catch (Exception ex) {
|
|
||||||
cf = CompletableFuture.failedFuture(ex);
|
|
||||||
}
|
|
||||||
return cf;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F, T> CompletableFuture<T> getCompletableFutureIO(IOCompletableFunction<F, T> completableFutureFunction, F value) {
|
|
||||||
return getCompletableFutureIO(() -> completableFutureFunction.apply(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get a CompletableFuture or a FailedFuture
|
|
||||||
*/
|
|
||||||
public static <F, T> CompletableFuture<T> getCompletableFutureIOSupply(IOCompletableFunction<F, T> completableFutureFunction, IOSupplier<F> valueSupplier) {
|
|
||||||
return getCompletableFutureIO(() -> completableFutureFunction.apply(valueSupplier.get()));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Aggregate multiple {@link CompletableFuture} lists into a single {@link CompletableFuture} list
|
|
||||||
*
|
|
||||||
* @param futureLists A collection of {@link CompletableFuture} lists.
|
|
||||||
* @param <T> List elements type
|
|
||||||
* @return {@link CompletableFuture} list
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<List<T>> aggregateList(Collection<CompletableFuture<List<T>>> futureLists) {
|
|
||||||
final CompletableFuture<List<T>> identityAggregatedResult = CompletableFuture.completedFuture(new ArrayList<T>());
|
|
||||||
|
|
||||||
return futureLists.parallelStream().reduce(identityAggregatedResult, (currentAggregatedResult, futureList) -> {
|
|
||||||
return currentAggregatedResult.thenApplyAsync((aggregatedList) -> {
|
|
||||||
aggregatedList.addAll(futureList.join());
|
|
||||||
return aggregatedList;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new empty collection of disaggregated future results future lists
|
|
||||||
*/
|
|
||||||
public static <T> Collection<CompletableFuture<List<CompletableFuture<T>>>> createDisaggregatedResultsList() {
|
|
||||||
return new ArrayList<>(10);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a
|
|
||||||
* @param disaggregatedResults
|
|
||||||
* @param result
|
|
||||||
* @param <T>
|
|
||||||
*/
|
|
||||||
public static <T> void addDisaggregatedList(
|
|
||||||
Collection<CompletableFuture<List<CompletableFuture<T>>>> disaggregatedResults,
|
|
||||||
CompletableFuture<List<CompletableFuture<T>>> result) {
|
|
||||||
disaggregatedResults.add(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a result
|
|
||||||
*/
|
|
||||||
public static <T, U extends T> void addDisaggregatedListCast(
|
|
||||||
Collection<CompletableFuture<List<CompletableFuture<T>>>> disaggregatedResults,
|
|
||||||
CompletableFuture<List<CompletableFuture<U>>> result) {
|
|
||||||
addDisaggregatedListCastForced(disaggregatedResults, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T, U> void addDisaggregatedListCastForced(
|
|
||||||
Collection<CompletableFuture<List<CompletableFuture<T>>>> disaggregatedResults,
|
|
||||||
CompletableFuture<List<CompletableFuture<U>>> result) {
|
|
||||||
disaggregatedResults.add(result.thenApply((originalList) -> {
|
|
||||||
List<CompletableFuture<T>> resultList = new ArrayList<>();
|
|
||||||
for (CompletableFuture<U> originalFuture : originalList) {
|
|
||||||
resultList.add(originalFuture.thenApply((originalValue) -> {
|
|
||||||
//noinspection unchecked
|
|
||||||
return (T) originalValue;
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
return resultList;
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Aggregate multiple {@link CompletableFuture} lists into a single {@link CompletableFuture} list
|
|
||||||
*
|
|
||||||
* @param futureFloatPriorityQueues A collection of {@link CompletableFuture} lists.
|
|
||||||
* @param <T> List elements type
|
|
||||||
* @return {@link CompletableFuture} list
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<FloatPriorityQueue<T>> aggregatePq(Collection<CompletableFuture<FloatPriorityQueue<T>>> futureFloatPriorityQueues) {
|
|
||||||
final CompletableFuture<FloatPriorityQueue<T>> identityAggregatedResult = CompletableFuture.completedFuture(new FloatPriorityQueue<>());
|
|
||||||
|
|
||||||
return futureFloatPriorityQueues.stream().reduce(identityAggregatedResult, (currentAggregatedResult, futureFloatPriorityQueue) -> {
|
|
||||||
return currentAggregatedResult.thenApply((aggregatedFloatPriorityQueue) -> {
|
|
||||||
var futureFloatPriorityQueueValues = futureFloatPriorityQueue.join();
|
|
||||||
if (futureFloatPriorityQueueValues == aggregatedFloatPriorityQueue) {
|
|
||||||
return aggregatedFloatPriorityQueue;
|
|
||||||
}
|
|
||||||
futureFloatPriorityQueueValues.forEachItem(aggregatedFloatPriorityQueue::offer);
|
|
||||||
return aggregatedFloatPriorityQueue;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new empty collection of disaggregated future results future lists
|
|
||||||
*/
|
|
||||||
public static <T> Collection<CompletableFuture<FloatPriorityQueue<CompletableFuture<T>>>> createDisaggregatedFutureResultsPq() {
|
|
||||||
return FloatPriorityQueue.synchronizedPq(10);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new empty collection of disaggregated results future lists
|
|
||||||
*/
|
|
||||||
public static <T> Collection<CompletableFuture<FloatPriorityQueue<T>>> createDisaggregatedResultsPq() {
|
|
||||||
return FloatPriorityQueue.synchronizedPq(10);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a
|
|
||||||
* @param disaggregatedResults
|
|
||||||
* @param result
|
|
||||||
* @param <T>
|
|
||||||
*/
|
|
||||||
public static <T> void addDisaggregatedPq(
|
|
||||||
Collection<CompletableFuture<FloatPriorityQueue<CompletableFuture<T>>>> disaggregatedResults,
|
|
||||||
CompletableFuture<FloatPriorityQueue<CompletableFuture<T>>> result) {
|
|
||||||
disaggregatedResults.add(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a result
|
|
||||||
*/
|
|
||||||
public static <T, U extends T> void addDisaggregatedPqCast(
|
|
||||||
Collection<CompletableFuture<FloatPriorityQueue<CompletableFuture<T>>>> disaggregatedResults,
|
|
||||||
CompletableFuture<FloatPriorityQueue<CompletableFuture<U>>> result) {
|
|
||||||
addDisaggregatedPqCastForced(disaggregatedResults, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T, U> void addDisaggregatedFuturePqCastForced(
|
|
||||||
Collection<CompletableFuture<FloatPriorityQueue<CompletableFuture<T>>>> disaggregatedResults,
|
|
||||||
CompletableFuture<FloatPriorityQueue<CompletableFuture<U>>> result) {
|
|
||||||
disaggregatedResults.add(result.thenApply((originalFloatPriorityQueue) -> {
|
|
||||||
FloatPriorityQueue<CompletableFuture<T>> resultFloatPriorityQueue = new FloatPriorityQueue<>();
|
|
||||||
originalFloatPriorityQueue.forEachItem((originalFuture) -> {
|
|
||||||
resultFloatPriorityQueue.offer(ScoredValue.of(originalFuture.getScore(),
|
|
||||||
originalFuture.getValue().thenApply((originalValue) -> {
|
|
||||||
//noinspection unchecked
|
|
||||||
return (T) originalValue;
|
|
||||||
})
|
|
||||||
));
|
|
||||||
});
|
|
||||||
return resultFloatPriorityQueue;
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T, U> void addDisaggregatedPqCastForced(
|
|
||||||
Collection<CompletableFuture<FloatPriorityQueue<T>>> disaggregatedResults,
|
|
||||||
CompletableFuture<FloatPriorityQueue<U>> result) {
|
|
||||||
disaggregatedResults.add(result.thenApply((originalFloatPriorityQueue) -> {
|
|
||||||
FloatPriorityQueue<T> resultFloatPriorityQueue = new FloatPriorityQueue<>();
|
|
||||||
originalFloatPriorityQueue.forEachItem((originalFuture) -> {
|
|
||||||
//noinspection unchecked
|
|
||||||
resultFloatPriorityQueue.offer(ScoredValue.of(originalFuture.getScore(), (T) originalFuture.getValue()));
|
|
||||||
});
|
|
||||||
return resultFloatPriorityQueue;
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> Set<T> collectToSet(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList) {
|
|
||||||
return futureList.join().parallelStream().map(CompletableFuture::join).collect(Collectors.toSet());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> Set<T> collectToSet(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList, int limit) {
|
|
||||||
return futureList.join().parallelStream().map(CompletableFuture::join).limit(10).collect(Collectors.toSet());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> List<T> collectToList(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> List<T> collectToList(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList, int limit) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).limit(limit).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> LinkedHashSet<T> collectToLinkedSetFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).collect(Collectors.toCollection(LinkedHashSet::new));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> LinkedHashSet<T> collectToLinkedSetFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList,
|
|
||||||
int limit) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).limit(limit)
|
|
||||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> LinkedHashSet<T> collectToLinkedSet(CompletableFuture<? extends Collection<T>> futureList) {
|
|
||||||
return new LinkedHashSet<>(futureList.join());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> LinkedHashSet<T> collectToLinkedSet(CompletableFuture<? extends Collection<T>> futureList,
|
|
||||||
int limit) {
|
|
||||||
return futureList.join().stream().limit(limit)
|
|
||||||
.collect(Collectors.toCollection(LinkedHashSet::new));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> collectToPq(CompletableFuture<? extends FloatPriorityQueue<CompletableFuture<T>>> futureList) {
|
|
||||||
var internalPq = futureList.join().streamItems().map(t -> {
|
|
||||||
if (t.getValue() != null) {
|
|
||||||
return ScoredValue.of(t.getScore(), t.getValue().join());
|
|
||||||
} else {
|
|
||||||
return ScoredValue.of(t.getScore(), (T) null);
|
|
||||||
}
|
|
||||||
}).collect(Collectors.toCollection(PriorityQueue::new));
|
|
||||||
return new FloatPriorityQueue<>(internalPq);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> collectToPq(CompletableFuture<? extends FloatPriorityQueue<CompletableFuture<T>>> futureList,
|
|
||||||
int limit) {
|
|
||||||
var internalPq = futureList.join().streamItems().map(t -> {
|
|
||||||
if (t.getValue() != null) {
|
|
||||||
return ScoredValue.of(t.getScore(), t.getValue().join());
|
|
||||||
} else {
|
|
||||||
return ScoredValue.of(t.getScore(), (T) null);
|
|
||||||
}
|
|
||||||
}).limit(limit).collect(Collectors.toCollection(PriorityQueue::new));
|
|
||||||
return new FloatPriorityQueue<>(internalPq);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSetFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).collect(Collectors.toCollection(TreeSet::new));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSetFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList, int limit) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).limit(limit)
|
|
||||||
.collect(Collectors.toCollection(TreeSet::new));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSetFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList, Comparator<T> comparator) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).collect(Collectors.toCollection(() -> new TreeSet<>(comparator)));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSetFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList, Comparator<T> comparator, int limit) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).limit(limit)
|
|
||||||
.collect(Collectors.toCollection(() -> new TreeSet<>(comparator)));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSet(CompletableFuture<? extends Collection<T>> futureList) {
|
|
||||||
return new TreeSet<>(futureList.join());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSet(CompletableFuture<? extends Collection<T>> futureList, int limit) {
|
|
||||||
return futureList.join().stream().limit(limit)
|
|
||||||
.collect(Collectors.toCollection(TreeSet::new));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSet(CompletableFuture<? extends Collection<T>> futureList, Comparator<T> comparator) {
|
|
||||||
return futureList.join().stream().collect(Collectors.toCollection(() -> new TreeSet<>(comparator)));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> TreeSet<T> collectToTreeSet(CompletableFuture<? extends Collection<T>> futureList, Comparator<T> comparator, int limit) {
|
|
||||||
return futureList.join().stream().limit(limit)
|
|
||||||
.collect(Collectors.toCollection(() -> new TreeSet<>(comparator)));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> Optional<T> anyOrNull(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList) {
|
|
||||||
return futureList.join().parallelStream().map(CompletableFuture::join).findAny();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> Optional<T> firstOrNullFuture(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList) {
|
|
||||||
return futureList.join().stream().map(CompletableFuture::join).findFirst();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> Optional<T> firstOrNull(CompletableFuture<? extends Collection<T>> futureList) {
|
|
||||||
return futureList.join().stream().findFirst();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> void forEachOrdered(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList,
|
|
||||||
Consumer<T> consumer) {
|
|
||||||
var futures = futureList.join();
|
|
||||||
futures.stream().map(CompletableFuture::join).forEachOrdered(consumer);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> void forEachOrdered(CompletableFuture<List<CompletableFuture<T>>> futureList,
|
|
||||||
Consumer<T> consumer, boolean reverse) {
|
|
||||||
var futures = futureList.join();
|
|
||||||
if (reverse) {
|
|
||||||
Collections.reverse(futures);
|
|
||||||
}
|
|
||||||
futures.stream().map(CompletableFuture::join).forEachOrdered(consumer);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> void forEach(CompletableFuture<? extends Collection<CompletableFuture<T>>> futureList, Consumer<T> consumer) {
|
|
||||||
futureList.join().parallelStream().map(CompletableFuture::join).forEach(consumer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Use CompletableFutureUtils.getCompletableFuture(supplier);
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public static <T> CompletableFuture<T> catchUncheckedExceptions(Supplier<CompletableFuture<T>> supplier) {
|
|
||||||
return getCompletableFuture(supplier);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static CompletableFuture<Void> runSequence(Collection<CompletableFuture<?>> collection) {
|
|
||||||
if (collection.isEmpty()) {
|
|
||||||
return CompletableFuture.completedFuture(null);
|
|
||||||
} else {
|
|
||||||
var result = new CompletableFuture<Void>();
|
|
||||||
for (CompletableFuture<?> completableFuture : collection) {
|
|
||||||
result = result.thenCompose(x -> completableFuture.thenRun(() -> {}));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static CompletableFuture<Void> runSequenceAsync(Collection<CompletableFuture<?>> collection, ExecutorService executorService) {
|
|
||||||
var result = CompletableFuture.<Void>completedFuture(null);
|
|
||||||
for (CompletableFuture<?> completableFuture : collection) {
|
|
||||||
result = result.thenComposeAsync(x -> completableFuture.thenRun(() -> {}), executorService);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Accept values synchronously from an async sequence
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<?> acceptSequenceAsync(Collection<CompletableFuture<T>> collection,
|
|
||||||
Function<T, CompletionStage<?>> runner,
|
|
||||||
ExecutorService executorService) {
|
|
||||||
CompletableFuture<?> result = CompletableFuture.completedFuture(null);
|
|
||||||
for (CompletableFuture<T> completableFuture : collection) {
|
|
||||||
result = result.thenComposeAsync(x -> completableFuture.thenComposeAsync(runner::apply, executorService),
|
|
||||||
executorService
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Accept values synchronously from an async sequence
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<?> acceptSequenceAsync(Collection<CompletableFuture<T>> collection,
|
|
||||||
Consumer<T> runner,
|
|
||||||
ExecutorService executorService) {
|
|
||||||
CompletableFuture<?> result = CompletableFuture.completedFuture(null);
|
|
||||||
for (CompletableFuture<T> completableFuture : collection) {
|
|
||||||
result = result.thenComposeAsync(x -> completableFuture.thenAcceptAsync(runner, executorService), executorService);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> CompletableFuture<T> applySequenceAsync(T initialValue, Collection<Function<T, CompletableFuture<T>>> collection, ExecutorService executorService) {
|
|
||||||
var result = CompletableFuture.completedFuture(initialValue);
|
|
||||||
for (Function<T, CompletableFuture<T>> item : collection) {
|
|
||||||
result = result.thenComposeAsync(item, executorService);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <U> CompletableFuture<U> composeAsync(
|
|
||||||
Supplier<? extends CompletionStage<U>> supp,
|
|
||||||
Executor executor) {
|
|
||||||
return CompletableFuture.completedFuture(null).thenComposeAsync((_x) -> supp.get(), executor);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <U> CompletableFuture<U> composeAsyncIO(
|
|
||||||
IOSupplier<CompletableFuture<U>> supp,
|
|
||||||
Executor executor) {
|
|
||||||
return CompletableFuture.completedFuture(null).thenComposeAsync((_x) -> getCompletableFutureIO(supp), executor);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,67 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.future;
|
|
||||||
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
import java.util.concurrent.locks.StampedLock;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
|
|
||||||
public class FutureLockUtils {
|
|
||||||
|
|
||||||
public static <T> CompletableFuture<T> readLock(@Nullable StampedLock lock, @NotNull Supplier<CompletableFuture<T>> r) {
|
|
||||||
long lockValue;
|
|
||||||
if (lock != null) {
|
|
||||||
lockValue = lock.readLock();
|
|
||||||
} else {
|
|
||||||
lockValue = 0;
|
|
||||||
}
|
|
||||||
return CompletableFutureUtils.getCompletableFuture(r).whenComplete((result, err) -> {
|
|
||||||
if (lock != null) {
|
|
||||||
lock.unlockRead(lockValue);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> CompletableFuture<T> writeLock(@Nullable StampedLock lock, @NotNull Supplier<CompletableFuture<T>> r) {
|
|
||||||
long lockValue;
|
|
||||||
if (lock != null) {
|
|
||||||
lockValue = lock.writeLock();
|
|
||||||
} else {
|
|
||||||
lockValue = 0;
|
|
||||||
}
|
|
||||||
return CompletableFutureUtils.getCompletableFuture(r).whenComplete((result, err) -> {
|
|
||||||
if (lock != null) {
|
|
||||||
lock.unlockWrite(lockValue);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> CompletableFuture<T> readLockIO(@Nullable StampedLock lock, @NotNull IOSupplier<CompletableFuture<T>> r) {
|
|
||||||
long lockValue;
|
|
||||||
if (lock != null) {
|
|
||||||
lockValue = lock.readLock();
|
|
||||||
} else {
|
|
||||||
lockValue = 0;
|
|
||||||
}
|
|
||||||
return CompletableFutureUtils.getCompletableFutureIO(r).whenComplete((result, err) -> {
|
|
||||||
if (lock != null) {
|
|
||||||
lock.unlockRead(lockValue);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> CompletableFuture<T> writeLockIO(@Nullable StampedLock lock, @NotNull IOSupplier<CompletableFuture<T>> r) {
|
|
||||||
long lockValue;
|
|
||||||
if (lock != null) {
|
|
||||||
lockValue = lock.writeLock();
|
|
||||||
} else {
|
|
||||||
lockValue = 0;
|
|
||||||
}
|
|
||||||
return CompletableFutureUtils.getCompletableFutureIO(r).whenComplete((result, err) -> {
|
|
||||||
if (lock != null) {
|
|
||||||
lock.unlockWrite(lockValue);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.future;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class FutureUtils {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Waits for *all* futures to complete and returns a list of results. If *any* future completes exceptionally then the
|
|
||||||
* resulting future will also complete exceptionally.
|
|
||||||
*
|
|
||||||
* @param futures
|
|
||||||
* @param <T>
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public static <T> CompletableFuture<List<T>> all(List<CompletableFuture<T>> futures) {
|
|
||||||
CompletableFuture[] cfs = futures.toArray(CompletableFuture[]::new);
|
|
||||||
|
|
||||||
return CompletableFuture.allOf(cfs)
|
|
||||||
.thenApply(ignored -> futures.stream().map(CompletableFuture::join).collect(Collectors.toList()));
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,50 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.future;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
public class SizedFutureList<T> {
|
|
||||||
|
|
||||||
private final CompletableFuture<List<CompletableFuture<T>>> data;
|
|
||||||
private final CompletableFuture<Integer> size;
|
|
||||||
|
|
||||||
public SizedFutureList(CompletableFuture<List<CompletableFuture<T>>> data, CompletableFuture<Integer> size) {
|
|
||||||
this.data = data;
|
|
||||||
this.size = size;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> SizedFutureList<T> empty() {
|
|
||||||
return new SizedFutureList<>(CompletableFuture.completedFuture(List.of()), CompletableFuture.completedFuture(0));
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompletableFuture<List<CompletableFuture<T>>> getData() {
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompletableFuture<Integer> getSize() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
SizedFutureList<?> that = (SizedFutureList<?>) o;
|
|
||||||
return Objects.equals(data, that.data) && Objects.equals(size, that.size);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(data, size);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "SizedFutureList{" + "data=" + data + ", size=" + size + '}';
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,65 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.future;
|
|
||||||
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
public class SizedFutureSet<T> {
|
|
||||||
|
|
||||||
private final CompletableFuture<List<CompletableFuture<T>>> data;
|
|
||||||
private final CompletableFuture<Integer> size;
|
|
||||||
|
|
||||||
public SizedFutureSet(CompletableFuture<List<CompletableFuture<T>>> data, CompletableFuture<Integer> size) {
|
|
||||||
this.data = data;
|
|
||||||
this.size = size;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> SizedFutureSet<T> empty() {
|
|
||||||
return new SizedFutureSet<>(CompletableFuture.completedFuture(List.of()), CompletableFuture.completedFuture(0));
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompletableFuture<LinkedHashSet<CompletableFuture<T>>> getFutureDataOrdered() {
|
|
||||||
return data.thenApply(LinkedHashSet::new);
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompletableFuture<Set<CompletableFuture<T>>> getFutureDataUnordered() {
|
|
||||||
return data.thenApply(HashSet::new);
|
|
||||||
}
|
|
||||||
|
|
||||||
public LinkedHashSet<T> getDataOrdered() {
|
|
||||||
return CompletableFutureUtils.collectToLinkedSetFuture(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Set<T> getDataUnordered() {
|
|
||||||
return CompletableFutureUtils.collectToSet(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompletableFuture<Integer> getSize() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
SizedFutureSet<?> that = (SizedFutureSet<?>) o;
|
|
||||||
return Objects.equals(data, that.data) && Objects.equals(size, that.size);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(data, size);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "SizedFutureList{" + "data=" + data + ", size=" + size + '}';
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
package org.warp.commonutils.error;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
public class InitializationException extends IOException {
|
|
||||||
public InitializationException() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
public InitializationException(String text) {
|
|
||||||
super(text);
|
|
||||||
}
|
|
||||||
|
|
||||||
public InitializationException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public InitializationException(Throwable cause) {
|
|
||||||
super(cause);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
public interface BiCompletableFunction<T1, T2, U> {
|
|
||||||
CompletableFuture<U> apply(T1 value1, T2 value2);
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
public interface CancellableBiConsumer<T, U> { //extends BiConsumer<T, U> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return false to cancel
|
|
||||||
*/
|
|
||||||
ConsumerResult acceptCancellable(T t, U u);
|
|
||||||
|
|
||||||
/*default void accept(T t, U u) {
|
|
||||||
acceptCancellable(t, u);
|
|
||||||
}*/
|
|
||||||
}
|
|
@ -1,16 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
public interface CancellableBiFunction<T, U, V> { //extends BiFunction<T, U, V> {
|
|
||||||
|
|
||||||
OperationResult<V> applyCancellable(T t, U u);
|
|
||||||
|
|
||||||
/* default V apply(T t, U u) {
|
|
||||||
var result = applyCancellable(t, u);
|
|
||||||
if (result == OperationResult.CANCEL) {
|
|
||||||
throw new UnsupportedOperationException("Can't cancel this operation");
|
|
||||||
}
|
|
||||||
//noinspection unchecked
|
|
||||||
return (V) result;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
public interface CancellableConsumer<T> { //extends Consumer<T> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return false to cancel
|
|
||||||
*/
|
|
||||||
ConsumerResult acceptCancellable(T t);
|
|
||||||
|
|
||||||
/*default void accept(T t) {
|
|
||||||
acceptCancellable(t);
|
|
||||||
}*/
|
|
||||||
}
|
|
@ -1,15 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
public interface CancellableFunction<T, U> { //extends Function<T, U> {
|
|
||||||
|
|
||||||
OperationResult<U> applyCancellable(T t);
|
|
||||||
|
|
||||||
/*default U apply(T t) {
|
|
||||||
var result = applyCancellable(t);
|
|
||||||
if (result == OperationResult.CANCEL) {
|
|
||||||
throw new UnsupportedOperationException("Can't cancel this operation");
|
|
||||||
}
|
|
||||||
//noinspection unchecked
|
|
||||||
return (U) result;
|
|
||||||
}*/
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
public interface CancellableTriConsumer<T, U, V> { //extends BiConsumer<T, U> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return false to cancel
|
|
||||||
*/
|
|
||||||
ConsumerResult acceptCancellable(T t, U u, V v);
|
|
||||||
|
|
||||||
/*default void accept(T t, U u) {
|
|
||||||
acceptCancellable(t, u);
|
|
||||||
}*/
|
|
||||||
}
|
|
@ -1,16 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
public interface CancellableTriFunction<T, U, V, W> { //extends BiFunction<T, U, V> {
|
|
||||||
|
|
||||||
OperationResult<W> applyCancellable(T t, U u, V v);
|
|
||||||
|
|
||||||
/* default V apply(T t, U u) {
|
|
||||||
var result = applyCancellable(t, u);
|
|
||||||
if (result == OperationResult.CANCEL) {
|
|
||||||
throw new UnsupportedOperationException("Can't cancel this operation");
|
|
||||||
}
|
|
||||||
//noinspection unchecked
|
|
||||||
return (V) result;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
public interface CompletableFunction<T, U> {
|
|
||||||
CompletableFuture<U> apply(T value);
|
|
||||||
}
|
|
@ -1,62 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.StringJoiner;
|
|
||||||
import java.util.concurrent.CancellationException;
|
|
||||||
|
|
||||||
public final class ConsumerResult {
|
|
||||||
|
|
||||||
private final boolean cancel;
|
|
||||||
|
|
||||||
private ConsumerResult(boolean cancel) {
|
|
||||||
this.cancel = cancel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ConsumerResult cancelNext() {
|
|
||||||
return new ConsumerResult(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ConsumerResult result() {
|
|
||||||
return new ConsumerResult(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isCancelled() {
|
|
||||||
return cancel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void throwIfCancelled() {
|
|
||||||
if (cancel) {
|
|
||||||
throw new CancellationException("Operation cancelled");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
ConsumerResult that = (ConsumerResult) o;
|
|
||||||
|
|
||||||
return cancel == that.cancel;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return (cancel ? 1 : 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return new StringJoiner(", ", ConsumerResult.class.getSimpleName() + "[", "]").add("cancel=" + cancel).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
public ConsumerResult or(ConsumerResult otherResult) {
|
|
||||||
if (otherResult.cancel) {
|
|
||||||
return otherResult;
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.Function;
|
|
||||||
import org.warp.commonutils.functional.Unchecked.UncheckedConsumer;
|
|
||||||
|
|
||||||
public class Generic {
|
|
||||||
public static <T, U> Function<T, U> function(Function<Object, U> fnc) {
|
|
||||||
return (Function<T, U>) fnc;
|
|
||||||
|
|
||||||
}
|
|
||||||
public static <T> Consumer<T> consumer(Consumer<Object> fnc) {
|
|
||||||
return (Consumer<T>) fnc;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> UncheckedConsumer<T> consumerExc(UncheckedConsumer<Object> fnc) {
|
|
||||||
return (UncheckedConsumer<T>) fnc;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,43 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.Function;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
|
|
||||||
public class MappedIterator<A, B> implements Iterator<B> {
|
|
||||||
|
|
||||||
private final Iterator<A> iterator;
|
|
||||||
private final Function<A,B> mappingFunction;
|
|
||||||
|
|
||||||
private MappedIterator(Iterator<A> iterator, Function<A, B> mappingFunction) {
|
|
||||||
this.iterator = iterator;
|
|
||||||
this.mappingFunction = mappingFunction;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T, U> Iterator<U> of(Iterator<T> originalIterator, Function<@Nullable T, @Nullable U> mappingFunction) {
|
|
||||||
return new MappedIterator<>(originalIterator, mappingFunction);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return iterator.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public B next() {
|
|
||||||
return mappingFunction.apply(iterator.next());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void remove() {
|
|
||||||
iterator.remove();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEachRemaining(Consumer<? super B> action) {
|
|
||||||
iterator.forEachRemaining((item) -> {
|
|
||||||
action.accept(mappingFunction.apply(item));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,75 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.StringJoiner;
|
|
||||||
|
|
||||||
public final class OperationResult<T> {
|
|
||||||
|
|
||||||
private final boolean cancel;
|
|
||||||
private final T value;
|
|
||||||
|
|
||||||
private OperationResult(boolean cancel, T value) {
|
|
||||||
this.cancel = cancel;
|
|
||||||
this.value = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> OperationResult<T> cancelNext(T value) {
|
|
||||||
return new OperationResult<>(true, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> OperationResult<T> result(T value) {
|
|
||||||
return new OperationResult<>(false, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> OperationResult<T> of(boolean cancel, T value) {
|
|
||||||
return new OperationResult<>(cancel, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isCancelled() {
|
|
||||||
return cancel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public T getValue() {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
OperationResult<?> that = (OperationResult<?>) o;
|
|
||||||
|
|
||||||
if (cancel != that.cancel) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return Objects.equals(value, that.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
int result = (cancel ? 1 : 0);
|
|
||||||
result = 31 * result + (value != null ? value.hashCode() : 0);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return new StringJoiner(", ", OperationResult.class.getSimpleName() + "[", "]")
|
|
||||||
.add("cancel=" + cancel)
|
|
||||||
.add("value=" + value)
|
|
||||||
.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
public <X> OperationResult<X> copyStatusWith(X newResults) {
|
|
||||||
if (cancel) {
|
|
||||||
return OperationResult.cancelNext(newResults);
|
|
||||||
} else {
|
|
||||||
return OperationResult.result(newResults);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,7 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
public interface TriCompletableFunction<T1, T2, T3, U> {
|
|
||||||
CompletableFuture<U> apply(T1 value1, T2 value2, T3 value3);
|
|
||||||
}
|
|
@ -1,30 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
public class Unchecked<T> implements Function<T, UncheckedResult> {
|
|
||||||
|
|
||||||
private final UncheckedConsumer<T> uncheckedConsumer;
|
|
||||||
|
|
||||||
public Unchecked(UncheckedConsumer<T> uncheckedConsumer) {
|
|
||||||
this.uncheckedConsumer = uncheckedConsumer;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> Unchecked<T> wrap(UncheckedConsumer<T> uncheckedConsumer) {
|
|
||||||
return new Unchecked<>(uncheckedConsumer);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public UncheckedResult apply(T t) {
|
|
||||||
try {
|
|
||||||
uncheckedConsumer.consume(t);
|
|
||||||
return new UncheckedResult();
|
|
||||||
} catch (Exception e) {
|
|
||||||
return new UncheckedResult(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public interface UncheckedConsumer<T> {
|
|
||||||
public void consume(T value) throws Exception;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,33 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
|
|
||||||
public class UncheckedResult {
|
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private final Exception e;
|
|
||||||
|
|
||||||
public UncheckedResult(@NotNull Exception e) {
|
|
||||||
this.e = e;
|
|
||||||
}
|
|
||||||
|
|
||||||
public UncheckedResult() {
|
|
||||||
this.e = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public <T extends Exception> UncheckedResult throwException(@NotNull Class<T> exceptionClass) throws T {
|
|
||||||
if (e != null) {
|
|
||||||
if (exceptionClass.isInstance(e)) {
|
|
||||||
throw (T) e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void done() {
|
|
||||||
if (e != null) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,52 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.io.IOError;
|
|
||||||
import java.io.IOException;
|
|
||||||
import org.warp.commonutils.functional.IOBooleanSupplier;
|
|
||||||
import org.warp.commonutils.functional.IOIntegerSupplier;
|
|
||||||
import org.warp.commonutils.functional.IOLongSupplier;
|
|
||||||
import org.warp.commonutils.functional.IORunnable;
|
|
||||||
import org.warp.commonutils.functional.IOSupplier;
|
|
||||||
|
|
||||||
public final class UnsafeIOUtils {
|
|
||||||
|
|
||||||
public static <T> T unsafe(IOSupplier<T> expression) {
|
|
||||||
try {
|
|
||||||
return expression.get();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new IOError(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static int unsafe(IOIntegerSupplier expression) {
|
|
||||||
try {
|
|
||||||
return expression.get();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new IOError(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean unsafe(IOBooleanSupplier expression) {
|
|
||||||
try {
|
|
||||||
return expression.get();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new IOError(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static long unsafe(IOLongSupplier expression) {
|
|
||||||
try {
|
|
||||||
return expression.get();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new IOError(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void unsafe(IORunnable expression) {
|
|
||||||
try {
|
|
||||||
expression.run();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new IOError(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
package org.warp.commonutils.locks;
|
|
||||||
|
|
||||||
import java.util.concurrent.Phaser;
|
|
||||||
|
|
||||||
public class FlexibleCountDownLatch {
|
|
||||||
|
|
||||||
private final Phaser phaser;
|
|
||||||
|
|
||||||
public FlexibleCountDownLatch(int initialSize) {
|
|
||||||
this.phaser = new Phaser(initialSize + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void await() {
|
|
||||||
phaser.arriveAndAwaitAdvance();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void grow() {
|
|
||||||
phaser.register();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void grow(int n) {
|
|
||||||
phaser.bulkRegister(n);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void countDown() {
|
|
||||||
phaser.arriveAndDeregister();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,522 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (C) 2011 The Guava Authors
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
|
|
||||||
* in compliance with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software distributed under the License
|
|
||||||
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
|
||||||
* or implied. See the License for the specific language governing permissions and limitations under
|
|
||||||
* the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.warp.commonutils.locks;
|
|
||||||
|
|
||||||
import com.google.common.annotations.Beta;
|
|
||||||
import com.google.common.annotations.GwtIncompatible;
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
|
||||||
import com.google.common.base.MoreObjects;
|
|
||||||
import com.google.common.base.Preconditions;
|
|
||||||
import com.google.common.base.Supplier;
|
|
||||||
import com.google.common.collect.MapMaker;
|
|
||||||
import com.google.common.math.IntMath;
|
|
||||||
import com.google.common.primitives.Ints;
|
|
||||||
import it.cavallium.concurrentlocks.ReadWriteUpdateLock;
|
|
||||||
import it.cavallium.concurrentlocks.ReentrantReadWriteUpdateLock;
|
|
||||||
import it.unimi.dsi.fastutil.ints.IntAVLTreeSet;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectLinkedOpenHashSet;
|
|
||||||
import java.lang.ref.Reference;
|
|
||||||
import java.lang.ref.ReferenceQueue;
|
|
||||||
import java.lang.ref.WeakReference;
|
|
||||||
import java.math.RoundingMode;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.concurrent.ConcurrentMap;
|
|
||||||
import java.util.concurrent.Semaphore;
|
|
||||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
|
||||||
import java.util.concurrent.locks.Lock;
|
|
||||||
import java.util.concurrent.locks.ReadWriteLock;
|
|
||||||
import java.util.concurrent.locks.ReentrantLock;
|
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
|
||||||
import java.util.concurrent.locks.StampedLock;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A striped {@code Lock/Semaphore/ReadWriteLock}. This offers the underlying lock striping similar to that of {@code
|
|
||||||
* ConcurrentHashMap} in a reusable form, and extends it for semaphores and read-write locks. Conceptually, lock
|
|
||||||
* striping is the technique of dividing a lock into many
|
|
||||||
* <i>stripes</i>, increasing the granularity of a single lock and allowing independent operations
|
|
||||||
* to lock different stripes and proceed concurrently, instead of creating contention for a single lock.
|
|
||||||
*
|
|
||||||
* <p>The guarantee provided by this class is that equal keys lead to the same lock (or semaphore),
|
|
||||||
* i.e. {@code if (key1.equals(key2))} then {@code striped.get(key1) == striped.get(key2)} (assuming {@link
|
|
||||||
* Object#hashCode()} is correctly implemented for the keys). Note that if {@code key1} is
|
|
||||||
* <strong>not</strong> equal to {@code key2}, it is <strong>not</strong> guaranteed that
|
|
||||||
* {@code striped.get(key1) != striped.get(key2)}; the elements might nevertheless be mapped to the same lock. The lower
|
|
||||||
* the number of stripes, the higher the probability of this happening.
|
|
||||||
*
|
|
||||||
* <p>There are three flavors of this class: {@code Striped<Lock>}, {@code Striped<Semaphore>}, and
|
|
||||||
* {@code Striped<ReadWriteLock>}. For each type, two implementations are offered: {@linkplain #lock(int) strong} and
|
|
||||||
* {@linkplain #lazyWeakLock(int) weak} {@code Striped<Lock>}, {@linkplain #semaphore(int, int) strong} and {@linkplain
|
|
||||||
* #lazyWeakSemaphore(int, int) weak} {@code Striped<Semaphore>}, and {@linkplain #readWriteLock(int) strong} and
|
|
||||||
* {@linkplain #lazyWeakReadWriteLock(int) weak} {@code Striped<ReadWriteLock>}. <i>Strong</i> means that all stripes
|
|
||||||
* (locks/semaphores) are initialized eagerly, and are not reclaimed unless {@code Striped} itself is reclaimable.
|
|
||||||
* <i>Weak</i> means that locks/semaphores are created lazily, and they are allowed to be reclaimed if nobody is
|
|
||||||
* holding on to them. This is useful, for example, if one wants to create a {@code Striped<Lock>} of many locks, but
|
|
||||||
* worries that in most cases only a small portion of these would be in use.
|
|
||||||
*
|
|
||||||
* <p>Prior to this class, one might be tempted to use {@code Map<K, Lock>}, where {@code K}
|
|
||||||
* represents the task. This maximizes concurrency by having each unique key mapped to a unique lock, but also maximizes
|
|
||||||
* memory footprint. On the other extreme, one could use a single lock for all tasks, which minimizes memory footprint
|
|
||||||
* but also minimizes concurrency. Instead of choosing either of these extremes, {@code Striped} allows the user to
|
|
||||||
* trade between required concurrency and memory footprint. For example, if a set of tasks are CPU-bound, one could
|
|
||||||
* easily create a very compact {@code Striped<Lock>} of {@code availableProcessors() * 4} stripes, instead of possibly
|
|
||||||
* thousands of locks which could be created in a {@code Map<K, Lock>} structure.
|
|
||||||
*
|
|
||||||
* @author Dimitris Andreou
|
|
||||||
* @since 13.0
|
|
||||||
*/
|
|
||||||
@Beta
|
|
||||||
@GwtIncompatible
|
|
||||||
public abstract class Striped<L> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If there are at least this many stripes, we assume the memory usage of a ConcurrentMap will be smaller than a large
|
|
||||||
* array. (This assumes that in the lazy case, most stripes are unused. As always, if many stripes are in use, a
|
|
||||||
* non-lazy striped makes more sense.)
|
|
||||||
*/
|
|
||||||
private static final int LARGE_LAZY_CUTOFF = 1024;
|
|
||||||
|
|
||||||
private Striped() {
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the stripe that corresponds to the passed key. It is always guaranteed that if {@code key1.equals(key2)},
|
|
||||||
* then {@code get(key1) == get(key2)}.
|
|
||||||
*
|
|
||||||
* @param key an arbitrary, non-null key
|
|
||||||
* @return the stripe that the passed key corresponds to
|
|
||||||
*/
|
|
||||||
public abstract L get(Object key);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the stripe at the specified index. Valid indexes are 0, inclusively, to {@code size()}, exclusively.
|
|
||||||
*
|
|
||||||
* @param index the index of the stripe to return; must be in {@code [0...size())}
|
|
||||||
* @return the stripe at the specified index
|
|
||||||
*/
|
|
||||||
public abstract L getAt(int index);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the index to which the given key is mapped, so that getAt(indexFor(key)) == get(key).
|
|
||||||
*/
|
|
||||||
abstract int indexFor(Object key);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the total number of stripes in this instance.
|
|
||||||
*/
|
|
||||||
public abstract int size();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the stripes that correspond to the passed objects, in ascending (as per {@link #getAt(int)}) order. Thus,
|
|
||||||
* threads that use the stripes in the order returned by this method are guaranteed to not deadlock each other.
|
|
||||||
*
|
|
||||||
* <p>It should be noted that using a {@code Striped<L>} with relatively few stripes, and
|
|
||||||
* {@code bulkGet(keys)} with a relative large number of keys can cause an excessive number of shared stripes (much
|
|
||||||
* like the birthday paradox, where much fewer than anticipated birthdays are needed for a pair of them to match).
|
|
||||||
* Please consider carefully the implications of the number of stripes, the intended concurrency level, and the
|
|
||||||
* typical number of keys used in a {@code bulkGet(keys)} operation. See <a href="http://www.mathpages.com/home/kmath199.htm">Balls
|
|
||||||
* in Bins model</a> for mathematical formulas that can be used to estimate the probability of collisions.
|
|
||||||
*
|
|
||||||
* @param keys arbitrary non-null keys
|
|
||||||
* @return the stripes corresponding to the objects (one per each object, derived by delegating to {@link
|
|
||||||
* #get(Object)}; may contain duplicates), in an increasing index order.
|
|
||||||
*/
|
|
||||||
public Iterable<L> bulkGet(Iterable<?> keys) {
|
|
||||||
return Collections.unmodifiableCollection(bulkGet_(keys));
|
|
||||||
}
|
|
||||||
|
|
||||||
private Collection<L> bulkGet_(Iterable<?> keys) {
|
|
||||||
var stripes = new IntAVLTreeSet(Integer::compare);
|
|
||||||
for (Object key : keys) {
|
|
||||||
stripes.add(indexFor(key));
|
|
||||||
}
|
|
||||||
var locks = new ObjectLinkedOpenHashSet<L>();
|
|
||||||
stripes.forEach((int stripe) -> locks.add(getAt(stripe)));
|
|
||||||
return locks;
|
|
||||||
}
|
|
||||||
public Iterable<L> bulkGetAt(Iterable<Integer> keys) {
|
|
||||||
return Collections.unmodifiableCollection(bulkGetAt_(keys));
|
|
||||||
}
|
|
||||||
|
|
||||||
private Collection<L> bulkGetAt_(Iterable<Integer> keys) {
|
|
||||||
var stripes = new IntAVLTreeSet(Integer::compare);
|
|
||||||
for (Integer key : keys) {
|
|
||||||
stripes.add((int) key);
|
|
||||||
}
|
|
||||||
var locks = new ObjectLinkedOpenHashSet<L>();
|
|
||||||
for (Integer stripe : stripes) {
|
|
||||||
locks.add(getAt(stripe));
|
|
||||||
}
|
|
||||||
return locks;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Static factories
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<Lock>} with eagerly initialized, strongly referenced locks. Every lock is reentrant.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (locks) required
|
|
||||||
* @return a new {@code Striped<Lock>}
|
|
||||||
*/
|
|
||||||
public static Striped<Lock> lock(int stripes) {
|
|
||||||
return new CompactStriped<Lock>(stripes, new Supplier<Lock>() {
|
|
||||||
@Override
|
|
||||||
public Lock get() {
|
|
||||||
return new PaddedLock();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<Lock>} with lazily initialized, weakly referenced locks. Every lock is reentrant.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (locks) required
|
|
||||||
* @return a new {@code Striped<Lock>}
|
|
||||||
*/
|
|
||||||
public static Striped<Lock> lazyWeakLock(int stripes) {
|
|
||||||
return lazy(stripes, new Supplier<Lock>() {
|
|
||||||
@Override
|
|
||||||
public Lock get() {
|
|
||||||
return new ReentrantLock(false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <L> Striped<L> lazy(int stripes, Supplier<L> supplier) {
|
|
||||||
return stripes < LARGE_LAZY_CUTOFF ? new SmallLazyStriped<L>(stripes, supplier)
|
|
||||||
: new LargeLazyStriped<L>(stripes, supplier);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<Semaphore>} with eagerly initialized, strongly referenced semaphores, with the specified
|
|
||||||
* number of permits.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (semaphores) required
|
|
||||||
* @param permits the number of permits in each semaphore
|
|
||||||
* @return a new {@code Striped<Semaphore>}
|
|
||||||
*/
|
|
||||||
public static Striped<Semaphore> semaphore(int stripes, final int permits) {
|
|
||||||
return new CompactStriped<Semaphore>(stripes, new Supplier<Semaphore>() {
|
|
||||||
@Override
|
|
||||||
public Semaphore get() {
|
|
||||||
return new PaddedSemaphore(permits);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<Semaphore>} with lazily initialized, weakly referenced semaphores, with the specified
|
|
||||||
* number of permits.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (semaphores) required
|
|
||||||
* @param permits the number of permits in each semaphore
|
|
||||||
* @return a new {@code Striped<Semaphore>}
|
|
||||||
*/
|
|
||||||
public static Striped<Semaphore> lazyWeakSemaphore(int stripes, final int permits) {
|
|
||||||
return lazy(stripes, new Supplier<Semaphore>() {
|
|
||||||
@Override
|
|
||||||
public Semaphore get() {
|
|
||||||
return new Semaphore(permits, false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<ReadWriteLock>} with eagerly initialized, strongly referenced read-write locks. Every lock
|
|
||||||
* is reentrant.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (locks) required
|
|
||||||
* @return a new {@code Striped<ReadWriteLock>}
|
|
||||||
*/
|
|
||||||
public static Striped<ReadWriteLock> readWriteLock(int stripes) {
|
|
||||||
return new CompactStriped<ReadWriteLock>(stripes, READ_WRITE_LOCK_SUPPLIER);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<StampedLock>} with eagerly initialized, strongly referenced read-write locks. Every lock
|
|
||||||
* is striped.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (locks) required
|
|
||||||
* @return a new {@code Striped<StampedLock>}
|
|
||||||
*/
|
|
||||||
public static Striped<StampedLock> readWriteStampedLock(int stripes) {
|
|
||||||
return new CompactStriped<StampedLock>(stripes, STAMPED_LOCK_SUPPLIER);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<ReadWriteLock>} with eagerly initialized, strongly referenced read-write-update locks.
|
|
||||||
* Every lock is reentrant.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (locks) required
|
|
||||||
* @return a new {@code Striped<ReadWriteUpdateLock>}
|
|
||||||
*/
|
|
||||||
public static Striped<ReadWriteUpdateLock> readWriteUpdateLock(int stripes) {
|
|
||||||
return new CompactStriped<ReadWriteUpdateLock>(stripes, READ_WRITE_UPDATE_LOCK_SUPPLIER);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a {@code Striped<ReadWriteLock>} with lazily initialized, weakly referenced read-write locks. Every lock is
|
|
||||||
* reentrant.
|
|
||||||
*
|
|
||||||
* @param stripes the minimum number of stripes (locks) required
|
|
||||||
* @return a new {@code Striped<ReadWriteLock>}
|
|
||||||
*/
|
|
||||||
public static Striped<ReadWriteLock> lazyWeakReadWriteLock(int stripes) {
|
|
||||||
return lazy(stripes, READ_WRITE_LOCK_SUPPLIER);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReentrantReadWriteLock is large enough to make padding probably unnecessary
|
|
||||||
private static final Supplier<ReadWriteLock> READ_WRITE_LOCK_SUPPLIER = new Supplier<ReadWriteLock>() {
|
|
||||||
@Override
|
|
||||||
public ReadWriteLock get() {
|
|
||||||
return new ReentrantReadWriteLock();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// StampedLock is large enough to make padding probably unnecessary
|
|
||||||
private static final Supplier<StampedLock> STAMPED_LOCK_SUPPLIER = new Supplier<StampedLock>() {
|
|
||||||
@Override
|
|
||||||
public StampedLock get() {
|
|
||||||
return new StampedLock();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// ReentrantReadWriteUpdateLock is large enough to make padding probably unnecessary
|
|
||||||
private static final Supplier<ReadWriteUpdateLock> READ_WRITE_UPDATE_LOCK_SUPPLIER = new Supplier<ReadWriteUpdateLock>() {
|
|
||||||
@Override
|
|
||||||
public ReadWriteUpdateLock get() {
|
|
||||||
return new ReentrantReadWriteUpdateLock();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private abstract static class PowerOfTwoStriped<L> extends Striped<L> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Capacity (power of two) minus one, for fast mod evaluation
|
|
||||||
*/
|
|
||||||
final int mask;
|
|
||||||
|
|
||||||
PowerOfTwoStriped(int stripes) {
|
|
||||||
Preconditions.checkArgument(stripes > 0, "Stripes must be positive");
|
|
||||||
this.mask = stripes > Ints.MAX_POWER_OF_TWO ? ALL_SET : ceilToPowerOfTwo(stripes) - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
final int indexFor(Object key) {
|
|
||||||
int hash = smear(key.hashCode());
|
|
||||||
return hash & mask;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final L get(Object key) {
|
|
||||||
return getAt(indexFor(key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Implementation of Striped where 2^k stripes are represented as an array of the same length, eagerly initialized.
|
|
||||||
*/
|
|
||||||
private static class CompactStriped<L> extends PowerOfTwoStriped<L> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Size is a power of two.
|
|
||||||
*/
|
|
||||||
private final Object[] array;
|
|
||||||
|
|
||||||
private CompactStriped(int stripes, Supplier<L> supplier) {
|
|
||||||
super(stripes);
|
|
||||||
Preconditions.checkArgument(stripes <= Ints.MAX_POWER_OF_TWO, "Stripes must be <= 2^30)");
|
|
||||||
|
|
||||||
this.array = new Object[mask + 1];
|
|
||||||
for (int i = 0; i < array.length; i++) {
|
|
||||||
array[i] = supplier.get();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") // we only put L's in the array
|
|
||||||
@Override
|
|
||||||
public L getAt(int index) {
|
|
||||||
return (L) array[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return array.length;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Implementation of Striped where up to 2^k stripes can be represented, using an AtomicReferenceArray of size 2^k. To
|
|
||||||
* map a user key into a stripe, we take a k-bit slice of the user key's (smeared) hashCode(). The stripes are lazily
|
|
||||||
* initialized and are weakly referenced.
|
|
||||||
*/
|
|
||||||
@VisibleForTesting
|
|
||||||
static class SmallLazyStriped<L> extends PowerOfTwoStriped<L> {
|
|
||||||
|
|
||||||
final AtomicReferenceArray<ArrayReference<? extends L>> locks;
|
|
||||||
final Supplier<L> supplier;
|
|
||||||
final int size;
|
|
||||||
final ReferenceQueue<L> queue = new ReferenceQueue<L>();
|
|
||||||
|
|
||||||
SmallLazyStriped(int stripes, Supplier<L> supplier) {
|
|
||||||
super(stripes);
|
|
||||||
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
|
|
||||||
this.locks = new AtomicReferenceArray<ArrayReference<? extends L>>(size);
|
|
||||||
this.supplier = supplier;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public L getAt(int index) {
|
|
||||||
if (size != Integer.MAX_VALUE) {
|
|
||||||
Preconditions.checkElementIndex(index, size());
|
|
||||||
} // else no check necessary, all index values are valid
|
|
||||||
ArrayReference<? extends L> existingRef = locks.get(index);
|
|
||||||
L existing = existingRef == null ? null : existingRef.get();
|
|
||||||
if (existing != null) {
|
|
||||||
return existing;
|
|
||||||
}
|
|
||||||
L created = supplier.get();
|
|
||||||
ArrayReference<L> newRef = new ArrayReference<L>(created, index, queue);
|
|
||||||
while (!locks.compareAndSet(index, existingRef, newRef)) {
|
|
||||||
// we raced, we need to re-read and try again
|
|
||||||
existingRef = locks.get(index);
|
|
||||||
existing = existingRef == null ? null : existingRef.get();
|
|
||||||
if (existing != null) {
|
|
||||||
return existing;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
drainQueue();
|
|
||||||
return created;
|
|
||||||
}
|
|
||||||
|
|
||||||
// N.B. Draining the queue is only necessary to ensure that we don't accumulate empty references
|
|
||||||
// in the array. We could skip this if we decide we don't care about holding on to Reference
|
|
||||||
// objects indefinitely.
|
|
||||||
private void drainQueue() {
|
|
||||||
Reference<? extends L> ref;
|
|
||||||
while ((ref = queue.poll()) != null) {
|
|
||||||
// We only ever register ArrayReferences with the queue so this is always safe.
|
|
||||||
ArrayReference<? extends L> arrayRef = (ArrayReference<? extends L>) ref;
|
|
||||||
// Try to clear out the array slot, n.b. if we fail that is fine, in either case the
|
|
||||||
// arrayRef will be out of the array after this step.
|
|
||||||
locks.compareAndSet(arrayRef.index, arrayRef, null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final class ArrayReference<L> extends WeakReference<L> {
|
|
||||||
|
|
||||||
final int index;
|
|
||||||
|
|
||||||
ArrayReference(L referent, int index, ReferenceQueue<L> queue) {
|
|
||||||
super(referent, queue);
|
|
||||||
this.index = index;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Implementation of Striped where up to 2^k stripes can be represented, using a ConcurrentMap where the key domain is
|
|
||||||
* [0..2^k). To map a user key into a stripe, we take a k-bit slice of the user key's (smeared) hashCode(). The
|
|
||||||
* stripes are lazily initialized and are weakly referenced.
|
|
||||||
*/
|
|
||||||
@VisibleForTesting
|
|
||||||
static class LargeLazyStriped<L> extends PowerOfTwoStriped<L> {
|
|
||||||
|
|
||||||
final ConcurrentMap<Integer, L> locks;
|
|
||||||
final Supplier<L> supplier;
|
|
||||||
final int size;
|
|
||||||
|
|
||||||
LargeLazyStriped(int stripes, Supplier<L> supplier) {
|
|
||||||
super(stripes);
|
|
||||||
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
|
|
||||||
this.supplier = supplier;
|
|
||||||
this.locks = new MapMaker().weakValues().makeMap();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public L getAt(int index) {
|
|
||||||
if (size != Integer.MAX_VALUE) {
|
|
||||||
Preconditions.checkElementIndex(index, size());
|
|
||||||
} // else no check necessary, all index values are valid
|
|
||||||
L existing = locks.get(index);
|
|
||||||
if (existing != null) {
|
|
||||||
return existing;
|
|
||||||
}
|
|
||||||
L created = supplier.get();
|
|
||||||
existing = locks.putIfAbsent(index, created);
|
|
||||||
return MoreObjects.firstNonNull(existing, created);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A bit mask were all bits are set.
|
|
||||||
*/
|
|
||||||
private static final int ALL_SET = ~0;
|
|
||||||
|
|
||||||
private static int ceilToPowerOfTwo(int x) {
|
|
||||||
return 1 << IntMath.log2(x, RoundingMode.CEILING);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This method was written by Doug Lea with assistance from members of JCP JSR-166 Expert Group
|
|
||||||
* and released to the public domain, as explained at
|
|
||||||
* http://creativecommons.org/licenses/publicdomain
|
|
||||||
*
|
|
||||||
* As of 2010/06/11, this method is identical to the (package private) hash method in OpenJDK 7's
|
|
||||||
* java.util.HashMap class.
|
|
||||||
*/
|
|
||||||
// Copied from java/com/google/common/collect/Hashing.java
|
|
||||||
private static int smear(int hashCode) {
|
|
||||||
hashCode ^= (hashCode >>> 20) ^ (hashCode >>> 12);
|
|
||||||
return hashCode ^ (hashCode >>> 7) ^ (hashCode >>> 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class PaddedLock extends ReentrantLock {
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Padding from 40 into 64 bytes, same size as cache line. Might be beneficial to add a fourth
|
|
||||||
* long here, to minimize chance of interference between consecutive locks, but I couldn't
|
|
||||||
* observe any benefit from that.
|
|
||||||
*/ long unused1;
|
|
||||||
long unused2;
|
|
||||||
long unused3;
|
|
||||||
|
|
||||||
PaddedLock() {
|
|
||||||
super(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class PaddedSemaphore extends Semaphore {
|
|
||||||
|
|
||||||
// See PaddedReentrantLock comment
|
|
||||||
long unused1;
|
|
||||||
long unused2;
|
|
||||||
long unused3;
|
|
||||||
|
|
||||||
PaddedSemaphore(int permits) {
|
|
||||||
super(permits, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,107 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class AtomicDetailedTimeAbsoluteSamples<T> implements AtomicDetailedTimeAbsoluteSamplesSnapshot<T> {
|
|
||||||
|
|
||||||
private final boolean isSnapshot;
|
|
||||||
private final int sampleTime;
|
|
||||||
private final int samplesCount;
|
|
||||||
private Object2ObjectMap<T, AtomicTimeAbsoluteSamples> detailedAtomicTimeSamples = new Object2ObjectOpenHashMap<>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param sampleTime in milliseconds
|
|
||||||
* @param samplesCount
|
|
||||||
*/
|
|
||||||
public AtomicDetailedTimeAbsoluteSamples(int sampleTime, int samplesCount) {
|
|
||||||
this.sampleTime = sampleTime;
|
|
||||||
this.samplesCount = samplesCount;
|
|
||||||
this.isSnapshot = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public AtomicDetailedTimeAbsoluteSamples(int sampleTime, int samplesCount, HashMap<T, AtomicTimeAbsoluteSamplesSnapshot> detailedAtomicTimeSamples, boolean isSnapshot) {
|
|
||||||
this.sampleTime = sampleTime;
|
|
||||||
this.samplesCount = samplesCount;
|
|
||||||
this.detailedAtomicTimeSamples = new Object2ObjectOpenHashMap<>();
|
|
||||||
detailedAtomicTimeSamples.forEach((detail, sample) -> this.detailedAtomicTimeSamples.put(detail, (AtomicTimeAbsoluteSamples) sample));
|
|
||||||
this.isSnapshot = isSnapshot;
|
|
||||||
}
|
|
||||||
|
|
||||||
private synchronized void updateSamples() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private synchronized AtomicTimeAbsoluteSamples getDetailed(T detail) {
|
|
||||||
AtomicTimeAbsoluteSamples detailed = detailedAtomicTimeSamples.get(detail);
|
|
||||||
if (detailed == null) {
|
|
||||||
detailed = new AtomicTimeAbsoluteSamples(sampleTime, samplesCount);
|
|
||||||
detailedAtomicTimeSamples.put(detail, detailed);
|
|
||||||
}
|
|
||||||
return detailed;
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized void set(T detail, long count) {
|
|
||||||
updateSamples();
|
|
||||||
getDetailed(detail).set(count);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized Set<T> getDetails() {
|
|
||||||
return Collections.unmodifiableSet(new ObjectOpenHashSet<>(detailedAtomicTimeSamples.keySet()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getAveragePerSecond(T detail, long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getAveragePerSecond(timeRange);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getAveragePerSecond(long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
return detailedAtomicTimeSamples.values().stream().mapToDouble((detail) -> detail.getAveragePerSecond(timeRange)).sum();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getCurrentCount(T detail) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getCurrentCount();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getCurrentCount() {
|
|
||||||
updateSamples();
|
|
||||||
return detailedAtomicTimeSamples.values().stream().mapToLong(AtomicTimeAbsoluteSamples::getCurrentCount).sum();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getTotalAveragePerSecond() {
|
|
||||||
updateSamples();
|
|
||||||
return detailedAtomicTimeSamples.values().stream().mapToDouble(AtomicTimeAbsoluteSamples::getTotalAveragePerSecond).sum();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getTotalAveragePerSecond(T detail) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getTotalAveragePerSecond();
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized AtomicTimeAbsoluteSamplesSnapshot snapshot(T detail) {
|
|
||||||
return getDetailed(detail).snapshot();
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized AtomicDetailedTimeAbsoluteSamples<T> snapshot() {
|
|
||||||
if (isSnapshot) {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
var clonedDetailedAtomicTimeSamples = new HashMap<T, AtomicTimeAbsoluteSamplesSnapshot>(detailedAtomicTimeSamples);
|
|
||||||
clonedDetailedAtomicTimeSamples.replaceAll((key, value) -> ((AtomicTimeAbsoluteSamples) value).snapshot());
|
|
||||||
return new AtomicDetailedTimeAbsoluteSamples<>(sampleTime,
|
|
||||||
samplesCount, clonedDetailedAtomicTimeSamples, true);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public interface AtomicDetailedTimeAbsoluteSamplesSnapshot<T> extends AtomicTimeAbsoluteSamplesSnapshot {
|
|
||||||
|
|
||||||
Set<T> getDetails();
|
|
||||||
|
|
||||||
double getAveragePerSecond(T detail, long timeRange);
|
|
||||||
|
|
||||||
long getCurrentCount(T detail);
|
|
||||||
|
|
||||||
double getTotalAveragePerSecond(T detail);
|
|
||||||
}
|
|
@ -1,94 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class AtomicDetailedTimeIncrementalSamples<T> extends AtomicTimeIncrementalSamples implements
|
|
||||||
AtomicDetailedTimeIncrementalSamplesSnapshot<T> {
|
|
||||||
|
|
||||||
private Object2ObjectMap<T, AtomicTimeIncrementalSamples> detailedAtomicTimeSamples = new Object2ObjectOpenHashMap<>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param sampleTime in milliseconds
|
|
||||||
* @param samplesCount
|
|
||||||
*/
|
|
||||||
public AtomicDetailedTimeIncrementalSamples(int sampleTime, int samplesCount) {
|
|
||||||
super(sampleTime, samplesCount);
|
|
||||||
}
|
|
||||||
|
|
||||||
public AtomicDetailedTimeIncrementalSamples(long startTime, long[] samples, int sampleTime, long currentSampleStartTime, long totalEvents,
|
|
||||||
HashMap<T, AtomicTimeIncrementalSamplesSnapshot> detailedAtomicTimeSamples, boolean isSnapshot) {
|
|
||||||
super(startTime, samples, sampleTime, currentSampleStartTime, totalEvents, isSnapshot);
|
|
||||||
this.detailedAtomicTimeSamples = new Object2ObjectOpenHashMap<>();
|
|
||||||
detailedAtomicTimeSamples.forEach((detail, sample) -> this.detailedAtomicTimeSamples.put(detail, (AtomicTimeIncrementalSamples) sample));
|
|
||||||
}
|
|
||||||
|
|
||||||
private synchronized AtomicTimeIncrementalSamples getDetailed(T detail) {
|
|
||||||
AtomicTimeIncrementalSamples detailed = detailedAtomicTimeSamples.get(detail);
|
|
||||||
if (detailed == null) {
|
|
||||||
detailed = new AtomicTimeIncrementalSamples(sampleTime, samples.length);
|
|
||||||
detailedAtomicTimeSamples.put(detail, detailed);
|
|
||||||
}
|
|
||||||
return detailed;
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized void increment(T detail, long count) {
|
|
||||||
updateSamples();
|
|
||||||
getDetailed(detail).increment(count);
|
|
||||||
increment(count);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized Set<T> getDetails() {
|
|
||||||
return Collections.unmodifiableSet(new ObjectOpenHashSet<>(detailedAtomicTimeSamples.keySet()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getAveragePerSecond(T detail, long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getAveragePerSecond(timeRange);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getApproximateCount(T detail, long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getApproximateCount(timeRange);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getTotalCount(T detail) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getTotalCount();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getTotalAverage(T detail) {
|
|
||||||
updateSamples();
|
|
||||||
return getDetailed(detail).getTotalAveragePerSecond();
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized AtomicTimeIncrementalSamplesSnapshot snapshot(T detail) {
|
|
||||||
return getDetailed(detail).snapshot();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected synchronized void shiftSamples(int shiftCount) {
|
|
||||||
//detailedAtomicTimeSamples.values().forEach(AtomicTimeSamples::shiftSamples);
|
|
||||||
super.shiftSamples(shiftCount);
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized AtomicDetailedTimeIncrementalSamples<T> snapshot() {
|
|
||||||
if (isSnapshot) {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
var clonedDetailedAtomicTimeSamples = new HashMap<T, AtomicTimeIncrementalSamplesSnapshot>(detailedAtomicTimeSamples);
|
|
||||||
clonedDetailedAtomicTimeSamples.replaceAll((key, value) -> ((AtomicTimeIncrementalSamples) value).snapshot());
|
|
||||||
return new AtomicDetailedTimeIncrementalSamples<>(startTime, Arrays.copyOf(this.samples, this.samples.length), sampleTime,
|
|
||||||
currentSampleStartTime, totalEvents, clonedDetailedAtomicTimeSamples, isSnapshot);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,16 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public interface AtomicDetailedTimeIncrementalSamplesSnapshot<T> extends AtomicTimeIncrementalSamplesSnapshot {
|
|
||||||
|
|
||||||
Set<T> getDetails();
|
|
||||||
|
|
||||||
double getAveragePerSecond(T detail, long timeRange);
|
|
||||||
|
|
||||||
long getApproximateCount(T detail, long timeRange);
|
|
||||||
|
|
||||||
long getTotalCount(T detail);
|
|
||||||
|
|
||||||
double getTotalAverage(T detail);
|
|
||||||
}
|
|
@ -1,123 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
public class AtomicTimeAbsoluteSamples implements AtomicTimeAbsoluteSamplesSnapshot {
|
|
||||||
|
|
||||||
protected final boolean isSnapshot;
|
|
||||||
protected long startTime;
|
|
||||||
protected final long[] samples;
|
|
||||||
protected final int sampleTime;
|
|
||||||
protected long currentSampleStartTime;
|
|
||||||
protected long totalSamplesSum = 0;
|
|
||||||
protected long totalSamplesCount = 1;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param sampleTime in milliseconds
|
|
||||||
* @param samplesCount
|
|
||||||
*/
|
|
||||||
public AtomicTimeAbsoluteSamples(int sampleTime, int samplesCount) {
|
|
||||||
this.samples = new long[samplesCount];
|
|
||||||
this.sampleTime = sampleTime;
|
|
||||||
startTime = -1;
|
|
||||||
if (samplesCount < 1) throw new IndexOutOfBoundsException();
|
|
||||||
if (sampleTime < 1) throw new IndexOutOfBoundsException();
|
|
||||||
this.isSnapshot = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public AtomicTimeAbsoluteSamples(long startTime, long[] samples, int sampleTime, long currentSampleStartTime, long totalSamplesSum, long totalSamplesCount, boolean isSnapshot) {
|
|
||||||
this.startTime = startTime;
|
|
||||||
this.samples = samples;
|
|
||||||
this.sampleTime = sampleTime;
|
|
||||||
this.currentSampleStartTime = currentSampleStartTime;
|
|
||||||
this.totalSamplesSum = totalSamplesSum;
|
|
||||||
this.totalSamplesCount = totalSamplesCount;
|
|
||||||
this.isSnapshot = isSnapshot;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected synchronized void updateSamples() {
|
|
||||||
checkStarted();
|
|
||||||
|
|
||||||
if (isSnapshot) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
long currentTime = System.nanoTime() / 1000000L;
|
|
||||||
long timeDiff = currentTime - currentSampleStartTime;
|
|
||||||
long timeToShift = timeDiff - (timeDiff % sampleTime);
|
|
||||||
int shiftCount = (int) (timeToShift / sampleTime);
|
|
||||||
if (currentTime - (currentSampleStartTime + timeToShift) > sampleTime) {
|
|
||||||
throw new IndexOutOfBoundsException("Time sample bigger than " + sampleTime + "! It's " + (currentTime - (currentSampleStartTime + timeToShift)));
|
|
||||||
}
|
|
||||||
if (shiftCount > 0) {
|
|
||||||
shiftSamples(shiftCount);
|
|
||||||
currentSampleStartTime += timeToShift;
|
|
||||||
totalSamplesCount += shiftCount;
|
|
||||||
long lastSample = samples[0];
|
|
||||||
totalSamplesSum += lastSample * shiftCount;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected synchronized void checkStarted() {
|
|
||||||
if (startTime == -1) {
|
|
||||||
this.startTime = System.nanoTime() / 1000000L;
|
|
||||||
this.currentSampleStartTime = startTime;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void shiftSamples(int shiftCount) {
|
|
||||||
checkStarted();
|
|
||||||
long lastSample = samples[0];
|
|
||||||
if (samples.length - shiftCount > 0) {
|
|
||||||
System.arraycopy(samples, 0, samples, shiftCount, samples.length - shiftCount);
|
|
||||||
Arrays.fill(samples, 0, shiftCount, lastSample);
|
|
||||||
} else {
|
|
||||||
Arrays.fill(samples, lastSample);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized void set(long count) {
|
|
||||||
updateSamples();
|
|
||||||
long oldValue = samples[0];
|
|
||||||
samples[0]=count;
|
|
||||||
totalSamplesSum += count - oldValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getAveragePerSecond(long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
|
|
||||||
double preciseTimeRange = timeRange;
|
|
||||||
// Fix if the time range is bigger than the collected data since start
|
|
||||||
if (currentSampleStartTime - preciseTimeRange < startTime) {
|
|
||||||
preciseTimeRange = currentSampleStartTime - startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
double samplesCount = Math.min(Math.max(preciseTimeRange / sampleTime, 1d), samples.length - 1);
|
|
||||||
if (samplesCount < 0) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
double value = 0;
|
|
||||||
for (int i = 1; i <= samplesCount; i++) {
|
|
||||||
value += samples[i];
|
|
||||||
}
|
|
||||||
return value / samplesCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getCurrentCount() {
|
|
||||||
updateSamples();
|
|
||||||
return samples[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getTotalAveragePerSecond() {
|
|
||||||
updateSamples();
|
|
||||||
return (double) totalSamplesSum / (double) totalSamplesCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized AtomicTimeAbsoluteSamplesSnapshot snapshot() {
|
|
||||||
return new AtomicTimeAbsoluteSamples(startTime, Arrays.copyOf(this.samples, this.samples.length), sampleTime, currentSampleStartTime, totalSamplesSum, totalSamplesCount, true);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
public interface AtomicTimeAbsoluteSamplesSnapshot {
|
|
||||||
|
|
||||||
double getAveragePerSecond(long timeRange);
|
|
||||||
|
|
||||||
long getCurrentCount();
|
|
||||||
|
|
||||||
double getTotalAveragePerSecond();
|
|
||||||
}
|
|
@ -1,135 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
public class AtomicTimeIncrementalSamples implements AtomicTimeIncrementalSamplesSnapshot {
|
|
||||||
|
|
||||||
protected final boolean isSnapshot;
|
|
||||||
protected long startTime;
|
|
||||||
protected final long[] samples;
|
|
||||||
protected final int sampleTime;
|
|
||||||
protected long currentSampleStartTime;
|
|
||||||
protected long totalEvents;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param sampleTime in milliseconds
|
|
||||||
* @param samplesCount
|
|
||||||
*/
|
|
||||||
public AtomicTimeIncrementalSamples(int sampleTime, int samplesCount) {
|
|
||||||
this.samples = new long[samplesCount];
|
|
||||||
this.sampleTime = sampleTime;
|
|
||||||
startTime = -1;
|
|
||||||
if (samplesCount < 1) throw new IndexOutOfBoundsException();
|
|
||||||
if (sampleTime < 1) throw new IndexOutOfBoundsException();
|
|
||||||
this.isSnapshot = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public AtomicTimeIncrementalSamples(long startTime, long[] samples, int sampleTime, long currentSampleStartTime, long totalEvents, boolean isSnapshot) {
|
|
||||||
this.startTime = startTime;
|
|
||||||
this.samples = samples;
|
|
||||||
this.sampleTime = sampleTime;
|
|
||||||
this.currentSampleStartTime = currentSampleStartTime;
|
|
||||||
this.totalEvents = totalEvents;
|
|
||||||
this.isSnapshot = isSnapshot;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected synchronized void updateSamples() {
|
|
||||||
checkStarted();
|
|
||||||
|
|
||||||
if (isSnapshot) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
long currentTime = System.nanoTime() / 1000000L;
|
|
||||||
long timeDiff = currentTime - currentSampleStartTime;
|
|
||||||
long timeToShift = timeDiff - (timeDiff % sampleTime);
|
|
||||||
int shiftCount = (int) (timeToShift / sampleTime);
|
|
||||||
if (currentTime - (currentSampleStartTime + timeToShift) > sampleTime) {
|
|
||||||
throw new IndexOutOfBoundsException("Time sample bigger than " + sampleTime + "! It's " + (currentTime - (currentSampleStartTime + timeToShift)));
|
|
||||||
}
|
|
||||||
if (shiftCount > 0) {
|
|
||||||
shiftSamples(shiftCount);
|
|
||||||
currentSampleStartTime += timeToShift;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected synchronized void checkStarted() {
|
|
||||||
if (startTime == -1) {
|
|
||||||
this.startTime = System.nanoTime() / 1000000L;
|
|
||||||
this.currentSampleStartTime = startTime;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected synchronized void shiftSamples(int shiftCount) {
|
|
||||||
checkStarted();
|
|
||||||
if (samples.length - shiftCount > 0) {
|
|
||||||
System.arraycopy(samples, 0, samples, shiftCount, samples.length - shiftCount);
|
|
||||||
Arrays.fill(samples, 0, shiftCount, 0);
|
|
||||||
} else {
|
|
||||||
Arrays.fill(samples, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized void increment(long count) {
|
|
||||||
updateSamples();
|
|
||||||
samples[0]+=count;
|
|
||||||
totalEvents+=count;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getAveragePerSecond(long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
|
|
||||||
double preciseTimeRange = timeRange;
|
|
||||||
// Fix if the time range is bigger than the collected data since start
|
|
||||||
if (currentSampleStartTime - preciseTimeRange < startTime) {
|
|
||||||
preciseTimeRange = currentSampleStartTime - startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
double samplesCount = Math.min(Math.max(preciseTimeRange / sampleTime, 1d), samples.length - 1);
|
|
||||||
if (samplesCount < 0) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
double roundedTimeRange = samplesCount * sampleTime;
|
|
||||||
double value = 0;
|
|
||||||
for (int i = 1; i <= samplesCount; i++) {
|
|
||||||
value += samples[i];
|
|
||||||
}
|
|
||||||
return (value / roundedTimeRange) * 1000d;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getApproximateCount(long timeRange) {
|
|
||||||
updateSamples();
|
|
||||||
long samplesCount = Math.min(Math.max(timeRange / sampleTime, 1L), samples.length);
|
|
||||||
long value = 0;
|
|
||||||
for (int i = 0; i < samplesCount; i++) {
|
|
||||||
value += samples[i];
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized long getTotalCount() {
|
|
||||||
updateSamples();
|
|
||||||
return totalEvents;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized double getTotalAveragePerSecond() {
|
|
||||||
updateSamples();
|
|
||||||
if (currentSampleStartTime == startTime) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return ((double) totalEvents) / (double) ((currentSampleStartTime - startTime) / 1000D);
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized AtomicTimeIncrementalSamplesSnapshot snapshot() {
|
|
||||||
if (isSnapshot) {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
return new AtomicTimeIncrementalSamples(startTime, Arrays.copyOf(this.samples, this.samples.length), sampleTime, currentSampleStartTime, totalEvents, true);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,12 +0,0 @@
|
|||||||
package org.warp.commonutils.metrics;
|
|
||||||
|
|
||||||
public interface AtomicTimeIncrementalSamplesSnapshot {
|
|
||||||
|
|
||||||
double getAveragePerSecond(long timeRange);
|
|
||||||
|
|
||||||
long getApproximateCount(long timeRange);
|
|
||||||
|
|
||||||
long getTotalCount();
|
|
||||||
|
|
||||||
double getTotalAveragePerSecond();
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
package org.warp.commonutils.random;
|
|
||||||
|
|
||||||
public final class HashUtil {
|
|
||||||
|
|
||||||
private HashUtil() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public static int boundedHash(Object o, int upperBoundExclusive) {
|
|
||||||
int h = o.hashCode();
|
|
||||||
|
|
||||||
// Protection against poor hash functions.
|
|
||||||
// Used by java.util.concurrent.ConcurrentHashMap
|
|
||||||
// Spread bits to regularize both segment and index locations,
|
|
||||||
// using variant of single-word Wang/Jenkins hash.
|
|
||||||
h += (h << 15) ^ 0xffffcd7d;
|
|
||||||
h ^= (h >>> 10);
|
|
||||||
h += (h << 3);
|
|
||||||
h ^= (h >>> 6);
|
|
||||||
h += (h << 2) + (h << 14);
|
|
||||||
h ^= (h >>> 16);
|
|
||||||
|
|
||||||
return Math.abs(h % upperBoundExclusive);
|
|
||||||
}
|
|
||||||
}
|
|
@ -6,10 +6,10 @@ import java.util.Collection;
|
|||||||
public class ArrayStack<T> extends FastUtilStackWrapper<T> {
|
public class ArrayStack<T> extends FastUtilStackWrapper<T> {
|
||||||
|
|
||||||
public ArrayStack() {
|
public ArrayStack() {
|
||||||
super(new ObjectArrayList<T>());
|
super(new ObjectArrayList<>());
|
||||||
}
|
}
|
||||||
|
|
||||||
public ArrayStack(Collection<T> stack) {
|
public ArrayStack(Collection<T> stack) {
|
||||||
super(new ObjectArrayList<T>(stack));
|
super(new ObjectArrayList<>(stack));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,124 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* One to many relationship (not overlapping)
|
|
||||||
*
|
|
||||||
* o ------- o
|
|
||||||
* +--- o
|
|
||||||
* o ---+--- o
|
|
||||||
* +--- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ---+--- o
|
|
||||||
* +--- o
|
|
||||||
*
|
|
||||||
* @param <T> Source type
|
|
||||||
* @param <U> Destination type
|
|
||||||
*/
|
|
||||||
public interface Association<T, U> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Link source to dest
|
|
||||||
* @param src Source
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if linked, false if it was already linked with that destination
|
|
||||||
*/
|
|
||||||
boolean link(T src, U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unlink only if src is linked with dest
|
|
||||||
* @param src Source
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if unlinked, false if not present
|
|
||||||
*/
|
|
||||||
boolean unlink(T src, U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unlink
|
|
||||||
* @param src Source
|
|
||||||
* @return previous linked destinations
|
|
||||||
*/
|
|
||||||
Set<U> unlink(T src);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unlink
|
|
||||||
* @param dest Destination
|
|
||||||
* @return previous linked source
|
|
||||||
*/
|
|
||||||
Optional<T> unlinkFromSource(U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if link exists
|
|
||||||
* @param src Source
|
|
||||||
* @return true if source it's linked with at least 1 destination
|
|
||||||
*/
|
|
||||||
default boolean hasAnyLink(T src) {
|
|
||||||
return !getLinks(src).isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if link exists
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if destination is linked with a source
|
|
||||||
*/
|
|
||||||
default boolean hasLinkSource(U dest) {
|
|
||||||
return getLinkSource(dest).isPresent();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if link exists
|
|
||||||
* @param src Source
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if source and destination are linked together
|
|
||||||
*/
|
|
||||||
boolean hasLink(T src, U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a link destination
|
|
||||||
* @param src Source
|
|
||||||
* @return Existing linked destinations
|
|
||||||
*/
|
|
||||||
Set<U> getLinks(T src);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a link source
|
|
||||||
* @param dest Source
|
|
||||||
* @return Source if the link exists
|
|
||||||
*/
|
|
||||||
Optional<T> getLinkSource(U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete all links
|
|
||||||
*/
|
|
||||||
void clear();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the count of existing links
|
|
||||||
* @return size
|
|
||||||
*/
|
|
||||||
int size();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all the sources
|
|
||||||
* @return Set of sources
|
|
||||||
*/
|
|
||||||
ObjectSet<T> getSources();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all the destinations
|
|
||||||
* @return Set of destinations
|
|
||||||
*/
|
|
||||||
ObjectSet<U> getDestinations();
|
|
||||||
|
|
||||||
static <T, U> Association<T, U> synchronize(Association<T, U> association) {
|
|
||||||
return new SynchronizedAssociation(association);
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T, U> Association<T, U> synchronize(Association<T, U> association, Object lock) {
|
|
||||||
return new SynchronizedAssociation(association, lock);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,123 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* One to one relationship
|
|
||||||
*
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
* o ------- o
|
|
||||||
*
|
|
||||||
* @param <T> Source type
|
|
||||||
* @param <U> Destination type
|
|
||||||
*/
|
|
||||||
public interface BiAssociation<T, U> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Link source to dest
|
|
||||||
* @param src Source
|
|
||||||
* @param dest Destination
|
|
||||||
* @return previous value if it was already linked
|
|
||||||
*/
|
|
||||||
Optional<U> link(T src, U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unlink only if src is linked with dest
|
|
||||||
* @param src Source
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if unlinked, false if not present
|
|
||||||
*/
|
|
||||||
boolean unlink(T src, U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unlink
|
|
||||||
* @param src Source
|
|
||||||
* @return previous linked destination
|
|
||||||
*/
|
|
||||||
Optional<U> unlink(T src);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unlink
|
|
||||||
* @param dest Destination
|
|
||||||
* @return previous linked source
|
|
||||||
*/
|
|
||||||
Optional<T> unlinkFromSource(U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if link exists
|
|
||||||
* @param src Source
|
|
||||||
* @return true if source it's linked with a destination
|
|
||||||
*/
|
|
||||||
default boolean hasLink(T src) {
|
|
||||||
return getLink(src).isPresent();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if link exists
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if destination is linked with a source
|
|
||||||
*/
|
|
||||||
default boolean hasLinkSource(U dest) {
|
|
||||||
return getLinkSource(dest).isPresent();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if link exists
|
|
||||||
* @param src Source
|
|
||||||
* @param dest Destination
|
|
||||||
* @return true if source and destination are linked together
|
|
||||||
*/
|
|
||||||
boolean hasLink(T src, U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a link destination
|
|
||||||
* @param src Source
|
|
||||||
* @return Destination if the link exists
|
|
||||||
*/
|
|
||||||
Optional<U> getLink(T src);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a link source
|
|
||||||
* @param dest Source
|
|
||||||
* @return Source if the link exists
|
|
||||||
*/
|
|
||||||
Optional<T> getLinkSource(U dest);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete all links
|
|
||||||
*/
|
|
||||||
void clear();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the count of existing links
|
|
||||||
* @return size
|
|
||||||
*/
|
|
||||||
int size();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all the sources
|
|
||||||
* @return Set of sources
|
|
||||||
*/
|
|
||||||
ObjectSet<T> getSources();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all the destinations
|
|
||||||
* @return Set of destinations
|
|
||||||
*/
|
|
||||||
ObjectSet<U> getDestinations();
|
|
||||||
|
|
||||||
static <T, U> BiAssociation<T, U> synchronize(BiAssociation<T, U> association) {
|
|
||||||
return new SynchronizedBiAssociation(association);
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T, U> BiAssociation<T, U> synchronize(BiAssociation<T, U> association, Object lock) {
|
|
||||||
return new SynchronizedBiAssociation(association, lock);
|
|
||||||
}
|
|
||||||
}
|
|
@ -23,18 +23,6 @@ public class Bytes {
|
|||||||
return newMap;
|
return newMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static UnmodifiableMap<? extends Bytes,? extends Bytes> ofMap(UnmodifiableIterableMap<byte[], byte[]> oldMap) {
|
|
||||||
Bytes[] keys = new Bytes[oldMap.size()];
|
|
||||||
Bytes[] values = new Bytes[oldMap.size()];
|
|
||||||
IntWrapper i = new IntWrapper(0);
|
|
||||||
oldMap.forEach((key, value) -> {
|
|
||||||
keys[i.var] = new Bytes(key);
|
|
||||||
values[i.var] = new Bytes(value);
|
|
||||||
i.var++;
|
|
||||||
});
|
|
||||||
return UnmodifiableMap.of(keys, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<? extends Bytes> ofList(List<byte[]> oldList) {
|
public static List<? extends Bytes> ofList(List<byte[]> oldList) {
|
||||||
var newList = new ArrayList<Bytes>(oldList.size());
|
var newList = new ArrayList<Bytes>(oldList.size());
|
||||||
oldList.forEach((item) -> newList.add(new Bytes(item)));
|
oldList.forEach((item) -> newList.add(new Bytes(item)));
|
||||||
@ -47,18 +35,6 @@ public class Bytes {
|
|||||||
return newSet;
|
return newSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static UnmodifiableIterableSet<byte[]> toIterableSet(UnmodifiableSet<Bytes> set) {
|
|
||||||
byte[][] resultItems = new byte[set.size()][];
|
|
||||||
var it = set.iterator();
|
|
||||||
int i = 0;
|
|
||||||
while (it.hasNext()) {
|
|
||||||
var item = it.next();
|
|
||||||
resultItems[i] = item.data;
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
return UnmodifiableIterableSet.of(resultItems);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static byte[][] toByteArray(Collection<Bytes> value) {
|
public static byte[][] toByteArray(Collection<Bytes> value) {
|
||||||
Bytes[] valueBytesArray = value.toArray(Bytes[]::new);
|
Bytes[] valueBytesArray = value.toArray(Bytes[]::new);
|
||||||
byte[][] convertedResult = new byte[valueBytesArray.length][];
|
byte[][] convertedResult = new byte[valueBytesArray.length][];
|
||||||
|
@ -1,142 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectLinkedOpenHashSet;
|
|
||||||
import java.util.Collection;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
|
||||||
|
|
||||||
public class FastUtilStackSetWrapper<T> implements StackSet<T>, Collection<T> {
|
|
||||||
|
|
||||||
protected final AddStrategy addStrategy;
|
|
||||||
protected final ObjectLinkedOpenHashSet<T> linkedHashSet;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The first element will be the head
|
|
||||||
*/
|
|
||||||
public FastUtilStackSetWrapper(ObjectLinkedOpenHashSet<T> linkedHashSet) {
|
|
||||||
this(linkedHashSet, AddStrategy.getDefault());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The first element will be the head
|
|
||||||
*/
|
|
||||||
public FastUtilStackSetWrapper(ObjectLinkedOpenHashSet<T> linkedHashSet, AddStrategy addStrategy) {
|
|
||||||
this.addStrategy = addStrategy;
|
|
||||||
this.linkedHashSet = linkedHashSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean push(T o) {
|
|
||||||
switch (addStrategy) {
|
|
||||||
case KEEP_POSITION:
|
|
||||||
if (linkedHashSet.contains(o)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return linkedHashSet.addAndMoveToFirst(o);
|
|
||||||
case OVERWRITE_POSITION:
|
|
||||||
return linkedHashSet.addAndMoveToFirst(o);
|
|
||||||
default:
|
|
||||||
throw new UnsupportedOperationException("Unsupported strategy type: " + addStrategy);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public java.util.Iterator<T> iterator() {
|
|
||||||
return linkedHashSet.iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Object @NotNull [] toArray() {
|
|
||||||
return linkedHashSet.toArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T1> T1 @NotNull [] toArray(@NotNull T1 @NotNull [] a) {
|
|
||||||
//noinspection SuspiciousToArrayCall
|
|
||||||
return linkedHashSet.toArray(a);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean add(T t) {
|
|
||||||
return linkedHashSet.add(t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean remove(Object o) {
|
|
||||||
return linkedHashSet.remove(o);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsAll(@NotNull Collection<?> c) {
|
|
||||||
return linkedHashSet.containsAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean addAll(@NotNull Collection<? extends T> c) {
|
|
||||||
return linkedHashSet.addAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean removeAll(@NotNull Collection<?> c) {
|
|
||||||
return linkedHashSet.removeAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean retainAll(@NotNull Collection<?> c) {
|
|
||||||
return linkedHashSet.retainAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
linkedHashSet.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T pop() {
|
|
||||||
return linkedHashSet.removeFirst();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return linkedHashSet.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return linkedHashSet.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Object o) {
|
|
||||||
return linkedHashSet.contains(o);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T top() {
|
|
||||||
return linkedHashSet.first();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T peek(int i) {
|
|
||||||
var size = linkedHashSet.size();
|
|
||||||
if (i < 0 || i >= size) {
|
|
||||||
throw new IndexOutOfBoundsException(i, 0, size);
|
|
||||||
}
|
|
||||||
|
|
||||||
var it = linkedHashSet.iterator();
|
|
||||||
// Skip middle elements
|
|
||||||
if (i > 0) {
|
|
||||||
it.skip(i);
|
|
||||||
}
|
|
||||||
return it.next();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("MethodDoesntCallSuperMethod")
|
|
||||||
@Override
|
|
||||||
public FastUtilStackSetWrapper<T> clone() {
|
|
||||||
return new FastUtilStackSetWrapper<>(linkedHashSet.clone(), addStrategy);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,443 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2FloatMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2FloatOpenHashMap;
|
|
||||||
import java.lang.reflect.Array;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.PriorityQueue;
|
|
||||||
import java.util.Queue;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.IntFunction;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public class FloatPriorityQueue<T> implements Queue<T> {
|
|
||||||
|
|
||||||
private final Object2FloatMap<T> contentValues;
|
|
||||||
private final Queue<ScoredValue<T>> internalQueue;
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> of() {
|
|
||||||
return new FloatPriorityQueue<>(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> of(T value, float score) {
|
|
||||||
var pq = new FloatPriorityQueue<T>(1);
|
|
||||||
pq.offer(value);
|
|
||||||
return pq;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> of(ScoredValue<T> value) {
|
|
||||||
var pq = new FloatPriorityQueue<T>(1);
|
|
||||||
pq.offer(value);
|
|
||||||
return pq;
|
|
||||||
}
|
|
||||||
|
|
||||||
@SafeVarargs
|
|
||||||
public static <T> FloatPriorityQueue<T> of(ScoredValue<T>... values) {
|
|
||||||
var pq = new FloatPriorityQueue<T>(values.length);
|
|
||||||
for (ScoredValue<T> value : values) {
|
|
||||||
pq.offer(value);
|
|
||||||
}
|
|
||||||
return pq;
|
|
||||||
}
|
|
||||||
|
|
||||||
public FloatPriorityQueue(PriorityQueue<ScoredValue<T>> internalQueue) {
|
|
||||||
this(internalQueue.size());
|
|
||||||
for (ScoredValue<T> tScoredValue : internalQueue) {
|
|
||||||
add(tScoredValue.getValue(), tScoredValue.getScore());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private FloatPriorityQueue(Object2FloatMap<T> contentValues, Queue<ScoredValue<T>> internalQueue) {
|
|
||||||
this.contentValues = contentValues;
|
|
||||||
this.internalQueue = internalQueue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public FloatPriorityQueue() {
|
|
||||||
this.contentValues = new Object2FloatOpenHashMap<>();
|
|
||||||
internalQueue = new PriorityQueue<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
public FloatPriorityQueue(int initialCapacity) {
|
|
||||||
this.contentValues = new Object2FloatOpenHashMap<>(initialCapacity);
|
|
||||||
internalQueue = new PriorityQueue<>(Math.max(1, initialCapacity));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> synchronize(FloatPriorityQueue<T> queue) {
|
|
||||||
return new SynchronizedFloatPriorityQueue<>(queue.contentValues, queue.internalQueue);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueue<T> synchronizedPq(int initialCapacity) {
|
|
||||||
var pq = new FloatPriorityQueue<T>(initialCapacity);
|
|
||||||
return new SynchronizedFloatPriorityQueue<>(pq.contentValues, pq.internalQueue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return internalQueue.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return internalQueue.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Object o) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return internalQueue.contains(ScoredValue.of(0, o));
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<T> iterator() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
var it = internalQueue.iterator();
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return it.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T next() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return getValueOrNull(it.next());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <T> T getValueOrNull(ScoredValue<T> scoredValue) {
|
|
||||||
if (scoredValue == null) {
|
|
||||||
return null;
|
|
||||||
} else {
|
|
||||||
return scoredValue.getValue();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Object @NotNull [] toArray() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return internalQueue.stream().map(FloatPriorityQueue::getValueOrNull).toArray(Object[]::new);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings({"SuspiciousToArrayCall", "unchecked"})
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T1> T1 @NotNull [] toArray(@NotNull T1 @NotNull [] a) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return internalQueue
|
|
||||||
.stream()
|
|
||||||
.map(FloatPriorityQueue::getValueOrNull)
|
|
||||||
.toArray(i -> (T1[]) Array.newInstance(a.getClass().getComponentType(), i));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
@Override
|
|
||||||
public boolean add(T t) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return offer(t, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean addTop(T t) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
if (contentValues.getFloat(t) == Integer.MAX_VALUE) {
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
if (contentValues.containsKey(t)) {
|
|
||||||
internalQueue.remove(ScoredValue.of(0, t));
|
|
||||||
}
|
|
||||||
contentValues.put(t, Integer.MAX_VALUE);
|
|
||||||
return internalQueue.add(ScoredValue.of(Integer.MAX_VALUE, t));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean add(T t, float score) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return offer(t, score);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean remove(Object o) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
contentValues.removeFloat(o);
|
|
||||||
return internalQueue.remove(ScoredValue.of(0, o));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsAll(@NotNull Collection<?> c) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
for (Object o : c) {
|
|
||||||
//noinspection SuspiciousMethodCalls
|
|
||||||
if (!contentValues.containsKey(o)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean addAll(@NotNull Collection<? extends T> c) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
boolean added = false;
|
|
||||||
for (T t : c) {
|
|
||||||
added |= add(t);
|
|
||||||
}
|
|
||||||
return added;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean removeAll(@NotNull Collection<?> c) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
boolean removed = false;
|
|
||||||
for (Object o : c) {
|
|
||||||
removed |= remove(o);
|
|
||||||
}
|
|
||||||
return removed;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean retainAll(@NotNull Collection<?> c) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return removeIf(item -> !c.contains(item));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
contentValues.clear();
|
|
||||||
internalQueue.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean offer(T t) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return offer(ScoredValue.of(0, t));
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean offer(T t, float score) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return offer(ScoredValue.of(score, t));
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean offer(ScoredValue<T> value) {
|
|
||||||
int contentValuesSize = contentValues.size();
|
|
||||||
int internalQueueSize = internalQueue.size();
|
|
||||||
assert contentValuesSize == internalQueueSize;
|
|
||||||
|
|
||||||
boolean added = true;
|
|
||||||
float oldValue;
|
|
||||||
if (contentValues.containsKey(value.getValue())) {
|
|
||||||
internalQueue.remove(value);
|
|
||||||
oldValue = contentValues.getFloat(value.getValue());
|
|
||||||
added = false;
|
|
||||||
} else {
|
|
||||||
oldValue = 0f;
|
|
||||||
}
|
|
||||||
contentValues.put(value.getValue(), oldValue + value.getScore());
|
|
||||||
internalQueue.add(value);
|
|
||||||
return added;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T remove() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
var val = internalQueue.remove();
|
|
||||||
if (val != null) {
|
|
||||||
contentValues.removeFloat(val.getValue());
|
|
||||||
}
|
|
||||||
return getValueOrNull(val);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T poll() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
var val = internalQueue.poll();
|
|
||||||
|
|
||||||
if (val != null) {
|
|
||||||
contentValues.removeFloat(val.getValue());
|
|
||||||
}
|
|
||||||
return getValueOrNull(val);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T element() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return getValueOrNull(internalQueue.element());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T peek() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return getValueOrNull(internalQueue.peek());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void forEachItem(Consumer<ScoredValue<T>> action) {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
internalQueue.forEach(action);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Stream<ScoredValue<T>> streamItems() {
|
|
||||||
assert contentValues.size() == internalQueue.size();
|
|
||||||
return internalQueue.stream();
|
|
||||||
}
|
|
||||||
|
|
||||||
public <U extends T> FloatPriorityQueueView<U> view() {
|
|
||||||
return new FloatPriorityQueueView<>(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class SynchronizedFloatPriorityQueue<T> extends FloatPriorityQueue<T> {
|
|
||||||
|
|
||||||
public SynchronizedFloatPriorityQueue(Object2FloatMap<T> contentValues, Queue<ScoredValue<T>> internalQueue) {
|
|
||||||
super(contentValues, internalQueue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized int size() {
|
|
||||||
return super.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean isEmpty() {
|
|
||||||
return super.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean contains(Object o) {
|
|
||||||
return super.contains(o);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized @NotNull Iterator<T> iterator() {
|
|
||||||
var it = super.iterator();
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
synchronized (SynchronizedFloatPriorityQueue.this) {
|
|
||||||
return it.hasNext();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T next() {
|
|
||||||
synchronized (SynchronizedFloatPriorityQueue.this) {
|
|
||||||
return it.next();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized @NotNull Object @NotNull [] toArray() {
|
|
||||||
return super.toArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("SuspiciousToArrayCall")
|
|
||||||
@Override
|
|
||||||
public synchronized <T1> @NotNull T1 @NotNull [] toArray(@NotNull T1 @NotNull [] a) {
|
|
||||||
return super.toArray(a);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized <T1> T1[] toArray(IntFunction<T1[]> generator) {
|
|
||||||
//noinspection SuspiciousToArrayCall
|
|
||||||
return super.toArray(generator);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean add(T t, float score) {
|
|
||||||
return super.add(t, score);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean addTop(T t) {
|
|
||||||
return super.addTop(t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
@Override
|
|
||||||
public synchronized boolean add(T t) {
|
|
||||||
return super.add(t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean addAll(@NotNull Collection<? extends T> c) {
|
|
||||||
return super.addAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean remove(Object o) {
|
|
||||||
return super.remove(o);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean removeAll(@NotNull Collection<?> c) {
|
|
||||||
return super.removeAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean containsAll(@NotNull Collection<?> c) {
|
|
||||||
return super.containsAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean retainAll(@NotNull Collection<?> c) {
|
|
||||||
return super.retainAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized void clear() {
|
|
||||||
super.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean offer(T t) {
|
|
||||||
return super.offer(t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean offer(T t, float score) {
|
|
||||||
return super.offer(t, score);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized boolean offer(ScoredValue<T> value) {
|
|
||||||
return super.offer(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized T remove() {
|
|
||||||
return super.remove();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized T poll() {
|
|
||||||
return super.poll();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized T element() {
|
|
||||||
return super.element();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized T peek() {
|
|
||||||
return super.peek();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized void forEachItem(Consumer<ScoredValue<T>> action) {
|
|
||||||
super.forEachItem(action);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized Stream<ScoredValue<T>> streamItems() {
|
|
||||||
return super.streamItems();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,150 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Queue;
|
|
||||||
import java.util.function.BiFunction;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
@SuppressWarnings("unused")
|
|
||||||
public class FloatPriorityQueueView<T> implements Queue<T> {
|
|
||||||
|
|
||||||
private final FloatPriorityQueue<? super T> queue;
|
|
||||||
|
|
||||||
public FloatPriorityQueueView(FloatPriorityQueue<? super T> queue) {
|
|
||||||
this.queue = queue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> BiFunction<FloatPriorityQueueView<T>, FloatPriorityQueueView<T>, FloatPriorityQueueView<T>> aggregator() {
|
|
||||||
return (a, b) -> {
|
|
||||||
var fpq = new FloatPriorityQueue<>();
|
|
||||||
a.forEachItem(fpq::offer);
|
|
||||||
b.forEachItem(fpq::offer);
|
|
||||||
return new FloatPriorityQueueView<>(fpq);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> FloatPriorityQueueView<T> of() {
|
|
||||||
return new FloatPriorityQueueView<>(FloatPriorityQueue.of());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return queue.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return queue.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Object o) {
|
|
||||||
return queue.contains(o);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<T> iterator() {
|
|
||||||
var it = queue.iterator();
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return it.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
|
||||||
public T next() {
|
|
||||||
return (T) it.next();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Object @NotNull [] toArray() {
|
|
||||||
return queue.toArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public <T1> T1 @NotNull [] toArray(@NotNull T1 @NotNull [] a) {
|
|
||||||
//noinspection SuspiciousToArrayCall
|
|
||||||
return queue.toArray(a);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsAll(@NotNull Collection<?> c) {
|
|
||||||
return queue.containsAll(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean addAll(@NotNull Collection<? extends T> c) {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean removeAll(@NotNull Collection<?> c) {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean retainAll(@NotNull Collection<?> c) {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean add(T t) {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean remove(Object o) {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean offer(T t) {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T remove() {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T poll() {
|
|
||||||
throw new UnsupportedOperationException("Read-only");
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
|
||||||
public T element() {
|
|
||||||
return (T) queue.element();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
|
||||||
public T peek() {
|
|
||||||
return (T) queue.peek();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void forEachItem(Consumer<ScoredValue<T>> action) {
|
|
||||||
queue.forEachItem((v) -> action.accept((ScoredValue<T>) v));
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Stream<ScoredValue<T>> streamItems() {
|
|
||||||
return queue.streamItems().map(t -> (ScoredValue<T>) t);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,179 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class HashAssociation<T, U> implements Association<T, U> {
|
|
||||||
|
|
||||||
private final Object2ObjectOpenHashMap<T, ObjectOpenHashSet<U>> associations;
|
|
||||||
private final Object2ObjectOpenHashMap<U, T> inverseAssociations;
|
|
||||||
|
|
||||||
public HashAssociation() {
|
|
||||||
this.associations = new Object2ObjectOpenHashMap<>();
|
|
||||||
this.inverseAssociations = new Object2ObjectOpenHashMap<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
private HashAssociation(Object2ObjectOpenHashMap<T, ObjectOpenHashSet<U>> associations,
|
|
||||||
Object2ObjectOpenHashMap<U, T> inverseAssociations) {
|
|
||||||
this.associations = associations;
|
|
||||||
this.inverseAssociations = inverseAssociations;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean link(T src, U dest) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
// If the destination is associated to null
|
|
||||||
var previousSrc = inverseAssociations.get(dest);
|
|
||||||
|
|
||||||
// Return if the association already exists
|
|
||||||
if (src.equals(previousSrc)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the previous association if present
|
|
||||||
if (previousSrc != null) {
|
|
||||||
inverseAssociations.remove(dest);
|
|
||||||
var destinations = associations.get(previousSrc);
|
|
||||||
destinations.remove(dest);
|
|
||||||
if (destinations.isEmpty()) {
|
|
||||||
associations.remove(previousSrc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!associations.computeIfAbsent(src, s -> new ObjectOpenHashSet<>(1)).add(dest)) {
|
|
||||||
throw new IllegalStateException("Association was partially linked");
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean unlink(T src, U dest) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
var linkedSrc = inverseAssociations.get(dest);
|
|
||||||
|
|
||||||
// Check if the link is different
|
|
||||||
if (!Objects.equals(src, linkedSrc)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
inverseAssociations.remove(dest);
|
|
||||||
var destinations = associations.get(src);
|
|
||||||
if (!destinations.remove(dest)) {
|
|
||||||
throw new IllegalStateException("Association was partially linked");
|
|
||||||
}
|
|
||||||
if (destinations.isEmpty()) {
|
|
||||||
associations.remove(src);
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> unlink(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
|
|
||||||
var destinations = associations.remove(src);
|
|
||||||
|
|
||||||
if (destinations == null) {
|
|
||||||
return Set.of();
|
|
||||||
}
|
|
||||||
|
|
||||||
for (U destination : destinations) {
|
|
||||||
if (!Objects.equals(src, inverseAssociations.remove(destination))) {
|
|
||||||
throw new IllegalStateException("Association was partially linked");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return destinations;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> unlinkFromSource(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
var previousSrc = inverseAssociations.remove(dest);
|
|
||||||
|
|
||||||
if (previousSrc == null) {
|
|
||||||
return Optional.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
var destinations = associations.get(previousSrc);
|
|
||||||
destinations.remove(dest);
|
|
||||||
if (destinations.isEmpty()) {
|
|
||||||
associations.remove(previousSrc);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Optional.of(previousSrc);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasAnyLink(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
var destinations = associations.get(src);
|
|
||||||
return destinations != null && !destinations.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLinkSource(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
return inverseAssociations.containsKey(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src, U dest) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
return Objects.equals(src, inverseAssociations.get(dest));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> getLinks(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
var dests = associations.get(src);
|
|
||||||
if (dests == null) return Set.of();
|
|
||||||
return dests.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> getLinkSource(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
return Optional.ofNullable(inverseAssociations.get(dest));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
associations.clear();
|
|
||||||
inverseAssociations.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return inverseAssociations.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<T> getSources() {
|
|
||||||
return associations.clone().keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<U> getDestinations() {
|
|
||||||
return inverseAssociations.clone().keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("MethodDoesntCallSuperMethod")
|
|
||||||
@Override
|
|
||||||
public HashAssociation<T, U> clone() {
|
|
||||||
var associationsClone = associations.clone();
|
|
||||||
associationsClone.replaceAll((item, destinations) -> destinations.clone());
|
|
||||||
return new HashAssociation<>(associationsClone, inverseAssociations.clone());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,153 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
public class HashBiAssociation<T, U> implements BiAssociation<T, U> {
|
|
||||||
|
|
||||||
private final Object2ObjectOpenHashMap<T, U> associations;
|
|
||||||
private final Object2ObjectOpenHashMap<U, T> inverseAssociations;
|
|
||||||
|
|
||||||
public HashBiAssociation() {
|
|
||||||
this.associations = new Object2ObjectOpenHashMap<>();
|
|
||||||
this.inverseAssociations = new Object2ObjectOpenHashMap<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
private HashBiAssociation(Object2ObjectOpenHashMap<T, U> associations,
|
|
||||||
Object2ObjectOpenHashMap<U, T> inverseAssociations) {
|
|
||||||
this.associations = associations;
|
|
||||||
this.inverseAssociations = inverseAssociations;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<U> link(T src, U dest) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
var previousSrc = inverseAssociations.put(dest, src);
|
|
||||||
|
|
||||||
// Return immediately if the link already exists
|
|
||||||
if (Objects.equals(src, previousSrc)) {
|
|
||||||
return Optional.of(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the previous association
|
|
||||||
if (previousSrc != null) {
|
|
||||||
associations.remove(previousSrc);
|
|
||||||
}
|
|
||||||
|
|
||||||
var previousDest = associations.put(src, dest);
|
|
||||||
|
|
||||||
// Remove the previous association
|
|
||||||
if (previousDest != null) {
|
|
||||||
inverseAssociations.remove(previousDest);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Optional.ofNullable(previousDest);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean unlink(T src, U dest) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
if (!Objects.equals(dest, associations.get(src))) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
associations.remove(src);
|
|
||||||
inverseAssociations.remove(dest);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<U> unlink(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
|
|
||||||
var dest = associations.remove(src);
|
|
||||||
|
|
||||||
if (dest != null) {
|
|
||||||
inverseAssociations.remove(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Optional.ofNullable(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> unlinkFromSource(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
var src = inverseAssociations.remove(dest);
|
|
||||||
|
|
||||||
if (src != null) {
|
|
||||||
associations.remove(src);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Optional.ofNullable(src);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
|
|
||||||
return associations.containsKey(src);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLinkSource(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
return inverseAssociations.containsKey(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src, U dest) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
return Objects.equals(dest, associations.get(src));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<U> getLink(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
|
|
||||||
return Optional.ofNullable(associations.get(src));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> getLinkSource(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
|
|
||||||
return Optional.ofNullable(inverseAssociations.get(dest));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
associations.clear();
|
|
||||||
inverseAssociations.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return inverseAssociations.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<T> getSources() {
|
|
||||||
return associations.clone().keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<U> getDestinations() {
|
|
||||||
return inverseAssociations.clone().keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("MethodDoesntCallSuperMethod")
|
|
||||||
@Override
|
|
||||||
public HashBiAssociation<T, U> clone() {
|
|
||||||
return new HashBiAssociation<>(associations.clone(), inverseAssociations.clone());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,185 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Set;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
|
|
||||||
public class HashMultiAssociation<T, U> implements MultiAssociation<T, U> {
|
|
||||||
|
|
||||||
private final Object2ObjectOpenHashMap<T, ObjectOpenHashSet<U>> associations;
|
|
||||||
private final Object2ObjectOpenHashMap<U, ObjectOpenHashSet<T>> inverseAssociations;
|
|
||||||
|
|
||||||
public HashMultiAssociation() {
|
|
||||||
this.associations = new Object2ObjectOpenHashMap<>();
|
|
||||||
this.inverseAssociations = new Object2ObjectOpenHashMap<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
private HashMultiAssociation(Object2ObjectOpenHashMap<T, ObjectOpenHashSet<U>> associations,
|
|
||||||
Object2ObjectOpenHashMap<U, ObjectOpenHashSet<T>> inverseAssociations) {
|
|
||||||
this.associations = associations;
|
|
||||||
this.inverseAssociations = inverseAssociations;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove the source from this destination
|
|
||||||
*/
|
|
||||||
@Nullable
|
|
||||||
private boolean removeSourceFromDest(@NotNull U dest, @NotNull T src) {
|
|
||||||
var sources = inverseAssociations.get(dest);
|
|
||||||
if (sources != null) {
|
|
||||||
sources.remove(src);
|
|
||||||
|
|
||||||
// Keep the map clean
|
|
||||||
if (sources.isEmpty()) {
|
|
||||||
inverseAssociations.remove(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove the destination from this source
|
|
||||||
*/
|
|
||||||
@Nullable
|
|
||||||
private boolean removeDestFromSource(@NotNull T src, @NotNull U dest) {
|
|
||||||
var dests = associations.get(src);
|
|
||||||
if (dests != null) {
|
|
||||||
dests.remove(dest);
|
|
||||||
|
|
||||||
// Keep the map clean
|
|
||||||
if (dests.isEmpty()) {
|
|
||||||
associations.remove(src);
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean link(T src, U dest) {
|
|
||||||
return associations.computeIfAbsent(src, s -> new ObjectOpenHashSet<>(1)).add(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean unlink(T src, U dest) {
|
|
||||||
var removed = removeDestFromSource(src, dest);
|
|
||||||
|
|
||||||
if (removed) {
|
|
||||||
removeSourceFromDest(dest, src);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> unlink(T src) {
|
|
||||||
var dests = associations.remove(src);
|
|
||||||
|
|
||||||
if (dests == null) {
|
|
||||||
return Set.of();
|
|
||||||
}
|
|
||||||
|
|
||||||
for (U dest : dests) {
|
|
||||||
removeSourceFromDest(dest, src);
|
|
||||||
}
|
|
||||||
return dests;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<T> unlinkFromSource(U dest) {
|
|
||||||
var sources = inverseAssociations.remove(dest);
|
|
||||||
|
|
||||||
if (sources == null) {
|
|
||||||
return Set.of();
|
|
||||||
}
|
|
||||||
|
|
||||||
for (T source : sources) {
|
|
||||||
removeDestFromSource(source, dest);
|
|
||||||
}
|
|
||||||
return sources;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasAnyLink(T src) {
|
|
||||||
var dests = associations.get(src);
|
|
||||||
if (dests == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return !dests.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasAnyLinkSource(U dest) {
|
|
||||||
var sources = inverseAssociations.get(dest);
|
|
||||||
if (sources == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return !sources.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src, U dest) {
|
|
||||||
var dests = associations.get(src);
|
|
||||||
if (dests == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return dests.contains(dest);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> getLinks(T src) {
|
|
||||||
Objects.requireNonNull(src);
|
|
||||||
var dests = associations.get(src);
|
|
||||||
if (dests == null) return Set.of();
|
|
||||||
return dests.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<T> getLinkSources(U dest) {
|
|
||||||
Objects.requireNonNull(dest);
|
|
||||||
var sources = inverseAssociations.get(dest);
|
|
||||||
if (sources == null) return Set.of();
|
|
||||||
return sources.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
associations.clear();
|
|
||||||
inverseAssociations.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return Math.max(associations.size(), inverseAssociations.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<T> getSources() {
|
|
||||||
return associations.clone().keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<U> getDestinations() {
|
|
||||||
return inverseAssociations.clone().keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("MethodDoesntCallSuperMethod")
|
|
||||||
@Override
|
|
||||||
public HashMultiAssociation<T, U> clone() {
|
|
||||||
var associationsClone = associations.clone();
|
|
||||||
associationsClone.replaceAll((item, destinations) -> destinations.clone());
|
|
||||||
|
|
||||||
var inverseAssociationsClone = inverseAssociations.clone();
|
|
||||||
inverseAssociationsClone.replaceAll((item, sources) -> sources.clone());
|
|
||||||
|
|
||||||
return new HashMultiAssociation<>(associationsClone, inverseAssociationsClone);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectLinkedOpenHashSet;
|
|
||||||
import java.util.Collection;
|
|
||||||
|
|
||||||
public class HashStackSet<T> extends FastUtilStackSetWrapper<T> {
|
|
||||||
|
|
||||||
public HashStackSet() {
|
|
||||||
super(new ObjectLinkedOpenHashSet<>());
|
|
||||||
}
|
|
||||||
|
|
||||||
public HashStackSet(Collection<T> collection) {
|
|
||||||
super(new ObjectLinkedOpenHashSet<>(collection));
|
|
||||||
}
|
|
||||||
|
|
||||||
public HashStackSet(AddStrategy addStrategy) {
|
|
||||||
super(new ObjectLinkedOpenHashSet<>(), addStrategy);
|
|
||||||
}
|
|
||||||
|
|
||||||
private HashStackSet(ObjectLinkedOpenHashSet<T> linkedHashSet, AddStrategy addStrategy) {
|
|
||||||
super(linkedHashSet, addStrategy);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public HashStackSet<T> clone() {
|
|
||||||
return new HashStackSet<>(super.linkedHashSet.clone(), super.addStrategy);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
public class IntWrapper {
|
|
||||||
|
|
||||||
public int var;
|
|
||||||
|
|
||||||
public IntWrapper(int value) {
|
|
||||||
this.var = value;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,76 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
|
||||||
|
|
||||||
public class JavaStackSetWrapper<T> implements StackSet<T> {
|
|
||||||
|
|
||||||
private final LinkedHashSet<T> linkedHashSet;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The last element will be the head
|
|
||||||
*/
|
|
||||||
public JavaStackSetWrapper(LinkedHashSet<T> linkedHashSet) {
|
|
||||||
this.linkedHashSet = linkedHashSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean push(T o) {
|
|
||||||
return linkedHashSet.add(o);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T pop() {
|
|
||||||
var it = linkedHashSet.iterator();
|
|
||||||
if (!it.hasNext()) {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
// Go to the last element
|
|
||||||
T lastValue;
|
|
||||||
do {
|
|
||||||
lastValue = it.next();
|
|
||||||
} while (it.hasNext());
|
|
||||||
// Remove the last element
|
|
||||||
it.remove();
|
|
||||||
return lastValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return linkedHashSet.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T top() {
|
|
||||||
if (linkedHashSet.isEmpty()) {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
|
|
||||||
var it = linkedHashSet.iterator();
|
|
||||||
T lastValue;
|
|
||||||
do {
|
|
||||||
lastValue = it.next();
|
|
||||||
} while (it.hasNext());
|
|
||||||
return lastValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public T peek(int i) {
|
|
||||||
var size = linkedHashSet.size();
|
|
||||||
int positionFromBottom = size - 1 - i;
|
|
||||||
|
|
||||||
if (positionFromBottom < 0 || positionFromBottom >= size) {
|
|
||||||
throw new IndexOutOfBoundsException(positionFromBottom, 0, size);
|
|
||||||
}
|
|
||||||
|
|
||||||
var it = linkedHashSet.iterator();
|
|
||||||
// Skip middle elements
|
|
||||||
if (positionFromBottom > 0) {
|
|
||||||
for (int j = 0; j < positionFromBottom; j++) {
|
|
||||||
it.next();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return it.next();
|
|
||||||
}
|
|
||||||
}
|
|
@ -113,11 +113,4 @@ public interface MultiAssociation<T, U> {
|
|||||||
*/
|
*/
|
||||||
ObjectSet<U> getDestinations();
|
ObjectSet<U> getDestinations();
|
||||||
|
|
||||||
static <T, U> MultiAssociation<T, U> synchronize(MultiAssociation<T, U> association) {
|
|
||||||
return new SynchronizedMultiAssociation<>(association);
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T, U> MultiAssociation<T, U> synchronize(MultiAssociation<T, U> association, Object lock) {
|
|
||||||
return new SynchronizedMultiAssociation<>(association, lock);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public final class ScoredValue<T> implements Comparable<ScoredValue<T>> {
|
|
||||||
private final float score;
|
|
||||||
private final T value;
|
|
||||||
|
|
||||||
private ScoredValue(float score, T value) {
|
|
||||||
this.score = score;
|
|
||||||
this.value = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> ScoredValue<T> of(float score, T value) {
|
|
||||||
return new ScoredValue<T>(score, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int compareTo(@NotNull ScoredValue<T> o) {
|
|
||||||
// This is reversed
|
|
||||||
return Float.compare(o.score, this.score);
|
|
||||||
}
|
|
||||||
|
|
||||||
public float getScore() {
|
|
||||||
return this.score;
|
|
||||||
}
|
|
||||||
|
|
||||||
public T getValue() {
|
|
||||||
return this.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
ScoredValue<?> that = (ScoredValue<?>) o;
|
|
||||||
return Objects.equals(value, that.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
public String toString() {
|
|
||||||
return "ScoredValue(score=" + this.getScore() + ", value=" + this.getValue() + ")";
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,102 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectLinkedOpenHashSet;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A stack but with the behavior of a Linked HashSet
|
|
||||||
*
|
|
||||||
* <p>A stack must provide the classical {@link #push(Object)} and
|
|
||||||
* {@link #pop()} operations, but may be also <em>peekable</em> to some extent: it may provide just the {@link #top()}
|
|
||||||
* function, or even a more powerful {@link #peek(int)} method that provides access to all elements on the stack
|
|
||||||
* (indexed from the top, which has index 0).
|
|
||||||
*/
|
|
||||||
|
|
||||||
public interface StackSet<K> {
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pushes the given object on the stack.
|
|
||||||
*
|
|
||||||
* @param o the object that will become the new top of the stack.
|
|
||||||
* @return true if added, false if already present
|
|
||||||
*/
|
|
||||||
boolean push(K o);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pop multiple times
|
|
||||||
* @param count the number of times to pop
|
|
||||||
* @return list of popped elements
|
|
||||||
*/
|
|
||||||
default List<K> pop(int count) {
|
|
||||||
if (count < 0) {
|
|
||||||
throw new IndexOutOfBoundsException(count);
|
|
||||||
}
|
|
||||||
var items = new ArrayList<K>(count);
|
|
||||||
for (int i = 0; i < count; i++) {
|
|
||||||
items.add(this.pop());
|
|
||||||
}
|
|
||||||
return items;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pops the top off the stack.
|
|
||||||
*
|
|
||||||
* @return the top of the stack.
|
|
||||||
* @throws NoSuchElementException if the stack is empty.
|
|
||||||
*/
|
|
||||||
K pop();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks whether the stack is empty.
|
|
||||||
*
|
|
||||||
* @return true if the stack is empty.
|
|
||||||
*/
|
|
||||||
boolean isEmpty();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Peeks at the top of the stack (optional operation).
|
|
||||||
*
|
|
||||||
* <p>This default implementation returns {@link #peek(int) peek(0)}.
|
|
||||||
*
|
|
||||||
* @return the top of the stack.
|
|
||||||
* @throws NoSuchElementException if the stack is empty.
|
|
||||||
*/
|
|
||||||
default K top() {
|
|
||||||
return peek(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Peeks at an element on the stack (optional operation).
|
|
||||||
*
|
|
||||||
* <p>This default implementation just throws an {@link UnsupportedOperationException}.
|
|
||||||
*
|
|
||||||
* @param i an index from the stop of the stack (0 represents the top).
|
|
||||||
* @return the {@code i}-th element on the stack.
|
|
||||||
* @throws IndexOutOfBoundsException if the designated element does not exist..
|
|
||||||
*/
|
|
||||||
default K peek(int i) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T> StackSet<T> create() {
|
|
||||||
return new HashStackSet<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T> StackSet<T> wrap(LinkedHashSet<T> linkedHashSet) {
|
|
||||||
return new JavaStackSetWrapper<>(linkedHashSet);
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T> StackSet<T> wrap(ObjectLinkedOpenHashSet<T> linkedHashSet) {
|
|
||||||
return new FastUtilStackSetWrapper<>(linkedHashSet);
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T> StackSet<T> wrap(ObjectLinkedOpenHashSet<T> linkedHashSet, AddStrategy addStrategy) {
|
|
||||||
return new FastUtilStackSetWrapper<>(linkedHashSet, addStrategy);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,112 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public final class SynchronizedAssociation<T, U> implements Association<T, U> {
|
|
||||||
|
|
||||||
private final Association<T,U> association;
|
|
||||||
private final Object lock;
|
|
||||||
|
|
||||||
SynchronizedAssociation(Association<T, U> association) {
|
|
||||||
this.association = association;
|
|
||||||
this.lock = new Object();
|
|
||||||
}
|
|
||||||
|
|
||||||
SynchronizedAssociation(Association<T, U> association, Object lock) {
|
|
||||||
this.association = association;
|
|
||||||
this.lock = lock;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean link(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.link(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean unlink(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlink(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> unlink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> unlinkFromSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlinkFromSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasAnyLink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasAnyLink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLinkSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasLinkSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasLink(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> getLinks(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getLinks(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> getLinkSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getLinkSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
synchronized (lock) {
|
|
||||||
association.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.size();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<T> getSources() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getSources();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<U> getDestinations() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getDestinations();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,111 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
public final class SynchronizedBiAssociation<T, U> implements BiAssociation<T, U> {
|
|
||||||
|
|
||||||
private final BiAssociation<T,U> association;
|
|
||||||
private final Object lock;
|
|
||||||
|
|
||||||
SynchronizedBiAssociation(BiAssociation<T, U> association) {
|
|
||||||
this.association = association;
|
|
||||||
this.lock = new Object();
|
|
||||||
}
|
|
||||||
|
|
||||||
SynchronizedBiAssociation(BiAssociation<T, U> association, Object lock) {
|
|
||||||
this.association = association;
|
|
||||||
this.lock = lock;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<U> link(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.link(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean unlink(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlink(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<U> unlink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> unlinkFromSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlinkFromSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasLink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLinkSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasLinkSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasLink(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<U> getLink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getLink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<T> getLinkSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getLinkSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
synchronized (lock) {
|
|
||||||
association.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.size();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<T> getSources() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getSources();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<U> getDestinations() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getDestinations();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,111 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class SynchronizedMultiAssociation<T, U> implements MultiAssociation<T, U> {
|
|
||||||
|
|
||||||
private final MultiAssociation<T, U> association;
|
|
||||||
private final Object lock;
|
|
||||||
|
|
||||||
SynchronizedMultiAssociation(MultiAssociation<T, U> association) {
|
|
||||||
this.association = association;
|
|
||||||
this.lock = new Object();
|
|
||||||
}
|
|
||||||
|
|
||||||
SynchronizedMultiAssociation(MultiAssociation<T, U> association, Object lock) {
|
|
||||||
this.association = association;
|
|
||||||
this.lock = lock;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean link(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.link(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean unlink(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlink(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> unlink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<T> unlinkFromSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.unlinkFromSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasAnyLink(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasAnyLink(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasAnyLinkSource(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasAnyLinkSource(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasLink(T src, U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.hasLink(src, dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<U> getLinks(T src) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getLinks(src);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<T> getLinkSources(U dest) {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getLinkSources(dest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void clear() {
|
|
||||||
synchronized (lock) {
|
|
||||||
association.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.size();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<T> getSources() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getSources();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectSet<U> getDestinations() {
|
|
||||||
synchronized (lock) {
|
|
||||||
return association.getDestinations();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,202 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import com.google.common.collect.Streams;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectMaps;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import java.lang.reflect.Array;
|
|
||||||
import java.util.ConcurrentModificationException;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.function.BiConsumer;
|
|
||||||
import java.util.function.IntFunction;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public interface UnmodifiableIterableMap<K, V> extends Iterable<Entry<K, V>> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the number of key-value mappings in this map. If the
|
|
||||||
* map contains more than {@code Integer.MAX_VALUE} elements, returns
|
|
||||||
* {@code Integer.MAX_VALUE}.
|
|
||||||
*
|
|
||||||
* @return the number of key-value mappings in this map
|
|
||||||
*/
|
|
||||||
int size();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns {@code true} if this map contains no key-value mappings.
|
|
||||||
*
|
|
||||||
* @return {@code true} if this map contains no key-value mappings
|
|
||||||
*/
|
|
||||||
boolean isEmpty();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Performs the given action for each entry in this map until all entries
|
|
||||||
* have been processed or the action throws an exception. Unless
|
|
||||||
* otherwise specified by the implementing class, actions are performed in
|
|
||||||
* the order of entry set iteration (if an iteration order is specified.)
|
|
||||||
* Exceptions thrown by the action are relayed to the caller.
|
|
||||||
*
|
|
||||||
* @implSpec
|
|
||||||
* The default implementation is equivalent to, for this {@code map}:
|
|
||||||
* <pre> {@code
|
|
||||||
* for (Map.Entry<K, V> entry : map.entrySet())
|
|
||||||
* action.accept(entry.getKey(), entry.getValue());
|
|
||||||
* }</pre>
|
|
||||||
*
|
|
||||||
* The default implementation makes no guarantees about synchronization
|
|
||||||
* or atomicity properties of this method. Any implementation providing
|
|
||||||
* atomicity guarantees must override this method and document its
|
|
||||||
* concurrency properties.
|
|
||||||
*
|
|
||||||
* @param action The action to be performed for each entry
|
|
||||||
* @throws NullPointerException if the specified action is null
|
|
||||||
* @throws ConcurrentModificationException if an entry is found to be
|
|
||||||
* removed during iteration
|
|
||||||
* @since 1.8
|
|
||||||
*/
|
|
||||||
void forEach(BiConsumer<? super K, ? super V> action);
|
|
||||||
|
|
||||||
Map<K, V> toUnmodifiableMap();
|
|
||||||
|
|
||||||
Stream<Entry<K, V>> stream();
|
|
||||||
|
|
||||||
UnmodifiableIterableSet<K> toUnmodifiableIterableKeysSet(IntFunction<K[]> generator);
|
|
||||||
|
|
||||||
@SuppressWarnings("SuspiciousSystemArraycopy")
|
|
||||||
static <K, V> UnmodifiableIterableMap<K, V> ofObjects(Object[] keys, Object[] values) {
|
|
||||||
if (keys == null || values == null || (keys.length == 0 && values.length == 0)) {
|
|
||||||
return UnmodifiableIterableMap.of(null, null);
|
|
||||||
} else if (keys.length == values.length) {
|
|
||||||
//noinspection unchecked
|
|
||||||
K[] keysArray = (K[]) Array.newInstance(keys[0].getClass(), keys.length);
|
|
||||||
System.arraycopy(keys, 0, keysArray, 0, keys.length);
|
|
||||||
//noinspection unchecked
|
|
||||||
V[] valuesArray = (V[]) Array.newInstance(values[0].getClass(), keys.length);
|
|
||||||
System.arraycopy(values, 0, valuesArray, 0, values.length);
|
|
||||||
return UnmodifiableIterableMap.of(keysArray, valuesArray);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("The number of keys doesn't match the number of values.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static <K, V> UnmodifiableIterableMap<K, V> of(K[] keys, V[] values) {
|
|
||||||
int keysSize = (keys != null) ? keys.length : 0;
|
|
||||||
int valuesSize = (values != null) ? values.length : 0;
|
|
||||||
|
|
||||||
if (keysSize == 0 && valuesSize == 0) {
|
|
||||||
// return mutable map
|
|
||||||
return new EmptyUnmodifiableIterableMap<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (keysSize != valuesSize) {
|
|
||||||
throw new IllegalArgumentException("The number of keys doesn't match the number of values.");
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ArrayUnmodifiableIterableMap<>(keys, values, keysSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
class EmptyUnmodifiableIterableMap<K, V> implements UnmodifiableIterableMap<K, V> {
|
|
||||||
|
|
||||||
private EmptyUnmodifiableIterableMap() {}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<Entry<K, V>> iterator() {
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Entry<K, V> next() {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(BiConsumer<? super K, ? super V> action) {}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<K, V> toUnmodifiableMap() {
|
|
||||||
//noinspection unchecked
|
|
||||||
return Object2ObjectMaps.EMPTY_MAP;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<Entry<K, V>> stream() {
|
|
||||||
return Stream.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public UnmodifiableIterableSet<K> toUnmodifiableIterableKeysSet(IntFunction<K[]> generator) {
|
|
||||||
return UnmodifiableIterableSet.of(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class ArrayUnmodifiableIterableMap<K, V> implements UnmodifiableIterableMap<K, V> {
|
|
||||||
|
|
||||||
private final K[] keys;
|
|
||||||
private final V[] values;
|
|
||||||
private final int keysSize;
|
|
||||||
|
|
||||||
private ArrayUnmodifiableIterableMap(K[] keys, V[] values, int keysSize) {
|
|
||||||
this.keys = keys;
|
|
||||||
this.values = values;
|
|
||||||
this.keysSize = keysSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<Entry<K, V>> iterator() {
|
|
||||||
return new Object2ObjectOpenHashMap<K, V>(keys, values, 1.0f).entrySet().iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return keysSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(BiConsumer<? super K, ? super V> action) {
|
|
||||||
for (int i = 0; i < keys.length; i++) {
|
|
||||||
action.accept(keys[i], values[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<K, V> toUnmodifiableMap() {
|
|
||||||
return Object2ObjectMaps.unmodifiable(new Object2ObjectOpenHashMap<>(keys, values, 1.0f));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<Entry<K, V>> stream() {
|
|
||||||
//noinspection UnstableApiUsage
|
|
||||||
return Streams.zip(Stream.of(keys), Stream.of(values), Map::entry);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public UnmodifiableIterableSet<K> toUnmodifiableIterableKeysSet(IntFunction<K[]> generator) {
|
|
||||||
return UnmodifiableIterableSet.of(keys);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,215 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSets;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.function.IntFunction;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public interface UnmodifiableIterableSet<K> extends Iterable<K> {
|
|
||||||
|
|
||||||
int size();
|
|
||||||
|
|
||||||
boolean isEmpty();
|
|
||||||
|
|
||||||
void forEach(Consumer<? super K> action);
|
|
||||||
|
|
||||||
Set<K> toUnmodifiableSet();
|
|
||||||
|
|
||||||
Stream<K> stream();
|
|
||||||
|
|
||||||
<V> UnmodifiableIterableMap<K,V> toUnmodifiableIterableMapSetValues(V[] values);
|
|
||||||
|
|
||||||
<K2> UnmodifiableIterableMap<K2,K> toUnmodifiableIterableMapSetKeys(K2[] keys);
|
|
||||||
|
|
||||||
<V> UnmodifiableMap<K,V> toUnmodifiableMapSetValues(V[] values);
|
|
||||||
|
|
||||||
<K2> UnmodifiableMap<K2,K> toUnmodifiableMapSetKeys(K2[] keys);
|
|
||||||
|
|
||||||
static <K> UnmodifiableIterableSet<K> of(K[] items) {
|
|
||||||
int keysSize = (items != null) ? items.length : 0;
|
|
||||||
|
|
||||||
if (keysSize == 0) {
|
|
||||||
// return mutable map
|
|
||||||
return new UnmodifiableIterableSet<K>() {
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<K> iterator() {
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public K next() {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(Consumer<? super K> action) {}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<K> toUnmodifiableSet() {
|
|
||||||
//noinspection unchecked
|
|
||||||
return new HashSet<>(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<K> stream() {
|
|
||||||
return Stream.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableIterableMap<K, V> toUnmodifiableIterableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableIterableMap.of(null, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableIterableMap<K2, K> toUnmodifiableIterableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableIterableMap.of(keys, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableMap<K, V> toUnmodifiableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableMap.of(null, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableMap<K2, K> toUnmodifiableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableMap.of(keys, null);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return new UnmodifiableIterableSet<K>() {
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return keysSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(Consumer<? super K> action) {
|
|
||||||
for (int i = 0; i < items.length; i++) {
|
|
||||||
action.accept(items[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<K> toUnmodifiableSet() {
|
|
||||||
return new HashSet<>(Arrays.asList(items));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<K> stream() {
|
|
||||||
return Arrays.stream(items);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableIterableMap<K, V> toUnmodifiableIterableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableIterableMap.of(items, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableIterableMap<K2, K> toUnmodifiableIterableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableIterableMap.of(keys, items);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableMap<K, V> toUnmodifiableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableMap.of(items, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableMap<K2, K> toUnmodifiableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableMap.of(keys, items);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<K> iterator() {
|
|
||||||
return new ObjectOpenHashSet<K>(items, 1.0f).iterator();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
static <K> UnmodifiableIterableSet<K> of(Set<K> items, IntFunction<K[]> generator) {
|
|
||||||
|
|
||||||
return new UnmodifiableIterableSet<K>() {
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return items.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return items.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(Consumer<? super K> action) {
|
|
||||||
items.forEach(action);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<K> toUnmodifiableSet() {
|
|
||||||
return Collections.unmodifiableSet(items);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<K> stream() {
|
|
||||||
return items.stream();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableIterableMap<K, V> toUnmodifiableIterableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableIterableMap.of(items.toArray(generator), values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableIterableMap<K2, K> toUnmodifiableIterableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableIterableMap.of(keys, items.toArray(generator));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableMap<K, V> toUnmodifiableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableMap.of(items.toArray(generator), values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableMap<K2, K> toUnmodifiableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableMap.of(keys, items.toArray(generator));
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<K> iterator() {
|
|
||||||
return items.iterator();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,335 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectMaps;
|
|
||||||
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectIterator;
|
|
||||||
import java.lang.reflect.Array;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.ConcurrentModificationException;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.function.BiConsumer;
|
|
||||||
import java.util.function.IntFunction;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public interface UnmodifiableMap<K, V> extends UnmodifiableIterableMap<K, V> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns {@code true} if this map contains a mapping for the specified
|
|
||||||
* key. More formally, returns {@code true} if and only if
|
|
||||||
* this map contains a mapping for a key {@code k} such that
|
|
||||||
* {@code Objects.equals(key, k)}. (There can be
|
|
||||||
* at most one such mapping.)
|
|
||||||
*
|
|
||||||
* @param key key whose presence in this map is to be tested
|
|
||||||
* @return {@code true} if this map contains a mapping for the specified
|
|
||||||
* key
|
|
||||||
* @throws ClassCastException if the key is of an inappropriate type for
|
|
||||||
* this map
|
|
||||||
* (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>)
|
|
||||||
* @throws NullPointerException if the specified key is null and this map
|
|
||||||
* does not permit null keys
|
|
||||||
* (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>)
|
|
||||||
*/
|
|
||||||
boolean containsKey(Object key);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the value to which the specified key is mapped,
|
|
||||||
* or {@code null} if this map contains no mapping for the key.
|
|
||||||
*
|
|
||||||
* <p>More formally, if this map contains a mapping from a key
|
|
||||||
* {@code k} to a value {@code v} such that
|
|
||||||
* {@code Objects.equals(key, k)},
|
|
||||||
* then this method returns {@code v}; otherwise
|
|
||||||
* it returns {@code null}. (There can be at most one such mapping.)
|
|
||||||
*
|
|
||||||
* <p>If this map permits null values, then a return value of
|
|
||||||
* {@code null} does not <i>necessarily</i> indicate that the map
|
|
||||||
* contains no mapping for the key; it's also possible that the map
|
|
||||||
* explicitly maps the key to {@code null}. The {@link #containsKey
|
|
||||||
* containsKey} operation may be used to distinguish these two cases.
|
|
||||||
*
|
|
||||||
* @param key the key whose associated value is to be returned
|
|
||||||
* @return the value to which the specified key is mapped, or
|
|
||||||
* {@code null} if this map contains no mapping for the key
|
|
||||||
* @throws ClassCastException if the key is of an inappropriate type for
|
|
||||||
* this map
|
|
||||||
* (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>)
|
|
||||||
* @throws NullPointerException if the specified key is null and this map
|
|
||||||
* does not permit null keys
|
|
||||||
* (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>)
|
|
||||||
*/
|
|
||||||
V get(Object key);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the value to which the specified key is mapped, or
|
|
||||||
* {@code defaultValue} if this map contains no mapping for the key.
|
|
||||||
*
|
|
||||||
* @implSpec
|
|
||||||
* The default implementation makes no guarantees about synchronization
|
|
||||||
* or atomicity properties of this method. Any implementation providing
|
|
||||||
* atomicity guarantees must override this method and document its
|
|
||||||
* concurrency properties.
|
|
||||||
*
|
|
||||||
* @param key the key whose associated value is to be returned
|
|
||||||
* @param defaultValue the default mapping of the key
|
|
||||||
* @return the value to which the specified key is mapped, or
|
|
||||||
* {@code defaultValue} if this map contains no mapping for the key
|
|
||||||
* @throws ClassCastException if the key is of an inappropriate type for
|
|
||||||
* this map
|
|
||||||
* (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>)
|
|
||||||
* @throws NullPointerException if the specified key is null and this map
|
|
||||||
* does not permit null keys
|
|
||||||
* (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>)
|
|
||||||
* @since 1.8
|
|
||||||
*/
|
|
||||||
default V getOrDefault(Object key, V defaultValue) {
|
|
||||||
V v;
|
|
||||||
return (((v = get(key)) != null) || containsKey(key))
|
|
||||||
? v
|
|
||||||
: defaultValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
ObjectIterator<Object2ObjectMap.Entry<K, V>> fastIterator();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Performs the given action for each entry in this map until all entries
|
|
||||||
* have been processed or the action throws an exception. Unless
|
|
||||||
* otherwise specified by the implementing class, actions are performed in
|
|
||||||
* the order of entry set iteration (if an iteration order is specified.)
|
|
||||||
* Exceptions thrown by the action are relayed to the caller.
|
|
||||||
*
|
|
||||||
* @implSpec
|
|
||||||
* The default implementation is equivalent to, for this {@code map}:
|
|
||||||
* <pre> {@code
|
|
||||||
* for (Map.Entry<K, V> entry : map.entrySet())
|
|
||||||
* action.accept(entry.getKey(), entry.getValue());
|
|
||||||
* }</pre>
|
|
||||||
*
|
|
||||||
* The default implementation makes no guarantees about synchronization
|
|
||||||
* or atomicity properties of this method. Any implementation providing
|
|
||||||
* atomicity guarantees must override this method and document its
|
|
||||||
* concurrency properties.
|
|
||||||
*
|
|
||||||
* @param action The action to be performed for each entry
|
|
||||||
* @throws NullPointerException if the specified action is null
|
|
||||||
* @throws ConcurrentModificationException if an entry is found to be
|
|
||||||
* removed during iteration
|
|
||||||
* @since 1.8
|
|
||||||
*/
|
|
||||||
void forEach(BiConsumer<? super K, ? super V> action);
|
|
||||||
|
|
||||||
static <K, V> UnmodifiableMap<K, V> of(K[] keys, V[] values) {
|
|
||||||
int keysSize = (keys != null) ? keys.length : 0;
|
|
||||||
int valuesSize = (values != null) ? values.length : 0;
|
|
||||||
|
|
||||||
if (keysSize == 0 && valuesSize == 0) {
|
|
||||||
// return mutable map
|
|
||||||
return new EmptyUnmodifiableMap<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
return new MappedUnmodifiableMap<>(new Object2ObjectOpenHashMap<>(keys, values, 1.0f));
|
|
||||||
}
|
|
||||||
|
|
||||||
static <K, V> UnmodifiableMap<K, V> of(Map<K, V> map) {
|
|
||||||
return new MappedUnmodifiableMap<K, V>(map);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("SuspiciousSystemArraycopy")
|
|
||||||
static <K, V> UnmodifiableMap<K, V> ofObjects(Object[] keys, Object[] values) {
|
|
||||||
if (keys == null || values == null || (keys.length == 0 && values.length == 0)) {
|
|
||||||
return UnmodifiableMap.of(null, null);
|
|
||||||
} else if (keys.length == values.length) {
|
|
||||||
//noinspection unchecked
|
|
||||||
K[] keysArray = (K[]) Array.newInstance(keys[0].getClass(), keys.length);
|
|
||||||
System.arraycopy(keys, 0, keysArray, 0, keys.length);
|
|
||||||
//noinspection unchecked
|
|
||||||
V[] valuesArray = (V[]) Array.newInstance(values[0].getClass(), keys.length);
|
|
||||||
System.arraycopy(values, 0, valuesArray, 0, values.length);
|
|
||||||
return UnmodifiableMap.of(keysArray, valuesArray);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("The number of keys doesn't match the number of values.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class EmptyUnmodifiableMap<K, V> implements UnmodifiableMap<K, V> {
|
|
||||||
|
|
||||||
private EmptyUnmodifiableMap() {}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsKey(Object key) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public V get(Object key) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(BiConsumer<? super K, ? super V> action) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<Entry<K, V>> iterator() {
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Entry<K, V> next() {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public ObjectIterator<Object2ObjectMap.Entry<K, V>> fastIterator() {
|
|
||||||
return new ObjectIterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object2ObjectMap.Entry<K, V> next() {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<K, V> toUnmodifiableMap() {
|
|
||||||
//noinspection unchecked
|
|
||||||
return Object2ObjectMaps.EMPTY_MAP;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<Entry<K, V>> stream() {
|
|
||||||
return Stream.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public UnmodifiableIterableSet<K> toUnmodifiableIterableKeysSet(IntFunction<K[]> generator) {
|
|
||||||
return UnmodifiableIterableSet.of(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MappedUnmodifiableMap<K, V> implements UnmodifiableMap<K, V> {
|
|
||||||
|
|
||||||
private final Map<K,V> map;
|
|
||||||
|
|
||||||
private MappedUnmodifiableMap(@NotNull Map<K, V> map) {
|
|
||||||
this.map = map;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return map.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return map.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsKey(Object key) {
|
|
||||||
return map.containsKey(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public V get(Object key) {
|
|
||||||
return map.get(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(BiConsumer<? super K, ? super V> action) {
|
|
||||||
map.forEach(action);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<Entry<K, V>> iterator() {
|
|
||||||
return map.entrySet().iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public ObjectIterator<Object2ObjectMap.Entry<K, V>> fastIterator() {
|
|
||||||
if (map instanceof Object2ObjectMap) {
|
|
||||||
return Object2ObjectMaps.fastIterator((Object2ObjectMap<K, V>) map);
|
|
||||||
} else {
|
|
||||||
var iterator = map.entrySet().iterator();
|
|
||||||
var reusableEntry = new Object2ObjectMap.Entry<K, V>() {
|
|
||||||
private K key;
|
|
||||||
private V val;
|
|
||||||
@Override
|
|
||||||
public K getKey() {
|
|
||||||
return key;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public V getValue() {
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public V setValue(V value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return new ObjectIterator<>() {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return iterator.hasNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object2ObjectMap.Entry<K, V> next() {
|
|
||||||
var next = iterator.next();
|
|
||||||
reusableEntry.key = next.getKey();
|
|
||||||
reusableEntry.val = next.getValue();
|
|
||||||
return reusableEntry;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<K, V> toUnmodifiableMap() {
|
|
||||||
return Collections.unmodifiableMap(map);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<Entry<K, V>> stream() {
|
|
||||||
return map.entrySet().stream();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public UnmodifiableIterableSet<K> toUnmodifiableIterableKeysSet(IntFunction<K[]> generator) {
|
|
||||||
return UnmodifiableIterableSet.of(map.keySet().toArray(generator));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,204 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectIterator;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectIterators;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSet;
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectSets;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
public interface UnmodifiableSet<K> extends UnmodifiableIterableSet<K> {
|
|
||||||
|
|
||||||
boolean contains(Object value);
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
ObjectIterator<K> fastIterator();
|
|
||||||
|
|
||||||
void forEach(Consumer<? super K> action);
|
|
||||||
|
|
||||||
static <K> UnmodifiableSet<K> of(K[] values) {
|
|
||||||
int valuesSize = (values != null) ? values.length : 0;
|
|
||||||
|
|
||||||
if (valuesSize == 0) {
|
|
||||||
// return mutable map
|
|
||||||
return new EmptyUnmodifiableSet<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
return new MappedUnmodifiableSet<>(new ObjectOpenHashSet<>(values, 1.0f));
|
|
||||||
}
|
|
||||||
|
|
||||||
static <K> UnmodifiableSet<K> of(Set<K> set) {
|
|
||||||
return new MappedUnmodifiableSet<K>(set);
|
|
||||||
}
|
|
||||||
|
|
||||||
class EmptyUnmodifiableSet<K> implements UnmodifiableSet<K> {
|
|
||||||
|
|
||||||
private EmptyUnmodifiableSet() {}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Object value) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(Consumer<? super K> action) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<K> iterator() {
|
|
||||||
return new Iterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public K next() {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public ObjectIterator<K> fastIterator() {
|
|
||||||
return new ObjectIterator<>() {
|
|
||||||
@Override
|
|
||||||
public boolean hasNext() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public K next() {
|
|
||||||
throw new NoSuchElementException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<K> toUnmodifiableSet() {
|
|
||||||
//noinspection unchecked
|
|
||||||
return new HashSet<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<K> stream() {
|
|
||||||
return Stream.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableIterableMap<K, V> toUnmodifiableIterableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableIterableMap.of(null, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableIterableMap<K2, K> toUnmodifiableIterableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableIterableMap.of(keys, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableMap<K, V> toUnmodifiableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableMap.of(null, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableMap<K2, K> toUnmodifiableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableMap.of(keys, null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class MappedUnmodifiableSet<K> implements UnmodifiableSet<K> {
|
|
||||||
|
|
||||||
private final Set<K> set;
|
|
||||||
|
|
||||||
private MappedUnmodifiableSet(@NotNull Set<K> set) {
|
|
||||||
this.set = set;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int size() {
|
|
||||||
return set.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return set.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Object key) {
|
|
||||||
//noinspection SuspiciousMethodCalls
|
|
||||||
return set.contains(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(Consumer<? super K> action) {
|
|
||||||
set.forEach(action);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<K> toUnmodifiableSet() {
|
|
||||||
return Collections.unmodifiableSet(set);
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public Iterator<K> iterator() {
|
|
||||||
return set.iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
@Override
|
|
||||||
public ObjectIterator<K> fastIterator() {
|
|
||||||
if (set instanceof ObjectSet) {
|
|
||||||
return ((ObjectSet<K>) set).iterator();
|
|
||||||
} else {
|
|
||||||
return ObjectIterators.asObjectIterator(set.iterator());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Stream<K> stream() {
|
|
||||||
return set.stream();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableIterableMap<K, V> toUnmodifiableIterableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableIterableMap.ofObjects(set.toArray(), values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableIterableMap<K2, K> toUnmodifiableIterableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableIterableMap.ofObjects(keys, set.toArray());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <V> UnmodifiableMap<K, V> toUnmodifiableMapSetValues(V[] values) {
|
|
||||||
return UnmodifiableMap.ofObjects(set.toArray(), values);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public <K2> UnmodifiableMap<K2, K> toUnmodifiableMapSetKeys(K2[] keys) {
|
|
||||||
return UnmodifiableMap.ofObjects(keys, set.toArray());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,124 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.opentest4j.AssertionFailedError;
|
|
||||||
import org.warp.commonutils.type.ShortNamedThreadFactory;
|
|
||||||
|
|
||||||
public class BoundedQueueTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBoundedQueue() throws InterruptedException, ExecutionException {
|
|
||||||
testBoundedQueue(1, 1);
|
|
||||||
testBoundedQueue(1, 10);
|
|
||||||
testBoundedQueue(4, 10);
|
|
||||||
testBoundedQueue(0, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBoundedQueue(int corePoolSize, int maxPoolSize) throws InterruptedException, ExecutionException {
|
|
||||||
int maxQueueSize = 2;
|
|
||||||
AtomicInteger queueSize = new AtomicInteger();
|
|
||||||
AtomicReference<AssertionFailedError> failedError = new AtomicReference<>();
|
|
||||||
AtomicInteger maxRecordedCurrentQueueSize = new AtomicInteger(0);
|
|
||||||
var executor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
maxPoolSize,
|
|
||||||
0L,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("test"),
|
|
||||||
(isQueueFull, currentQueueSize) -> {
|
|
||||||
try {
|
|
||||||
if (currentQueueSize >= maxQueueSize) {
|
|
||||||
Assertions.assertTrue(isQueueFull);
|
|
||||||
} else {
|
|
||||||
Assertions.assertFalse(isQueueFull);
|
|
||||||
}
|
|
||||||
} catch (AssertionFailedError ex) {
|
|
||||||
if (failedError.get() == null) {
|
|
||||||
failedError.set(ex);
|
|
||||||
}
|
|
||||||
ex.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
for (int i = 0; i < 10000; i++) {
|
|
||||||
queueSize.incrementAndGet();
|
|
||||||
executor.execute(queueSize::decrementAndGet);
|
|
||||||
}
|
|
||||||
|
|
||||||
executor.testShutdown();
|
|
||||||
if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
|
|
||||||
Assertions.fail("Not terminated");
|
|
||||||
}
|
|
||||||
|
|
||||||
Assertions.assertNull(failedError.get());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBoundedQueueMaxPoolSize1_1() throws InterruptedException, ExecutionException {
|
|
||||||
testBoundedQueueMaxPoolSize( 1, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBoundedQueueMaxPoolSize10_10() throws InterruptedException, ExecutionException {
|
|
||||||
testBoundedQueueMaxPoolSize( 10, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBoundedQueueMaxPoolSize10_1() throws InterruptedException, ExecutionException {
|
|
||||||
testBoundedQueueMaxPoolSize( 10, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBoundedQueueMaxPoolSize1_10() throws InterruptedException, ExecutionException {
|
|
||||||
testBoundedQueueMaxPoolSize( 1, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBoundedQueueMaxPoolSize4_10() throws InterruptedException, ExecutionException {
|
|
||||||
testBoundedQueueMaxPoolSize( 4, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBoundedQueueMaxPoolSize(int maxPoolSize, int maxQueueSize) throws InterruptedException, ExecutionException {
|
|
||||||
CountDownLatch allFilled = new CountDownLatch(maxPoolSize);
|
|
||||||
var executor = BoundedExecutorService.create(maxQueueSize,
|
|
||||||
maxPoolSize,
|
|
||||||
0L,
|
|
||||||
TimeUnit.MILLISECONDS,
|
|
||||||
new ShortNamedThreadFactory("test"),
|
|
||||||
(isQueueFull, currentQueueSize) -> {
|
|
||||||
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
AtomicReference<InterruptedException> failedError = new AtomicReference<>();
|
|
||||||
for (int i = 0; i < maxPoolSize; i++) {
|
|
||||||
executor.execute(() -> {
|
|
||||||
allFilled.countDown();
|
|
||||||
try {
|
|
||||||
allFilled.await();
|
|
||||||
} catch (InterruptedException ex) {
|
|
||||||
if (failedError.get() == null) {
|
|
||||||
failedError.set(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!allFilled.await(10, TimeUnit.SECONDS)) {
|
|
||||||
Assertions.fail("Not reached max pool size");
|
|
||||||
}
|
|
||||||
|
|
||||||
executor.testShutdown();
|
|
||||||
if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
|
|
||||||
Assertions.fail("Not terminated");
|
|
||||||
}
|
|
||||||
|
|
||||||
Assertions.assertNull(failedError.get());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,42 +0,0 @@
|
|||||||
package org.warp.commonutils.concurrency.executor;
|
|
||||||
|
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
public class TestScheduledTaskLifecycle {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testScheduledTaskLifecycle() throws InterruptedException {
|
|
||||||
var scheduler = Executors.newScheduledThreadPool(100);
|
|
||||||
|
|
||||||
var lifecycle = new ScheduledTaskLifecycle();
|
|
||||||
|
|
||||||
AtomicInteger runningTasks = new AtomicInteger();
|
|
||||||
|
|
||||||
for (int i = 0; i < 49; i++) {
|
|
||||||
lifecycle.registerScheduledTask(scheduler.scheduleAtFixedRate(() -> {
|
|
||||||
lifecycle.startScheduledTask();
|
|
||||||
runningTasks.incrementAndGet();
|
|
||||||
try {
|
|
||||||
Thread.sleep(33);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} finally {
|
|
||||||
runningTasks.decrementAndGet();
|
|
||||||
lifecycle.endScheduledTask();
|
|
||||||
}
|
|
||||||
}, 0, 1, TimeUnit.MICROSECONDS));
|
|
||||||
}
|
|
||||||
|
|
||||||
Thread.sleep(96);
|
|
||||||
|
|
||||||
lifecycle.cancelAndWait();
|
|
||||||
|
|
||||||
System.out.println("stopped");
|
|
||||||
|
|
||||||
Assertions.assertEquals(0, runningTasks.get());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,40 +0,0 @@
|
|||||||
package org.warp.commonutils.functional;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.warp.commonutils.functional.Unchecked.UncheckedConsumer;
|
|
||||||
|
|
||||||
public class TestGenericExceptions {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testGenericExceptions() {
|
|
||||||
testFunction((number) -> {
|
|
||||||
Assertions.assertEquals(number, 1);
|
|
||||||
}).done();
|
|
||||||
|
|
||||||
boolean thrown = false;
|
|
||||||
try {
|
|
||||||
testFunction((number) -> {
|
|
||||||
throw new IOException("Test");
|
|
||||||
}).throwException(IOException.class);
|
|
||||||
} catch (IOException e) {
|
|
||||||
thrown = true;
|
|
||||||
}
|
|
||||||
Assertions.assertEquals(true, thrown, "IOException not thrown");
|
|
||||||
|
|
||||||
boolean thrown2 = false;
|
|
||||||
try {
|
|
||||||
testFunction((number) -> {
|
|
||||||
throw new IOException("Test");
|
|
||||||
}).throwException(Exception.class);
|
|
||||||
} catch (Exception e) {
|
|
||||||
thrown2 = true;
|
|
||||||
}
|
|
||||||
Assertions.assertEquals(true, thrown2, "Exception not thrown");
|
|
||||||
}
|
|
||||||
|
|
||||||
private UncheckedResult testFunction(UncheckedConsumer<Integer> uncheckedConsumer) {
|
|
||||||
return Unchecked.wrap(uncheckedConsumer).apply(1);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,160 +0,0 @@
|
|||||||
package org.warp.commonutils.type;
|
|
||||||
|
|
||||||
import it.unimi.dsi.fastutil.objects.ObjectLinkedOpenHashSet;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.Set;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.warp.commonutils.error.IndexOutOfBoundsException;
|
|
||||||
|
|
||||||
public class TestStackSet {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetEmptyTop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
Assertions.assertThrows(NoSuchElementException.class, implementation::top);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetTop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testBottom");
|
|
||||||
implementation.push("testMiddle");
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertEquals("testTop", implementation.top());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetItemPeekBottom() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testBottom");
|
|
||||||
implementation.push("testMiddle");
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertEquals("testBottom", implementation.peek(2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetItemPeekMiddle() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testBottom");
|
|
||||||
implementation.push("testMiddle");
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertEquals("testMiddle", implementation.peek(1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetItemPeekTop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testBottom");
|
|
||||||
implementation.push("testMiddle");
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertEquals("testTop", implementation.peek(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetItemPeekTopSingle() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertEquals("testTop", implementation.peek(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetEmptyIsEmpty() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
Assertions.assertTrue(implementation.isEmpty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetFullIsEmpty() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertFalse(implementation.isEmpty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetEmptyPeekTop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
Assertions.assertThrows(IndexOutOfBoundsException.class, () -> implementation.peek(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetPeekOverRange() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertThrows(IndexOutOfBoundsException.class, () -> implementation.peek(10));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetPeekUnderRange() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testTop");
|
|
||||||
Assertions.assertThrows(IndexOutOfBoundsException.class, () -> implementation.peek(-10));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetItemPop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testBottom");
|
|
||||||
implementation.push("testMiddle");
|
|
||||||
implementation.push("testTop");
|
|
||||||
implementation.push("testExtra");
|
|
||||||
Assertions.assertEquals("testTop", implementation.peek(1));
|
|
||||||
implementation.pop();
|
|
||||||
Assertions.assertEquals("testTop", implementation.peek(0));
|
|
||||||
Assertions.assertEquals("testTop", implementation.top());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetCopyOrder() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testBottom");
|
|
||||||
implementation.push("testMiddle");
|
|
||||||
implementation.push("testTop");
|
|
||||||
if (implementation instanceof FastUtilStackSetWrapper) {
|
|
||||||
var copy = new HashStackSet<>((FastUtilStackSetWrapper<String>) implementation);
|
|
||||||
Assertions.assertEquals("testTop", copy.pop());
|
|
||||||
Assertions.assertEquals("testMiddle", copy.pop());
|
|
||||||
Assertions.assertEquals("testBottom", copy.pop());
|
|
||||||
Assertions.assertTrue(copy.isEmpty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetOneItemOnePop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
implementation.push("testExtra");
|
|
||||||
implementation.pop();
|
|
||||||
Assertions.assertThrows(IndexOutOfBoundsException.class, () -> implementation.peek(0));
|
|
||||||
Assertions.assertThrows(NoSuchElementException.class, implementation::top);
|
|
||||||
Assertions.assertTrue(implementation.isEmpty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStackSetItemEmptyPop() {
|
|
||||||
for (StackSet<String> implementation : getImplementations()) {
|
|
||||||
Assertions.assertThrows(NoSuchElementException.class, implementation::pop);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Set<StackSet<String>> getImplementations() {
|
|
||||||
return Set.of(new HashStackSet<>(),
|
|
||||||
new JavaStackSetWrapper<>(new LinkedHashSet<>()),
|
|
||||||
new FastUtilStackSetWrapper<>(new ObjectLinkedOpenHashSet<>())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
x
Reference in New Issue
Block a user