strangedb-core/src/main/java/it/cavallium/strangedb/database/references/DatabaseReferencesMetadataC...

196 lines
6.5 KiB
Java

package it.cavallium.strangedb.database.references;
import it.unimi.dsi.fastutil.longs.*;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static it.cavallium.strangedb.database.IBlocksMetadata.ERROR_BLOCK_ID;
import static it.cavallium.strangedb.database.blocks.DatabaseBlocksMetadata.BLOCK_META_READS_AT_EVERY_READ;
import static it.cavallium.strangedb.database.references.DatabaseReferencesMetadata.*;
public class DatabaseReferencesMetadataCache {
private static final int BASE_QUANTITY = (REF_META_READS_AT_EVERY_READ < 500 ? REF_META_READS_AT_EVERY_READ : 500);
private static final int GOOD_CACHE_SIZE = 140 * BASE_QUANTITY;
private static final int FLUSH_CACHE_SIZE = 300 * BLOCK_META_READS_AT_EVERY_READ;
private static final int MAX_CACHE_SIZE = 400 * BASE_QUANTITY;
private final Long2LongMap references2Blocks = new Long2LongLinkedOpenHashMap(MAX_CACHE_SIZE, 0.5f);
private final Long2ByteMap referencesCleaners = new Long2ByteLinkedOpenHashMap(MAX_CACHE_SIZE, 0.5f);
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(false);
private final DatabaseReferencesMetadataCacheFlusher flusher;
private volatile boolean closed;
private ExecutorService flushService = Executors.newSingleThreadExecutor();
private volatile boolean flushing;
public DatabaseReferencesMetadataCache(DatabaseReferencesMetadataCacheFlusher flusher) {
this.flusher = flusher;
}
public long getBlock(long reference) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.readLock().lock();
try {
return references2Blocks.getOrDefault(reference, ERROR_BLOCK_ID);
} finally {
lock.readLock().unlock();
}
}
public ReferenceInfo get(long reference) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.readLock().lock();
try {
long blockId = references2Blocks.getOrDefault(reference, ERROR_BLOCK_ID);
if (blockId == ERROR_BLOCK_ID) {
return NONEXISTENT_REFERENCE_INFO;
}
byte cleanerId = referencesCleaners.get(reference);
return new ReferenceInfo(cleanerId, blockId);
} finally {
lock.readLock().unlock();
}
}
public void put(long reference, byte cleanerId, long blockId) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.writeLock().lock();
try {
if (cleanerId == 0) {
throw new IOException("Null cleaner id");
}
references2Blocks.put(reference, blockId);
referencesCleaners.put(reference, cleanerId);
} finally {
lock.writeLock().unlock();
flushAsync();
}
}
public void putAll(long[] references, byte[] cleanerIds, long[] blockIds) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.writeLock().lock();
try {
for (int i = 0; i < references.length; i++) {
if (cleanerIds[i] == 0) {
throw new IOException("Null cleaner id");
}
}
Long2LongMap referencesBlocksToAdd = new Long2LongLinkedOpenHashMap(references, blockIds, 0.5f);
Long2ByteMap referencesCleanersToAdd = new Long2ByteLinkedOpenHashMap(references, cleanerIds, 0.5f);
references2Blocks.putAll(referencesBlocksToAdd);
referencesCleaners.putAll(referencesCleanersToAdd);
} finally {
lock.writeLock().unlock();
flushAsync();
}
}
private void flushAsync() {
if (references2Blocks.size() >= FLUSH_CACHE_SIZE && !flushing) {
flushing = true;
flushService.execute(() -> {
try {
flush();
} catch (IOException e) {
throw new RejectedExecutionException(e);
}
});
}
}
private void flush() throws IOException {
if (closed) {
flushing = false;
return;
}
int references2BlocksSize = references2Blocks.size();
if (references2BlocksSize >= FLUSH_CACHE_SIZE) {
lock.writeLock().lock();
try {
ObjectIterator<Long2LongMap.Entry> entriesIterator = references2Blocks.long2LongEntrySet().iterator();
ObjectIterator<Long2ByteMap.Entry> cleanersIterator = referencesCleaners.long2ByteEntrySet().iterator();
List<Future<?>> entriesToFlush = new ArrayList<>();
while (references2BlocksSize >= GOOD_CACHE_SIZE) {
Long2LongMap.Entry entry = entriesIterator.next();
Long2ByteMap.Entry cleaner = cleanersIterator.next();
entriesToFlush.add(flusher.flush(entry.getLongKey(), cleaner.getByteValue(), entry.getLongValue()));
entriesIterator.remove();
cleanersIterator.remove();
if (entriesToFlush.size() >= 1000) {
awaitFlushWriteEnd(entriesToFlush);
}
references2BlocksSize--;
}
awaitFlushWriteEnd(entriesToFlush);
} finally {
flushing = false;
lock.writeLock().unlock();
}
} else {
flushing = false;
}
}
public void close() throws IOException {
if (!closed) {
closed = true;
stopFlushService();
lock.writeLock().lock();
try {
int references2BlocksSize = references2Blocks.size();
ObjectIterator<Long2LongMap.Entry> entriesIterator = references2Blocks.long2LongEntrySet().iterator();
ObjectIterator<Long2ByteMap.Entry> cleanersIterator = referencesCleaners.long2ByteEntrySet().iterator();
List<Future<?>> entriesToFlush = new LinkedList<>();
while (references2BlocksSize > 0) {
Long2LongMap.Entry entry = entriesIterator.next();
Long2ByteMap.Entry cleaner = cleanersIterator.next();
entriesToFlush.add(flusher.flush(entry.getLongKey(), cleaner.getByteValue(), entry.getLongValue()));
entriesIterator.remove();
cleanersIterator.remove();
if (entriesToFlush.size() >= 1000) {
awaitFlushWriteEnd(entriesToFlush);
}
references2BlocksSize--;
}
awaitFlushWriteEnd(entriesToFlush);
} finally {
lock.writeLock().unlock();
}
}
}
private void stopFlushService() {
flushService.shutdown();
try { flushService.awaitTermination(30, TimeUnit.SECONDS); } catch (InterruptedException e) {}
if (!flushService.isTerminated()) {
flushService.shutdownNow();
}
}
private void awaitFlushWriteEnd(List<Future<?>> entriesToFlush) throws IOException {
try {
entriesToFlush.parallelStream().forEach((entry) -> {
try {
entry.get();
} catch (InterruptedException e) {
throw new CompletionException(e);
} catch (ExecutionException e) {
throw new CompletionException(e.getCause());
}
});
} catch (CompletionException e) {
throw new IOException(e.getCause());
} finally {
entriesToFlush.clear();
}
}
}