strangedb-core/src/main/java/it/cavallium/strangedb/database/references/DatabaseReferencesMetadataC...

179 lines
6.6 KiB
Java
Raw Normal View History

2019-03-07 16:19:53 +01:00
package it.cavallium.strangedb.database.references;
2019-04-20 22:18:34 +02:00
import it.cavallium.strangedb.VariableWrapper;
2019-03-07 16:19:53 +01:00
import it.unimi.dsi.fastutil.longs.*;
2019-04-20 22:18:34 +02:00
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
2019-03-07 16:19:53 +01:00
import it.unimi.dsi.fastutil.objects.ObjectIterator;
2019-04-20 22:18:34 +02:00
import it.unimi.dsi.fastutil.objects.ObjectIterators;
2019-03-07 16:19:53 +01:00
import java.io.IOException;
2019-04-20 15:54:40 +02:00
import java.util.ArrayList;
2019-04-20 22:18:34 +02:00
import java.util.Collection;
2019-03-07 16:19:53 +01:00
import java.util.LinkedList;
import java.util.List;
2019-04-20 15:54:40 +02:00
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantReadWriteLock;
2019-03-07 16:19:53 +01:00
import static it.cavallium.strangedb.database.IBlocksMetadata.ERROR_BLOCK_ID;
2019-04-20 15:54:40 +02:00
import static it.cavallium.strangedb.database.blocks.DatabaseBlocksMetadata.BLOCK_META_READS_AT_EVERY_READ;
2019-04-20 22:18:34 +02:00
import static it.cavallium.strangedb.database.references.DatabaseReferencesMetadata.NONEXISTENT_REFERENCE_INFO;
import static it.cavallium.strangedb.database.references.DatabaseReferencesMetadata.REF_META_READS_AT_EVERY_READ;
2019-03-07 16:19:53 +01:00
public class DatabaseReferencesMetadataCache {
2019-04-20 15:54:40 +02:00
private static final int BASE_QUANTITY = (REF_META_READS_AT_EVERY_READ < 500 ? REF_META_READS_AT_EVERY_READ : 500);
private static final int GOOD_CACHE_SIZE = 140 * BASE_QUANTITY;
private static final int FLUSH_CACHE_SIZE = 300 * BLOCK_META_READS_AT_EVERY_READ;
private static final int MAX_CACHE_SIZE = 400 * BASE_QUANTITY;
2019-03-07 16:19:53 +01:00
2019-04-20 22:18:34 +02:00
private final Long2LongMap references2Blocks = Long2LongMaps.synchronize(new Long2LongLinkedOpenHashMap(MAX_CACHE_SIZE, 0.5f));
private final Long2ByteMap referencesCleaners = Long2ByteMaps.synchronize(new Long2ByteLinkedOpenHashMap(MAX_CACHE_SIZE, 0.5f));
2019-04-20 15:54:40 +02:00
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(false);
2019-03-07 16:19:53 +01:00
private final DatabaseReferencesMetadataCacheFlusher flusher;
private volatile boolean closed;
2019-04-20 22:18:34 +02:00
ExecutorService flushExecutorService = Executors.newFixedThreadPool(ForkJoinPool.getCommonPoolParallelism(), (r) -> new Thread(r, "References Flush Thread"));
2019-03-07 16:19:53 +01:00
public DatabaseReferencesMetadataCache(DatabaseReferencesMetadataCacheFlusher flusher) {
this.flusher = flusher;
}
2019-04-20 15:54:40 +02:00
public long getBlock(long reference) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.readLock().lock();
try {
2019-03-07 16:19:53 +01:00
return references2Blocks.getOrDefault(reference, ERROR_BLOCK_ID);
2019-04-20 15:54:40 +02:00
} finally {
lock.readLock().unlock();
2019-03-07 16:19:53 +01:00
}
}
2019-04-20 15:54:40 +02:00
public ReferenceInfo get(long reference) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.readLock().lock();
try {
long blockId = references2Blocks.getOrDefault(reference, ERROR_BLOCK_ID);
if (blockId == ERROR_BLOCK_ID) {
return NONEXISTENT_REFERENCE_INFO;
2019-03-07 16:19:53 +01:00
}
2019-04-20 15:54:40 +02:00
byte cleanerId = referencesCleaners.get(reference);
return new ReferenceInfo(cleanerId, blockId);
} finally {
lock.readLock().unlock();
}
}
public void put(long reference, byte cleanerId, long blockId) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.writeLock().lock();
try {
if (cleanerId == 0) {
throw new IOException("Null cleaner id");
}
references2Blocks.put(reference, blockId);
referencesCleaners.put(reference, cleanerId);
2019-04-20 22:18:34 +02:00
flush();
2019-04-20 15:54:40 +02:00
} finally {
lock.writeLock().unlock();
}
}
public void putAll(long[] references, byte[] cleanerIds, long[] blockIds) throws IOException {
if (closed) throw new IOException("Cache already closed!");
lock.writeLock().lock();
try {
2019-04-20 23:04:05 +02:00
Long2LongMap referencesBlocksToAdd = new Long2LongLinkedOpenHashMap(references, blockIds, 0.5f);
Long2ByteMap referencesCleanersToAdd = new Long2ByteLinkedOpenHashMap(references, cleanerIds, 0.5f);
2019-04-20 15:54:40 +02:00
for (int i = 0; i < references.length; i++) {
if (cleanerIds[i] == 0) {
2019-04-20 23:04:05 +02:00
referencesBlocksToAdd.remove(references[i]);
referencesCleanersToAdd.remove(references[i]);
2019-04-20 15:54:40 +02:00
}
}
references2Blocks.putAll(referencesBlocksToAdd);
referencesCleaners.putAll(referencesCleanersToAdd);
2019-04-20 22:18:34 +02:00
flush();
2019-04-20 15:54:40 +02:00
} finally {
lock.writeLock().unlock();
2019-03-07 16:19:53 +01:00
}
}
private void flush() throws IOException {
2019-04-20 22:18:34 +02:00
if (closed) return;
2019-04-20 15:54:40 +02:00
int references2BlocksSize = references2Blocks.size();
if (references2BlocksSize >= FLUSH_CACHE_SIZE) {
2019-04-20 22:18:34 +02:00
ObjectIterator<Long2LongMap.Entry> entriesIterator = references2Blocks.long2LongEntrySet().iterator();
ObjectIterator<Long2ByteMap.Entry> cleanersIterator = referencesCleaners.long2ByteEntrySet().iterator();
@SuppressWarnings("unchecked")
ObjectArrayList<Callable<Void>> tasks = ObjectArrayList.wrap(new Callable[references2BlocksSize - GOOD_CACHE_SIZE], references2BlocksSize - GOOD_CACHE_SIZE);
for (int i = 0; i < references2BlocksSize - GOOD_CACHE_SIZE; i++) {
Long2LongMap.Entry entry = entriesIterator.next();
Long2ByteMap.Entry cleaner = cleanersIterator.next();
long refId = entry.getLongKey();
byte cleanerId = cleaner.getByteValue();
long blockId = entry.getLongValue();
entriesIterator.remove();
cleanersIterator.remove();
tasks.set(i, () -> {
try {
flusher.flush(refId, cleanerId, blockId);
} catch (IOException e) {
throw new CompletionException(e);
2019-03-07 16:19:53 +01:00
}
2019-04-20 22:18:34 +02:00
return null;
});
}
try {
flushExecutorService.invokeAll(tasks);
} catch (InterruptedException e) {
throw new IOException(e.getCause());
2019-03-07 16:19:53 +01:00
}
}
}
public void close() throws IOException {
2019-04-20 15:54:40 +02:00
if (!closed) {
closed = true;
lock.writeLock().lock();
try {
int references2BlocksSize = references2Blocks.size();
ObjectIterator<Long2LongMap.Entry> entriesIterator = references2Blocks.long2LongEntrySet().iterator();
ObjectIterator<Long2ByteMap.Entry> cleanersIterator = referencesCleaners.long2ByteEntrySet().iterator();
2019-04-20 22:18:34 +02:00
@SuppressWarnings("unchecked")
ObjectArrayList<Callable<Void>> tasks = ObjectArrayList.wrap(new Callable[references2BlocksSize], references2BlocksSize);
for (int i = 0; i < references2BlocksSize; i++) {
2019-04-20 15:54:40 +02:00
Long2LongMap.Entry entry = entriesIterator.next();
Long2ByteMap.Entry cleaner = cleanersIterator.next();
2019-04-20 22:18:34 +02:00
long refId = entry.getLongKey();
byte cleanerId = cleaner.getByteValue();
long blockId = entry.getLongValue();
2019-04-20 15:54:40 +02:00
entriesIterator.remove();
cleanersIterator.remove();
2019-04-20 22:18:34 +02:00
tasks.set(i, () -> {
try {
flusher.flush(refId, cleanerId, blockId);
} catch (IOException e) {
throw new CompletionException(e);
}
return null;
});
2019-03-07 16:19:53 +01:00
}
try {
2019-04-20 22:18:34 +02:00
flushExecutorService.invokeAll(tasks);
2019-03-07 16:19:53 +01:00
} catch (InterruptedException e) {
2019-04-20 22:18:34 +02:00
throw new IOException(e.getCause());
2019-03-07 16:19:53 +01:00
}
2019-04-20 22:18:34 +02:00
flushExecutorService.shutdown();
try {
if (!flushExecutorService.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS))
flushExecutorService.shutdownNow();
} catch (InterruptedException e) {
throw new IOException(e);
}
} finally {
lock.writeLock().unlock();
}
2019-03-07 16:19:53 +01:00
}
}
}