Fixed BigList and Cleaner

This commit is contained in:
Andrea Cavalli 2019-01-06 00:31:52 +01:00
parent a4a981ad1c
commit 71a8148bdc
11 changed files with 237 additions and 156 deletions

View File

@ -0,0 +1,5 @@
package org.warp.jcwdb;
public interface AdvancedSaveable extends Saveable {
public void save(boolean isEditFinished);
}

View File

@ -45,6 +45,11 @@ public class CacheIndexManager implements IndexManager {
return null;
}
@Override
public void setFlushingAllowed(long index, boolean isUnloadingAllowed) {
// TODO: implement
}
@Override
public void delete(long index) {
// TODO: implement

View File

@ -8,6 +8,8 @@ import it.unimi.dsi.fastutil.longs.Long2ObjectMap.Entry;
public class Cleaner {
public static final boolean DISABLE_CLEANER = false;
public static final boolean ENABLE_CLEANER_LOGGING = false;
private static final double MAXIMUM_SLEEP_INTERVAL = 8d * 1000d; // 8 seconds
private static final double MINIMUM_SLEEP_INTERVAL = 1d * 1000d; // 1 second
private static final double NORMAL_REMOVED_ITEMS = 2500l;
@ -26,7 +28,9 @@ public class Cleaner {
}
public void start() {
//this.cleanerThread.start();
if (!DISABLE_CLEANER) {
this.cleanerThread.start();
}
}
/**
@ -38,7 +42,7 @@ public class Cleaner {
for (Cleanable cleanable : objectsToClean) {
cleanedItems += cleanable.clean();
}
System.gc();
//System.gc();
return cleanedItems;
}
@ -61,38 +65,38 @@ public class Cleaner {
public void run() {
while(!stopRequest) {
try {
System.out.println("[CLEANER] Waiting " + sleepInterval + "ms.");
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] Waiting " + sleepInterval + "ms.");
sleepFor(sleepInterval);
final long time1 = System.currentTimeMillis();
final double removedItems = clean();
final long time2 = System.currentTimeMillis();
System.out.println("[CLEANER] CLEAN_TIME " + (time2 - time1));
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] CLEAN_TIME " + (time2 - time1));
double suggestedExecutionTimeByItemsCalculations = (sleepInterval + MAXIMUM_SLEEP_INTERVAL) / 2;
System.out.println("[CLEANER] REMOVED_ITEMS: " + removedItems);
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] REMOVED_ITEMS: " + removedItems);
if (removedItems > 0) {
final double removedItemsRatio = removedItems / NORMAL_REMOVED_ITEMS;
System.out.println("[CLEANER] REMOVED_ITEMS_RATIO: " + removedItemsRatio);
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] REMOVED_ITEMS_RATIO: " + removedItemsRatio);
if (removedItemsRatio < 1d / REMOVED_ITEMS_RATIO || removedItemsRatio >= REMOVED_ITEMS_RATIO) {
suggestedExecutionTimeByItemsCalculations = sleepInterval / removedItemsRatio;
}
}
System.out.println("[CLEANER] Items: SUGGESTING SLEEP_INTERVAL FROM " + sleepInterval + "ms TO " + suggestedExecutionTimeByItemsCalculations + "ms");
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] Items: SUGGESTING SLEEP_INTERVAL FROM " + sleepInterval + "ms TO " + suggestedExecutionTimeByItemsCalculations + "ms");
double newSleepInterval = suggestedExecutionTimeByItemsCalculations;
System.out.println("[CLEANER] Total: SUGGESTING SLEEP_INTERVAL FROM " + sleepInterval + "ms TO " + newSleepInterval + "ms");
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] Total: SUGGESTING SLEEP_INTERVAL FROM " + sleepInterval + "ms TO " + newSleepInterval + "ms");
if (newSleepInterval > MAXIMUM_SLEEP_INTERVAL) {
sleepInterval = (int) MAXIMUM_SLEEP_INTERVAL;
} else if (newSleepInterval < MINIMUM_SLEEP_INTERVAL) {
sleepInterval = (int) MINIMUM_SLEEP_INTERVAL;
} else {
System.out.println("[CLEANER] CHANGED SLEEP_INTERVAL FROM " + sleepInterval + "ms TO " + newSleepInterval + "ms");
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] CHANGED SLEEP_INTERVAL FROM " + sleepInterval + "ms TO " + newSleepInterval + "ms");
sleepInterval = (int) newSleepInterval;
}
System.out.println("[CLEANER] Cleaned " + removedItems + " items.");
if (ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] Cleaned " + removedItems + " items.");
}catch (InterruptedException e) {
}

View File

@ -10,7 +10,7 @@ import java.util.function.Function;
* You must have only a maximum of 1 reference for each index
* @param <T>
*/
public class EntryReference<T> implements Castable, Saveable {
public class EntryReference<T> implements Castable, AdvancedSaveable {
private final JCWDatabase.EntryReferenceTools db;
private final long entryIndex;
private final DBTypeParser<T> parser;
@ -19,6 +19,7 @@ public class EntryReference<T> implements Castable, Saveable {
private volatile boolean isHashCached;
private volatile boolean loaded;
private volatile boolean closed;
private volatile boolean isFlushingAllowed;
private final Object hashCacheLock = new Object();
private final Object accessLock = new Object();
private final Object closeLock = new Object();
@ -66,17 +67,29 @@ public class EntryReference<T> implements Castable, Saveable {
* Note that this method won't be called when closing without saving
*/
public void save() {
this.save(false);
}
public void save(boolean isEditFinished) {
synchronized(accessLock) {
if (loaded && !closed) {
try {
if (value instanceof Saveable) {
if (value instanceof AdvancedSaveable) {
((AdvancedSaveable)value).save(isEditFinished);
} else if (value instanceof Saveable) {
((Saveable)value).save();
}
IndexDetails returnedDetails = db.write(entryIndex, parser.getWriter(value));
IndexDetails returnedDetails = this.db.write(entryIndex, parser.getWriter(value));
synchronized(hashCacheLock) {
this.cachedHash = returnedDetails.getHash();
this.isHashCached = true;
}
if (isEditFinished) {
if (!isFlushingAllowed) {
this.db.setFlushingAllowed(entryIndex, true);
this.isFlushingAllowed = true;
}
}
} catch (IOException e) {
e.printStackTrace();
}
@ -93,7 +106,7 @@ public class EntryReference<T> implements Castable, Saveable {
synchronized(accessLock) {
load();
this.value = editFunction.apply(this.value, this);
this.save();
this.save(true);
}
}
@ -106,7 +119,7 @@ public class EntryReference<T> implements Castable, Saveable {
synchronized(accessLock) {
load();
this.value = editFunction.apply(this.value);
this.save();
this.save(true);
}
}
@ -119,7 +132,7 @@ public class EntryReference<T> implements Castable, Saveable {
synchronized(accessLock) {
load();
editFunction.accept(this.value, this);
this.save();
this.save(true);
}
}
@ -132,7 +145,7 @@ public class EntryReference<T> implements Castable, Saveable {
synchronized(accessLock) {
load();
editFunction.accept(this.value);
this.save();
this.save(true);
}
}
@ -148,7 +161,7 @@ public class EntryReference<T> implements Castable, Saveable {
synchronized(hashCacheLock) {
this.isHashCached = false;
}
this.save();
this.save(true);
}
}
@ -177,6 +190,10 @@ public class EntryReference<T> implements Castable, Saveable {
synchronized(accessLock) {
if (!loaded) {
try {
if (this.isFlushingAllowed) {
this.db.setFlushingAllowed(entryIndex, false);
this.isFlushingAllowed = false;
}
this.value = db.read(entryIndex, parser.getReader());
this.loaded = true;
} catch (IOException e) {
@ -201,7 +218,7 @@ public class EntryReference<T> implements Castable, Saveable {
return;
}
save();
save(true);
closed = true;
}

View File

@ -12,11 +12,10 @@ import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
public class FileIndexManager implements IndexManager {
private final SeekableByteChannel dataFileChannel, metadataFileChannel;
private volatile long metadataFileChannelSize;
private final FileAllocator fileAllocator;
private final ByteBuffer metadataByteBuffer = ByteBuffer.allocateDirect(IndexDetails.TOTAL_BYTES);
private final ByteBuffer maskByteBuffer = ByteBuffer.allocateDirect(Integer.BYTES);
@ -35,13 +34,21 @@ public class FileIndexManager implements IndexManager {
/**
* Edit this using editIndex()
*/
private final LongSet dirtyLoadedIndices, removedIndices;
private final LongSet dirtyLoadedIndices, flushingAllowedIndices, removedIndices;
private long firstAllocableIndex;
public FileIndexManager(Path dataFile, Path metadataFile) throws IOException {
if (Cleaner.DISABLE_CLEANER) {
loadedIndices = new Long2ObjectOpenHashMap<>();
dirtyLoadedIndices = new LongOpenHashSet();
flushingAllowedIndices = new LongOpenHashSet();
removedIndices = new LongOpenHashSet();
} else {
loadedIndices = new Long2ObjectLinkedOpenHashMap<>();
dirtyLoadedIndices = new LongLinkedOpenHashSet();
flushingAllowedIndices = new LongLinkedOpenHashSet();
removedIndices = new LongLinkedOpenHashSet();
}
if (Files.notExists(dataFile)) {
Files.createFile(dataFile);
}
@ -51,17 +58,22 @@ public class FileIndexManager implements IndexManager {
dataFileChannel = Files.newByteChannel(dataFile, StandardOpenOption.READ, StandardOpenOption.WRITE);
metadataFileChannel = Files.newByteChannel(metadataFile, StandardOpenOption.READ, StandardOpenOption.WRITE);
fileAllocator = createFileAllocator(dataFileChannel, metadataFileChannel.position(0));
firstAllocableIndex = metadataFileChannel.size() / (long) IndexDetails.TOTAL_BYTES;
metadataFileChannelSize = metadataFileChannel.size();
firstAllocableIndex = getMetadataFileChannelSize() / (long) IndexDetails.TOTAL_BYTES;
if (firstAllocableIndex == 0) {
firstAllocableIndex = 1;
}
}
private long getMetadataFileChannelSize() throws IOException {
return metadataFileChannelSize;
}
private FileAllocator createFileAllocator(final SeekableByteChannel dataFileChannel, final SeekableByteChannel metadataFileChannel) throws IOException {
Long2IntMap freeBytes = new Long2IntRBTreeMap();
Long2IntMap usedBytes = new Long2IntRBTreeMap();
long firstOffset = 0;
while (metadataFileChannel.position() + IndexDetails.TOTAL_BYTES <= metadataFileChannel.size()) {
while (metadataFileChannel.position() + IndexDetails.TOTAL_BYTES <= getMetadataFileChannelSize()) {
IndexDetails indexDetails = readIndexDetailsAt(metadataFileChannel);
if (indexDetails != null) {
long offset = indexDetails.getOffset();
@ -117,7 +129,7 @@ public class FileIndexManager implements IndexManager {
public <T> IndexDetails set(long index, DBDataOutput<T> data) throws IOException {
checkClosed();
final int dataSize = data.getSize();
final IndexDetails indexDetails = getIndexMetadataUnsafe(index);
IndexDetails indexDetails = getIndexMetadataUnsafe(index);
if (indexDetails == null || indexDetails.getSize() < dataSize) {
// Allocate new space
IndexDetails newDetails = allocateAndWrite(index, data);
@ -133,7 +145,7 @@ public class FileIndexManager implements IndexManager {
fileAllocator.markFree(indexDetails.getOffset() + dataSize, dataSize);
}
// Update index details
editIndex(index, indexDetails, indexDetails.getOffset(), dataSize, indexDetails.getType(), data.calculateHash());
indexDetails = editIndex(index, indexDetails, indexDetails.getOffset(), dataSize, indexDetails.getType(), data.calculateHash());
// Write data
writeExact(indexDetails, data);
// Before returning, return IndexDetails
@ -141,6 +153,16 @@ public class FileIndexManager implements IndexManager {
}
}
@Override
public void setFlushingAllowed(long index, boolean isUnloadingAllowed) {
checkClosed();
if (isUnloadingAllowed) {
flushingAllowedIndices.add(index);
} else {
flushingAllowedIndices.remove(index);
}
}
@Override
public <T> long add(DBDataOutput<T> data) throws IOException {
checkClosed();
@ -206,6 +228,7 @@ public class FileIndexManager implements IndexManager {
}
synchronized (indicesMapsAccessLock) {
dirtyLoadedIndices.remove(index);
flushingAllowedIndices.remove(index);
loadedIndices.remove(index);
removedIndices.add(index);
}
@ -216,6 +239,7 @@ public class FileIndexManager implements IndexManager {
synchronized (indicesMapsAccessLock) {
removedIndices.remove(index);
dirtyLoadedIndices.remove(index);
flushingAllowedIndices.remove(index);
loadedIndices.remove(index);
}
// Update indices metadata
@ -228,11 +252,14 @@ public class FileIndexManager implements IndexManager {
if (dirtyLoadedIndices.contains(index)) {
indexDetails = loadedIndices.get(index);
dirtyLoadedIndices.remove(index);
flushingAllowedIndices.remove(index);
}
}
if (isDirty) {
// Update indices metadata
SeekableByteChannel metadata = metadataFileChannel.position(index * IndexDetails.TOTAL_BYTES);
long position = index * IndexDetails.TOTAL_BYTES;
resizeMetadataFileChannel(position);
SeekableByteChannel metadata = metadataFileChannel.position(position);
writeIndexDetails(metadata, indexDetails);
}
synchronized (indicesMapsAccessLock) {
@ -293,6 +320,7 @@ public class FileIndexManager implements IndexManager {
synchronized (indicesMapsAccessLock) {
loadedIndices.put(index, details);
dirtyLoadedIndices.add(index);
flushingAllowedIndices.remove(index);
}
}
@ -301,6 +329,7 @@ public class FileIndexManager implements IndexManager {
long newIndex = firstAllocableIndex++;
loadedIndices.put(newIndex, indexDetails);
dirtyLoadedIndices.add(newIndex);
flushingAllowedIndices.remove(newIndex);
removedIndices.remove(newIndex);
return newIndex;
}
@ -316,7 +345,7 @@ public class FileIndexManager implements IndexManager {
// Try to load the details from file
final long metadataPosition = index * IndexDetails.TOTAL_BYTES;
if (metadataPosition + IndexDetails.TOTAL_BYTES > metadataFileChannel.size()) {
if (metadataPosition + IndexDetails.TOTAL_BYTES > getMetadataFileChannelSize()) {
// Avoid underflow exception
return null;
}
@ -374,7 +403,7 @@ public class FileIndexManager implements IndexManager {
}
// Update indices metadata
flushAllIndices();
flushAllFlushableIndices();
// Remove removed indices
removeRemovedIndices();
@ -417,39 +446,56 @@ public class FileIndexManager implements IndexManager {
@Override
public long clean() {
long cleaned = 0;
long tim1 = System.currentTimeMillis();
try {
cleaned += flushAllIndices();
cleaned += flushAllFlushableIndices();
} catch (IOException ex) {
ex.printStackTrace();
}
long tim2 = System.currentTimeMillis();
try {
cleaned += removeRemovedIndices();
} catch (IOException ex) {
ex.printStackTrace();
}
long tim3 = System.currentTimeMillis();
cleaned += cleanExtraIndices();
long tim4 = System.currentTimeMillis();
if (Cleaner.ENABLE_CLEANER_LOGGING) System.out.println("[CLEANER] FileIndexManager CLEAN_TIME: " + (tim2-tim1) + "," + (tim3-tim2) + "," + (tim4-tim3));
return cleaned;
}
private long flushAllIndices() throws IOException {
private long flushAllFlushableIndices() throws IOException {
long flushedIndices = 0;
SeekableByteChannel metadata = metadataFileChannel;
long lastIndex = -2;
synchronized (indicesMapsAccessLock) {
for (long index : dirtyLoadedIndices) {
if (!flushingAllowedIndices.contains(index)) {
IndexDetails indexDetails = loadedIndices.get(index);
long position = index * IndexDetails.TOTAL_BYTES;
resizeMetadataFileChannel(position);
if (index - lastIndex != 1) {
metadata = metadata.position(index * IndexDetails.TOTAL_BYTES);
metadata = metadata.position(position);
}
writeIndexDetails(metadata, indexDetails);
lastIndex = index;
flushedIndices++;
}
}
dirtyLoadedIndices.clear();
dirtyLoadedIndices.addAll(flushingAllowedIndices);
flushingAllowedIndices.clear();
}
return flushedIndices;
}
private void resizeMetadataFileChannel(long position) {
if (position + IndexDetails.TOTAL_BYTES > metadataFileChannelSize) {
metadataFileChannelSize = position + IndexDetails.TOTAL_BYTES;
}
}
private long removeRemovedIndices() throws IOException {
SeekableByteChannel metadata = metadataFileChannel;
synchronized (indicesMapsAccessLock) {

View File

@ -12,6 +12,7 @@ public interface IndexManager extends Cleanable {
<T> long add(DBDataOutput<T> writer) throws IOException;
<T> FullIndexDetails addAndGetDetails(DBDataOutput<T> writer) throws IOException;
<T> IndexDetails set(long index, DBDataOutput<T> writer) throws IOException;
void setFlushingAllowed(long index, boolean isUnloadingAllowed);
void delete(long index) throws IOException;
boolean has(long index);
void close() throws IOException;

View File

@ -1,7 +1,5 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.longs.LongArrayList;
import java.io.IOException;
import java.nio.file.Path;
@ -154,6 +152,10 @@ public class JCWDatabase implements AutoCloseable, Cleanable {
public <T> IndexDetails write(long index, DBDataOutput<T> writer) throws IOException {
return indices.set(index, writer);
}
public void setFlushingAllowed(long index, boolean isFlushingAllowed) {
indices.setFlushingAllowed(index, isFlushingAllowed);
}
}
@SuppressWarnings("unchecked")

View File

@ -3,6 +3,7 @@ package org.warp.jcwdb;
import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.IOError;
import java.io.IOException;
import java.util.*;
import java.util.function.Consumer;
@ -112,7 +113,7 @@ public class LightArrayList<T> implements LightList<T> {
try {
action.accept(db.get(index));
} catch (IOException e) {
throw (RuntimeException) new RuntimeException().initCause(e);
throw new IOError(e);
}
}
}

View File

@ -4,6 +4,7 @@ import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.IOError;
import java.io.IOException;
import java.util.*;
import java.util.function.Consumer;
@ -16,8 +17,8 @@ public class LightBigList<T> implements LightList<T>, Saveable {
public final LongArrayList chunks;
public final IntArrayList chunkSizes;
private final JCWDatabase db;
private LightList<T> cachedChunk;
private EntryReference<LightList<T>> cachedChunkRef;
private LightArrayList<T> cachedChunk;
private EntryReference<LightArrayList<T>> cachedChunkRef;
private long cachedChunkIndex = -1;
private int cachedChunkNumber = -1;
private final Object cachedChunkLock = new Object();
@ -132,8 +133,8 @@ public class LightBigList<T> implements LightList<T>, Saveable {
if (o != null) {
for (long chunkIndex : chunks) {
try {
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
LightList<T> chunk = chunkRef.getValueReadOnly();
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
LightArrayList<T> chunk = chunkRef.getValueReadOnly();
if (chunk.contains(o)) {
return true;
}
@ -175,13 +176,13 @@ public class LightBigList<T> implements LightList<T>, Saveable {
@Override
public void forEachReference(Consumer<? super EntryReference<T>> action) {
Objects.requireNonNull(action);
for (long chunkIndex : this.chunks) {
try {
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
LightList<T> chunk = chunkRef.getValueReadOnly();
chunk.forEachReference(action);
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
synchronized (cachedChunkLock) {
if (cachedChunkNumber != i) {
prepareAccessToChunk(i);
}
cachedChunk.forEachReference(action);
}
}
}
@ -194,6 +195,9 @@ public class LightBigList<T> implements LightList<T>, Saveable {
@Deprecated
@Override
public T[] toArray() {
if (true) {
throw new RuntimeException("toArray() isn't implemented!");
}
T[] result = (T[]) new Object[this.size()];
long currentOffset = 0;
@ -206,8 +210,8 @@ public class LightBigList<T> implements LightList<T>, Saveable {
final long chunkIndex = chunks.getLong(i);
try {
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
LightList<T> chunk = chunkRef.getValueReadOnly();
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
LightArrayList<T> chunk = chunkRef.getValueReadOnly();
for (int i1 = 0; i1 < chunk.size(); i1++) {
result[(int)(chunkStartOffset + i1)] = chunk.get(i);
}
@ -276,7 +280,7 @@ public class LightBigList<T> implements LightList<T>, Saveable {
}
try {
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
chunkRef.editValue((chunk) -> {
result.var = chunk.remove(relativeOffset);
});
@ -414,7 +418,7 @@ public class LightBigList<T> implements LightList<T>, Saveable {
}
try {
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
chunkRef.editValue((chunk) -> {
chunk.set(relativeOffset, element);
wrapper.var = element;
@ -458,7 +462,7 @@ public class LightBigList<T> implements LightList<T>, Saveable {
// Get chunk index
final long chunkIndex = chunks.getLong(i);
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
final int foundIndex = chunkRef.getValueReadOnly().indexOfEntry(ref);
if (foundIndex >= 0) {
return currentOffset + foundIndex;
@ -487,7 +491,7 @@ public class LightBigList<T> implements LightList<T>, Saveable {
// Get chunk index
final long chunkIndex = chunks.getLong(i);
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
final int foundIndex = chunkRef.getValueReadOnly().lastIndexOfEntry(ref);
if (foundIndex >= 0) {
return currentOffset + foundIndex;
@ -543,26 +547,20 @@ public class LightBigList<T> implements LightList<T>, Saveable {
@Override
public boolean removeIf(Predicate<? super T> filter) {
Objects.requireNonNull(filter);
final VariableWrapper<Boolean> result = new VariableWrapper(false);
boolean result = false;
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
try {
final int chunkOffset = i;
// Get chunk index
final long chunkIndex = chunks.getLong(i);
EntryReference<LightList<T>> chunkRef = db.get(chunkIndex);
chunkRef.editValue((chunk) -> {
boolean removed = chunk.removeIf(filter);
if (removed) {
result.var = true;
chunkSizes.set(chunkOffset, chunk.size());
synchronized (cachedChunkLock) {
if (cachedChunkNumber != i) {
prepareAccessToChunk(i);
}
});
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
if (cachedChunk.removeIf(filter)) {
result = true;
chunkSizes.set(cachedChunkNumber, cachedChunk.size());
}
}
return result.var;
}
return result;
}
@Override

View File

@ -1,12 +1,7 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.longs.Long2LongLinkedOpenHashMap;
import it.unimi.dsi.fastutil.longs.Long2LongMap;
import java.io.IOException;
import java.nio.file.Path;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
public class MixedIndexDatabase implements IndexManager {
private final FileIndexManager fileIndices;
@ -62,6 +57,14 @@ public class MixedIndexDatabase implements IndexManager {
return fileIndices.set(index, writer);
}
}
@Override
public void setFlushingAllowed(long index, boolean isFlushingAllowed) {
if (cacheIndices.has(index)) {
cacheIndices.setFlushingAllowed(index, isFlushingAllowed);
} else {
fileIndices.setFlushingAllowed(index, isFlushingAllowed);
}
}
@Override
public void delete(long index) throws IOException {

View File

@ -7,14 +7,14 @@ import org.warp.jcwdb.LightList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import it.unimi.dsi.fastutil.objects.ObjectList;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.function.Predicate;
public class App {
static long time3;
public static void main(String[] args) {
try {
public static void main(String[] args) throws IOException {
if (args.length > 2 && Boolean.parseBoolean(args[2])) {
Files.delete(Paths.get(args[0]));
Files.delete(Paths.get(args[1]));
@ -33,17 +33,17 @@ public class App {
System.out.println("Time elapsed: " + (time1 - time01));
System.out.println("Root size: " + root.size());
System.out.println("Root:");
// for (int i = 0; i < root.size(); i++) {
// System.out.println(" - " + root.get(i));
// }
// for (int i = 0; i < root.size(); i++) {
// System.out.println(" - " + root.get(i));
// }
long prectime = System.currentTimeMillis();
for (int i = 0; i < 20000000/* 2000000 */; i++) {
for (int i = 0; i < 2000000/* 2000000 */; i++) {
Animal animal = new StrangeAnimal(i % 40);
root.add(animal);
root.addEntry(animal);
if (i > 0 && i % 200000 == 0) {
long precprectime = prectime;
prectime = System.currentTimeMillis();
System.out.println("Element " + i + " (" + (prectime - precprectime) + "ms)");
System.out.println("Element " + i + " (" + (prectime - precprectime) + "ms)" + " Total Time: " + (prectime - time1));
}
}
long time2 = System.currentTimeMillis();
@ -54,11 +54,14 @@ public class App {
+ ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
long time2_0 = System.currentTimeMillis();
System.out.println("Filtering strings...");
//root.removeIf(Animal::hasFourLegs);
long oldSize = root.size();
root.removeIf(Animal::hasFourLegs);
long time2_1 = System.currentTimeMillis();
System.out.println("RemoveIf(x) removed items: " + (oldSize - root.size()));
System.out.println("Time elapsed: " + (time2_1 - time2_0));
ObjectList<Animal> results = new ObjectArrayList<>();
System.out.println("Retrieving items...");
root.forEachReference((valueReference) -> {
Animal value = valueReference.getValueReadOnly();
if (Animal.hasFourLegs(value)) {
@ -73,9 +76,8 @@ public class App {
+ ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
System.out.println("Used memory: " + ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
System.out.println("Cleaning database (to reduce the amount of used memory and detect memory leaks)...");
long removedItems = 0;//db.clean();
db.clean();
time3 = System.currentTimeMillis();
System.out.println("Removed items: " + removedItems);
System.out.println("Used memory: " + ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
System.out.println("Time elapsed: " + (time3 - time2_2));
System.out.println("Saving database...");
@ -91,9 +93,6 @@ public class App {
db.close();
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static <T> Predicate<T> not(Predicate<T> t) {