Reimplemented database using Reflection

This commit is contained in:
Andrea Cavalli 2019-01-10 23:24:48 +01:00
parent 4414917432
commit 7123228027
39 changed files with 642 additions and 2348 deletions

View File

@ -40,6 +40,12 @@
<artifactId>zero-allocation-hashing</artifactId>
<version>0.8</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.8.1</version>
</dependency>
</dependencies>
<build>

View File

@ -1,73 +0,0 @@
package org.warp.jcwdb;
import java.io.IOException;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
public class CacheIndexManager implements IndexManager {
public CacheIndexManager() {
}
@Override
public <T> T get(long index, DBReader<T> reader) {
// TODO: implement
return null;
}
@Override
public int getType(long index) {
// TODO: implement
return 0;
}
@Override
public long getHash(long index) {
// TODO: implement
return 0;
}
@Override
public <T> long add(DBDataOutput<T> writer) {
// TODO: implement
return 0;
}
@Override
public <T> FullIndexDetails addAndGetDetails(DBDataOutput<T> writer) {
// TODO: implement
return null;
}
@Override
public <T> IndexDetails set(long index, DBDataOutput<T> writer) {
// TODO: implement
return null;
}
@Override
public void setFlushingAllowed(long index, boolean isUnloadingAllowed) {
// TODO: implement
}
@Override
public void delete(long index) {
// TODO: implement
}
@Override
public boolean has(long index) {
// TODO: implement
return false;
}
@Override
public void close() {
// TODO: implement
}
@Override
public long clean() {
return 0;
}
}

View File

@ -1,34 +0,0 @@
package org.warp.jcwdb;
public interface DBDataOutput<T> {
int getSize();
int getType();
long calculateHash();
DBWriter<T> getWriter();
static <T> DBDataOutput<T> create(DBWriter<T> writer, int type, int size, long hash) {
return new DBDataOutput<T>() {
@Override
public int getSize() {
return size;
}
@Override
public int getType() {
return type;
}
@Override
public long calculateHash() {
return hash;
}
@Override
public DBWriter<T> getWriter() {
return writer;
}
};
}
}

View File

@ -1,58 +0,0 @@
package org.warp.jcwdb;
import java.io.ByteArrayOutputStream;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Output;
import net.openhft.hashing.LongHashFunction;
public class DBGenericObjectParser extends DBTypeParserImpl<Object> implements DBTypedObjectParser<Object> {
private static final LongHashFunction hashFunction = net.openhft.hashing.LongHashFunction.xx();
private static final Kryo kryo = new Kryo();
static {
kryo.setRegistrationRequired(false);
}
private static final DBReader<Object> defaultReader = (i, size) -> {
return kryo.readClassAndObject(i);
};
public DBReader<Object> getReader() {
return defaultReader;
}
public DBDataOutput<Object> getWriter(final Object value) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Output tmpO = new Output(baos);
kryo.writeClassAndObject(tmpO, value);
tmpO.flush();
final byte[] bytes = baos.toByteArray();
final long hash = hashFunction.hashBytes(bytes);
tmpO.close();
return DBDataOutput.create((o) -> {
o.write(bytes);
}, DBStandardTypes.GENERIC_OBJECT, bytes.length, hash);
}
@Override
public long calculateHash(Object value) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Output tmpO = new Output(baos);
kryo.writeClassAndObject(tmpO, value);
tmpO.flush();
final byte[] bytes = baos.toByteArray();
final long hash = hashFunction.hashBytes(bytes);
tmpO.close();
return hash;
}
@Override
public <U> void registerClass(Class<U> clazz, int id) {
if (id >= Integer.MAX_VALUE - 100) {
throw new IndexOutOfBoundsException();
}
kryo.register(clazz, id + 100);
}
}

View File

@ -1,45 +0,0 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.longs.LongArrayList;
public class DBLightArrayListParser<T> extends DBTypeParserImpl<LightArrayList<T>> {
private final JCWDatabase db;
public DBLightArrayListParser(JCWDatabase db) {
this.db = db;
}
public DBReader<LightArrayList<T>> getReader() {
return (i, size) -> {
LongArrayList internalList = new LongArrayList();
long max = size / Long.BYTES;
for (int item = 0; item < max; item++) {
long itm = i.readLong();
internalList.add(itm);
}
return new LightArrayList<T>(db, internalList);
};
}
public DBDataOutput<LightArrayList<T>> getWriter(final LightArrayList<T> value) {
final int elementsCount = value.size();
return DBDataOutput.create((o) -> {
LongArrayList list = value.internalList;
for (int i = 0; i < elementsCount; i++) {
o.writeLong(list.getLong(i));
}
}, DBStandardTypes.LIGHT_LIST_ARRAY, elementsCount * Long.BYTES, calculateHash(value));
}
@Override
public long calculateHash(LightArrayList<T> value) {
return value.internalList.hashCode();
}
@Override
public String toString() {
return "DBLightArrayListParser{" +
"db=" + db +
'}';
}
}

View File

@ -1,44 +0,0 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.longs.LongArrayList;
public class DBLightBigListParser<T> extends DBTypeParserImpl<LightBigList<T>> {
private final JCWDatabase db;
public DBLightBigListParser(JCWDatabase db) {
this.db = db;
}
public DBReader<LightBigList<T>> getReader() {
return (i, size) -> {
LongArrayList chunks = new LongArrayList();
IntArrayList chunkSizes = new IntArrayList();
long max = size / (Long.BYTES + Integer.BYTES);
for (int item = 0; item < max; item++) {
long itm = i.readLong();
int itm2 = i.readInt();
chunks.add(itm);
chunkSizes.add(itm2);
}
return new LightBigList<>(db, chunks, chunkSizes);
};
}
public DBDataOutput<LightBigList<T>> getWriter(final LightBigList<T> value) {
final int elementsCount = value.chunksCount();
return DBDataOutput.create((o) -> {
LongArrayList list = value.chunks;
IntArrayList list2 = value.chunkSizes;
for (int i = 0; i < elementsCount; i++) {
o.writeLong(list.getLong(i));
o.writeInt(list2.getInt(i));
}
}, DBStandardTypes.LIGHT_LIST_BIG, elementsCount * (Long.BYTES + Integer.BYTES), calculateHash(value));
}
@Override
public long calculateHash(LightBigList<T> value) {
return (((long)value.chunks.hashCode()) << 32) | (value.chunkSizes.hashCode() & 0xffffffffL);
}
}

View File

@ -1,24 +0,0 @@
package org.warp.jcwdb;
public class DBStandardTypes {
private static final int STD = 0xFFFFF000;
public static final int BOOLEAN = STD| 0x000;
public static final int BYTE = STD| 0x001;
public static final int SHORT = STD| 0x002;
public static final int CHAR = STD| 0x003;
public static final int INTEGER = STD| 0x004;
public static final int FLOAT = STD| 0x005;
public static final int DOUBLE = STD| 0x006;
public static final int STRING = STD| 0x007;
public static final int BYTE_ARRAY = STD| 0x008;
public static final int LIGHT_LIST_ARRAY = STD| 0x009;
public static final int LIGHT_LIST_BIG = STD| 0x00A;
public static final int GENERIC_OBJECT = STD| 0x00B;
public static void registerStandardTypes(JCWDatabase db, TypesManager typesManager) {
typesManager.registerType(String.class, STRING, new DBStringParser());
typesManager.registerType(LightArrayList.class, LIGHT_LIST_ARRAY, new DBLightArrayListParser(db));
typesManager.registerType(LightBigList.class, LIGHT_LIST_BIG, new DBLightBigListParser(db));
typesManager.registerTypeFallback(new DBGenericObjectParser());
}
}

View File

@ -1,27 +0,0 @@
package org.warp.jcwdb;
import java.nio.charset.StandardCharsets;
import net.openhft.hashing.LongHashFunction;
public class DBStringParser extends DBTypeParserImpl<String> {
private static final LongHashFunction hashFunction = net.openhft.hashing.LongHashFunction.xx();
private static final DBReader<String> defaultReader = (i, size) -> {
return new String(i.readBytes(size), StandardCharsets.UTF_16LE);
};
public DBReader<String> getReader() {
return defaultReader;
}
public DBDataOutput<String> getWriter(final String value) {
return DBDataOutput.create((o) -> {
o.write(value.getBytes(StandardCharsets.UTF_16LE));
}, DBStandardTypes.STRING, value.length() * 2, calculateHash(value));
}
@Override
public long calculateHash(String value) {
return hashFunction.hashBytes(value.getBytes(StandardCharsets.UTF_16LE));
}
}

View File

@ -1,7 +0,0 @@
package org.warp.jcwdb;
public interface DBTypeParser<T> extends Castable {
DBReader<T> getReader();
DBDataOutput<T> getWriter(final T value);
long calculateHash(final T value);
}

View File

@ -1,9 +0,0 @@
package org.warp.jcwdb;
public abstract class DBTypeParserImpl<T> implements DBTypeParser<T> {
@SuppressWarnings("unchecked")
@Override
public T cast() {
return (T) this;
}
}

View File

@ -1,5 +0,0 @@
package org.warp.jcwdb;
public interface DBTypedObjectParser<T> extends DBTypeParser<T> {
public <U> void registerClass(Class<U> clazz, int type);
}

View File

@ -2,6 +2,6 @@ package org.warp.jcwdb;
import com.esotericsoftware.kryo.io.Output;
public interface DBWriter<T> {
public interface DBWriter {
void write(Output o);
}

View File

@ -1,60 +0,0 @@
package org.warp.jcwdb;
import java.io.IOException;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
public interface Editable<T> {
/**
* Reccomended way to edit the value
*
* @param editFunction
* @throws IOException
*/
void editValue(BiFunction<T, Saveable, T> editFunction);
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
void editValue(Function<T, T> editFunction);
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
void editValue(BiConsumer<T, Saveable> editFunction);
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
void editValue(Consumer<T> editFunction);
/**
* Reccomended way to view the value
* @param viewFunction
* @throws IOException
*/
void viewValue(Consumer<T> viewFunction);
/**
* Substitute the old value with a new one
* @param val
* @throws IOException
*/
void setValue(T val);
/**
* DO NOT ATTEMPT TO MODIFY THE VALUE RETURNED
* @return
*/
T getValueReadOnlyUnsafe();
}

View File

@ -1,266 +0,0 @@
package org.warp.jcwdb;
import java.io.IOException;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* You must have only a maximum of 1 reference for each index
* @param <T>
*/
public class EntryReference<T> implements Editable<T>, Saveable, Castable {
private final JCWDatabase.EntryReferenceTools db;
private final long entryIndex;
private final DBTypeParser<T> parser;
private T value;
private long cachedHash;
private volatile boolean isHashCached;
private volatile boolean loaded;
private volatile boolean closed;
private volatile boolean isFlushingAllowed;
private final Object hashCacheLock = new Object();
private final Object accessLock = new Object();
private final Object closeLock = new Object();
public EntryReference(JCWDatabase.EntryReferenceTools db, long entryId, long hash, DBTypeParser<T> parser) {
this.loaded = false;
this.isHashCached = false;
this.db = db;
this.entryIndex = entryId;
this.parser = parser;
this.value = null;
}
public EntryReference(JCWDatabase.EntryReferenceTools db, long entryId, long hash, DBTypeParser<T> parser, T value) {
this.loaded = true;
this.isHashCached = true;
this.db = db;
this.entryIndex = entryId;
this.parser = parser;
this.cachedHash = hash;
this.value = value;
}
public DBTypeParser<T> getParser() {
return parser;
}
public long getIndex() {
return entryIndex;
}
public long calculateHash() {
synchronized(accessLock) {
load();
synchronized(hashCacheLock) {
if (isHashCached) {
return cachedHash;
}
}
return parser.calculateHash(this.value);
}
}
public boolean isClosed() {
return closed;
}
/**
* Note that this method won't be called when closing without saving
*/
public void save() {
this.save(false);
}
public void saveAndFlush() {
this.save(true);
}
private void save(boolean flush) {
synchronized(accessLock) {
if (loaded && !closed) {
try {
if (value instanceof Saveable) {
if (flush) {
((Saveable)value).saveAndFlush();
} else {
((Saveable)value).save();
}
}
IndexDetails returnedDetails = this.db.write(entryIndex, parser.getWriter(value));
synchronized(hashCacheLock) {
this.cachedHash = returnedDetails.getHash();
this.isHashCached = true;
}
if (flush) {
if (!isFlushingAllowed) {
this.db.setFlushingAllowed(entryIndex, true);
this.isFlushingAllowed = true;
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
public void editValue(BiFunction<T, Saveable, T> editFunction) {
synchronized(accessLock) {
load();
this.value = editFunction.apply(this.value, this);
this.save(true);
}
}
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
public void editValue(Function<T, T> editFunction) {
synchronized(accessLock) {
load();
this.value = editFunction.apply(this.value);
this.save(true);
}
}
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
public void editValue(BiConsumer<T, Saveable> editFunction) {
synchronized(accessLock) {
load();
editFunction.accept(this.value, this);
this.save(true);
}
}
/**
* Reccomended way to edit the value
* @param editFunction
* @throws IOException
*/
public void editValue(Consumer<T> editFunction) {
synchronized(accessLock) {
load();
editFunction.accept(this.value);
this.save(true);
}
}
/**
* Reccomended way to edit the value
* DO NOT EDIT THE VALUE
* @param viewFunction
* @throws IOException
*/
public void viewValue(Consumer<T> viewFunction) {
synchronized(accessLock) {
load();
viewFunction.accept(this.value);
}
}
/**
* Substitute the old value with a new one
* @param val
* @throws IOException
*/
public void setValue(T val) {
synchronized(accessLock) {
this.loaded = true;
this.value = val;
synchronized(hashCacheLock) {
this.isHashCached = false;
}
this.save(true);
}
}
/**
* Use editValue instead. READ ONLY!!
* @return
*/
@Deprecated()
public T getValue() {
return getValueReadOnlyUnsafe();
}
/**
* DO NOT ATTEMPT TO MODIFY THE VALUE RETURNED
* @return
*/
public T getValueReadOnlyUnsafe() {
synchronized(accessLock) {
load();
return this.value;
}
}
private void load() {
synchronized(accessLock) {
if (!loaded) {
try {
if (this.isFlushingAllowed) {
this.db.setFlushingAllowed(entryIndex, false);
this.isFlushingAllowed = false;
}
this.value = db.read(entryIndex, parser.getReader());
this.loaded = true;
} catch (IOException e) {
throw (NullPointerException) new NullPointerException(e.getLocalizedMessage()).initCause(e);
}
}
}
}
@SuppressWarnings("unchecked")
@Override
public <U> U cast() {
return (U) this;
}
protected void close() throws IOException {
if (closed) {
return;
}
synchronized (closeLock) {
if (closed) {
return;
}
save(true);
closed = true;
}
}
public void closeWithoutSaving() {
if (closed) {
return;
}
synchronized (closeLock) {
if (closed) {
return;
}
closed = true;
}
}
public Object getAccessLock() {
return accessLock;
}
}

View File

@ -4,6 +4,7 @@ import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import it.unimi.dsi.fastutil.longs.*;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
import org.warp.jcwdb.ann.Database;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -12,6 +13,7 @@ import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.function.Consumer;
public class FileIndexManager implements IndexManager {
private final SeekableByteChannel dataFileChannel, metadataFileChannel;
@ -116,23 +118,12 @@ public class FileIndexManager implements IndexManager {
}
@Override
public int getType(long index) throws IOException {
return getIndexMetadata(index).getType();
}
@Override
public long getHash(long index) throws IOException {
return getIndexMetadata(index).getHash();
}
@Override
public <T> IndexDetails set(long index, DBDataOutput<T> data) throws IOException {
public IndexDetails set(long index, int size, DBWriter data) throws IOException {
checkClosed();
final int dataSize = data.getSize();
IndexDetails indexDetails = getIndexMetadataUnsafe(index);
if (indexDetails == null || indexDetails.getSize() < dataSize) {
if (indexDetails == null || indexDetails.getSize() < size) {
// Allocate new space
IndexDetails newDetails = allocateAndWrite(index, data);
IndexDetails newDetails = allocateAndWrite(index, size, data);
if (indexDetails != null) {
// Mark free the old bytes
fileAllocator.markFree(indexDetails.getOffset(), indexDetails.getSize());
@ -140,14 +131,14 @@ public class FileIndexManager implements IndexManager {
return newDetails;
} else {
// Check if size changed
if (dataSize < indexDetails.getSize()) {
if (size < indexDetails.getSize()) {
// Mark free the unused bytes
fileAllocator.markFree(indexDetails.getOffset() + dataSize, dataSize);
fileAllocator.markFree(indexDetails.getOffset() + size, size);
}
// Update index details
indexDetails = editIndex(index, indexDetails, indexDetails.getOffset(), dataSize, indexDetails.getType(), data.calculateHash());
indexDetails = editIndex(index, indexDetails, indexDetails.getOffset(), size);
// Write data
writeExact(indexDetails, data);
writeExact(indexDetails, size, data);
// Before returning, return IndexDetails
return indexDetails;
}
@ -164,58 +155,53 @@ public class FileIndexManager implements IndexManager {
}
@Override
public <T> long add(DBDataOutput<T> data) throws IOException {
public long add(int size) {
checkClosed();
final int size = data.getSize();
final long offset = fileAllocator.allocate(size);
final int type = data.getType();
final long hash = data.calculateHash();
final IndexDetails indexDetails = new IndexDetails(offset, size, type, hash);
final IndexDetails indexDetails = new IndexDetails(offset, size);
final long index = createIndexMetadata(indexDetails);
writeExact(indexDetails, data);
return index;
}
@Override
public <T> FullIndexDetails addAndGetDetails(DBDataOutput<T> data) throws IOException {
public long add(int size, DBWriter data) throws IOException {
checkClosed();
final int size = data.getSize();
final long offset = fileAllocator.allocate(size);
final int type = data.getType();
final long hash = data.calculateHash();
final IndexDetails indexDetails = new IndexDetails(offset, size, type, hash);
final IndexDetails indexDetails = new IndexDetails(offset, size);
final long index = createIndexMetadata(indexDetails);
writeExact(indexDetails, data);
writeExact(indexDetails, size, data);
return index;
}
@Override
public FullIndexDetails addAndGetDetails(int size, DBWriter data) throws IOException {
checkClosed();
final long offset = fileAllocator.allocate(size);
final IndexDetails indexDetails = new IndexDetails(offset, size);
final long index = createIndexMetadata(indexDetails);
writeExact(indexDetails, size, data);
return new FullIndexDetails(index, indexDetails);
}
/**
* Write the data at index.
* The input size must be equal to the index size!
*
* @param indexDetails
* @param data
* @throws IOException
*/
private void writeExact(final IndexDetails indexDetails, DBDataOutput<?> data) throws IOException {
final int dataSize = data.getSize();
if (indexDetails.getSize() != dataSize) {
throw new IOException("Unable to write " + dataSize + " in a space of " + indexDetails.getSize());
private void writeExact(final IndexDetails indexDetails, int size, DBWriter data) throws IOException {
if (indexDetails.getSize() != size) {
throw new IOException("Unable to write " + size + " in a space of " + indexDetails.getSize());
}
final long offset = indexDetails.getOffset();
final Output o = new Output(Channels.newOutputStream(dataFileChannel.position(offset)), dataSize);
data.getWriter().write(o);
final Output o = new Output(Channels.newOutputStream(dataFileChannel.position(offset)), size);
data.write(o);
o.flush();
}
private IndexDetails allocateAndWrite(final long index, DBDataOutput<?> w) throws IOException {
final int size = w.getSize();
final int type = w.getType();
final long hash = w.calculateHash();
private IndexDetails allocateAndWrite(final long index, int size, DBWriter w) throws IOException {
final long offset = fileAllocator.allocate(size);
IndexDetails details = editIndex(index, offset, size, type, hash);
writeExact(details, w);
IndexDetails details = editIndex(index, offset, size);
writeExact(details, size, w);
return details;
}
@ -284,13 +270,11 @@ public class FileIndexManager implements IndexManager {
* @param oldData Old index data to check
* @param offset offset
* @param size size
* @param type type
* @param hash hash
* @return
*/
private IndexDetails editIndex(long index, IndexDetails oldData, long offset, int size, int type, long hash) {
if (oldData.getOffset() != offset || oldData.getSize() != size || oldData.getType() != type || oldData.getHash() != hash) {
return editIndex(index, offset, size, type, hash);
private IndexDetails editIndex(long index, IndexDetails oldData, long offset, int size) {
if (oldData.getOffset() != offset || oldData.getSize() != size) {
return editIndex(index, offset, size);
} else {
return oldData;
}
@ -301,12 +285,10 @@ public class FileIndexManager implements IndexManager {
* @param index
* @param offset
* @param size
* @param type
* @param hash
* @return
*/
private IndexDetails editIndex(long index, long offset, int size, int type, long hash) {
IndexDetails indexDetails = new IndexDetails(offset, size, type, hash);
private IndexDetails editIndex(long index, long offset, int size) {
IndexDetails indexDetails = new IndexDetails(offset, size);
editIndex(index, indexDetails);
return indexDetails;
}
@ -368,15 +350,10 @@ public class FileIndexManager implements IndexManager {
currentMetadataFileChannel.read(metadataByteBuffer);
metadataByteBuffer.rewind();
// If it's not deleted continue
if ((metadataByteBuffer.getInt() & IndexDetails.MASK_DELETED) == 0) {
final long offset = metadataByteBuffer.getLong();
// final long sizeAndType = metadataByteBuffer.getLong();
// final int size = (int)(sizeAndType >> 32);
// final int type = (int)sizeAndType;
if (offset >= 0) { // If it's < 0 it means that the index has been deleted
final int size = metadataByteBuffer.getInt();
final int type = metadataByteBuffer.getInt();
final long hash = metadataByteBuffer.getLong();
indexDetails = new IndexDetails(offset, size, type, hash);
indexDetails = new IndexDetails(offset, size);
}
}
return indexDetails;
@ -411,18 +388,12 @@ public class FileIndexManager implements IndexManager {
}
private void writeIndexDetails(SeekableByteChannel position, IndexDetails indexDetails) throws IOException {
synchronized (metadataByteBufferLock) {// FIXXXX cleaner3
synchronized (metadataByteBufferLock) {
final int size = indexDetails.getSize();
final int type = indexDetails.getType();
final long offset = indexDetails.getOffset();
final long hash = indexDetails.getHash();
metadataByteBuffer.rewind();
metadataByteBuffer.putInt(0);
metadataByteBuffer.putLong(offset);
metadataByteBuffer.putInt(size);
metadataByteBuffer.putInt(type);
//metadataByteBuffer.putLong((long)size << 32 | type & 0xFFFFFFFFL);
metadataByteBuffer.putLong(hash);
metadataByteBuffer.rewind();
position.write(metadataByteBuffer);
}
@ -431,7 +402,7 @@ public class FileIndexManager implements IndexManager {
private void eraseIndexDetails(SeekableByteChannel position) throws IOException {
synchronized (maskByteBufferLock) {
maskByteBuffer.rewind();
maskByteBuffer.putInt(IndexDetails.MASK_DELETED);
maskByteBuffer.putLong(-1); // -1 = deleted
maskByteBuffer.rewind();
position.write(maskByteBuffer);
}
@ -513,12 +484,12 @@ public class FileIndexManager implements IndexManager {
long removedIndices = 0;
LongArrayList toUnload = new LongArrayList();
synchronized (indicesMapsAccessLock) {
if (loadedIndices.size() > JCWDatabase.MAX_LOADED_INDICES) {
if (loadedIndices.size() > Database.MAX_LOADED_INDICES) {
long count = loadedIndices.size();
LongIterator it = loadedIndices.keySet().iterator();
while (it.hasNext()) {
long loadedIndex = it.nextLong();
if (count < JCWDatabase.MAX_LOADED_INDICES * 3l / 2l) {
if (count < Database.MAX_LOADED_INDICES * 3l / 2l) {
break;
}
toUnload.add(loadedIndex);

View File

@ -6,30 +6,20 @@ public class IndexDetails {
/**
* The bitmask is used to determine if an index has been deleted
*/
public static final int BITMASK_SIZE = Integer.BYTES;
public static final int OFFSET_BYTES = Long.BYTES;
public static final int DATA_SIZE_BYTES = Integer.BYTES;
public static final int TYPE_BYTES = Integer.BYTES;
public static final int HASH_BYTES = Long.BYTES;
public static final int TOTAL_BYTES = BITMASK_SIZE + OFFSET_BYTES + DATA_SIZE_BYTES + TYPE_BYTES + HASH_BYTES;
public static final int MASK_DELETED = 0b00000001;
public static final int TOTAL_BYTES = OFFSET_BYTES + DATA_SIZE_BYTES;
private final long offset;
private final int size;
private final int type;
private final long hash;
public IndexDetails(long offset, int size, int type, long hash) {
public IndexDetails(long offset, int size) {
this.offset = offset;
this.size = size;
this.type = type;
this.hash = hash;
}
public IndexDetails(IndexDetails indexDetails) {
this.offset = indexDetails.offset;
this.size = indexDetails.size;
this.type = indexDetails.type;
this.hash = indexDetails.hash;
}
public long getOffset() {
@ -40,22 +30,12 @@ public class IndexDetails {
return size;
}
public int getType() {
return type;
}
public long getHash() {
return hash;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (hash ^ (hash >>> 32));
result = prime * result + (int) (offset ^ (offset >>> 32));
result = prime * result + size;
result = prime * result + type;
return result;
}
@ -68,20 +48,16 @@ public class IndexDetails {
if (getClass() != obj.getClass())
return false;
IndexDetails other = (IndexDetails) obj;
if (hash != other.hash)
return false;
if (offset != other.offset)
return false;
if (size != other.size)
return false;
if (type != other.type)
return false;
return true;
}
@Override
public String toString() {
return "IndexDetails [offset=" + offset + ", size=" + size + ", type=" + type + ", hash=" + hash + "]";
return "IndexDetails [offset=" + offset + ", size=" + size + "]";
}

View File

@ -1,5 +1,7 @@
package org.warp.jcwdb;
import com.esotericsoftware.kryo.io.Output;
import java.io.IOException;
import java.util.function.BiConsumer;
import java.util.function.BiPredicate;
@ -7,11 +9,10 @@ import java.util.function.Consumer;
public interface IndexManager extends Cleanable {
<T> T get(long index, DBReader<T> reader) throws IOException;
int getType(long index) throws IOException;
long getHash(long index) throws IOException;
<T> long add(DBDataOutput<T> writer) throws IOException;
<T> FullIndexDetails addAndGetDetails(DBDataOutput<T> writer) throws IOException;
<T> IndexDetails set(long index, DBDataOutput<T> writer) throws IOException;
long add(int size);
long add(int size, DBWriter writer) throws IOException;
FullIndexDetails addAndGetDetails(int size, DBWriter writer) throws IOException;
IndexDetails set(long index, int size, DBWriter writer) throws IOException;
void setFlushingAllowed(long index, boolean isUnloadingAllowed);
void delete(long index) throws IOException;
boolean has(long index);

View File

@ -1,197 +0,0 @@
package org.warp.jcwdb;
import java.io.IOError;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.function.Supplier;
public class JCWDatabase implements AutoCloseable, Cleanable {
public final static long MAX_LOADED_INDICES = 1000;
private final TypesManager typesManager;
private final MixedIndexDatabase indices;
private final Cleaner databaseCleaner;
private final EntryReferenceTools entryReferenceTools = new EntryReferenceTools();
private volatile boolean closed;
private final Object closeLock = new Object();
private final Object indicesAccessLock = new Object();
private final LinkedList<EntryReference<?>> usedReferences = new LinkedList<>();
public JCWDatabase(Path dataFile, Path metadataFile) throws IOException {
this.typesManager = new TypesManager(this);
this.indices = new MixedIndexDatabase(dataFile, metadataFile);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
JCWDatabase.this.close();
} catch (Exception e) {
e.printStackTrace();
}
}));
this.databaseCleaner = new Cleaner(this);
this.databaseCleaner.start();
}
public <T> EntryReference<LightList<T>> getRoot() {
try {
checkClosed();
if (exists(0)) {
return get(0);
} else {
LightList<T> newRoot = new LightBigList<>(this);
return set(0, newRoot);
}
} catch (IOException e) {
throw new IOError(e);
}
}
@SuppressWarnings("unchecked")
public <T> EntryReference<T> getRootItem(int index) {
return ((LightList<T>) getRoot().getValueReadOnlyUnsafe()).getReference(index);
}
@SuppressWarnings("unchecked")
public <T> EntryReference<T> getRootItem(int index, Supplier<T> defaultValue) {
return ((LightList<T>) getRoot().getValueReadOnlyUnsafe()).getReferenceOrInitialize(index, defaultValue);
}
public <T> EntryReference<LightList<T>> getRoot(Class<T> clazz) {
return getRoot().cast();
}
public <T> EntryReference<T> get(long index) throws IOException {
checkClosed();
int type;
long hash;
synchronized (indicesAccessLock) {
type = this.indices.getType(index);
hash = this.indices.getHash(index);
}
DBTypeParser<T> typeParser = this.typesManager.get(type);
return new EntryReference<>(entryReferenceTools, index, hash, typeParser);
}
protected <T> EntryReference<T> add(T value) throws IOException {
checkClosed();
DBTypeParser<T> typeParser = this.typesManager.get((Class<T>) value.getClass());
long index;
long hash;
synchronized (indicesAccessLock) {
FullIndexDetails fullIndexDetails = indices.addAndGetDetails(typeParser.getWriter(value));
index = fullIndexDetails.getIndex();
hash = fullIndexDetails.getHash();
}
return new EntryReference<>(entryReferenceTools, index, hash, typeParser, value);
}
protected boolean exists(long index) {
checkClosed();
synchronized (indicesAccessLock) {
return this.indices.has(index);
}
}
protected <T> EntryReference<T> set(long index, T value) throws IOException {
checkClosed();
EntryReference<T> ref;
if (exists(index)) {
ref = get(index);
ref.setValue(value);
return ref;
} else {
@SuppressWarnings("unchecked")
DBTypeParser<T> typeParser = this.typesManager.get((Class<T>) value.getClass());
long hash;
synchronized (indicesAccessLock) {
IndexDetails returnedDetails = indices.set(index, typeParser.getWriter(value));
hash = returnedDetails.getHash();
}
return new EntryReference<>(entryReferenceTools, index, hash, typeParser);
}
}
public <U> void registerType(Class<U> clazz, short type, DBTypeParser<U> parser) {
final int addition = 0xEFFF8000;
int extendedType = addition | (type & 0x7FFF);
typesManager.registerType(clazz, extendedType, parser);
}
public <U> void registerClass(Class<U> clazz, int type) {
typesManager.registerGenericClass(clazz, type);
}
public boolean isOpen() {
return !closed;
}
@Override
public void close() throws IOException {
if (closed) {
return;
}
synchronized (closeLock) {
if (closed) {
return;
}
closed = true;
}
this.databaseCleaner.stop();
synchronized (indicesAccessLock) {
this.indices.close();
}
System.out.println("Database closed.");
}
private void checkClosed() {
if (closed) {
throw new RuntimeException("Index Manager is closed.");
}
}
@Override
public long clean() {
long removedItems = indices.clean();
Iterator<EntryReference<?>> usedReferencesIterator = usedReferences.iterator();
while(usedReferencesIterator.hasNext()) {
EntryReference<?> entryReference = usedReferencesIterator.next();
if (entryReference.isClosed()) {
usedReferencesIterator.remove();
removedItems += 1;
}
}
return removedItems;
}
public class EntryReferenceTools {
private EntryReferenceTools() {
}
public <T> T read(long index, DBReader<T> reader) throws IOException {
return indices.get(index, reader);
}
public <T> IndexDetails write(long index, DBDataOutput<T> writer) throws IOException {
return indices.set(index, writer);
}
public void setFlushingAllowed(long index, boolean isFlushingAllowed) {
indices.setFlushingAllowed(index, isFlushingAllowed);
}
public <T> void setUsed(EntryReference<T> ref) {
usedReferences.add(ref);
}
}
@SuppressWarnings("unchecked")
protected <T> long calculateHash(T o) {
return ((DBTypeParser<T>) typesManager.get(o.getClass())).calculateHash(o);
}
}

View File

@ -1,455 +0,0 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.IOError;
import java.io.IOException;
import java.util.*;
import java.util.function.Consumer;
import java.util.function.Predicate;
public class LightArrayList<T> implements LightList<T> {
public final LongArrayList internalList;
private final transient JCWDatabase db;
/**
* @param db Database reference
*/
public LightArrayList(JCWDatabase db) {
this.db = db;
this.internalList = new LongArrayList();
}
/**
* @param db Database reference
* @param elements Elements to add
*/
public LightArrayList(JCWDatabase db, LongArrayList elements) {
this.db = db;
this.internalList = new LongArrayList(elements);
}
@Override
public int size() {
return internalList.size();
}
@Override
public boolean isEmpty() {
return internalList.isEmpty();
}
@Override
public boolean contains(Object o) {
if (o != null) {
for (long element : internalList) {
EntryReference<T> ref = null;
try {
ref = db.get(element);
if (o.equals(ref.getValueReadOnlyUnsafe())) {
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
return false;
}
/**
* Use iteratorReferences()
*/
@Deprecated
@SuppressWarnings("unchecked")
@Override
public Iterator<T> iterator() {
System.out.println("WARNING! YOU ARE USING iterator()! PLEASE USE ITERATORREFERENCES TO AVOID OUTOFMEMORY!");
final ArrayList<T> elements = new ArrayList<>();
for (long element : internalList) {
try {
elements.add((T) db.get(element).getValueReadOnlyUnsafe());
} catch (IOException e) {
e.printStackTrace();
}
}
return elements.iterator();
}
@SuppressWarnings("unchecked")
@Override
public Iterator<EntryReference<T>> iteratorReferences() {
final ArrayList<EntryReference<T>> elements = new ArrayList<>();
for (long element : internalList) {
try {
elements.add(db.get(element));
} catch (IOException e) {
e.printStackTrace();
}
}
return elements.iterator();
}
/**
* USE forEachReference INSTEAD, TO AVOID OUTOFMEMORY
*
* @param action
*/
@Deprecated
@Override
public void forEach(Consumer<? super T> action) {
Objects.requireNonNull(action);
for (T t : this) {
action.accept(t);
}
}
@Override
public void forEachReference(Consumer<? super EntryReference<T>> action) {
Objects.requireNonNull(action);
for (long index : this.internalList) {
try {
action.accept(db.get(index));
} catch (IOException e) {
throw new IOError(e);
}
}
}
@SuppressWarnings("unchecked")
@Override
public T[] toArray() {
final T[] elements = (T[]) new Object[internalList.size()];
for (int i = 0; i < elements.length; i++) {
try {
T element = (T) db.get(internalList.getLong(i)).getValueReadOnlyUnsafe();
elements[i] = element;
} catch (IOException e) {
e.printStackTrace();
}
}
return elements;
}
@SuppressWarnings("unchecked")
@Override
public <T1> T1[] toArray(T1[] a) {
final T1[] elements = (T1[]) new Objects[internalList.size()];
for (int i = 0; i < elements.length; i++) {
try {
elements[i] = (T1) db.get(internalList.getLong(i)).getValueReadOnlyUnsafe();
} catch (IOException e) {
e.printStackTrace();
}
}
return elements;
}
@Override
public boolean add(T o) {
EntryReference<T> ref = addEntry(o);
return ref != null;
}
@Override
public EntryReference<T> addEntry(T o) {
EntryReference<T> ref = addToDatabase(o);
if (internalList.add(ref.getIndex())) {
return ref;
} else {
return null;
}
}
@Override
public boolean remove(Object o) {
int removeIndex = indexOf(o);
if (removeIndex >= 0) {
internalList.removeLong(removeIndex);
return true;
}
return false;
}
@Override
public boolean remove(EntryReference<T> ref) {
int removeIndex = indexOfEntry(ref);
if (removeIndex >= 0) {
internalList.removeLong(removeIndex);
return true;
}
return false;
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
int objIndex = indexOf(o);
if (objIndex < 0) {
return false;
}
}
return true;
}
@SuppressWarnings("unchecked")
@Override
public boolean addAll(Collection<? extends T> c) {
boolean result = false;
for (Object o : c) {
result |= add((T) o);
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean addAll(int index, Collection<? extends T> c) {
boolean result = false;
int delta = 0;
for (Object o : c) {
add(index + delta, (T) o);
result = true;
delta++;
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean removeAll(Collection<?> c) {
boolean result = false;
for (Object o : c) {
result |= remove((T) o);
}
return result;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean result = false;
LongArrayList collectionHashes = new LongArrayList();
ObjectArrayList<Object> collection = new ObjectArrayList<>();
collection.addAll(c);
for (Object o : c) {
collectionHashes.add(db.calculateHash(o));
}
for (int i = 0; i < internalList.size(); i++) {
long hash = internalList.getLong(i);
int positionInCollection = collectionHashes.indexOf(hash);
if (positionInCollection == -1) {
remove(collection.get(positionInCollection));
result = true;
}
}
return result;
}
@Override
public void clear() {
internalList.clear();
}
/**
* Use getReference or getReadOnlyValue
*/
@Deprecated
@Override
public T get(int index) {
return getReadOnlyValue(index);
}
@SuppressWarnings("unchecked")
public T getReadOnlyValue(int index) {
try {
return (T) db.get(internalList.getLong(index)).getValueReadOnlyUnsafe();
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
@Override
public EntryReference<T> getReference(int index) {
try {
return db.get(internalList.getLong(index)).cast();
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
@SuppressWarnings("unchecked")
@Override
public T set(int index, T element) {
EntryReference<T> ref = addToDatabase(element);
long oldIndex = internalList.set(index, ref.getIndex());
try {
ref.close();
return ((EntryReference<T>) (db.get(oldIndex))).getValueReadOnlyUnsafe();
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
}
@Override
public void add(int index, T element) {
EntryReference<T> ref = addToDatabase(element);
internalList.add(index, ref.getIndex());
}
@SuppressWarnings("unchecked")
@Override
public T remove(int index) {
long oldIndex = internalList.removeLong(index);
try {
return ((EntryReference<T>) (db.get(oldIndex))).getValueReadOnlyUnsafe();
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
}
@Override
public int indexOf(Object o) {
EntryReference<?> ref = addToDatabase(o);
long objToRemoveHash = ref.calculateHash();
LongArrayList hashes = new LongArrayList();
for (int i = 0; i < hashes.size(); i++) {
long hash = hashes.getLong(i);
if (objToRemoveHash == hash) {
try {
if (ref.equals(db.get(internalList.getLong(i)))) {
return i;
}
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
}
}
return -1;
}
@Override
public int indexOfEntry(EntryReference<T> ref) {
for (int i = 0; i < internalList.size(); i++) {
long index = internalList.getLong(i);
try {
EntryReference<?> ref2 = db.get(index);
if (ref.getValueReadOnlyUnsafe().equals(ref2.getValueReadOnlyUnsafe())) {
return i;
}
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
}
return -1;
}
@Override
public void appendIndex(long elementIndex) {
internalList.add(elementIndex);
}
@Override
public int lastIndexOf(Object o) {
EntryReference<T> ref = addToDatabase(o).cast();
return lastIndexOfEntry(ref);
}
@Override
public int lastIndexOfEntry(EntryReference<T> ref) {
long objToRemoveHash = ref.calculateHash();
int lastValue = -1;
for (int i = 0; i < internalList.size(); i++) {
long index2 = internalList.getLong(i);
try {
EntryReference<?> ref2 = db.get(index2);
if (objToRemoveHash == ref2.calculateHash()) {
if (ref.getValueReadOnlyUnsafe().equals(ref2.getValueReadOnlyUnsafe())) {
lastValue = i;
}
}
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
}
return lastValue;
}
@Deprecated
@Override
public ListIterator<T> listIterator() {
// TODO: implement
throw new RuntimeException("Not implemented!");
}
@Deprecated
@Override
public ListIterator<T> listIterator(int index) {
// TODO: implement
throw new RuntimeException("Not implemented!");
}
@Deprecated
@Override
public List<T> subList(int fromIndex, int toIndex) {
// TODO: implement
throw new RuntimeException("Not implemented!");
}
private <U> EntryReference<U> addToDatabase(U obj) {
EntryReference<U> ref;
try {
ref = db.add(obj);
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
return ref;
}
@Deprecated
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((internalList == null) ? 0 : internalList.hashCode());
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean removeIf(Predicate<? super T> filter) {
Objects.requireNonNull(filter);
boolean removed = false;
for (int i = 0; i < internalList.size(); ) {
T obj;
try {
obj = ((EntryReference<T>) (db.get(internalList.getLong(i)).cast())).getValueReadOnlyUnsafe();
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
if (filter.test(obj)) {
internalList.removeLong(i);
removed = true;
} else {
i++;
}
}
return removed;
}
@Override
public String toString() {
return "LightArrayList{" +
"internalList=" + internalList +
", db=" + db +
'}';
}
}

View File

@ -1,607 +0,0 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.IOException;
import java.util.*;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.function.Supplier;
public class LightBigList<T> implements LightList<T>, Saveable {
public static final int MAX_ELEMENTS_PER_CHUNK = 200000;
public final LongArrayList chunks;
public final IntArrayList chunkSizes;
private final JCWDatabase db;
private LightArrayList<T> cachedChunk;
private EntryReference<LightArrayList<T>> cachedChunkRef;
private long cachedChunkIndex = -1;
private int cachedChunkNumber = -1;
private final Object cachedChunkLock = new Object();
/**
* @param db Database reference
*/
public LightBigList(JCWDatabase db) {
this.db = db;
this.chunks = new LongArrayList();
this.chunkSizes = new IntArrayList();
}
/**
* @param db Database reference
* @param elements Elements to add
*/
public LightBigList(JCWDatabase db, LongArrayList elements) {
this.db = db;
this.chunks = new LongArrayList();
this.chunkSizes = new IntArrayList();
elements.forEach((long element) -> {
this.appendIndex(element);
});
}
public LightBigList(JCWDatabase db, LongArrayList chunks, IntArrayList chunkSizes) {
this.db = db;
this.chunks = chunks;
this.chunkSizes = chunkSizes;
if (this.chunks.size() > 0) {
prepareAccessToChunk(0);
}
}
private void prepareAccessToChunk(int chunkNumber) {
if (this.cachedChunkRef != null) {
this.cachedChunkRef.save();
}
this.cachedChunkNumber = chunkNumber;
this.cachedChunkIndex = this.chunks.getLong(chunkNumber);
try {
this.cachedChunkRef = db.get(this.cachedChunkIndex);
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
this.cachedChunk = this.cachedChunkRef.getValueReadOnlyUnsafe();
}
/**
* Append an index to the first free chunk
* @param elementIndex
*/
public void appendIndex(long elementIndex) {
for (int i = 0; i < chunks.size(); i++) {
final int chunkNumber = i;
if (MAX_ELEMENTS_PER_CHUNK - chunkSizes.getInt(i) > 0) {
synchronized (cachedChunkLock) {
if (cachedChunkNumber != i) {
prepareAccessToChunk(i);
}
cachedChunk.appendIndex(elementIndex);
chunkSizes.set(chunkNumber, cachedChunk.size());
return;
}
}
}
LightList<T> newChunk = new LightArrayList<>(db);
newChunk.appendIndex(elementIndex);
long newChunkIndex;
try {
newChunkIndex = db.add(newChunk).getIndex();
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
chunks.add(newChunkIndex);
chunkSizes.add(1);
}
/**
* Get the elements count
* @return the size of the list
*/
@Override
public int size() {
int size = 0;
for (int chunkSize : this.chunkSizes) {
size += chunkSize;
}
return size;
}
/**
*
* @return the count of chunks
*/
public int chunksCount() {
return this.chunkSizes.size();
}
/**
* Check if the list is empty
* @return true if the list is empty
*/
@Override
public boolean isEmpty() {
return this.size() <= 0;
}
@Override
public boolean contains(Object o) {
if (o != null) {
for (long chunkIndex : chunks) {
try {
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
LightArrayList<T> chunk = chunkRef.getValueReadOnlyUnsafe();
if (chunk.contains(o)) {
return true;
}
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
}
}
return false;
}
/**
* Use iteratorReferences()
*/
@Deprecated
@Override
public Iterator<T> iterator()
{
throw new RuntimeException("iterator() isn't implemented!");
}
@Deprecated
@Override
public Iterator<EntryReference<T>> iteratorReferences() {
throw new RuntimeException("iteratorReferences() isn't implemented!");
}
/**
* USE forEachReference INSTEAD, TO AVOID OUTOFMEMORY
*
* @param action
*/
@Deprecated
@Override
public void forEach(Consumer<? super T> action) {
throw new RuntimeException("forEach() isn't implemented! Use forEachReferences() instead");
}
@Override
public void forEachReference(Consumer<? super EntryReference<T>> action) {
Objects.requireNonNull(action);
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
synchronized (cachedChunkLock) {
if (cachedChunkNumber != i) {
prepareAccessToChunk(i);
}
cachedChunk.forEachReference(action);
}
}
}
/**
* toArray() isn't implemented! DO NOT USE IT.
* @return
*/
@SuppressWarnings("unchecked")
@Deprecated
@Override
public T[] toArray() {
if (true) {
throw new RuntimeException("toArray() isn't implemented!");
}
T[] result = (T[]) new Object[this.size()];
long currentOffset = 0;
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
final int currentChunkSize = chunkSizes.getInt(i);
final long chunkStartOffset = currentOffset;
currentOffset += currentChunkSize;
// Get chunk index
final long chunkIndex = chunks.getLong(i);
try {
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
LightArrayList<T> chunk = chunkRef.getValueReadOnlyUnsafe();
for (int i1 = 0; i1 < chunk.size(); i1++) {
result[(int)(chunkStartOffset + i1)] = chunk.get(i);
}
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public <T1> T1[] toArray(T1[] a) {
throw new RuntimeException("toArray() isn't implemented!");
}
/**
* Use addEntry(o)
* @param o
* @return
*/
@Deprecated
@Override
public boolean add(T o) {
EntryReference<T> ref = addEntry(o);
return ref != null;
}
@Override
public EntryReference<T> addEntry(T o) {
EntryReference<T> ref = addToDatabase(o);
appendIndex(ref.getIndex());
return ref;
}
@Override
public boolean remove(Object o) {
final int removeOffset = indexOf(o);
return removeAt(removeOffset) != null;
}
@Override
public boolean remove(EntryReference<T> ref) {
final int removeOffset = indexOfEntry(ref);
return removeAt(removeOffset) != null;
}
private T removeAt(int removeOffset) {
final VariableWrapper<T> result = new VariableWrapper<>(null);
long currentOffset = 0;
if (removeOffset >= 0) {
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
final int currentChunkSize = chunkSizes.getInt(i);
final long chunkStartOffset = currentOffset;
currentOffset += currentChunkSize;
// If the offset to remove is in the current chunk
if (currentOffset > removeOffset) {
// Get chunk index
final long chunkIndex = chunks.getLong(i);
// Get the offset relative to the current chunk
final int relativeOffset = (int) (removeOffset - chunkStartOffset);
if (relativeOffset < 0) {
return null;
}
try {
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
chunkRef.editValue((chunk) -> {
result.var = chunk.remove(relativeOffset);
});
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
chunkSizes.set(removeOffset, currentChunkSize - 1);
break;
}
}
return result.var;
}
return null;
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
int objIndex = indexOf(o);
if (objIndex < 0) {
return false;
}
}
return true;
}
@SuppressWarnings("unchecked")
@Override
public boolean addAll(Collection<? extends T> c) {
boolean result = false;
for (Object o : c) {
result |= add((T) o);
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean addAll(int index, Collection<? extends T> c) {
boolean result = false;
int delta = 0;
for (Object o : c) {
add(index + delta, (T) o);
result = true;
delta++;
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean removeAll(Collection<?> c) {
boolean result = false;
for (Object o : c) {
result |= remove((T) o);
}
return result;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean result = false;
LongArrayList collectionHashes = new LongArrayList();
ObjectArrayList<Object> collection = new ObjectArrayList<>();
collection.addAll(c);
for (Object o : c) {
collectionHashes.add(db.calculateHash(o));
}
for (int i = 0; i < chunks.size(); i++) {
long hash = chunks.getLong(i);
int positionInCollection = collectionHashes.indexOf(hash);
if (positionInCollection == -1) {
remove(collection.get(positionInCollection));
result = true;
}
}
return result;
}
@Override
public void clear() {
chunks.clear();
chunkSizes.clear();
}
/**
* Use getReference or getReadOnlyValue
*/
@Deprecated
@Override
public T get(int index) {
return getReadOnlyValue(index);
}
@SuppressWarnings("unchecked")
public T getReadOnlyValue(int index) {
try {
return (T) db.get(chunks.getLong(index)).getValueReadOnlyUnsafe();
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
@Override
public EntryReference<T> getReference(int index) {
return getReferenceUnsafe(index, true);
}
@Override
public EntryReference<T> getReferenceOrInitialize(int index, Supplier<T> initializer) {
EntryReference<T> value = getReferenceUnsafe(index, false);
if (value != null) {
return value;
} else {
T initializedData = initializer.get();
EntryReference<T> initializedDataRef = addToDatabase(initializedData);
return set(index, initializedDataRef);
}
}
private EntryReference<T> getReferenceUnsafe(int index, boolean throwError) {
try {
return db.get(chunks.getLong(index)).cast();
} catch (IOException e) {
if (throwError) e.printStackTrace();
return null;
}
}
@SuppressWarnings("unchecked")
@Override
public T set(int setOffset, final T element) {
return set(setOffset, addToDatabase(element)).getValueReadOnlyUnsafe();
}
@SuppressWarnings("unchecked")
@Override
public EntryReference<T> set(int setOffset, final EntryReference<T> element) {
long nextChunkOffset = 0;
VariableWrapper<EntryReference<T>> wrapper = new VariableWrapper<>(null);
if (setOffset >= 0) {
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
final int currentChunkSize = chunkSizes.getInt(i);
final long chunkStartOffset = nextChunkOffset;
nextChunkOffset += currentChunkSize;
// If the offset to remove is in the current chunk
if (nextChunkOffset > setOffset) {
// Get chunk index
final long chunkIndex = chunks.getLong(i);
// Get the offset relative to the current chunk
final int relativeOffset = (int) (setOffset - chunkStartOffset);
if (relativeOffset < 0) {
throw new NullPointerException("Relative Offset < 0");
}
try {
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
chunkRef.editValue((chunk) -> {
wrapper.var = chunk.set(relativeOffset, element);
});
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
break;
}
}
return wrapper.var;
}
return null;
}
@Override
public void add(int index, T element) {
throw new RuntimeException("add() isn't implemented!");
}
@SuppressWarnings("unchecked")
@Override
public T remove(int index) {
return this.removeAt(index);
}
@Override
public int indexOf(Object o) {
EntryReference<T> ref = addToDatabase(o).cast();
return indexOfEntry(ref);
}
@Override
public int indexOfEntry(EntryReference<T> ref) {
int currentOffset = 0;
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
try {
final int currentChunkSize = chunkSizes.getInt(i);
// If the offset to remove is in the current chunk
// Get chunk index
final long chunkIndex = chunks.getLong(i);
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
final int foundIndex = chunkRef.getValueReadOnlyUnsafe().indexOfEntry(ref);
if (foundIndex >= 0) {
return currentOffset + foundIndex;
}
currentOffset += currentChunkSize;
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
}
return -1;
}
@Override
public int lastIndexOf(Object o) {
return lastIndexOfEntry(addToDatabase(o).cast());
}
@Override
public int lastIndexOfEntry(EntryReference<T> ref) {
int currentOffset = 0;
// Iterate through all chunks
for (int i = chunks.size() - 1; i >= 0; i--) {
try {
final int currentChunkSize = chunkSizes.getInt(i);
// If the offset to remove is in the current chunk
// Get chunk index
final long chunkIndex = chunks.getLong(i);
EntryReference<LightArrayList<T>> chunkRef = db.get(chunkIndex);
final int foundIndex = chunkRef.getValueReadOnlyUnsafe().lastIndexOfEntry(ref);
if (foundIndex >= 0) {
return currentOffset + foundIndex;
}
currentOffset += currentChunkSize;
} catch (IOException ex) {
throw (NullPointerException) new NullPointerException().initCause(ex);
}
}
return -1;
}
@Deprecated
@Override
public ListIterator<T> listIterator() {
// TODO: implement
throw new RuntimeException("Not implemented!");
}
@Deprecated
@Override
public ListIterator<T> listIterator(int index) {
// TODO: implement
throw new RuntimeException("Not implemented!");
}
@Deprecated
@Override
public List<T> subList(int fromIndex, int toIndex) {
// TODO: implement
throw new RuntimeException("Not implemented!");
}
private <U> EntryReference<U> addToDatabase(U obj) {
EntryReference<U> ref;
try {
ref = db.add(obj);
} catch (IOException e) {
throw (NullPointerException) new NullPointerException().initCause(e);
}
return ref;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((chunks == null) ? 0 : chunks.hashCode());
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean removeIf(Predicate<? super T> filter) {
Objects.requireNonNull(filter);
boolean result = false;
// Iterate through all chunks
for (int i = 0; i < chunks.size(); i++) {
synchronized (cachedChunkLock) {
if (cachedChunkNumber != i) {
prepareAccessToChunk(i);
}
if (cachedChunk.removeIf(filter)) {
result = true;
chunkSizes.set(cachedChunkNumber, cachedChunk.size());
}
}
}
return result;
}
@Override
public String toString() {
return "LightBigList{" +
"chunks=" + chunks +
", chunkSizes=" + chunkSizes +
", db=" + db +
'}';
}
@Override
public void save() {
if (this.cachedChunkRef != null) {
this.cachedChunkRef.save();
}
}
@Override
public void saveAndFlush() {
save();
}
}

View File

@ -1,29 +0,0 @@
package org.warp.jcwdb;
import java.util.Iterator;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Supplier;
public interface LightList<T> extends List<T> {
Iterator<EntryReference<T>> iteratorReferences();
void forEachReference(Consumer<? super EntryReference<T>> action);
EntryReference<T> addEntry(T o);
EntryReference<T> set(int setOffset, final EntryReference<T> element);
boolean remove(EntryReference<T> ref);
EntryReference<T> getReference(int index);
EntryReference<T> getReferenceOrInitialize(int index, Supplier<T> initializer);
int indexOfEntry(EntryReference<T> ref);
int lastIndexOfEntry(EntryReference<T> ref);
void appendIndex(long elementIndex);
}

View File

@ -1,92 +0,0 @@
package org.warp.jcwdb;
import java.io.IOException;
import java.nio.file.Path;
public class MixedIndexDatabase implements IndexManager {
private final FileIndexManager fileIndices;
private final CacheIndexManager cacheIndices;
public MixedIndexDatabase(Path dataFile, Path metadataFile) throws IOException {
this.fileIndices = new FileIndexManager(dataFile, metadataFile);
this.cacheIndices = new CacheIndexManager();
}
@Override
public <T> T get(long index, DBReader<T> reader) throws IOException {
if (cacheIndices.has(index)) {
return cacheIndices.get(index, reader);
} else {
return fileIndices.get(index, reader);
}
}
@Override
public int getType(long index) throws IOException {
if (cacheIndices.has(index)) {
return cacheIndices.getType(index);
} else {
return fileIndices.getType(index);
}
}
@Override
public long getHash(long index) throws IOException {
if (cacheIndices.has(index)) {
return cacheIndices.getHash(index);
} else {
return fileIndices.getHash(index);
}
}
@Override
public <T> long add(DBDataOutput<T> writer) throws IOException {
return fileIndices.add(writer);
}
@Override
public <T> FullIndexDetails addAndGetDetails(DBDataOutput<T> writer) throws IOException {
return fileIndices.addAndGetDetails(writer);
}
@Override
public <T> IndexDetails set(long index, DBDataOutput<T> writer) throws IOException {
if (cacheIndices.has(index)) {
return cacheIndices.set(index, writer);
} else {
return fileIndices.set(index, writer);
}
}
@Override
public void setFlushingAllowed(long index, boolean isFlushingAllowed) {
if (cacheIndices.has(index)) {
cacheIndices.setFlushingAllowed(index, isFlushingAllowed);
} else {
fileIndices.setFlushingAllowed(index, isFlushingAllowed);
}
}
@Override
public void delete(long index) throws IOException {
cacheIndices.delete(index);
fileIndices.delete(index);
}
@Override
public boolean has(long index) {
return cacheIndices.has(index) || fileIndices.has(index);
}
@Override
public void close() throws IOException {
// TODO: move all cached indices to filesIndices before closing.
this.cacheIndices.close();
this.fileIndices.close();
}
@Override
public long clean() {
return fileIndices.clean()
+ cacheIndices.clean();
}
}

View File

@ -1,59 +0,0 @@
package org.warp.jcwdb;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
public class TypesManager {
private final Int2ObjectMap<DBTypeParser<?>> types;
private final Object2ObjectMap<Class<?>, DBTypeParser<?>> typesByClass;
private DBTypedObjectParser<?> fallbackParser;
public TypesManager(JCWDatabase db) {
types = new Int2ObjectOpenHashMap<>();
typesByClass = new Object2ObjectOpenHashMap<>();
DBStandardTypes.registerStandardTypes(db, this);
}
public <T> void registerType(Class<T> clazz, int type, DBTypeParser<T> parser) {
this.types.put(type, parser);
this.typesByClass.put(clazz, parser);
}
/**
* Use this method with the most used classes to save disk space.
* @param clazz
* @param id
* @param <T>
*/
public <T> void registerGenericClass(Class<T> clazz, int id) {
this.fallbackParser.registerClass(clazz, id);
}
public void registerTypeFallback(DBTypedObjectParser<?> parser) {
this.fallbackParser = parser;
}
public <T> DBTypeParser<T> get(int type) {
if (types.containsKey(type) == false) {
if (fallbackParser == null) {
throw new NoParserFoundException("The type " + type + " can't be parsed.");
} else {
return fallbackParser.cast();
}
}
return types.get(type).cast();
}
public <T> DBTypeParser<T> get(Class<T> type) {
if (typesByClass.containsKey(type) == false) {
if (fallbackParser == null) {
throw new NoParserFoundException("The class " + type.getSimpleName() + " can't be parsed.");
} else {
return fallbackParser.cast();
}
}
return typesByClass.get(type).cast();
}
}

View File

@ -1,7 +1,5 @@
package org.warp.jcwdb;
import java.nio.channels.SeekableByteChannel;
public class VariableWrapper<T> {
public T var;

View File

@ -0,0 +1,9 @@
package org.warp.jcwdb.ann;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface DBClass {
int classTypeId();
}

View File

@ -0,0 +1,7 @@
package org.warp.jcwdb.ann;
public enum DBDataType {
DATABASE_OBJECT,
OBJECT,
INTEGER
}

View File

@ -0,0 +1,13 @@
package org.warp.jcwdb.ann;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD})
public @interface DBField {
int id();
DBDataType type() default DBDataType.OBJECT;
}

View File

@ -0,0 +1,106 @@
package org.warp.jcwdb.ann;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public abstract class DBObject {
private final Database database;
private final long uid;
private Field[] fields;
private DBDataType[] fieldTypes;
private long[] fieldUIDs;
private Method[] propertyGetters;
private Method[] propertySetters;
private DBDataType[] propertyTypes;
private long[] propertyUIDs;
private boolean[] loadedProperties;
private Object[] loadedPropertyValues;
public DBObject(Database database) {
this.database = database;
this.uid = database.newDBObject(this);
database.preloadDBObject(this);
}
public DBObject(Database database, long uid) {
this.database = database;
this.uid = uid;
database.preloadDBObject(this);
}
public <T> T getProperty() {
StackWalker walker = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE);
StackWalker.StackFrame stackFrame = walker.walk(f -> f.skip(1).findFirst().orElse(null));
try {
int propertyId = stackFrame.getDeclaringClass().getDeclaredMethod(stackFrame.getMethodName()).getAnnotation(DBPropertyGetter.class).id();
return getProperty(propertyId);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
<T> void setLoadedProperty(int propertyId, T value) {
loadedPropertyValues[propertyId] = value;
}
@SuppressWarnings("unchecked")
private <T> T getProperty(int propertyId) {
if (!loadedProperties[propertyId]) {
try {
database.loadProperty(this, propertyId, propertyGetters[propertyId], propertyTypes[propertyId], propertyUIDs[propertyId]);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e) {
throw new RuntimeException(e);
}
}
return (T) loadedPropertyValues[propertyId];
}
public <T> void setProperty(T value) {
StackWalker walker = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE);
StackWalker.StackFrame stackFrame = walker.walk(f -> f.skip(1).findFirst().orElse(null));
try {
int propertyId = stackFrame.getDeclaringClass().getDeclaredMethod(stackFrame.getMethodName(), value.getClass()).getAnnotation(DBPropertySetter.class).id();
setProperty(propertyId, value);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public <T> void setProperty(int propertyId, T value) {
loadedPropertyValues[propertyId] = value;
loadedProperties[propertyId] = true;
}
public void close() {
database.saveObject(this);
}
public final void setFields(Field[] fields, DBDataType[] fieldTypes, long[] fieldUIDs) {
this.fields = fields;
this.fieldTypes = fieldTypes;
this.fieldUIDs = fieldUIDs;
}
public final void setProperties(Method[] propertyGetters, Method[] propertySetters, DBDataType[] propertyTypes, long[] propertyUIDs) {
this.propertyGetters = propertyGetters;
this.propertySetters = propertySetters;
this.propertyTypes = propertyTypes;
this.propertyUIDs = propertyUIDs;
this.loadedProperties = new boolean[this.propertyUIDs.length];
this.loadedPropertyValues = new Object[this.propertyUIDs.length];
}
public final long getUID() {
return uid;
}
public long[] getAllFieldUIDs() {
return fieldUIDs;
}
public long[] getAllPropertyUIDs() {
return propertyUIDs;
}
}

View File

@ -0,0 +1,74 @@
package org.warp.jcwdb.ann;
import org.warp.jcwdb.FileIndexManager;
import java.io.IOError;
import java.io.IOException;
public class DBObjectIndicesManager {
private final FileIndexManager indices;
public DBObjectIndicesManager(FileIndexManager indices) {
this.indices = indices;
}
public long allocate(int fieldsCount, int propertiesCount) {
return indices.add(calculateObjectSize(fieldsCount, propertiesCount));
}
public void set(long uid, long[] fields, long[] properties) throws IOException {
indices.set(uid, calculateObjectSize(fields, properties), (w) -> {
w.writeInt(fields.length);
w.writeInt(properties.length);
for (int i = 0; i < fields.length; i++) {
w.writeLong(fields[i]);
}
for (int i = 0; i < fields.length; i++) {
w.writeLong(properties[i]);
}
});
}
public DBObjectInfo get(long uid) throws IOException {
return indices.get(uid, (i, size) -> {
long[] indices = new long[i.readInt()];
long[] properties = new long[i.readInt()];
if (size != calculateObjectSize(indices, properties)) {
throw new IOError(new IOException("The size of the object is different!"));
}
for (int indicesI = 0; indicesI < indices.length; indicesI++) {
indices[indicesI] = i.readLong();
}
for (int propertiesI = 0; propertiesI < properties.length; propertiesI++) {
properties[propertiesI] = i.readLong();
}
return new DBObjectInfo(indices, properties);
});
}
private int calculateObjectSize(long[] fields, long[] properties) {
return calculateObjectSize(fields.length, properties.length);
}
private int calculateObjectSize(int fieldsCount, int propertiesCount) {
return Integer.BYTES * 2 + (fieldsCount + propertiesCount) * Long.BYTES;
}
public class DBObjectInfo {
private final long[] fields;
private final long[] properties;
public DBObjectInfo(long[] fields, long[] properties) {
this.fields = fields;
this.properties = properties;
}
public long[] getFields() {
return fields;
}
public long[] getProperties() {
return properties;
}
}
}

View File

@ -0,0 +1,13 @@
package org.warp.jcwdb.ann;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface DBPropertyGetter {
int id();
DBDataType type() default DBDataType.OBJECT;
}

View File

@ -0,0 +1,13 @@
package org.warp.jcwdb.ann;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface DBPropertySetter {
int id();
DBDataType type() default DBDataType.OBJECT;
}

View File

@ -0,0 +1,275 @@
package org.warp.jcwdb.ann;
import com.esotericsoftware.kryo.Kryo;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.commons.lang3.reflect.MethodUtils;
import org.warp.jcwdb.FileIndexManager;
import java.io.IOError;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.file.Path;
import java.util.LinkedList;
public class Database {
public static final long MAX_LOADED_INDICES = 100000;
private final DBObjectIndicesManager objectIndicesManager;
private final FileIndexManager indices;
private final LinkedList<WeakReference<DBObject>> loadedObjects = new LinkedList<>();
private static final Kryo kryo = new Kryo();
public Database(Path dataFile, Path metadataFile) throws IOException {
this.indices = new FileIndexManager(dataFile, metadataFile);
this.objectIndicesManager = new DBObjectIndicesManager(this.indices);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
Database.this.close();
} catch (Exception e) {
e.printStackTrace();
}
}));
}
private void close() throws IOException {
indices.close();
for (WeakReference<DBObject> loadedObjectReference : loadedObjects) {
DBObject loadedObject = loadedObjectReference.get();
if (loadedObject != null) {
loadedObject.close();
}
}
}
public void preloadDBObject(DBObject obj) {
DBObjectIndicesManager.DBObjectInfo UIDs = readUIDs(obj.getUID());
preloadDBObjectFields(obj, UIDs.getFields());
preloadDBObjectProperties(obj, UIDs.getProperties());
}
private void preloadDBObjectFields(DBObject obj, long[] fieldUIDs) {
// Declare the variables needed to get the biggest field Id
Field[] unorderedFields = getFields(obj);
// Find the biggest field Id
int biggestFieldId = getBiggestFieldId(unorderedFields);
// Declare the other variables
Field[] fields = new Field[biggestFieldId + 1];
DBDataType[] orderedFieldTypes = new DBDataType[biggestFieldId + 1];
// Load all fields metadata and load them
for (Field field : unorderedFields) {
DBField fieldAnnotation = field.getAnnotation(DBField.class);
int fieldId = fieldAnnotation.id();
DBDataType fieldType = fieldAnnotation.type();
try {
loadField(obj, field, fieldType, fieldUIDs[fieldId]);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e) {
throw new RuntimeException(e);
}
fields[fieldId] = field;
orderedFieldTypes[fieldId] = fieldType;
}
// Set fields metadata
obj.setFields(fields, orderedFieldTypes, fieldUIDs);
}
private void preloadDBObjectProperties(DBObject obj, long[] propertyUIDs) {
// Declare the variables needed to get the biggest property Id
Method[] unorderedPropertyGetters = getPropertyGetters(obj);
Method[] unorderedPropertySetters = getPropertySetters(obj);
// Find the biggest property Id
int biggestGetter = getBiggestPropertyGetterId(unorderedPropertyGetters);
int biggestSetter = getBiggestPropertySetterId(unorderedPropertySetters);
int biggestPropertyId = biggestGetter > biggestSetter ? biggestGetter : biggestSetter;
for (Method property : unorderedPropertySetters) {
DBPropertySetter fieldAnnotation = property.getAnnotation(DBPropertySetter.class);
int propertyId = fieldAnnotation.id();
if (propertyId > biggestPropertyId) {
biggestPropertyId = propertyId;
}
}
// Declare the other variables
DBDataType[] propertyTypes = new DBDataType[biggestPropertyId + 1];
Method[] propertyGetters = new Method[biggestPropertyId + 1];
Method[] propertySetters = new Method[biggestPropertyId + 1];
// Load the properties metadata
for (Method property : unorderedPropertyGetters) {
DBPropertyGetter propertyAnnotation = property.getAnnotation(DBPropertyGetter.class);
int propertyId = propertyAnnotation.id();
DBDataType propertyType = propertyAnnotation.type();
propertyTypes[propertyId] = propertyType;
propertyGetters[propertyId] = property;
}
for (Method property : unorderedPropertySetters) {
DBPropertySetter propertyAnnotation = property.getAnnotation(DBPropertySetter.class);
int propertyId = propertyAnnotation.id();
DBDataType propertyType = propertyAnnotation.type();
propertyTypes[propertyId] = propertyType;
propertySetters[propertyId] = property;
}
// Set properties metadata
obj.setProperties(propertyGetters, propertySetters, propertyTypes, propertyUIDs);
}
private Method[] getPropertyGetters(DBObject obj) {
return MethodUtils.getMethodsWithAnnotation(obj.getClass(), DBPropertyGetter.class);
}
private Method[] getPropertySetters(DBObject obj) {
return MethodUtils.getMethodsWithAnnotation(obj.getClass(), DBPropertySetter.class);
}
private Field[] getFields(DBObject obj) {
return FieldUtils.getFieldsWithAnnotation(obj.getClass(), DBField.class);
}
private int getBiggestPropertyGetterId(Method[] unorderedPropertyGetters) {
int biggestPropertyId = -1;
for (Method property : unorderedPropertyGetters) {
DBPropertyGetter fieldAnnotation = property.getAnnotation(DBPropertyGetter.class);
int propertyId = fieldAnnotation.id();
if (propertyId > biggestPropertyId) {
biggestPropertyId = propertyId;
}
}
return biggestPropertyId;
}
private int getBiggestPropertySetterId(Method[] unorderedPropertySetters) {
int biggestPropertyId = -1;
for (Method property : unorderedPropertySetters) {
DBPropertySetter fieldAnnotation = property.getAnnotation(DBPropertySetter.class);
int propertyId = fieldAnnotation.id();
if (propertyId > biggestPropertyId) {
biggestPropertyId = propertyId;
}
}
return biggestPropertyId;
}
private int getBiggestFieldId(Field[] unorderedFields) {
int biggestFieldId = -1;
for (Field field : unorderedFields) {
DBField fieldAnnotation = field.getAnnotation(DBField.class);
int propertyId = fieldAnnotation.id();
if (propertyId > biggestFieldId) {
biggestFieldId = propertyId;
}
}
return biggestFieldId;
}
public void loadProperty(DBObject obj, int propertyId, Method property, DBDataType propertyType, long propertyUID) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
switch (propertyType) {
case DATABASE_OBJECT:
DBObject fieldDBObjectValue = (DBObject) property.getDeclaringClass().getConstructor(Database.class, long.class).newInstance(this, propertyUID);
obj.setLoadedProperty(propertyId, fieldDBObjectValue);
break;
case OBJECT:
Object fieldObjectValue = loadObject(propertyUID);
obj.setLoadedProperty(propertyId, fieldObjectValue);
break;
case INTEGER:
int fieldIntValue = loadInt(propertyUID);
obj.setLoadedProperty(propertyId, fieldIntValue);
break;
default:
throw new NullPointerException("Unknown Field Type");
}
}
public void loadField(DBObject obj, Field field, DBDataType fieldType, long fieldUID) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
switch (fieldType) {
case DATABASE_OBJECT:
DBObject fieldDBObjectValue = (DBObject) field.getDeclaringClass().getConstructor(Database.class, long.class).newInstance(this, fieldUID);
field.set(obj, fieldDBObjectValue);
break;
case OBJECT:
Object fieldObjectValue = loadObject(fieldUID);
field.set(obj, fieldObjectValue);
break;
case INTEGER:
int fieldIntValue = loadInt(fieldUID);
field.setInt(obj, fieldIntValue);
break;
default:
throw new NullPointerException("Unknown Field Type");
}
}
@SuppressWarnings("unchecked")
public <T> T loadObject(long uid) {
try {
return (T) indices.get(uid, (i, size) -> size == 0 ? null : kryo.readClassAndObject(i));
} catch (IOException ex) {
throw new IOError(ex);
}
}
public int loadInt(long uid) {
try {
return indices.get(uid, (i, size) -> size == 0 ? 0 : i.readInt());
} catch (IOException ex) {
throw new IOError(ex);
}
}
public void watchObject(DBObject obj) {
loadedObjects.add(new WeakReference<>(obj));
}
/**
*
* @param uid
* @return
*/
private DBObjectIndicesManager.DBObjectInfo readUIDs(long uid) {
try {
return objectIndicesManager.get(uid);
} catch (IOException e) {
throw new IOError(e);
}
}
public void saveObject(DBObject obj) {
System.out.println("Saving object " + obj.getUID());
try {
objectIndicesManager.set(obj.getUID(), obj.getAllFieldUIDs(), obj.getAllPropertyUIDs());
} catch (IOException e) {
throw new IOError(e);
}
}
public long newDBObject(DBObject obj) {
int fieldsCount = getBiggestFieldId(getFields(obj)) + 1;
int biggestGetter = getBiggestPropertyGetterId(getPropertyGetters(obj));
int biggestSetter = getBiggestPropertySetterId(getPropertySetters(obj));
int propertiesCount = (biggestGetter > biggestSetter ? biggestGetter : biggestSetter) + 1;
long uid = objectIndicesManager.allocate(fieldsCount, propertiesCount);
long[] fields = new long[fieldsCount];
for (int i = 0; i < fieldsCount; i++) {
fields[i] = indices.add(0);
}
long[] properties = new long[propertiesCount];
for (int i = 0; i < propertiesCount; i++) {
properties[i] = indices.add(0);
}
try {
objectIndicesManager.set(uid, fields, properties);
} catch (IOException e) {
throw new IOError(e);
}
return uid;
}
}

View File

@ -0,0 +1,40 @@
package org.warp.jcwdb.ann.exampleimpl;
import org.warp.jcwdb.ann.*;
@DBClass(classTypeId = 0)
public class Class1 extends DBObject {
public Class1(Database database) {
super(database);
}
public Class1(Database database, long uid) {
super(database, uid);
}
@DBField(id = 0, type = DBDataType.OBJECT)
public String value1;
@DBField(id = 1, type = DBDataType.INTEGER)
public int value2;
@DBPropertyGetter(id = 0, type = DBDataType.OBJECT)
public String getValue3() {
return getProperty();
}
@DBPropertySetter(id = 0, type = DBDataType.OBJECT)
public void setValue3(String value) {
setProperty(value);
}
@DBPropertyGetter(id = 1, type = DBDataType.DATABASE_OBJECT)
public Class1 getValue4() {
return getProperty();
}
@DBPropertySetter(id = 1, type = DBDataType.DATABASE_OBJECT)
public void setValue4(Class1 value) {
setProperty(value);
}
}

View File

@ -0,0 +1,32 @@
package org.warp.jcwdb.ann.exampleimpl;
import org.warp.jcwdb.ann.Database;
import java.io.IOException;
import java.nio.file.Paths;
public class Main {
public static void main(String[] args) throws IOException {
Database db = new Database(Paths.get("database_temp.db"), Paths.get("database_temp.idx"));
Class1 class1 = new Class1(db, 0);
class1.value1 = "ciao";
class1.value2 = 3;
System.out.println("value3="+class1.getValue3());
class1.setValue3("Ciao 3");
System.out.println("value3="+class1.getValue3());
Class1 nested = new Class1(db);
class1.setValue4(nested);
nested.setValue3("Ciao nested 3");
try {
class1.close();
nested.close();
System.out.println(class1.getValue4().getValue3());
} catch (Exception ex) {
ex.printStackTrace();
} catch (Throwable throwable) {
throwable.printStackTrace();
}
}
}

View File

@ -1,9 +0,0 @@
package org.warp.jcwdb.exampleimpl;
public abstract class Animal {
protected int legsCount;
public static boolean hasFourLegs(Animal a) {
return a.legsCount == 4;
}
}

View File

@ -1,101 +0,0 @@
package org.warp.jcwdb.exampleimpl;
import org.warp.jcwdb.EntryReference;
import org.warp.jcwdb.JCWDatabase;
import org.warp.jcwdb.LightList;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import it.unimi.dsi.fastutil.objects.ObjectList;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.function.Predicate;
public class App {
static long time3;
public static void main(String[] args) throws IOException {
if (args.length > 2 && Boolean.parseBoolean(args[2])) {
Files.delete(Paths.get(args[0]));
Files.delete(Paths.get(args[1]));
}
System.out.println("Loading database...");
long time0 = System.currentTimeMillis();
JCWDatabase db = new JCWDatabase(Paths.get(args[0]), Paths.get(args[1]));
db.registerClass(StrangeAnimal.class, 0);
try {
long time01 = System.currentTimeMillis();
System.out.println("Time elapsed: " + (time01 - time0));
System.out.println("Loading root...");
EntryReference<LightList<Animal>> rootRef = db.getRoot(Animal.class);
rootRef.editValue((root, saver) -> {
long time1 = System.currentTimeMillis();
System.out.println("Time elapsed: " + (time1 - time01));
System.out.println("Root size: " + root.size());
System.out.println("Root:");
// for (int i = 0; i < root.size(); i++) {
// System.out.println(" - " + root.get(i));
// }
long prectime = System.currentTimeMillis();
for (int i = 0; i < 2000000/* 2000000 */; i++) {
Animal animal = new StrangeAnimal(i % 40);
root.addEntry(animal);
if (i > 0 && i % 200000 == 0) {
long precprectime = prectime;
prectime = System.currentTimeMillis();
System.out.println("Element " + i + " (" + (prectime - precprectime) + "ms)" + " Total Time: " + (prectime - time1));
}
}
long time2 = System.currentTimeMillis();
saver.save();
System.out.println("Root size: " + root.size());
System.out.println("Time elapsed: " + (time2 - time1));
System.out.println("Used memory: "
+ ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
long time2_0 = System.currentTimeMillis();
System.out.println("Filtering strings...");
long oldSize = root.size();
root.removeIf(Animal::hasFourLegs);
long time2_1 = System.currentTimeMillis();
System.out.println("RemoveIf(x) removed items: " + (oldSize - root.size()));
System.out.println("Time elapsed: " + (time2_1 - time2_0));
ObjectList<Animal> results = new ObjectArrayList<>();
System.out.println("Retrieving items...");
root.forEachReference((valueReference) -> {
Animal value = valueReference.getValueReadOnlyUnsafe();
if (Animal.hasFourLegs(value)) {
results.add(value);
}
//System.out.println("val:" + value);
});
long time2_2 = System.currentTimeMillis();
System.out.println("Matches: " + results.size());
System.out.println("Time elapsed: " + (time2_2 - time2_1));
System.out.println("Used memory: "
+ ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
System.out.println("Used memory: " + ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
System.out.println("Cleaning database (to reduce the amount of used memory and detect memory leaks)...");
db.clean();
time3 = System.currentTimeMillis();
System.out.println("Used memory: " + ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024) + "MB");
System.out.println("Time elapsed: " + (time3 - time2_2));
System.out.println("Saving database...");
System.out.println("Root size: " + root.size());
});
db.close();
long time4 = System.currentTimeMillis();
System.out.println("Time elapsed: " + (time4 - time3));
} catch (Exception ex) {
ex.printStackTrace();
} finally {
if (db.isOpen()) {
db.close();
}
}
}
public static <T> Predicate<T> not(Predicate<T> t) {
return t.negate();
}
}

View File

@ -1,12 +0,0 @@
package org.warp.jcwdb.exampleimpl;
public class Cat extends Animal {
public Cat() {
this.legsCount = 12;
}
@Override
public String toString() {
return "Cat [legsCount=" + legsCount + "]";
}
}

View File

@ -1,12 +0,0 @@
package org.warp.jcwdb.exampleimpl;
public class Dog extends Animal {
public Dog() {
this.legsCount = 4;
}
@Override
public String toString() {
return "Dog [legsCount=" + legsCount + "]";
}
}

View File

@ -1,15 +0,0 @@
package org.warp.jcwdb.exampleimpl;
public class StrangeAnimal extends Animal {
public StrangeAnimal() {
super();
}
public StrangeAnimal(int legs) {
super();
this.legsCount = legs;
}
@Override
public String toString() {
return "StrangeAnimal [legsCount=" + legsCount + "]";
}
}