2021-10-13 00:23:56 +02:00
|
|
|
package it.cavallium.dbengine.lucene;
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
import io.netty5.buffer.api.Buffer;
|
2021-10-13 00:23:56 +02:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.function.Function;
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
public class LLFieldDocCodec implements HugePqCodec<LLFieldDoc> {
|
2021-10-13 00:23:56 +02:00
|
|
|
|
|
|
|
private enum FieldType {
|
|
|
|
FLOAT,
|
|
|
|
DOUBLE,
|
|
|
|
INT,
|
|
|
|
LONG;
|
|
|
|
|
|
|
|
public byte ordinalByte() {
|
|
|
|
return (byte) ordinal();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
2022-04-06 02:41:32 +02:00
|
|
|
public Buffer serialize(Function<Integer, Buffer> allocator, LLFieldDoc data) {
|
2021-10-13 00:23:56 +02:00
|
|
|
int fieldsDataSize = 0;
|
|
|
|
byte[] fieldTypes = new byte[data.fields().size()];
|
|
|
|
int fieldId = 0;
|
|
|
|
for (Object field : data.fields()) {
|
|
|
|
assert field != null;
|
|
|
|
if (field instanceof Float) {
|
|
|
|
fieldsDataSize += Float.BYTES;
|
|
|
|
fieldTypes[fieldId] = FieldType.FLOAT.ordinalByte();
|
|
|
|
} else if (field instanceof Double) {
|
|
|
|
fieldsDataSize += Double.BYTES;
|
|
|
|
fieldTypes[fieldId] = FieldType.DOUBLE.ordinalByte();
|
|
|
|
} else if (field instanceof Integer) {
|
|
|
|
fieldsDataSize += Integer.BYTES;
|
|
|
|
fieldTypes[fieldId] = FieldType.INT.ordinalByte();
|
|
|
|
} else if (field instanceof Long) {
|
|
|
|
fieldsDataSize += Long.BYTES;
|
|
|
|
fieldTypes[fieldId] = FieldType.LONG.ordinalByte();
|
|
|
|
} else {
|
|
|
|
throw new UnsupportedOperationException("Unsupported field type " + field.getClass());
|
|
|
|
}
|
|
|
|
fieldId++;
|
|
|
|
}
|
|
|
|
int size = Float.BYTES + Integer.BYTES + Integer.BYTES + Character.BYTES + (data.fields().size() + Byte.BYTES) + fieldsDataSize;
|
|
|
|
var buf = allocator.apply(size);
|
|
|
|
setScore(buf, data.score());
|
|
|
|
setDoc(buf, data.doc());
|
|
|
|
setShardIndex(buf, data.shardIndex());
|
|
|
|
setFieldsCount(buf, data.fields().size());
|
2022-04-06 02:41:32 +02:00
|
|
|
buf.writerOffset(size);
|
2021-10-13 00:23:56 +02:00
|
|
|
|
|
|
|
fieldId = 0;
|
|
|
|
for (Object field : data.fields()) {
|
|
|
|
assert field != null;
|
|
|
|
buf.writeByte(fieldTypes[fieldId]);
|
|
|
|
if (field instanceof Float val) {
|
|
|
|
buf.writeFloat(val);
|
|
|
|
} else if (field instanceof Double val) {
|
|
|
|
buf.writeDouble(val);
|
|
|
|
} else if (field instanceof Integer val) {
|
|
|
|
buf.writeInt(val);
|
|
|
|
} else if (field instanceof Long val) {
|
|
|
|
buf.writeLong(val);
|
|
|
|
} else {
|
|
|
|
throw new UnsupportedOperationException("Unsupported field type " + field.getClass());
|
|
|
|
}
|
|
|
|
fieldId++;
|
|
|
|
}
|
|
|
|
assert buf.writableBytes() == 0;
|
2022-04-06 02:41:32 +02:00
|
|
|
return buf;
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
2022-04-06 02:41:32 +02:00
|
|
|
public LLFieldDoc deserialize(Buffer buf) {
|
2021-10-13 00:23:56 +02:00
|
|
|
var fieldsCount = getFieldsCount(buf);
|
|
|
|
ArrayList<Object> fields = new ArrayList<>(fieldsCount);
|
2022-04-06 02:41:32 +02:00
|
|
|
buf.readerOffset(Float.BYTES + Integer.BYTES + Integer.BYTES + Character.BYTES);
|
2021-10-13 00:23:56 +02:00
|
|
|
for (char i = 0; i < fieldsCount; i++) {
|
|
|
|
fields.add(switch (FieldType.values()[buf.readByte()]) {
|
|
|
|
case FLOAT -> buf.readFloat();
|
|
|
|
case DOUBLE -> buf.readDouble();
|
|
|
|
case INT -> buf.readInt();
|
|
|
|
case LONG -> buf.readLong();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
assert buf.readableBytes() == 0;
|
|
|
|
return new LLFieldDoc(getDoc(buf), getScore(buf), getShardIndex(buf), fields);
|
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private static float getScore(Buffer hit) {
|
|
|
|
return HugePqCodec.getLexFloat(hit, 0, false);
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private static int getDoc(Buffer hit) {
|
|
|
|
return HugePqCodec.getLexInt(hit, Float.BYTES, true);
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private static int getShardIndex(Buffer hit) {
|
|
|
|
return HugePqCodec.getLexInt(hit, Float.BYTES + Integer.BYTES, false);
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private char getFieldsCount(Buffer hit) {
|
2021-10-13 00:23:56 +02:00
|
|
|
return hit.getChar(Float.BYTES + Integer.BYTES + Integer.BYTES);
|
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private static void setScore(Buffer hit, float score) {
|
|
|
|
HugePqCodec.setLexFloat(hit, 0, false, score);
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private static void setDoc(Buffer hit, int doc) {
|
|
|
|
HugePqCodec.setLexInt(hit, Float.BYTES, true, doc);
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private static void setShardIndex(Buffer hit, int shardIndex) {
|
|
|
|
HugePqCodec.setLexInt(hit, Float.BYTES + Integer.BYTES, false, shardIndex);
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|
|
|
|
|
2022-04-06 02:41:32 +02:00
|
|
|
private void setFieldsCount(Buffer hit, int size) {
|
2021-10-13 00:23:56 +02:00
|
|
|
hit.setChar(Float.BYTES + Integer.BYTES + Integer.BYTES, (char) size);
|
|
|
|
}
|
2022-04-06 02:41:32 +02:00
|
|
|
|
|
|
|
@Override
|
|
|
|
public LLFieldDoc clone(LLFieldDoc obj) {
|
|
|
|
return new LLFieldDoc(obj.doc(), obj.score(), obj.shardIndex(), obj.fields());
|
|
|
|
}
|
2021-10-13 00:23:56 +02:00
|
|
|
}
|