Use markers

This commit is contained in:
Andrea Cavalli 2021-09-10 12:13:52 +02:00
parent 0a378bc0f1
commit 7feeb4a9ce
4 changed files with 31 additions and 19 deletions

View File

@ -43,6 +43,8 @@ import org.jetbrains.annotations.Nullable;
import org.rocksdb.RocksDB;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
@ -52,6 +54,9 @@ import reactor.util.function.Tuple3;
public class LLUtils {
private static final Logger logger = LoggerFactory.getLogger(LLUtils.class);
public static final Marker MARKER_DB_BUFFER = MarkerFactory.getMarker("DB_BUFFER");
public static final Marker MARKER_ROCKSDB = MarkerFactory.getMarker("ROCKSDB");
public static final Marker MARKER_LUCENE = MarkerFactory.getMarker("LUCENE");
private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.allocateDirect(0);
private static final byte[] RESPONSE_TRUE = new byte[]{1};
@ -687,17 +692,17 @@ public class LLUtils {
}
private static void discardLLEntry(LLEntry entry) {
logger.trace("Releasing discarded Buffer");
logger.trace(MARKER_ROCKSDB, "Releasing discarded Buffer");
entry.close();
}
private static void discardLLRange(LLRange range) {
logger.trace("Releasing discarded Buffer");
logger.trace(MARKER_ROCKSDB, "Releasing discarded LLRange");
range.close();
}
private static void discardLLDelta(LLDelta delta) {
logger.trace("Releasing discarded LLDelta");
logger.trace(MARKER_ROCKSDB, "Releasing discarded LLDelta");
delta.close();
}

View File

@ -1,6 +1,7 @@
package it.cavallium.dbengine.database.disk;
import static io.netty5.buffer.Unpooled.wrappedBuffer;
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
import static it.cavallium.dbengine.database.LLUtils.fromByteArray;
import static java.util.Objects.requireNonNull;
@ -266,8 +267,8 @@ public class LLLocalDictionary implements LLDictionary {
stamp = 0;
}
try {
if (logger.isTraceEnabled()) {
logger.trace("Reading {}", LLUtils.toStringSafe(key));
if (logger.isTraceEnabled(MARKER_ROCKSDB)) {
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toStringSafe(key));
}
return dbGet(cfh, resolveSnapshot(snapshot), key.send(), existsAlmostCertainly);
} finally {
@ -603,7 +604,7 @@ public class LLLocalDictionary implements LLDictionary {
}
try {
if (logger.isTraceEnabled()) {
logger.trace("Writing {}: {}",
logger.trace(MARKER_ROCKSDB, "Writing {}: {}",
LLUtils.toStringSafe(key), LLUtils.toStringSafe(value));
}
dbPut(cfh, null, key.send(), value.send());
@ -656,7 +657,7 @@ public class LLLocalDictionary implements LLDictionary {
}
try {
if (logger.isTraceEnabled()) {
logger.trace("Reading {}", LLUtils.toStringSafe(key));
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toStringSafe(key));
}
while (true) {
@Nullable Buffer prevData;
@ -708,7 +709,7 @@ public class LLLocalDictionary implements LLDictionary {
}
}
if (logger.isTraceEnabled()) {
logger.trace("Deleting {}", LLUtils.toStringSafe(key));
logger.trace(MARKER_ROCKSDB, "Deleting {}", LLUtils.toStringSafe(key));
}
dbDelete(cfh, null, key.send());
} else if (newData != null
@ -726,7 +727,7 @@ public class LLLocalDictionary implements LLDictionary {
}
}
if (logger.isTraceEnabled()) {
logger.trace("Writing {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
logger.trace(MARKER_ROCKSDB, "Writing {}: {}", LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
}
Buffer dataToPut;
if (updateReturnMode == UpdateReturnMode.GET_NEW_VALUE) {
@ -798,7 +799,7 @@ public class LLLocalDictionary implements LLDictionary {
}
try {
if (logger.isTraceEnabled()) {
logger.trace("Reading {}", LLUtils.toStringSafe(key));
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toStringSafe(key));
}
while (true) {
@Nullable Buffer prevData;
@ -850,7 +851,7 @@ public class LLLocalDictionary implements LLDictionary {
}
}
if (logger.isTraceEnabled()) {
logger.trace("Deleting {}", LLUtils.toStringSafe(key));
logger.trace(MARKER_ROCKSDB, "Deleting {}", LLUtils.toStringSafe(key));
}
dbDelete(cfh, null, key.send());
} else if (newData != null
@ -868,7 +869,7 @@ public class LLLocalDictionary implements LLDictionary {
}
}
if (logger.isTraceEnabled()) {
logger.trace("Writing {}: {}",
logger.trace(MARKER_ROCKSDB, "Writing {}: {}",
LLUtils.toStringSafe(key), LLUtils.toStringSafe(newData));
}
assert key.isAccessible();
@ -942,7 +943,7 @@ public class LLLocalDictionary implements LLDictionary {
}
try {
if (logger.isTraceEnabled()) {
logger.trace("Deleting {}", LLUtils.toStringSafe(key));
logger.trace(MARKER_ROCKSDB, "Deleting {}", LLUtils.toStringSafe(key));
}
dbDelete(cfh, null, key.send());
return null;
@ -986,7 +987,7 @@ public class LLLocalDictionary implements LLDictionary {
}
try {
if (logger.isTraceEnabled()) {
logger.trace("Reading {}", LLUtils.toArray(key));
logger.trace(MARKER_ROCKSDB, "Reading {}", LLUtils.toArray(key));
}
var data = new Holder<byte[]>();
if (db.keyMayExist(cfh, LLUtils.toArray(key), data)) {
@ -2211,7 +2212,7 @@ public class LLLocalDictionary implements LLDictionary {
try {
return db.getLongProperty(cfh, "rocksdb.estimate-num-keys");
} catch (RocksDBException e) {
e.printStackTrace();
logger.error(MARKER_ROCKSDB, "Failed to get RocksDB estimated keys count property", e);
return 0;
}
} else if (PARALLEL_EXACT_SIZE) {

View File

@ -1,5 +1,7 @@
package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
import io.netty5.buffer.api.BufferAllocator;
import io.netty5.util.internal.PlatformDependent;
import it.cavallium.dbengine.database.Column;
@ -639,11 +641,11 @@ public class LLLocalKeyValueDatabase implements LLKeyValueDatabase {
Files.deleteIfExists(path);
System.out.println("Deleted log file \"" + path + "\"");
} catch (IOException e) {
e.printStackTrace();
logger.error(MARKER_ROCKSDB, "Failed to delete log file \"" + path + "\"", e);
}
});
} catch (IOException ex) {
ex.printStackTrace();
logger.error(MARKER_ROCKSDB, "Failed to delete unused log files", ex);
}
}
}

View File

@ -1,5 +1,8 @@
package it.cavallium.dbengine.database.disk;
import static it.cavallium.dbengine.database.LLUtils.MARKER_LUCENE;
import static it.cavallium.dbengine.database.LLUtils.MARKER_ROCKSDB;
import it.cavallium.dbengine.client.DirectIOOptions;
import it.cavallium.dbengine.client.IndicizerAnalyzers;
import it.cavallium.dbengine.client.IndicizerSimilarities;
@ -255,6 +258,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
.<Void>fromCallable(() -> {
activeTasks.register();
try {
//noinspection BlockingMethodInNonBlockingContext
indexWriter.addDocuments(LLUtils.toDocumentsFromEntries(documentsList));
return null;
} finally {
@ -402,7 +406,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
if (similarity instanceof TFIDFSimilarity) {
mlt.setSimilarity((TFIDFSimilarity) similarity);
} else {
logger.trace("Using an unsupported similarity algorithm for MoreLikeThis:"
logger.trace(MARKER_ROCKSDB, "Using an unsupported similarity algorithm for MoreLikeThis:"
+ " {}. You must use a similarity instance based on TFIDFSimilarity!", similarity);
}
@ -523,7 +527,7 @@ public class LLLocalLuceneIndex implements LLLuceneIndex {
indexWriter.commit();
}
} catch (IOException ex) {
ex.printStackTrace();
logger.error(MARKER_LUCENE, "Failed to execute a scheduled commit", ex);
}
}