diff --git a/tools/db_bench_tool.cc b/tools/db_bench_tool.cc index cffd2897f..c91814663 100644 --- a/tools/db_bench_tool.cc +++ b/tools/db_bench_tool.cc @@ -96,7 +96,12 @@ using GFLAGS_NAMESPACE::ParseCommandLineFlags; using GFLAGS_NAMESPACE::RegisterFlagValidator; using GFLAGS_NAMESPACE::SetUsageMessage; -#ifndef ROCKSDB_LITE +#ifdef ROCKSDB_LITE +#define IF_ROCKSDB_LITE(Then, Else) Then +#else +#define IF_ROCKSDB_LITE(Then, Else) Else +#endif + DEFINE_string( benchmarks, "fillseq," @@ -116,9 +121,11 @@ DEFINE_string( "compact," "compactall," "flush," +IF_ROCKSDB_LITE("", "compact0," "compact1," "waitforcompaction," +) "multireadrandom," "mixgraph," "readseq," @@ -208,9 +215,11 @@ DEFINE_string( "Meta operations:\n" "\tcompact -- Compact the entire DB; If multiple, randomly choose one\n" "\tcompactall -- Compact the entire DB\n" +IF_ROCKSDB_LITE("", "\tcompact0 -- compact L0 into L1\n" "\tcompact1 -- compact L1 into L2\n" "\twaitforcompaction - pause until compaction is (probably) done\n" +) "\tflush - flush the memtable\n" "\tstats -- Print DB stats\n" "\tresetstats -- Reset DB stats\n" @@ -225,130 +234,6 @@ DEFINE_string( "by doing a Get followed by binary searching in the large sorted list vs " "doing a GetMergeOperands and binary searching in the operands which are" "sorted sub-lists. The MergeOperator used is sortlist.h\n"); -#else -DEFINE_string( - benchmarks, - "fillseq," - "fillseqdeterministic," - "fillsync," - "fillrandom," - "filluniquerandomdeterministic," - "overwrite," - "readrandom," - "newiterator," - "newiteratorwhilewriting," - "seekrandom," - "seekrandomwhilewriting," - "seekrandomwhilemerging," - "readseq," - "readreverse," - "compact," - "compactall," - "flush," - "multireadrandom," - "mixgraph," - "readseq," - "readtorowcache," - "readtocache," - "readreverse," - "readwhilewriting," - "readwhilemerging," - "readwhilescanning," - "readrandomwriterandom," - "updaterandom," - "xorupdaterandom," - "approximatesizerandom," - "randomwithverify," - "fill100K," - "crc32c," - "xxhash," - "compress," - "uncompress," - "acquireload," - "fillseekseq," - "randomtransaction," - "randomreplacekeys," - "timeseries," - "getmergeoperands", - - "Comma-separated list of operations to run in the specified" - " order. Available benchmarks:\n" - "\tfillseq -- write N values in sequential key" - " order in async mode\n" - "\tfillseqdeterministic -- write N values in the specified" - " key order and keep the shape of the LSM tree\n" - "\tfillrandom -- write N values in random key order in async" - " mode\n" - "\tfilluniquerandomdeterministic -- write N values in a random" - " key order and keep the shape of the LSM tree\n" - "\toverwrite -- overwrite N values in random key order in" - " async mode\n" - "\tfillsync -- write N/1000 values in random key order in " - "sync mode\n" - "\tfill100K -- write N/1000 100K values in random order in" - " async mode\n" - "\tdeleteseq -- delete N keys in sequential order\n" - "\tdeleterandom -- delete N keys in random order\n" - "\treadseq -- read N times sequentially\n" - "\treadtocache -- 1 thread reading database sequentially\n" - "\treadreverse -- read N times in reverse order\n" - "\treadrandom -- read N times in random order\n" - "\treadmissing -- read N missing keys in random order\n" - "\treadwhilewriting -- 1 writer, N threads doing random " - "reads\n" - "\treadwhilemerging -- 1 merger, N threads doing random " - "reads\n" - "\treadwhilescanning -- 1 thread doing full table scan, " - "N threads doing random reads\n" - "\treadrandomwriterandom -- N threads doing random-read, " - "random-write\n" - "\tupdaterandom -- N threads doing read-modify-write for random " - "keys\n" - "\txorupdaterandom -- N threads doing read-XOR-write for " - "random keys\n" - "\tappendrandom -- N threads doing read-modify-write with " - "growing values\n" - "\tmergerandom -- same as updaterandom/appendrandom using merge" - " operator. " - "Must be used with merge_operator\n" - "\treadrandommergerandom -- perform N random read-or-merge " - "operations. Must be used with merge_operator\n" - "\tnewiterator -- repeated iterator creation\n" - "\tseekrandom -- N random seeks, call Next seek_nexts times " - "per seek\n" - "\tseekrandomwhilewriting -- seekrandom and 1 thread doing " - "overwrite\n" - "\tseekrandomwhilemerging -- seekrandom and 1 thread doing " - "merge\n" - "\tcrc32c -- repeated crc32c of 4K of data\n" - "\txxhash -- repeated xxHash of 4K of data\n" - "\tacquireload -- load N*1000 times\n" - "\tfillseekseq -- write N values in sequential key, then read " - "them by seeking to each key\n" - "\trandomtransaction -- execute N random transactions and " - "verify correctness\n" - "\trandomreplacekeys -- randomly replaces N keys by deleting " - "the old version and putting the new version\n\n" - "\ttimeseries -- 1 writer generates time series data " - "and multiple readers doing random reads on id\n\n" - "Meta operations:\n" - "\tcompact -- Compact the entire DB; If multiple, randomly choose one\n" - "\tcompactall -- Compact the entire DB\n" - "\tflush - flush the memtable\n" - "\tstats -- Print DB stats\n" - "\tresetstats -- Reset DB stats\n" - "\tlevelstats -- Print the number of files and bytes per level\n" - "\tmemstats -- Print memtable stats\n" - "\tsstables -- Print sstable info\n" - "\theapprofile -- Dump a heap profile (if supported by this port)\n" - "\treplay -- replay the trace file specified with trace_file\n" - "\tgetmergeoperands -- Insert lots of merge records which are a list of " - "sorted ints for a key and then compare performance of lookup for another " - "key " - "by doing a Get followed by binary searching in the large sorted list vs " - "doing a GetMergeOperands and binary searching in the operands which are" - "sorted sub-lists. The MergeOperator used is sortlist.h\n"); -#endif DEFINE_int64(num, 1000000, "Number of key/values to place in database");