2020-05-29 20:24:19 +02:00
|
|
|
#!/usr/bin/env python3
|
2019-04-18 19:51:19 +02:00
|
|
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
|
2020-03-25 04:57:53 +01:00
|
|
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
|
|
|
2013-01-11 20:09:23 +01:00
|
|
|
import os
|
2015-09-08 23:23:42 +02:00
|
|
|
import glob
|
2013-01-11 20:09:23 +01:00
|
|
|
import os.path
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import time
|
|
|
|
import unittest
|
|
|
|
import tempfile
|
2015-09-08 23:23:42 +02:00
|
|
|
import re
|
2013-01-11 20:09:23 +01:00
|
|
|
|
|
|
|
def my_check_output(*popenargs, **kwargs):
|
|
|
|
"""
|
|
|
|
If we had python 2.7, we should simply use subprocess.check_output.
|
|
|
|
This is a stop-gap solution for python 2.6
|
|
|
|
"""
|
|
|
|
if 'stdout' in kwargs:
|
|
|
|
raise ValueError('stdout argument not allowed, it will be overridden.')
|
2013-03-11 22:31:06 +01:00
|
|
|
process = subprocess.Popen(stderr=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
*popenargs, **kwargs)
|
2013-01-11 20:09:23 +01:00
|
|
|
output, unused_err = process.communicate()
|
|
|
|
retcode = process.poll()
|
|
|
|
if retcode:
|
|
|
|
cmd = kwargs.get("args")
|
|
|
|
if cmd is None:
|
|
|
|
cmd = popenargs[0]
|
|
|
|
raise Exception("Exit code is not 0. It is %d. Command: %s" %
|
|
|
|
(retcode, cmd))
|
2020-03-25 04:57:53 +01:00
|
|
|
return output.decode('utf-8')
|
2013-01-11 20:09:23 +01:00
|
|
|
|
2013-03-11 22:31:06 +01:00
|
|
|
def run_err_null(cmd):
|
|
|
|
return os.system(cmd + " 2>/dev/null ")
|
2013-01-11 20:09:23 +01:00
|
|
|
|
|
|
|
class LDBTestCase(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.TMP_DIR = tempfile.mkdtemp(prefix="ldb_test_")
|
|
|
|
self.DB_NAME = "testdb"
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
assert(self.TMP_DIR.strip() != "/"
|
|
|
|
and self.TMP_DIR.strip() != "/tmp"
|
|
|
|
and self.TMP_DIR.strip() != "/tmp/") #Just some paranoia
|
|
|
|
|
|
|
|
shutil.rmtree(self.TMP_DIR)
|
|
|
|
|
|
|
|
def dbParam(self, dbName):
|
|
|
|
return "--db=%s" % os.path.join(self.TMP_DIR, dbName)
|
|
|
|
|
2015-09-08 23:23:42 +02:00
|
|
|
def assertRunOKFull(self, params, expectedOutput, unexpected=False,
|
|
|
|
isPattern=False):
|
2013-01-11 20:09:23 +01:00
|
|
|
"""
|
|
|
|
All command-line params must be specified.
|
|
|
|
Allows full flexibility in testing; for example: missing db param.
|
|
|
|
"""
|
|
|
|
output = my_check_output("./ldb %s |grep -v \"Created bg thread\"" %
|
|
|
|
params, shell=True)
|
2013-05-14 04:11:56 +02:00
|
|
|
if not unexpected:
|
2015-09-08 23:23:42 +02:00
|
|
|
if isPattern:
|
|
|
|
self.assertNotEqual(expectedOutput.search(output.strip()),
|
|
|
|
None)
|
|
|
|
else:
|
|
|
|
self.assertEqual(output.strip(), expectedOutput.strip())
|
2013-05-14 04:11:56 +02:00
|
|
|
else:
|
2015-09-08 23:23:42 +02:00
|
|
|
if isPattern:
|
|
|
|
self.assertEqual(expectedOutput.search(output.strip()), None)
|
|
|
|
else:
|
|
|
|
self.assertNotEqual(output.strip(), expectedOutput.strip())
|
2013-01-11 20:09:23 +01:00
|
|
|
|
|
|
|
def assertRunFAILFull(self, params):
|
|
|
|
"""
|
|
|
|
All command-line params must be specified.
|
|
|
|
Allows full flexibility in testing; for example: missing db param.
|
|
|
|
"""
|
|
|
|
try:
|
2013-03-11 22:31:06 +01:00
|
|
|
|
|
|
|
my_check_output("./ldb %s >/dev/null 2>&1 |grep -v \"Created bg \
|
|
|
|
thread\"" % params, shell=True)
|
2018-08-09 23:18:59 +02:00
|
|
|
except Exception:
|
2013-01-11 20:09:23 +01:00
|
|
|
return
|
|
|
|
self.fail(
|
|
|
|
"Exception should have been raised for command with params: %s" %
|
|
|
|
params)
|
|
|
|
|
2013-05-14 04:11:56 +02:00
|
|
|
def assertRunOK(self, params, expectedOutput, unexpected=False):
|
2013-01-11 20:09:23 +01:00
|
|
|
"""
|
|
|
|
Uses the default test db.
|
|
|
|
"""
|
|
|
|
self.assertRunOKFull("%s %s" % (self.dbParam(self.DB_NAME), params),
|
2013-05-14 04:11:56 +02:00
|
|
|
expectedOutput, unexpected)
|
2013-01-11 20:09:23 +01:00
|
|
|
|
|
|
|
def assertRunFAIL(self, params):
|
|
|
|
"""
|
|
|
|
Uses the default test db.
|
|
|
|
"""
|
|
|
|
self.assertRunFAILFull("%s %s" % (self.dbParam(self.DB_NAME), params))
|
|
|
|
|
|
|
|
def testSimpleStringPutGet(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testSimpleStringPutGet...")
|
2013-01-11 20:09:23 +01:00
|
|
|
self.assertRunFAIL("put x1 y1")
|
|
|
|
self.assertRunOK("put --create_if_missing x1 y1", "OK")
|
|
|
|
self.assertRunOK("get x1", "y1")
|
|
|
|
self.assertRunFAIL("get x2")
|
|
|
|
|
|
|
|
self.assertRunOK("put x2 y2", "OK")
|
|
|
|
self.assertRunOK("get x1", "y1")
|
|
|
|
self.assertRunOK("get x2", "y2")
|
|
|
|
self.assertRunFAIL("get x3")
|
|
|
|
|
|
|
|
self.assertRunOK("scan --from=x1 --to=z", "x1 : y1\nx2 : y2")
|
|
|
|
self.assertRunOK("put x3 y3", "OK")
|
|
|
|
|
|
|
|
self.assertRunOK("scan --from=x1 --to=z", "x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
self.assertRunOK("scan --from=x", "x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
|
|
|
|
self.assertRunOK("scan --to=x2", "x1 : y1")
|
|
|
|
self.assertRunOK("scan --from=x1 --to=z --max_keys=1", "x1 : y1")
|
|
|
|
self.assertRunOK("scan --from=x1 --to=z --max_keys=2",
|
|
|
|
"x1 : y1\nx2 : y2")
|
|
|
|
|
|
|
|
self.assertRunOK("scan --from=x1 --to=z --max_keys=3",
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
self.assertRunOK("scan --from=x1 --to=z --max_keys=4",
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
self.assertRunOK("scan --from=x1 --to=x2", "x1 : y1")
|
|
|
|
self.assertRunOK("scan --from=x2 --to=x4", "x2 : y2\nx3 : y3")
|
|
|
|
self.assertRunFAIL("scan --from=x4 --to=z") # No results => FAIL
|
|
|
|
self.assertRunFAIL("scan --from=x1 --to=z --max_keys=foo")
|
|
|
|
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
|
|
|
|
self.assertRunOK("delete x1", "OK")
|
|
|
|
self.assertRunOK("scan", "x2 : y2\nx3 : y3")
|
|
|
|
|
|
|
|
self.assertRunOK("delete NonExistentKey", "OK")
|
2013-05-14 04:11:56 +02:00
|
|
|
# It is weird that GET and SCAN raise exception for
|
2013-01-11 20:09:23 +01:00
|
|
|
# non-existent key, while delete does not
|
|
|
|
|
2014-03-20 21:42:45 +01:00
|
|
|
self.assertRunOK("checkconsistency", "OK")
|
|
|
|
|
2013-01-11 20:09:23 +01:00
|
|
|
def dumpDb(self, params, dumpFile):
|
2013-03-11 22:31:06 +01:00
|
|
|
return 0 == run_err_null("./ldb dump %s > %s" % (params, dumpFile))
|
2013-01-11 20:09:23 +01:00
|
|
|
|
|
|
|
def loadDb(self, params, dumpFile):
|
2013-03-11 22:31:06 +01:00
|
|
|
return 0 == run_err_null("cat %s | ./ldb load %s" % (dumpFile, params))
|
2013-01-11 20:09:23 +01:00
|
|
|
|
2018-08-09 23:18:59 +02:00
|
|
|
def writeExternSst(self, params, inputDumpFile, outputSst):
|
|
|
|
return 0 == run_err_null("cat %s | ./ldb write_extern_sst %s %s"
|
|
|
|
% (inputDumpFile, outputSst, params))
|
|
|
|
|
|
|
|
def ingestExternSst(self, params, inputSst):
|
|
|
|
return 0 == run_err_null("./ldb ingest_extern_sst %s %s"
|
|
|
|
% (inputSst, params))
|
|
|
|
|
2013-01-11 20:09:23 +01:00
|
|
|
def testStringBatchPut(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testStringBatchPut...")
|
2013-01-11 20:09:23 +01:00
|
|
|
self.assertRunOK("batchput x1 y1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("scan", "x1 : y1")
|
|
|
|
self.assertRunOK("batchput x2 y2 x3 y3 \"x4 abc\" \"y4 xyz\"", "OK")
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 abc : y4 xyz")
|
|
|
|
self.assertRunFAIL("batchput")
|
|
|
|
self.assertRunFAIL("batchput k1")
|
|
|
|
self.assertRunFAIL("batchput k1 v1 k2")
|
|
|
|
|
2013-11-01 21:59:14 +01:00
|
|
|
def testCountDelimDump(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testCountDelimDump...")
|
2013-11-01 21:59:14 +01:00
|
|
|
self.assertRunOK("batchput x.1 x1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("batchput y.abc abc y.2 2 z.13c pqr", "OK")
|
|
|
|
self.assertRunOK("dump --count_delim", "x => count:1\tsize:5\ny => count:2\tsize:12\nz => count:1\tsize:8")
|
|
|
|
self.assertRunOK("dump --count_delim=\".\"", "x => count:1\tsize:5\ny => count:2\tsize:12\nz => count:1\tsize:8")
|
|
|
|
self.assertRunOK("batchput x,2 x2 x,abc xabc", "OK")
|
|
|
|
self.assertRunOK("dump --count_delim=\",\"", "x => count:2\tsize:14\nx.1 => count:1\tsize:5\ny.2 => count:1\tsize:4\ny.abc => count:1\tsize:8\nz.13c => count:1\tsize:8")
|
|
|
|
|
|
|
|
def testCountDelimIDump(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testCountDelimIDump...")
|
2013-11-01 21:59:14 +01:00
|
|
|
self.assertRunOK("batchput x.1 x1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("batchput y.abc abc y.2 2 z.13c pqr", "OK")
|
2017-05-12 23:59:57 +02:00
|
|
|
self.assertRunOK("idump --count_delim", "x => count:1\tsize:5\ny => count:2\tsize:12\nz => count:1\tsize:8")
|
|
|
|
self.assertRunOK("idump --count_delim=\".\"", "x => count:1\tsize:5\ny => count:2\tsize:12\nz => count:1\tsize:8")
|
2013-11-01 21:59:14 +01:00
|
|
|
self.assertRunOK("batchput x,2 x2 x,abc xabc", "OK")
|
2017-05-12 23:59:57 +02:00
|
|
|
self.assertRunOK("idump --count_delim=\",\"", "x => count:2\tsize:14\nx.1 => count:1\tsize:5\ny.2 => count:1\tsize:4\ny.abc => count:1\tsize:8\nz.13c => count:1\tsize:8")
|
2013-11-01 21:59:14 +01:00
|
|
|
|
2013-05-14 04:11:56 +02:00
|
|
|
def testInvalidCmdLines(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testInvalidCmdLines...")
|
2013-05-14 04:11:56 +02:00
|
|
|
# db not specified
|
|
|
|
self.assertRunFAILFull("put 0x6133 0x6233 --hex --create_if_missing")
|
|
|
|
# No param called he
|
|
|
|
self.assertRunFAIL("put 0x6133 0x6233 --he --create_if_missing")
|
|
|
|
# max_keys is not applicable for put
|
|
|
|
self.assertRunFAIL("put 0x6133 0x6233 --max_keys=1 --create_if_missing")
|
|
|
|
# hex has invalid boolean value
|
|
|
|
|
2013-01-11 20:09:23 +01:00
|
|
|
def testHexPutGet(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testHexPutGet...")
|
2013-01-11 20:09:23 +01:00
|
|
|
self.assertRunOK("put a1 b1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("scan", "a1 : b1")
|
|
|
|
self.assertRunOK("scan --hex", "0x6131 : 0x6231")
|
|
|
|
self.assertRunFAIL("put --hex 6132 6232")
|
|
|
|
self.assertRunOK("put --hex 0x6132 0x6232", "OK")
|
|
|
|
self.assertRunOK("scan --hex", "0x6131 : 0x6231\n0x6132 : 0x6232")
|
|
|
|
self.assertRunOK("scan", "a1 : b1\na2 : b2")
|
|
|
|
self.assertRunOK("get a1", "b1")
|
|
|
|
self.assertRunOK("get --hex 0x6131", "0x6231")
|
|
|
|
self.assertRunOK("get a2", "b2")
|
|
|
|
self.assertRunOK("get --hex 0x6132", "0x6232")
|
|
|
|
self.assertRunOK("get --key_hex 0x6132", "b2")
|
|
|
|
self.assertRunOK("get --key_hex --value_hex 0x6132", "0x6232")
|
|
|
|
self.assertRunOK("get --value_hex a2", "0x6232")
|
|
|
|
self.assertRunOK("scan --key_hex --value_hex",
|
|
|
|
"0x6131 : 0x6231\n0x6132 : 0x6232")
|
|
|
|
self.assertRunOK("scan --hex --from=0x6131 --to=0x6133",
|
|
|
|
"0x6131 : 0x6231\n0x6132 : 0x6232")
|
|
|
|
self.assertRunOK("scan --hex --from=0x6131 --to=0x6132",
|
|
|
|
"0x6131 : 0x6231")
|
|
|
|
self.assertRunOK("scan --key_hex", "0x6131 : b1\n0x6132 : b2")
|
|
|
|
self.assertRunOK("scan --value_hex", "a1 : 0x6231\na2 : 0x6232")
|
|
|
|
self.assertRunOK("batchput --hex 0x6133 0x6233 0x6134 0x6234", "OK")
|
|
|
|
self.assertRunOK("scan", "a1 : b1\na2 : b2\na3 : b3\na4 : b4")
|
|
|
|
self.assertRunOK("delete --hex 0x6133", "OK")
|
|
|
|
self.assertRunOK("scan", "a1 : b1\na2 : b2\na4 : b4")
|
2014-03-20 21:42:45 +01:00
|
|
|
self.assertRunOK("checkconsistency", "OK")
|
2013-01-11 20:09:23 +01:00
|
|
|
|
2013-05-14 04:11:56 +02:00
|
|
|
def testTtlPutGet(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testTtlPutGet...")
|
2013-05-14 04:11:56 +02:00
|
|
|
self.assertRunOK("put a1 b1 --ttl --create_if_missing", "OK")
|
2014-03-10 20:11:46 +01:00
|
|
|
self.assertRunOK("scan --hex", "0x6131 : 0x6231", True)
|
2013-05-14 04:11:56 +02:00
|
|
|
self.assertRunOK("dump --ttl ", "a1 ==> b1", True)
|
2014-03-10 20:11:46 +01:00
|
|
|
self.assertRunOK("dump --hex --ttl ",
|
|
|
|
"0x6131 ==> 0x6231\nKeys in range: 1")
|
2013-05-14 04:11:56 +02:00
|
|
|
self.assertRunOK("scan --hex --ttl", "0x6131 : 0x6231")
|
2014-03-10 20:11:46 +01:00
|
|
|
self.assertRunOK("get --value_hex a1", "0x6231", True)
|
2013-05-14 04:11:56 +02:00
|
|
|
self.assertRunOK("get --ttl a1", "b1")
|
|
|
|
self.assertRunOK("put a3 b3 --create_if_missing", "OK")
|
|
|
|
# fails because timstamp's length is greater than value's
|
|
|
|
self.assertRunFAIL("get --ttl a3")
|
2014-03-20 21:42:45 +01:00
|
|
|
self.assertRunOK("checkconsistency", "OK")
|
2013-05-14 04:11:56 +02:00
|
|
|
|
2018-01-29 21:43:56 +01:00
|
|
|
def testInvalidCmdLines(self): # noqa: F811 T25377293 Grandfathered in
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testInvalidCmdLines...")
|
2013-01-11 20:09:23 +01:00
|
|
|
# db not specified
|
|
|
|
self.assertRunFAILFull("put 0x6133 0x6233 --hex --create_if_missing")
|
|
|
|
# No param called he
|
|
|
|
self.assertRunFAIL("put 0x6133 0x6233 --he --create_if_missing")
|
|
|
|
# max_keys is not applicable for put
|
|
|
|
self.assertRunFAIL("put 0x6133 0x6233 --max_keys=1 --create_if_missing")
|
|
|
|
# hex has invalid boolean value
|
|
|
|
self.assertRunFAIL("put 0x6133 0x6233 --hex=Boo --create_if_missing")
|
|
|
|
|
|
|
|
def testDumpLoad(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testDumpLoad...")
|
2013-01-11 20:09:23 +01:00
|
|
|
self.assertRunOK("batchput --create_if_missing x1 y1 x2 y2 x3 y3 x4 y4",
|
|
|
|
"OK")
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
origDbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
|
|
|
|
# Dump and load without any additional params specified
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump1")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump1")
|
|
|
|
self.assertTrue(self.dumpDb("--db=%s" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s --create_if_missing" % loadedDbPath, dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
|
|
|
# Dump and load in hex
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump2")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump2")
|
|
|
|
self.assertTrue(self.dumpDb("--db=%s --hex" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s --hex --create_if_missing" % loadedDbPath, dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
|
|
|
# Dump only a portion of the key range
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump3")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump3")
|
|
|
|
self.assertTrue(self.dumpDb(
|
|
|
|
"--db=%s --from=x1 --to=x3" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s --create_if_missing" % loadedDbPath, dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath, "x1 : y1\nx2 : y2")
|
|
|
|
|
|
|
|
# Dump upto max_keys rows
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump4")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump4")
|
|
|
|
self.assertTrue(self.dumpDb(
|
|
|
|
"--db=%s --max_keys=3" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s --create_if_missing" % loadedDbPath, dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3")
|
|
|
|
|
|
|
|
# Load into an existing db, create_if_missing is not specified
|
|
|
|
self.assertTrue(self.dumpDb("--db=%s" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb("--db=%s" % loadedDbPath, dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
|
|
|
# Dump and load with WAL disabled
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump5")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump5")
|
|
|
|
self.assertTrue(self.dumpDb("--db=%s" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s --disable_wal --create_if_missing" % loadedDbPath,
|
|
|
|
dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
|
|
|
# Dump and load with lots of extra params specified
|
2015-07-15 08:13:23 +02:00
|
|
|
extraParams = " ".join(["--bloom_bits=14", "--block_size=1024",
|
|
|
|
"--auto_compaction=true",
|
2013-01-11 20:09:23 +01:00
|
|
|
"--write_buffer_size=4194304",
|
|
|
|
"--file_size=2097152"])
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump6")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump6")
|
|
|
|
self.assertTrue(self.dumpDb(
|
|
|
|
"--db=%s %s" % (origDbPath, extraParams), dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s %s --create_if_missing" % (loadedDbPath, extraParams),
|
|
|
|
dumpFilePath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
|
|
|
# Dump with count_only
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump7")
|
|
|
|
loadedDbPath = os.path.join(self.TMP_DIR, "loaded_from_dump7")
|
|
|
|
self.assertTrue(self.dumpDb(
|
|
|
|
"--db=%s --count_only" % origDbPath, dumpFilePath))
|
|
|
|
self.assertTrue(self.loadDb(
|
|
|
|
"--db=%s --create_if_missing" % loadedDbPath, dumpFilePath))
|
|
|
|
# DB should have atleast one value for scan to work
|
|
|
|
self.assertRunOKFull("put --db=%s k1 v1" % loadedDbPath, "OK")
|
|
|
|
self.assertRunOKFull("scan --db=%s" % loadedDbPath, "k1 : v1")
|
|
|
|
|
|
|
|
# Dump command fails because of typo in params
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump8")
|
|
|
|
self.assertFalse(self.dumpDb(
|
2013-11-16 12:21:34 +01:00
|
|
|
"--db=%s --create_if_missing" % origDbPath, dumpFilePath))
|
2013-01-11 20:09:23 +01:00
|
|
|
|
2017-05-12 23:59:57 +02:00
|
|
|
def testIDumpBasics(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testIDumpBasics...")
|
2017-05-12 23:59:57 +02:00
|
|
|
self.assertRunOK("put a val --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put b val", "OK")
|
|
|
|
self.assertRunOK(
|
|
|
|
"idump", "'a' seq:1, type:1 => val\n"
|
|
|
|
"'b' seq:2, type:1 => val\nInternal keys in range: 2")
|
|
|
|
self.assertRunOK(
|
|
|
|
"idump --input_key_hex --from=%s --to=%s" % (hex(ord('a')),
|
|
|
|
hex(ord('b'))),
|
|
|
|
"'a' seq:1, type:1 => val\nInternal keys in range: 1")
|
|
|
|
|
2013-01-11 20:09:23 +01:00
|
|
|
def testMiscAdminTask(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testMiscAdminTask...")
|
2013-01-11 20:09:23 +01:00
|
|
|
# These tests need to be improved; for example with asserts about
|
|
|
|
# whether compaction or level reduction actually took place.
|
|
|
|
self.assertRunOK("batchput --create_if_missing x1 y1 x2 y2 x3 y3 x4 y4",
|
|
|
|
"OK")
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
origDbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
|
2013-03-11 22:31:06 +01:00
|
|
|
self.assertTrue(0 == run_err_null(
|
|
|
|
"./ldb compact --db=%s" % origDbPath))
|
2013-01-11 20:09:23 +01:00
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
2013-03-11 22:31:06 +01:00
|
|
|
self.assertTrue(0 == run_err_null(
|
2013-01-11 20:09:23 +01:00
|
|
|
"./ldb reduce_levels --db=%s --new_levels=2" % origDbPath))
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
2013-03-11 22:31:06 +01:00
|
|
|
self.assertTrue(0 == run_err_null(
|
2013-01-11 20:09:23 +01:00
|
|
|
"./ldb reduce_levels --db=%s --new_levels=3" % origDbPath))
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
2013-03-11 22:31:06 +01:00
|
|
|
self.assertTrue(0 == run_err_null(
|
2013-01-11 20:09:23 +01:00
|
|
|
"./ldb compact --db=%s --from=x1 --to=x3" % origDbPath))
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
2013-03-11 22:31:06 +01:00
|
|
|
self.assertTrue(0 == run_err_null(
|
|
|
|
"./ldb compact --db=%s --hex --from=0x6131 --to=0x6134"
|
|
|
|
% origDbPath))
|
2013-01-11 20:09:23 +01:00
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
|
|
|
#TODO(dilip): Not sure what should be passed to WAL.Currently corrupted.
|
2013-03-11 22:31:06 +01:00
|
|
|
self.assertTrue(0 == run_err_null(
|
2013-01-11 20:09:23 +01:00
|
|
|
"./ldb dump_wal --db=%s --walfile=%s --header" % (
|
|
|
|
origDbPath, os.path.join(origDbPath, "LOG"))))
|
|
|
|
self.assertRunOK("scan", "x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
|
2014-03-20 21:42:45 +01:00
|
|
|
def testCheckConsistency(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testCheckConsistency...")
|
2014-03-20 21:42:45 +01:00
|
|
|
|
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
self.assertRunOK("put x1 y1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put x2 y2", "OK")
|
|
|
|
self.assertRunOK("get x1", "y1")
|
|
|
|
self.assertRunOK("checkconsistency", "OK")
|
|
|
|
|
|
|
|
sstFilePath = my_check_output("ls %s" % os.path.join(dbPath, "*.sst"),
|
|
|
|
shell=True)
|
|
|
|
|
|
|
|
# Modify the file
|
|
|
|
my_check_output("echo 'evil' > %s" % sstFilePath, shell=True)
|
|
|
|
self.assertRunFAIL("checkconsistency")
|
|
|
|
|
|
|
|
# Delete the file
|
|
|
|
my_check_output("rm -f %s" % sstFilePath, shell=True)
|
|
|
|
self.assertRunFAIL("checkconsistency")
|
|
|
|
|
2014-11-24 19:04:16 +01:00
|
|
|
def dumpLiveFiles(self, params, dumpFile):
|
|
|
|
return 0 == run_err_null("./ldb dump_live_files %s > %s" % (
|
|
|
|
params, dumpFile))
|
|
|
|
|
|
|
|
def testDumpLiveFiles(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testDumpLiveFiles...")
|
2014-11-24 19:04:16 +01:00
|
|
|
|
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
self.assertRunOK("put x1 y1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put x2 y2", "OK")
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump1")
|
|
|
|
self.assertTrue(self.dumpLiveFiles("--db=%s" % dbPath, dumpFilePath))
|
|
|
|
self.assertRunOK("delete x1", "OK")
|
|
|
|
self.assertRunOK("put x3 y3", "OK")
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump2")
|
|
|
|
self.assertTrue(self.dumpLiveFiles("--db=%s" % dbPath, dumpFilePath))
|
2014-03-20 21:42:45 +01:00
|
|
|
|
2015-09-08 23:23:42 +02:00
|
|
|
def getManifests(self, directory):
|
|
|
|
return glob.glob(directory + "/MANIFEST-*")
|
|
|
|
|
2016-01-06 23:19:08 +01:00
|
|
|
def getSSTFiles(self, directory):
|
|
|
|
return glob.glob(directory + "/*.sst")
|
|
|
|
|
|
|
|
def getWALFiles(self, directory):
|
|
|
|
return glob.glob(directory + "/*.log")
|
|
|
|
|
2015-09-08 23:23:42 +02:00
|
|
|
def copyManifests(self, src, dest):
|
|
|
|
return 0 == run_err_null("cp " + src + " " + dest)
|
|
|
|
|
|
|
|
def testManifestDump(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testManifestDump...")
|
2015-09-08 23:23:42 +02:00
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
self.assertRunOK("put 1 1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put 2 2", "OK")
|
|
|
|
self.assertRunOK("put 3 3", "OK")
|
|
|
|
# Pattern to expect from manifest_dump.
|
|
|
|
num = "[0-9]+"
|
|
|
|
st = ".*"
|
2017-03-13 18:24:52 +01:00
|
|
|
subpat = st + " seq:" + num + ", type:" + num
|
2015-09-08 23:23:42 +02:00
|
|
|
regex = num + ":" + num + "\[" + subpat + ".." + subpat + "\]"
|
|
|
|
expected_pattern = re.compile(regex)
|
|
|
|
cmd = "manifest_dump --db=%s"
|
|
|
|
manifest_files = self.getManifests(dbPath)
|
|
|
|
self.assertTrue(len(manifest_files) == 1)
|
|
|
|
# Test with the default manifest file in dbPath.
|
|
|
|
self.assertRunOKFull(cmd % dbPath, expected_pattern,
|
|
|
|
unexpected=False, isPattern=True)
|
|
|
|
self.copyManifests(manifest_files[0], manifest_files[0] + "1")
|
|
|
|
manifest_files = self.getManifests(dbPath)
|
|
|
|
self.assertTrue(len(manifest_files) == 2)
|
|
|
|
# Test with multiple manifest files in dbPath.
|
|
|
|
self.assertRunFAILFull(cmd % dbPath)
|
|
|
|
# Running it with the copy we just created should pass.
|
|
|
|
self.assertRunOKFull((cmd + " --path=%s")
|
|
|
|
% (dbPath, manifest_files[1]),
|
|
|
|
expected_pattern, unexpected=False,
|
|
|
|
isPattern=True)
|
2016-01-06 23:19:08 +01:00
|
|
|
# Make sure that using the dump with --path will result in identical
|
|
|
|
# output as just using manifest_dump.
|
|
|
|
cmd = "dump --path=%s"
|
|
|
|
self.assertRunOKFull((cmd)
|
|
|
|
% (manifest_files[1]),
|
|
|
|
expected_pattern, unexpected=False,
|
|
|
|
isPattern=True)
|
|
|
|
|
2021-06-10 21:54:13 +02:00
|
|
|
# Check if null characters doesn't infer with output format.
|
|
|
|
self.assertRunOK("put a1 b1", "OK")
|
|
|
|
self.assertRunOK("put a2 b2", "OK")
|
|
|
|
self.assertRunOK("put --hex 0x12000DA0 0x80C0000B", "OK")
|
|
|
|
self.assertRunOK("put --hex 0x7200004f 0x80000004", "OK")
|
|
|
|
self.assertRunOK("put --hex 0xa000000a 0xf000000f", "OK")
|
|
|
|
self.assertRunOK("put a3 b3", "OK")
|
|
|
|
self.assertRunOK("put a4 b4", "OK")
|
|
|
|
|
|
|
|
# Verifies that all "levels" are printed out.
|
|
|
|
# There should be 66 mentions of levels.
|
|
|
|
expected_verbose_output = re.compile("matched")
|
|
|
|
# Test manifest_dump verbose and verify that key 0x7200004f
|
|
|
|
# is present. Note that we are forced to use grep here because
|
|
|
|
# an output with a non-terminating null character in it isn't piped
|
|
|
|
# correctly through the Python subprocess object.
|
|
|
|
# Also note that 0x72=r and 0x4f=O, hence the regex \'r.{2}O\'
|
|
|
|
# (we cannot use null character in the subprocess input either,
|
|
|
|
# so we have to use '.{2}')
|
|
|
|
cmd_verbose = "manifest_dump --verbose --db=%s | grep -aq $'\'r.{2}O\'' && echo 'matched' || echo 'not matched'" %dbPath
|
|
|
|
|
|
|
|
self.assertRunOKFull(cmd_verbose , expected_verbose_output,
|
|
|
|
unexpected=False, isPattern=True)
|
|
|
|
|
|
|
|
|
2020-12-19 16:59:08 +01:00
|
|
|
def testGetProperty(self):
|
|
|
|
print("Running testGetProperty...")
|
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
self.assertRunOK("put 1 1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put 2 2", "OK")
|
|
|
|
# A "string" property
|
|
|
|
cmd = "--db=%s get_property rocksdb.estimate-num-keys"
|
|
|
|
self.assertRunOKFull(cmd % dbPath,
|
|
|
|
"rocksdb.estimate-num-keys: 2")
|
|
|
|
# A "map" property
|
|
|
|
# FIXME: why doesn't this pick up two entries?
|
|
|
|
cmd = "--db=%s get_property rocksdb.aggregated-table-properties"
|
|
|
|
part = "rocksdb.aggregated-table-properties.num_entries: "
|
|
|
|
expected_pattern = re.compile(part)
|
|
|
|
self.assertRunOKFull(cmd % dbPath,
|
|
|
|
expected_pattern, unexpected=False,
|
|
|
|
isPattern=True)
|
|
|
|
# An invalid property
|
|
|
|
cmd = "--db=%s get_property rocksdb.this-property-does-not-exist"
|
|
|
|
self.assertRunFAILFull(cmd % dbPath)
|
|
|
|
|
2016-01-06 23:19:08 +01:00
|
|
|
def testSSTDump(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testSSTDump...")
|
2016-01-06 23:19:08 +01:00
|
|
|
|
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
self.assertRunOK("put sst1 sst1_val --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put sst2 sst2_val", "OK")
|
|
|
|
self.assertRunOK("get sst1", "sst1_val")
|
|
|
|
|
|
|
|
# Pattern to expect from SST dump.
|
|
|
|
regex = ".*Sst file format:.*"
|
|
|
|
expected_pattern = re.compile(regex)
|
|
|
|
|
|
|
|
sst_files = self.getSSTFiles(dbPath)
|
|
|
|
self.assertTrue(len(sst_files) >= 1)
|
|
|
|
cmd = "dump --path=%s"
|
|
|
|
self.assertRunOKFull((cmd)
|
|
|
|
% (sst_files[0]),
|
|
|
|
expected_pattern, unexpected=False,
|
|
|
|
isPattern=True)
|
|
|
|
|
|
|
|
def testWALDump(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testWALDump...")
|
2016-01-06 23:19:08 +01:00
|
|
|
|
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME)
|
|
|
|
self.assertRunOK("put wal1 wal1_val --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put wal2 wal2_val", "OK")
|
|
|
|
self.assertRunOK("get wal1", "wal1_val")
|
|
|
|
|
|
|
|
# Pattern to expect from WAL dump.
|
|
|
|
regex = "^Sequence,Count,ByteSize,Physical Offset,Key\(s\).*"
|
|
|
|
expected_pattern = re.compile(regex)
|
|
|
|
|
|
|
|
wal_files = self.getWALFiles(dbPath)
|
|
|
|
self.assertTrue(len(wal_files) >= 1)
|
|
|
|
cmd = "dump --path=%s"
|
|
|
|
self.assertRunOKFull((cmd)
|
|
|
|
% (wal_files[0]),
|
|
|
|
expected_pattern, unexpected=False,
|
|
|
|
isPattern=True)
|
2015-09-08 23:23:42 +02:00
|
|
|
|
|
|
|
def testListColumnFamilies(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testListColumnFamilies...")
|
2015-09-08 23:23:42 +02:00
|
|
|
self.assertRunOK("put x1 y1 --create_if_missing", "OK")
|
2019-10-09 04:17:39 +02:00
|
|
|
cmd = "list_column_families | grep -v \"Column families\""
|
2015-09-08 23:23:42 +02:00
|
|
|
# Test on valid dbPath.
|
2019-10-09 04:17:39 +02:00
|
|
|
self.assertRunOK(cmd, "{default}")
|
2015-09-08 23:23:42 +02:00
|
|
|
# Test on empty path.
|
2019-10-09 04:17:39 +02:00
|
|
|
self.assertRunFAIL(cmd)
|
2015-09-08 23:23:42 +02:00
|
|
|
|
2016-01-23 00:46:32 +01:00
|
|
|
def testColumnFamilies(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testColumnFamilies...")
|
2018-01-29 21:43:56 +01:00
|
|
|
dbPath = os.path.join(self.TMP_DIR, self.DB_NAME) # noqa: F841 T25377293 Grandfathered in
|
2016-01-23 00:46:32 +01:00
|
|
|
self.assertRunOK("put cf1_1 1 --create_if_missing", "OK")
|
|
|
|
self.assertRunOK("put cf1_2 2 --create_if_missing", "OK")
|
2017-04-20 19:16:13 +02:00
|
|
|
self.assertRunOK("put cf1_3 3 --try_load_options", "OK")
|
2016-01-23 00:46:32 +01:00
|
|
|
# Given non-default column family to single CF DB.
|
|
|
|
self.assertRunFAIL("get cf1_1 --column_family=two")
|
|
|
|
self.assertRunOK("create_column_family two", "OK")
|
|
|
|
self.assertRunOK("put cf2_1 1 --create_if_missing --column_family=two",
|
|
|
|
"OK")
|
|
|
|
self.assertRunOK("put cf2_2 2 --create_if_missing --column_family=two",
|
|
|
|
"OK")
|
|
|
|
self.assertRunOK("delete cf1_2", "OK")
|
|
|
|
self.assertRunOK("create_column_family three", "OK")
|
|
|
|
self.assertRunOK("delete cf2_2 --column_family=two", "OK")
|
|
|
|
self.assertRunOK(
|
|
|
|
"put cf3_1 3 --create_if_missing --column_family=three",
|
|
|
|
"OK")
|
|
|
|
self.assertRunOK("get cf1_1 --column_family=default", "1")
|
|
|
|
self.assertRunOK("dump --column_family=two",
|
|
|
|
"cf2_1 ==> 1\nKeys in range: 1")
|
2017-04-20 19:16:13 +02:00
|
|
|
self.assertRunOK("dump --column_family=two --try_load_options",
|
|
|
|
"cf2_1 ==> 1\nKeys in range: 1")
|
2016-01-23 00:46:32 +01:00
|
|
|
self.assertRunOK("dump",
|
|
|
|
"cf1_1 ==> 1\ncf1_3 ==> 3\nKeys in range: 2")
|
|
|
|
self.assertRunOK("get cf2_1 --column_family=two",
|
|
|
|
"1")
|
|
|
|
self.assertRunOK("get cf3_1 --column_family=three",
|
|
|
|
"3")
|
2019-06-27 20:08:45 +02:00
|
|
|
self.assertRunOK("drop_column_family three", "OK")
|
2016-01-23 00:46:32 +01:00
|
|
|
# non-existing column family.
|
|
|
|
self.assertRunFAIL("get cf3_1 --column_family=four")
|
2019-06-27 20:08:45 +02:00
|
|
|
self.assertRunFAIL("drop_column_family four")
|
2015-09-08 23:23:42 +02:00
|
|
|
|
2018-08-09 23:18:59 +02:00
|
|
|
def testIngestExternalSst(self):
|
2020-03-25 04:57:53 +01:00
|
|
|
print("Running testIngestExternalSst...")
|
2018-08-09 23:18:59 +02:00
|
|
|
|
|
|
|
# Dump, load, write external sst and ingest it in another db
|
|
|
|
dbPath = os.path.join(self.TMP_DIR, "db1")
|
|
|
|
self.assertRunOK(
|
|
|
|
"batchput --db=%s --create_if_missing x1 y1 x2 y2 x3 y3 x4 y4"
|
|
|
|
% dbPath,
|
|
|
|
"OK")
|
|
|
|
self.assertRunOK("scan --db=%s" % dbPath,
|
|
|
|
"x1 : y1\nx2 : y2\nx3 : y3\nx4 : y4")
|
|
|
|
dumpFilePath = os.path.join(self.TMP_DIR, "dump1")
|
|
|
|
with open(dumpFilePath, 'w') as f:
|
|
|
|
f.write("x1 ==> y10\nx2 ==> y20\nx3 ==> y30\nx4 ==> y40")
|
|
|
|
externSstPath = os.path.join(self.TMP_DIR, "extern_data1.sst")
|
|
|
|
self.assertTrue(self.writeExternSst("--create_if_missing --db=%s"
|
|
|
|
% dbPath,
|
|
|
|
dumpFilePath,
|
|
|
|
externSstPath))
|
|
|
|
# cannot ingest if allow_global_seqno is false
|
|
|
|
self.assertFalse(
|
|
|
|
self.ingestExternSst(
|
|
|
|
"--create_if_missing --allow_global_seqno=false --db=%s"
|
|
|
|
% dbPath,
|
|
|
|
externSstPath))
|
|
|
|
self.assertTrue(
|
|
|
|
self.ingestExternSst(
|
|
|
|
"--create_if_missing --allow_global_seqno --db=%s"
|
|
|
|
% dbPath,
|
|
|
|
externSstPath))
|
|
|
|
self.assertRunOKFull("scan --db=%s" % dbPath,
|
|
|
|
"x1 : y10\nx2 : y20\nx3 : y30\nx4 : y40")
|
|
|
|
|
2013-01-11 20:09:23 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
unittest.main()
|