2019-04-18 19:51:19 +02:00
|
|
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
|
2017-04-05 01:09:31 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
|
|
|
from __future__ import print_function
|
|
|
|
from __future__ import unicode_literals
|
2019-10-23 22:51:03 +02:00
|
|
|
try:
|
|
|
|
from builtins import str
|
|
|
|
except ImportError:
|
|
|
|
from __builtin__ import str
|
2017-04-05 01:09:31 +02:00
|
|
|
from targets_builder import TARGETSBuilder
|
2019-08-02 19:40:32 +02:00
|
|
|
import json
|
2017-04-05 01:09:31 +02:00
|
|
|
import os
|
|
|
|
import fnmatch
|
|
|
|
import sys
|
|
|
|
|
|
|
|
from util import ColorString
|
|
|
|
|
2019-08-02 19:40:32 +02:00
|
|
|
# This script generates TARGETS file for Buck.
|
|
|
|
# Buck is a build tool specifying dependencies among different build targets.
|
|
|
|
# User can pass extra dependencies as a JSON object via command line, and this
|
|
|
|
# script can include these dependencies in the generate TARGETS file.
|
|
|
|
# Usage:
|
2020-05-29 20:24:19 +02:00
|
|
|
# $python3 buckifier/buckify_rocksdb.py
|
2019-08-02 19:40:32 +02:00
|
|
|
# (This generates a TARGET file without user-specified dependency for unit
|
|
|
|
# tests.)
|
2020-05-29 20:24:19 +02:00
|
|
|
# $python3 buckifier/buckify_rocksdb.py \
|
2021-01-28 01:19:43 +01:00
|
|
|
# '{"fake": {
|
|
|
|
# "extra_deps": [":test_dep", "//fakes/module:mock1"],
|
|
|
|
# "extra_compiler_flags": ["-DROCKSDB_LITE", "-Os"]
|
|
|
|
# }
|
2019-08-02 19:40:32 +02:00
|
|
|
# }'
|
|
|
|
# (Generated TARGETS file has test_dep and mock1 as dependencies for RocksDB
|
|
|
|
# unit tests, and will use the extra_compiler_flags to compile the unit test
|
|
|
|
# source.)
|
|
|
|
|
2017-04-05 01:09:31 +02:00
|
|
|
# tests to export as libraries for inclusion in other projects
|
|
|
|
_EXPORTED_TEST_LIBS = ["env_basic_test"]
|
|
|
|
|
|
|
|
# Parse src.mk files as a Dictionary of
|
|
|
|
# VAR_NAME => list of files
|
|
|
|
def parse_src_mk(repo_path):
|
|
|
|
src_mk = repo_path + "/src.mk"
|
|
|
|
src_files = {}
|
|
|
|
for line in open(src_mk):
|
|
|
|
line = line.strip()
|
|
|
|
if len(line) == 0 or line[0] == '#':
|
|
|
|
continue
|
|
|
|
if '=' in line:
|
|
|
|
current_src = line.split('=')[0].strip()
|
|
|
|
src_files[current_src] = []
|
2020-07-01 04:31:57 +02:00
|
|
|
elif '.c' in line:
|
|
|
|
src_path = line.split('\\')[0].strip()
|
2017-04-05 01:09:31 +02:00
|
|
|
src_files[current_src].append(src_path)
|
|
|
|
return src_files
|
|
|
|
|
|
|
|
|
|
|
|
# get all .cc / .c files
|
|
|
|
def get_cc_files(repo_path):
|
|
|
|
cc_files = []
|
2018-01-29 21:43:56 +01:00
|
|
|
for root, dirnames, filenames in os.walk(repo_path): # noqa: B007 T25377293 Grandfathered in
|
2017-04-05 01:09:31 +02:00
|
|
|
root = root[(len(repo_path) + 1):]
|
|
|
|
if "java" in root:
|
|
|
|
# Skip java
|
|
|
|
continue
|
|
|
|
for filename in fnmatch.filter(filenames, '*.cc'):
|
|
|
|
cc_files.append(os.path.join(root, filename))
|
|
|
|
for filename in fnmatch.filter(filenames, '*.c'):
|
|
|
|
cc_files.append(os.path.join(root, filename))
|
|
|
|
return cc_files
|
|
|
|
|
|
|
|
|
2021-04-05 05:09:05 +02:00
|
|
|
# Get non_parallel tests from Makefile
|
|
|
|
def get_non_parallel_tests(repo_path):
|
2017-04-05 01:09:31 +02:00
|
|
|
Makefile = repo_path + "/Makefile"
|
|
|
|
|
2020-07-01 04:31:57 +02:00
|
|
|
s = set({})
|
2017-04-05 01:09:31 +02:00
|
|
|
|
2021-04-05 05:09:05 +02:00
|
|
|
found_non_parallel_tests = False
|
2017-04-05 01:09:31 +02:00
|
|
|
for line in open(Makefile):
|
|
|
|
line = line.strip()
|
2021-04-05 05:09:05 +02:00
|
|
|
if line.startswith("NON_PARALLEL_TEST ="):
|
|
|
|
found_non_parallel_tests = True
|
|
|
|
elif found_non_parallel_tests:
|
2017-04-05 01:09:31 +02:00
|
|
|
if line.endswith("\\"):
|
|
|
|
# remove the trailing \
|
|
|
|
line = line[:-1]
|
|
|
|
line = line.strip()
|
2020-07-01 04:31:57 +02:00
|
|
|
s.add(line)
|
2017-04-05 01:09:31 +02:00
|
|
|
else:
|
2021-04-05 05:09:05 +02:00
|
|
|
# we consumed all the non_parallel tests
|
2017-04-05 01:09:31 +02:00
|
|
|
break
|
2019-04-18 19:51:19 +02:00
|
|
|
|
2020-07-01 04:31:57 +02:00
|
|
|
return s
|
2017-04-05 01:09:31 +02:00
|
|
|
|
2019-08-02 19:40:32 +02:00
|
|
|
# Parse extra dependencies passed by user from command line
|
|
|
|
def get_dependencies():
|
|
|
|
deps_map = {
|
2019-10-23 22:51:03 +02:00
|
|
|
'': {
|
|
|
|
'extra_deps': [],
|
|
|
|
'extra_compiler_flags': []
|
2019-08-02 19:40:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(sys.argv) < 2:
|
|
|
|
return deps_map
|
|
|
|
|
|
|
|
def encode_dict(data):
|
|
|
|
rv = {}
|
|
|
|
for k, v in data.items():
|
2019-10-23 22:51:03 +02:00
|
|
|
if isinstance(v, dict):
|
2019-08-02 19:40:32 +02:00
|
|
|
v = encode_dict(v)
|
|
|
|
rv[k] = v
|
|
|
|
return rv
|
|
|
|
extra_deps = json.loads(sys.argv[1], object_hook=encode_dict)
|
|
|
|
for target_alias, deps in extra_deps.items():
|
|
|
|
deps_map[target_alias] = deps
|
|
|
|
return deps_map
|
|
|
|
|
|
|
|
|
2017-04-05 01:09:31 +02:00
|
|
|
# Prepare TARGETS file for buck
|
2019-08-02 19:40:32 +02:00
|
|
|
def generate_targets(repo_path, deps_map):
|
2017-04-05 01:09:31 +02:00
|
|
|
print(ColorString.info("Generating TARGETS"))
|
|
|
|
# parsed src.mk file
|
|
|
|
src_mk = parse_src_mk(repo_path)
|
|
|
|
# get all .cc files
|
|
|
|
cc_files = get_cc_files(repo_path)
|
2021-04-05 05:09:05 +02:00
|
|
|
# get non_parallel tests from Makefile
|
|
|
|
non_parallel_tests = get_non_parallel_tests(repo_path)
|
2017-04-05 01:09:31 +02:00
|
|
|
|
2021-04-05 05:09:05 +02:00
|
|
|
if src_mk is None or cc_files is None or non_parallel_tests is None:
|
2017-04-05 01:09:31 +02:00
|
|
|
return False
|
|
|
|
|
2021-01-28 01:19:43 +01:00
|
|
|
extra_argv = ""
|
|
|
|
if len(sys.argv) >= 2:
|
|
|
|
# Heuristically quote and canonicalize whitespace for inclusion
|
|
|
|
# in how the file was generated.
|
|
|
|
extra_argv = " '{0}'".format(" ".join(sys.argv[1].split()))
|
|
|
|
|
|
|
|
TARGETS = TARGETSBuilder("%s/TARGETS" % repo_path, extra_argv)
|
2020-05-20 20:35:28 +02:00
|
|
|
|
2017-04-05 01:09:31 +02:00
|
|
|
# rocksdb_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_lib",
|
|
|
|
src_mk["LIB_SOURCES"] +
|
2021-03-30 01:31:26 +02:00
|
|
|
# always add range_tree, it's only excluded on ppc64, which we don't use internally
|
|
|
|
src_mk["RANGE_TREE_SOURCES"] +
|
2017-04-05 01:09:31 +02:00
|
|
|
src_mk["TOOL_LIB_SOURCES"])
|
2020-10-01 07:49:20 +02:00
|
|
|
# rocksdb_whole_archive_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_whole_archive_lib",
|
|
|
|
src_mk["LIB_SOURCES"] +
|
2021-03-30 01:31:26 +02:00
|
|
|
# always add range_tree, it's only excluded on ppc64, which we don't use internally
|
|
|
|
src_mk["RANGE_TREE_SOURCES"] +
|
2020-10-01 07:49:20 +02:00
|
|
|
src_mk["TOOL_LIB_SOURCES"],
|
|
|
|
deps=None,
|
|
|
|
headers=None,
|
|
|
|
extra_external_deps="",
|
|
|
|
link_whole=True)
|
2017-04-05 01:09:31 +02:00
|
|
|
# rocksdb_test_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_test_lib",
|
|
|
|
src_mk.get("MOCK_LIB_SOURCES", []) +
|
|
|
|
src_mk.get("TEST_LIB_SOURCES", []) +
|
2018-09-25 23:12:12 +02:00
|
|
|
src_mk.get("EXP_LIB_SOURCES", []) +
|
|
|
|
src_mk.get("ANALYZER_LIB_SOURCES", []),
|
2020-06-05 21:14:42 +02:00
|
|
|
[":rocksdb_lib"],
|
2022-02-18 19:59:57 +01:00
|
|
|
extra_test_libs=True
|
|
|
|
)
|
2017-04-05 01:09:31 +02:00
|
|
|
# rocksdb_tools_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_tools_lib",
|
|
|
|
src_mk.get("BENCH_LIB_SOURCES", []) +
|
2018-09-25 23:12:12 +02:00
|
|
|
src_mk.get("ANALYZER_LIB_SOURCES", []) +
|
2019-05-30 20:21:38 +02:00
|
|
|
["test_util/testutil.cc"],
|
2017-04-05 01:09:31 +02:00
|
|
|
[":rocksdb_lib"])
|
2021-05-20 00:24:37 +02:00
|
|
|
# rocksdb_cache_bench_tools_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_cache_bench_tools_lib",
|
|
|
|
src_mk.get("CACHE_BENCH_LIB_SOURCES", []),
|
|
|
|
[":rocksdb_lib"])
|
2019-10-22 04:38:42 +02:00
|
|
|
# rocksdb_stress_lib
|
2020-05-17 06:46:21 +02:00
|
|
|
TARGETS.add_rocksdb_library(
|
2019-10-22 04:38:42 +02:00
|
|
|
"rocksdb_stress_lib",
|
|
|
|
src_mk.get("ANALYZER_LIB_SOURCES", [])
|
|
|
|
+ src_mk.get('STRESS_LIB_SOURCES', [])
|
2020-05-17 06:46:21 +02:00
|
|
|
+ ["test_util/testutil.cc"])
|
2022-02-18 19:59:57 +01:00
|
|
|
# bench binaries
|
|
|
|
for src in src_mk.get("MICROBENCH_SOURCES", []):
|
|
|
|
name = src.rsplit('/',1)[1].split('.')[0] if '/' in src else src.split('.')[0]
|
|
|
|
TARGETS.add_binary(
|
|
|
|
name,
|
|
|
|
[src],
|
|
|
|
[],
|
|
|
|
extra_bench_libs=True
|
|
|
|
)
|
2020-05-13 06:35:08 +02:00
|
|
|
print("Extra dependencies:\n{0}".format(json.dumps(deps_map)))
|
2020-07-01 04:31:57 +02:00
|
|
|
|
|
|
|
# Dictionary test executable name -> relative source file path
|
|
|
|
test_source_map = {}
|
2020-07-03 05:27:31 +02:00
|
|
|
|
|
|
|
# c_test.c is added through TARGETS.add_c_test(). If there
|
|
|
|
# are more than one .c test file, we need to extend
|
|
|
|
# TARGETS.add_c_test() to include other C tests too.
|
|
|
|
for test_src in src_mk.get("TEST_MAIN_SOURCES_C", []):
|
|
|
|
if test_src != 'db/c_test.c':
|
|
|
|
print("Don't know how to deal with " + test_src)
|
|
|
|
return False
|
|
|
|
TARGETS.add_c_test()
|
|
|
|
|
2022-02-18 19:59:57 +01:00
|
|
|
try:
|
|
|
|
with open(f"{repo_path}/buckifier/bench.json") as json_file:
|
|
|
|
fast_fancy_bench_config_list = json.load(json_file)
|
|
|
|
for config_dict in fast_fancy_bench_config_list:
|
2022-03-02 00:09:45 +01:00
|
|
|
clean_benchmarks = {}
|
|
|
|
benchmarks = config_dict['benchmarks']
|
|
|
|
for binary, benchmark_dict in benchmarks.items():
|
|
|
|
clean_benchmarks[binary] = {}
|
|
|
|
for benchmark, overloaded_metric_list in benchmark_dict.items():
|
|
|
|
clean_benchmarks[binary][benchmark] = []
|
|
|
|
for metric in overloaded_metric_list:
|
|
|
|
if not isinstance(metric, dict):
|
|
|
|
clean_benchmarks[binary][benchmark].append(metric)
|
|
|
|
TARGETS.add_fancy_bench_config(config_dict['name'], clean_benchmarks, False, config_dict['expected_runtime_one_iter'], config_dict['sl_iterations'], config_dict['regression_threshold'])
|
2022-02-18 19:59:57 +01:00
|
|
|
|
|
|
|
with open(f"{repo_path}/buckifier/bench-slow.json") as json_file:
|
|
|
|
slow_fancy_bench_config_list = json.load(json_file)
|
|
|
|
for config_dict in slow_fancy_bench_config_list:
|
2022-03-02 00:09:45 +01:00
|
|
|
clean_benchmarks = {}
|
|
|
|
benchmarks = config_dict['benchmarks']
|
|
|
|
for binary, benchmark_dict in benchmarks.items():
|
|
|
|
clean_benchmarks[binary] = {}
|
|
|
|
for benchmark, overloaded_metric_list in benchmark_dict.items():
|
|
|
|
clean_benchmarks[binary][benchmark] = []
|
|
|
|
for metric in overloaded_metric_list:
|
|
|
|
if not isinstance(metric, dict):
|
|
|
|
clean_benchmarks[binary][benchmark].append(metric)
|
|
|
|
for config_dict in slow_fancy_bench_config_list:
|
|
|
|
TARGETS.add_fancy_bench_config(config_dict['name']+"_slow", clean_benchmarks, True, config_dict['expected_runtime_one_iter'], config_dict['sl_iterations'], config_dict['regression_threshold'])
|
|
|
|
# it is better servicelab experiments break
|
|
|
|
# than rocksdb github ci
|
|
|
|
except Exception:
|
2022-02-18 19:59:57 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
TARGETS.add_test_header()
|
|
|
|
|
2020-07-03 05:27:31 +02:00
|
|
|
for test_src in src_mk.get("TEST_MAIN_SOURCES", []):
|
2020-07-01 04:31:57 +02:00
|
|
|
test = test_src.split('.c')[0].strip().split('/')[-1].strip()
|
|
|
|
test_source_map[test] = test_src
|
|
|
|
print("" + test + " " + test_src)
|
|
|
|
|
2019-08-02 19:40:32 +02:00
|
|
|
for target_alias, deps in deps_map.items():
|
2020-07-01 04:31:57 +02:00
|
|
|
for test, test_src in sorted(test_source_map.items()):
|
|
|
|
if len(test) == 0:
|
|
|
|
print(ColorString.warning("Failed to get test name for %s" % test_src))
|
2019-08-02 19:40:32 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
test_target_name = \
|
|
|
|
test if not target_alias else test + "_" + target_alias
|
|
|
|
|
|
|
|
if test in _EXPORTED_TEST_LIBS:
|
|
|
|
test_library = "%s_lib" % test_target_name
|
2022-02-18 19:59:57 +01:00
|
|
|
TARGETS.add_library(test_library, [test_src], deps=[":rocksdb_test_lib"], extra_test_libs=True)
|
|
|
|
TARGETS.register_test(
|
|
|
|
test_target_name,
|
|
|
|
test_src,
|
|
|
|
deps = json.dumps(deps['extra_deps'] + [':'+test_library]),
|
|
|
|
extra_compiler_flags = json.dumps(deps['extra_compiler_flags']))
|
|
|
|
else:
|
|
|
|
TARGETS.register_test(
|
|
|
|
test_target_name,
|
|
|
|
test_src,
|
|
|
|
deps = json.dumps(deps['extra_deps'] + [":rocksdb_test_lib"] ),
|
|
|
|
extra_compiler_flags = json.dumps(deps['extra_compiler_flags']))
|
2017-04-05 01:09:31 +02:00
|
|
|
|
|
|
|
print(ColorString.info("Generated TARGETS Summary:"))
|
|
|
|
print(ColorString.info("- %d libs" % TARGETS.total_lib))
|
|
|
|
print(ColorString.info("- %d binarys" % TARGETS.total_bin))
|
|
|
|
print(ColorString.info("- %d tests" % TARGETS.total_test))
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def get_rocksdb_path():
|
|
|
|
# rocksdb = {script_dir}/..
|
|
|
|
script_dir = os.path.dirname(sys.argv[0])
|
|
|
|
script_dir = os.path.abspath(script_dir)
|
|
|
|
rocksdb_path = os.path.abspath(
|
|
|
|
os.path.join(script_dir, "../"))
|
|
|
|
|
|
|
|
return rocksdb_path
|
|
|
|
|
2020-05-20 20:35:28 +02:00
|
|
|
|
2017-04-05 01:09:31 +02:00
|
|
|
def exit_with_error(msg):
|
|
|
|
print(ColorString.error(msg))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2019-08-02 19:40:32 +02:00
|
|
|
deps_map = get_dependencies()
|
2017-04-05 01:09:31 +02:00
|
|
|
# Generate TARGETS file for buck
|
2019-08-02 19:40:32 +02:00
|
|
|
ok = generate_targets(get_rocksdb_path(), deps_map)
|
2017-04-05 01:09:31 +02:00
|
|
|
if not ok:
|
|
|
|
exit_with_error("Failed to generate TARGETS files")
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|