rocksdb/util/arena_test.cc
Mike Kolupaev e85f2c64cb Prevent empty memtables from using a lot of memory
Summary:
This fixes OOMs that we (logdevice) are currently having in production.

SkipListRep constructor does a couple small allocations from ConcurrentArena (see InlineSkipList constructor). ConcurrentArena would sometimes allocate an entire block for that, which is a few megabytes (we use Options::arena_block_size = 4 MB). So an empty memtable can take take 4 MB of memory. We have ~40k column families (spread across 15 DB instances), so 4 MB per empty memtable easily OOMs a machine for us.

This PR makes ConcurrentArena always allocate from Arena's inline block when possible. So as long as InlineSkipList's initial allocations are below 2 KB there would be no blocks allocated for empty memtables.
Closes https://github.com/facebook/rocksdb/pull/2569

Differential Revision: D5404029

Pulled By: al13n321

fbshipit-source-id: 568ec22a3fd1a485c06123f6b2dfc5e9ef67cd23
2017-07-28 15:58:43 -07:00

204 lines
6.5 KiB
C++

// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
// This source code is licensed under both the GPLv2 (found in the
// COPYING file in the root directory) and Apache 2.0 License
// (found in the LICENSE.Apache file in the root directory).
//
// Copyright (c) 2011 The LevelDB Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "util/arena.h"
#include "util/random.h"
#include "util/testharness.h"
namespace rocksdb {
namespace {
const size_t kHugePageSize = 2 * 1024 * 1024;
} // namespace
class ArenaTest : public testing::Test {};
TEST_F(ArenaTest, Empty) { Arena arena0; }
namespace {
bool CheckMemoryAllocated(size_t allocated, size_t expected) {
// The value returned by Arena::MemoryAllocatedBytes() may be greater than
// the requested memory. We choose a somewhat arbitrary upper bound of
// max_expected = expected * 1.1 to detect critical overallocation.
size_t max_expected = expected + expected / 10;
return allocated >= expected && allocated <= max_expected;
}
void MemoryAllocatedBytesTest(size_t huge_page_size) {
const int N = 17;
size_t req_sz; // requested size
size_t bsz = 32 * 1024; // block size
size_t expected_memory_allocated;
Arena arena(bsz, nullptr, huge_page_size);
// requested size > quarter of a block:
// allocate requested size separately
req_sz = 12 * 1024;
for (int i = 0; i < N; i++) {
arena.Allocate(req_sz);
}
expected_memory_allocated = req_sz * N + Arena::kInlineSize;
ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
expected_memory_allocated);
arena.Allocate(Arena::kInlineSize - 1);
// requested size < quarter of a block:
// allocate a block with the default size, then try to use unused part
// of the block. So one new block will be allocated for the first
// Allocate(99) call. All the remaining calls won't lead to new allocation.
req_sz = 99;
for (int i = 0; i < N; i++) {
arena.Allocate(req_sz);
}
if (huge_page_size) {
ASSERT_TRUE(
CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
expected_memory_allocated + bsz) ||
CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
expected_memory_allocated + huge_page_size));
} else {
expected_memory_allocated += bsz;
ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
expected_memory_allocated);
}
// requested size > size of a block:
// allocate requested size separately
expected_memory_allocated = arena.MemoryAllocatedBytes();
req_sz = 8 * 1024 * 1024;
for (int i = 0; i < N; i++) {
arena.Allocate(req_sz);
}
expected_memory_allocated += req_sz * N;
ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
expected_memory_allocated);
}
// Make sure we didn't count the allocate but not used memory space in
// Arena::ApproximateMemoryUsage()
static void ApproximateMemoryUsageTest(size_t huge_page_size) {
const size_t kBlockSize = 4096;
const size_t kEntrySize = kBlockSize / 8;
const size_t kZero = 0;
Arena arena(kBlockSize, nullptr, huge_page_size);
ASSERT_EQ(kZero, arena.ApproximateMemoryUsage());
// allocate inline bytes
EXPECT_TRUE(arena.IsInInlineBlock());
arena.AllocateAligned(8);
EXPECT_TRUE(arena.IsInInlineBlock());
arena.AllocateAligned(Arena::kInlineSize / 2 - 16);
EXPECT_TRUE(arena.IsInInlineBlock());
arena.AllocateAligned(Arena::kInlineSize / 2);
EXPECT_TRUE(arena.IsInInlineBlock());
ASSERT_EQ(arena.ApproximateMemoryUsage(), Arena::kInlineSize - 8);
ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
Arena::kInlineSize);
auto num_blocks = kBlockSize / kEntrySize;
// first allocation
arena.AllocateAligned(kEntrySize);
EXPECT_FALSE(arena.IsInInlineBlock());
auto mem_usage = arena.MemoryAllocatedBytes();
if (huge_page_size) {
ASSERT_TRUE(
CheckMemoryAllocated(mem_usage, kBlockSize + Arena::kInlineSize) ||
CheckMemoryAllocated(mem_usage, huge_page_size + Arena::kInlineSize));
} else {
ASSERT_PRED2(CheckMemoryAllocated, mem_usage,
kBlockSize + Arena::kInlineSize);
}
auto usage = arena.ApproximateMemoryUsage();
ASSERT_LT(usage, mem_usage);
for (size_t i = 1; i < num_blocks; ++i) {
arena.AllocateAligned(kEntrySize);
ASSERT_EQ(mem_usage, arena.MemoryAllocatedBytes());
ASSERT_EQ(arena.ApproximateMemoryUsage(), usage + kEntrySize);
EXPECT_FALSE(arena.IsInInlineBlock());
usage = arena.ApproximateMemoryUsage();
}
if (huge_page_size) {
ASSERT_TRUE(usage > mem_usage ||
usage + huge_page_size - kBlockSize == mem_usage);
} else {
ASSERT_GT(usage, mem_usage);
}
}
static void SimpleTest(size_t huge_page_size) {
std::vector<std::pair<size_t, char*>> allocated;
Arena arena(Arena::kMinBlockSize, nullptr, huge_page_size);
const int N = 100000;
size_t bytes = 0;
Random rnd(301);
for (int i = 0; i < N; i++) {
size_t s;
if (i % (N / 10) == 0) {
s = i;
} else {
s = rnd.OneIn(4000)
? rnd.Uniform(6000)
: (rnd.OneIn(10) ? rnd.Uniform(100) : rnd.Uniform(20));
}
if (s == 0) {
// Our arena disallows size 0 allocations.
s = 1;
}
char* r;
if (rnd.OneIn(10)) {
r = arena.AllocateAligned(s);
} else {
r = arena.Allocate(s);
}
for (unsigned int b = 0; b < s; b++) {
// Fill the "i"th allocation with a known bit pattern
r[b] = i % 256;
}
bytes += s;
allocated.push_back(std::make_pair(s, r));
ASSERT_GE(arena.ApproximateMemoryUsage(), bytes);
if (i > N / 10) {
ASSERT_LE(arena.ApproximateMemoryUsage(), bytes * 1.10);
}
}
for (unsigned int i = 0; i < allocated.size(); i++) {
size_t num_bytes = allocated[i].first;
const char* p = allocated[i].second;
for (unsigned int b = 0; b < num_bytes; b++) {
// Check the "i"th allocation for the known bit pattern
ASSERT_EQ(int(p[b]) & 0xff, (int)(i % 256));
}
}
}
} // namespace
TEST_F(ArenaTest, MemoryAllocatedBytes) {
MemoryAllocatedBytesTest(0);
MemoryAllocatedBytesTest(kHugePageSize);
}
TEST_F(ArenaTest, ApproximateMemoryUsage) {
ApproximateMemoryUsageTest(0);
ApproximateMemoryUsageTest(kHugePageSize);
}
TEST_F(ArenaTest, Simple) {
SimpleTest(0);
SimpleTest(kHugePageSize);
}
} // namespace rocksdb
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}