Skip to content

Commit f08e805

Browse files
AlnisMmeta-codesync[bot]
authored andcommitted
LockGroupIterator object cache wrapper
Summary: Add object cache wrapper for LockGroupIterator. Reviewed By: pbhandar2 Differential Revision: D103213966 fbshipit-source-id: b41b094ff7b505456bb87c9e527ebc67d9194d0f
1 parent 11e4b87 commit f08e805

2 files changed

Lines changed: 191 additions & 21 deletions

File tree

cachelib/object_cache/ObjectCache.h

Lines changed: 61 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,9 @@ class ObjectCache : public ObjectCacheBase<AllocatorT> {
151151
using Restorer = Restorer<ObjectCache<AllocatorT>>;
152152
using EvictionIterator = typename AllocatorT::EvictionIterator;
153153
using RawAccessIterator = typename AllocatorT::AccessIterator;
154+
using RawLockGroupAccessIterator =
155+
typename AllocatorT::LockGroupAccessIterator;
156+
using LockGroupFilterFn = typename AllocatorT::LockGroupFilterFn;
154157
using NvmCache = typename AllocatorT::NvmCacheT;
155158
using NvmCacheConfig = typename AllocatorT::NvmCacheT::Config;
156159
using WriteHandle = typename AllocatorT::WriteHandle;
@@ -176,31 +179,31 @@ class ObjectCache : public ObjectCacheBase<AllocatorT> {
176179
__builtin_align_up(memory, kValueAlignment));
177180
}
178181

179-
// Wrapper iterator that provides aligned access to ObjectCacheItem. Does not
182+
// Wrapper iterator that provides aligned access to ObjectCacheItem. Does not
180183
// expose the underlying CacheItem to avoid using an unaligned pointer.
181-
class AccessIterator {
184+
// Templated on the underlying allocator iterator type (per-bucket
185+
// AccessIterator or LockGroupIterator).
186+
template <typename InnerItr>
187+
class IterWrapper {
182188
public:
183-
explicit AccessIterator(RawAccessIterator&& itr) : inner_(std::move(itr)) {}
184-
AccessIterator(AccessIterator&&) = default;
185-
AccessIterator& operator=(AccessIterator&&) = default;
186-
AccessIterator(const AccessIterator&) = delete;
187-
AccessIterator& operator=(const AccessIterator&) = delete;
188-
189-
AccessIterator& operator++() {
189+
explicit IterWrapper(InnerItr&& itr) : inner_(std::move(itr)) {}
190+
IterWrapper(IterWrapper&&) = default;
191+
IterWrapper& operator=(IterWrapper&&) = default;
192+
IterWrapper(const IterWrapper&) = delete;
193+
IterWrapper& operator=(const IterWrapper&) = delete;
194+
~IterWrapper() = default;
195+
196+
IterWrapper& operator++() {
190197
++inner_;
191198
return *this;
192199
}
193-
// AccessIterator exposes the cache item getters so "dereference" returns
194-
// this iterator. Note: this is required for range-based for loops.
195-
AccessIterator& operator*() { return *this; }
196-
const AccessIterator& operator*() const { return *this; }
200+
// Dereference returns this wrapper so range-based for loops see the
201+
// ObjectCache item accessors rather than the unaligned CacheItem.
202+
IterWrapper& operator*() { return *this; }
203+
const IterWrapper& operator*() const { return *this; }
197204

198-
bool operator==(const AccessIterator& o) const {
199-
return inner_ == o.inner_;
200-
}
201-
bool operator!=(const AccessIterator& o) const {
202-
return inner_ != o.inner_;
203-
}
205+
bool operator==(const IterWrapper& o) const { return inner_ == o.inner_; }
206+
bool operator!=(const IterWrapper& o) const { return inner_ != o.inner_; }
204207
FOLLY_ALWAYS_INLINE explicit operator bool() {
205208
return inner_.asHandle().get() != nullptr;
206209
}
@@ -238,8 +241,19 @@ class ObjectCache : public ObjectCacheBase<AllocatorT> {
238241
return getAlignedItemPtr(inner_->getMemory())->objectSize;
239242
}
240243

241-
private:
242-
RawAccessIterator inner_;
244+
protected:
245+
InnerItr inner_;
246+
};
247+
248+
using AccessIterator = IterWrapper<RawAccessIterator>;
249+
250+
// Adds the lock-group iterator's scan statistics on top of the shared
251+
// wrapper.
252+
class LockGroupAccessIterator
253+
: public IterWrapper<RawLockGroupAccessIterator> {
254+
public:
255+
using IterWrapper<RawLockGroupAccessIterator>::IterWrapper;
256+
const auto& getStats() const { return this->inner_.getStats(); }
243257
};
244258

245259
enum class AllocStatus { kSuccess, kAllocError, kKeyAlreadyExists };
@@ -378,6 +392,25 @@ class ObjectCache : public ObjectCacheBase<AllocatorT> {
378392

379393
AccessIterator end() { return AccessIterator{this->l1Cache_->end()}; }
380394

395+
// Lock-group iteration variant. See CacheAllocator::beginLockGroup for the
396+
// trade-offs vs. the per-bucket AccessIterator. The optional filter is
397+
// applied to each key under the hash table lock; only matching items have
398+
// handles created.
399+
LockGroupAccessIterator beginLockGroup(LockGroupFilterFn filter = {}) {
400+
return LockGroupAccessIterator{
401+
this->l1Cache_->beginLockGroup(std::move(filter))};
402+
}
403+
404+
LockGroupAccessIterator beginLockGroup(util::Throttler::Config config,
405+
LockGroupFilterFn filter = {}) {
406+
return LockGroupAccessIterator{
407+
this->l1Cache_->beginLockGroup(config, std::move(filter))};
408+
}
409+
410+
LockGroupAccessIterator endLockGroup() {
411+
return LockGroupAccessIterator{this->l1Cache_->endLockGroup()};
412+
}
413+
381414
// Get the default l1 allocation size in bytes.
382415
static uint32_t getL1AllocSize(uint32_t maxKeySizeBytes);
383416

@@ -509,6 +542,13 @@ class ObjectCache : public ObjectCacheBase<AllocatorT> {
509542
return itr.getObjectSize();
510543
}
511544

545+
size_t getObjectSize(LockGroupAccessIterator& itr) const {
546+
if (!itr) {
547+
return 0;
548+
}
549+
return itr.getObjectSize();
550+
}
551+
512552
// Update the object size without updating the object itself.
513553
// This is useful when object has been changed, although it's not changed
514554
// by mutateObject.

cachelib/object_cache/tests/ObjectCacheTest.cpp

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1837,6 +1837,118 @@ class ObjectCacheTest : public ::testing::Test {
18371837
ts[i].join();
18381838
}
18391839
}
1840+
1841+
std::unique_ptr<ObjectCache> makeFooCache() {
1842+
ObjectCacheConfig config;
1843+
config.setCacheName("test").setCacheCapacity(10'000);
1844+
config.setItemDestructor(
1845+
[&](ObjectCacheDestructorData data) { data.deleteObject<Foo>(); });
1846+
return ObjectCache::create(config);
1847+
}
1848+
1849+
std::map<std::string, Foo> populateFooCache(ObjectCache& objcache, int n) {
1850+
std::map<std::string, Foo> expected;
1851+
for (int i = 0; i < n; i++) {
1852+
auto key = folly::sformat("Foo_{}", i);
1853+
auto foo = std::make_unique<Foo>();
1854+
foo->a = i;
1855+
foo->b = i * 2;
1856+
foo->c = i * 3;
1857+
expected[key] = *foo;
1858+
auto [allocRes, _, __] = objcache.insertOrReplace(key, std::move(foo));
1859+
EXPECT_EQ(ObjectCache::AllocStatus::kSuccess, allocRes);
1860+
}
1861+
return expected;
1862+
}
1863+
1864+
template <typename Iter>
1865+
void verifyVisitsAllFoos(Iter begin,
1866+
const Iter& end,
1867+
const std::map<std::string, Foo>& expected) {
1868+
std::map<std::string, Foo> visited;
1869+
for (auto it = std::move(begin); it != end; ++it) {
1870+
ASSERT_TRUE(static_cast<bool>(it));
1871+
visited[it.getKey().str()] = *it.template getObjectPtrAs<Foo>();
1872+
}
1873+
EXPECT_EQ(expected.size(), visited.size());
1874+
for (const auto& [k, v] : expected) {
1875+
auto vIt = visited.find(k);
1876+
ASSERT_NE(visited.end(), vIt);
1877+
EXPECT_EQ(v.a, vIt->second.a);
1878+
EXPECT_EQ(v.b, vIt->second.b);
1879+
EXPECT_EQ(v.c, vIt->second.c);
1880+
}
1881+
}
1882+
1883+
void testAccessIteratorVisitsAll() {
1884+
auto objcache = makeFooCache();
1885+
auto expected = populateFooCache(*objcache, 50);
1886+
verifyVisitsAllFoos(objcache->begin(), objcache->end(), expected);
1887+
}
1888+
1889+
void testAccessIteratorEmpty() {
1890+
auto objcache = makeFooCache();
1891+
EXPECT_EQ(objcache->begin(), objcache->end());
1892+
}
1893+
1894+
void testLockGroupAccessIteratorVisitsAll() {
1895+
auto objcache = makeFooCache();
1896+
auto expected = populateFooCache(*objcache, 50);
1897+
verifyVisitsAllFoos(objcache->beginLockGroup(), objcache->endLockGroup(),
1898+
expected);
1899+
}
1900+
1901+
void testLockGroupAccessIteratorEmpty() {
1902+
auto objcache = makeFooCache();
1903+
EXPECT_EQ(objcache->beginLockGroup(), objcache->endLockGroup());
1904+
}
1905+
1906+
void testLockGroupAccessIteratorFilter() {
1907+
auto objcache = makeFooCache();
1908+
populateFooCache(*objcache, 50);
1909+
1910+
auto filter = [](folly::StringPiece key) {
1911+
return key == "Foo_0" || key == "Foo_1" || key == "Foo_2";
1912+
};
1913+
std::set<std::string> filteredVisited;
1914+
for (auto it = objcache->beginLockGroup(filter);
1915+
it != objcache->endLockGroup();
1916+
++it) {
1917+
filteredVisited.insert(it.getKey().str());
1918+
}
1919+
const std::set<std::string> filteredExpected{"Foo_0", "Foo_1", "Foo_2"};
1920+
EXPECT_EQ(filteredExpected, filteredVisited);
1921+
}
1922+
1923+
void testLockGroupAccessIteratorPrefixScan() {
1924+
auto objcache = makeFooCache();
1925+
std::set<std::string> expectedAlpha;
1926+
std::set<std::string> expectedBeta;
1927+
for (int i = 0; i < 500; i++) {
1928+
auto alpha = folly::sformat("alpha_{}", i);
1929+
auto beta = folly::sformat("beta_{}", i);
1930+
expectedAlpha.insert(alpha);
1931+
expectedBeta.insert(beta);
1932+
auto [r1, _1, __1] =
1933+
objcache->insertOrReplace(alpha, std::make_unique<Foo>());
1934+
auto [r2, _2, __2] =
1935+
objcache->insertOrReplace(beta, std::make_unique<Foo>());
1936+
ASSERT_EQ(ObjectCache::AllocStatus::kSuccess, r1);
1937+
ASSERT_EQ(ObjectCache::AllocStatus::kSuccess, r2);
1938+
}
1939+
1940+
auto prefixFilter =
1941+
[prefix = folly::StringPiece{"alpha_"}](folly::StringPiece key) {
1942+
return key.startsWith(prefix);
1943+
};
1944+
std::set<std::string> visited;
1945+
for (auto it = objcache->beginLockGroup(prefixFilter);
1946+
it != objcache->endLockGroup();
1947+
++it) {
1948+
visited.insert(it.getKey().str());
1949+
}
1950+
EXPECT_EQ(expectedAlpha, visited);
1951+
}
18401952
};
18411953

18421954
using AllocatorTypes = ::testing::Types<LruAllocator,
@@ -1853,6 +1965,24 @@ TYPED_TEST(ObjectCacheTest, SetEvictionPolicyConfig) {
18531965
}
18541966
}
18551967
TYPED_TEST(ObjectCacheTest, Simple) { this->testSimple(); }
1968+
TYPED_TEST(ObjectCacheTest, AccessIteratorVisitsAll) {
1969+
this->testAccessIteratorVisitsAll();
1970+
}
1971+
TYPED_TEST(ObjectCacheTest, AccessIteratorEmpty) {
1972+
this->testAccessIteratorEmpty();
1973+
}
1974+
TYPED_TEST(ObjectCacheTest, LockGroupAccessIteratorVisitsAll) {
1975+
this->testLockGroupAccessIteratorVisitsAll();
1976+
}
1977+
TYPED_TEST(ObjectCacheTest, LockGroupAccessIteratorEmpty) {
1978+
this->testLockGroupAccessIteratorEmpty();
1979+
}
1980+
TYPED_TEST(ObjectCacheTest, LockGroupAccessIteratorFilter) {
1981+
this->testLockGroupAccessIteratorFilter();
1982+
}
1983+
TYPED_TEST(ObjectCacheTest, LockGroupAccessIteratorPrefixScan) {
1984+
this->testLockGroupAccessIteratorPrefixScan();
1985+
}
18561986
TYPED_TEST(ObjectCacheTest, MultiType) { this->testMultiType(); }
18571987
TYPED_TEST(ObjectCacheTest, testMultiTypePolymorphism) {
18581988
this->testMultiTypePolymorphism();

0 commit comments

Comments
 (0)