diff --git a/source/common/stats/thread_local_store.cc b/source/common/stats/thread_local_store.cc index 09e235e6ac4c..175d7d473e3c 100644 --- a/source/common/stats/thread_local_store.cc +++ b/source/common/stats/thread_local_store.cc @@ -53,10 +53,12 @@ ThreadLocalStoreImpl::~ThreadLocalStoreImpl() { } void ThreadLocalStoreImpl::setHistogramSettings(HistogramSettingsConstPtr&& histogram_settings) { - iterateScopes([](const ScopeImplSharedPtr& scope) -> bool { - ASSERT(scope->centralCacheLockHeld()->histograms_.empty()); - return true; - }); + iterateScopes([this](const ScopeImplSharedPtr& scope) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(lock_) -> bool { + assertLocked(*scope); + ASSERT(scope->centralCacheLockHeld()->histograms_.empty()); + return true; + }); histogram_settings_ = std::move(histogram_settings); } @@ -74,6 +76,7 @@ void ThreadLocalStoreImpl::setStatsMatcher(StatsMatcherPtr&& stats_matcher) { const uint32_t first_histogram_index = deleted_histograms_.size(); iterateScopesLockHeld([this](const ScopeImplSharedPtr& scope) ABSL_EXCLUSIVE_LOCKS_REQUIRED( lock_) -> bool { + assertLocked(*scope); const CentralCacheEntrySharedPtr& central_cache = scope->centralCacheLockHeld(); removeRejectedStats(central_cache->counters_, [this](const CounterSharedPtr& counter) mutable { @@ -293,6 +296,7 @@ void ThreadLocalStoreImpl::releaseScopeCrossThread(ScopeImpl* scope) { // VirtualHosts. bool need_post = scopes_to_cleanup_.empty(); scopes_to_cleanup_.push_back(scope->scope_id_); + assertLocked(*scope); central_cache_entries_to_cleanup_.push_back(scope->centralCacheLockHeld()); lock.release(); diff --git a/source/common/stats/thread_local_store.h b/source/common/stats/thread_local_store.h index f8f27558b400..b2d7f54990ea 100644 --- a/source/common/stats/thread_local_store.h +++ b/source/common/stats/thread_local_store.h @@ -339,16 +339,20 @@ class ThreadLocalStoreImpl : Logger::Loggable, public StoreRo return iterateLockHeld(fn); } - bool iterateLockHeld(const IterateFn& fn) const { + bool iterateLockHeld(const IterateFn& fn) const + ABSL_EXCLUSIVE_LOCKS_REQUIRED(parent_.lock_) { return iterHelper(fn, centralCacheLockHeld()->counters_); } - bool iterateLockHeld(const IterateFn& fn) const { + bool iterateLockHeld(const IterateFn& fn) const + ABSL_EXCLUSIVE_LOCKS_REQUIRED(parent_.lock_) { return iterHelper(fn, centralCacheLockHeld()->gauges_); } - bool iterateLockHeld(const IterateFn& fn) const { + bool iterateLockHeld(const IterateFn& fn) const + ABSL_EXCLUSIVE_LOCKS_REQUIRED(parent_.lock_) { return iterHelper(fn, centralCacheLockHeld()->histograms_); } - bool iterateLockHeld(const IterateFn& fn) const { + bool iterateLockHeld(const IterateFn& fn) const + ABSL_EXCLUSIVE_LOCKS_REQUIRED(parent_.lock_) { return iterHelper(fn, centralCacheLockHeld()->text_readouts_); } ThreadLocalStoreImpl& store() override { return parent_; } @@ -421,7 +425,7 @@ class ThreadLocalStoreImpl : Logger::Loggable, public StoreRo // scope->central_cache_, the analysis system cannot understand that the // scope's parent_.lock_ is held, so we assert that here. const CentralCacheEntrySharedPtr& centralCacheLockHeld() const - ABSL_ASSERT_EXCLUSIVE_LOCK(parent_.lock_) { + ABSL_EXCLUSIVE_LOCKS_REQUIRED(parent_.lock_) { return central_cache_; } @@ -463,6 +467,19 @@ class ThreadLocalStoreImpl : Logger::Loggable, public StoreRo using ScopeImplSharedPtr = std::shared_ptr; + /** + * assertLocked exists to help compiler figure out that lock_ and scope->parent_.lock_ is + * actually the same lock known under two different names. This function requires lock_ to + * be held when it's called and at the same time it is annotated as if it checks in runtime + * that scope->parent_.lock_ is held. It does not actually perform any runtime checks, because + * those aren't needed since we know that scope->parent_ refers to ThreadLockStoreImpl and + * therefore scope->parent_.lock is the same as lock_. + */ + void assertLocked(const ScopeImpl& scope) const ABSL_EXCLUSIVE_LOCKS_REQUIRED(lock_) + ABSL_ASSERT_EXCLUSIVE_LOCK(scope.parent_.lock_) { + UNREFERENCED_PARAMETER(scope); + } + /** * Calls fn_lock_held for every scope with, lock_ held. This avoids iterate/destruct * races for scopes. @@ -481,8 +498,11 @@ class ThreadLocalStoreImpl : Logger::Loggable, public StoreRo // The Store versions of iterate cover all the scopes in the store. template bool iterHelper(StatFn fn) const { - return iterateScopes( - [fn](const ScopeImplSharedPtr& scope) -> bool { return scope->iterateLockHeld(fn); }); + return iterateScopes([this, fn](const ScopeImplSharedPtr& scope) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(lock_) -> bool { + assertLocked(*scope); + return scope->iterateLockHeld(fn); + }); } std::string getTagsForName(const std::string& name, TagVector& tags) const;