Commit 0d4683df authored by Joe Downing's avatar Joe Downing Committed by Commit Bot

Revert "Make DestructorAtExit LazyInstances leak."

This reverts commit 1b97e93c.

Reason for revert: Speculative revert due to Linux TSAN unittest errors:
https://uberchromegw.corp.google.com/i/chromium.memory/builders/Linux%20TSan%20Tests/builds/17057

Original change's description:
> Make DestructorAtExit LazyInstances leak.
> 
> This means that tests may no longer rely on LazyInstance<T> to clean
> up global state between tests. Tests that depend on globals but need
> a clean test environment should use explicit test hooks to reset this
> state.
> 
> Only one test depends on DestructorAtExit for correct functionality.
> cast_audio_backend_unittests creates several mocks that are owned by a
> base::LazyInstance. Since Gmock verifies expectations in destructors,
> switching all LazyInstances to leaky ones breaks this verification.
> The solution is to manually verify and check the expectations at the
> end of the test.
> 
> Note that while the mock objects are annotated as leaked for Gmock,
> that does not mean the cast_audio_backend_unittests will leak
> arbitrary amounts of memory. The test fixture already configures
> StreamMixer for each test run; this setup will clear up any leftover
> objects from the previous test.
> 
> DestructorAtExit will be removed in a followup.
> 
> Bug: 698982
> Cq-Include-Trybots: master.tryserver.chromium.android:android_optional_gpu_tests_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:ios-simulator-cronet;master.tryserver.chromium.mac:ios-simulator-full-configs;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel
> Change-Id: I822ce507dbb98067a788466e7c8fcc96c3a64ef9
> Reviewed-on: https://chromium-review.googlesource.com/874994
> Commit-Queue: Daniel Cheng <dcheng@chromium.org>
> Reviewed-by: Kenneth MacKay <kmackay@chromium.org>
> Reviewed-by: Gabriel Charette <gab@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#531385}

TBR=dcheng@chromium.org,gab@chromium.org,kmackay@chromium.org

Change-Id: Ief1f9169caa0ff81e2c095df0a0520a71d7640a1
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: 698982
Cq-Include-Trybots: master.tryserver.chromium.android:android_optional_gpu_tests_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:ios-simulator-cronet;master.tryserver.chromium.mac:ios-simulator-full-configs;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel
Reviewed-on: https://chromium-review.googlesource.com/882506Reviewed-by: default avatarJoe Downing <joedow@chromium.org>
Commit-Queue: Joe Downing <joedow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#531398}
parent cf60b272
......@@ -81,6 +81,25 @@ struct LazyInstanceTraitsBase {
// can implement the more complicated pieces out of line in the .cc file.
namespace internal {
// This traits class causes destruction the contained Type at process exit via
// AtExitManager. This is probably generally not what you want. Instead, prefer
// Leaky below.
template <typename Type>
struct DestructorAtExitLazyInstanceTraits {
static const bool kRegisterOnExit = true;
#if DCHECK_IS_ON()
static const bool kAllowedToAccessOnNonjoinableThread = false;
#endif
static Type* New(void* instance) {
return LazyInstanceTraitsBase<Type>::New(instance);
}
static void Delete(Type* instance) {
LazyInstanceTraitsBase<Type>::CallDestructor(instance);
}
};
// Use LazyInstance<T>::Leaky for a less-verbose call-site typedef; e.g.:
// base::LazyInstance<T>::Leaky my_leaky_lazy_instance;
// instead of:
......@@ -104,11 +123,6 @@ struct LeakyLazyInstanceTraits {
}
};
// TODO(dcheng): DestructorAtExit will be completely removed in followups if
// there are no issues discovered.
template <typename Type>
using DestructorAtExitLazyInstanceTraits = LeakyLazyInstanceTraits<Type>;
template <typename Type>
struct ErrorMustSelectLazyOrDestructorAtExitForLazyInstance {};
......@@ -133,10 +147,9 @@ class LazyInstance {
// Convenience typedef to avoid having to repeat Type for leaky lazy
// instances.
using Leaky = LazyInstance<Type, internal::LeakyLazyInstanceTraits<Type>>;
// TODO(dcheng): DestructorAtExit will be completely removed in followups if
// there are no issues discovered.
using DestructorAtExit = Leaky;
typedef LazyInstance<Type, internal::LeakyLazyInstanceTraits<Type>> Leaky;
typedef LazyInstance<Type, internal::DestructorAtExitLazyInstanceTraits<Type>>
DestructorAtExit;
Type& Get() {
return *Pointer();
......
......@@ -8,11 +8,11 @@
#include <vector>
#include "base/at_exit.h"
#include "base/atomic_sequence_num.h"
#include "base/atomicops.h"
#include "base/barrier_closure.h"
#include "base/bind.h"
#include "base/lazy_instance.h"
#include "base/logging.h"
#include "base/synchronization/atomic_flag.h"
#include "base/sys_info.h"
#include "base/threading/platform_thread.h"
#include "base/threading/simple_thread.h"
......@@ -20,111 +20,48 @@
#include "build/build_config.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace base {
namespace {
base::AtomicSequenceNumber constructed_seq_;
base::AtomicSequenceNumber destructed_seq_;
class ConstructAndDestructLogger {
public:
ConstructAndDestructLogger() {
CHECK(!constructed_->IsSet());
constructed_->Set();
constructed_seq_.GetNext();
}
~ConstructAndDestructLogger() {
CHECK(false) << "This should be leaked and never reached!";
}
class ScopedSetup {
public:
ScopedSetup() {
CHECK_EQ(nullptr, ConstructAndDestructLogger::constructed_);
ConstructAndDestructLogger::constructed_ = &constructed_;
destructed_seq_.GetNext();
}
~ScopedSetup() {
CHECK_EQ(&constructed_, ConstructAndDestructLogger::constructed_);
ConstructAndDestructLogger::constructed_ = nullptr;
}
AtomicFlag& constructed() { return constructed_; }
private:
AtomicFlag constructed_;
};
private:
static AtomicFlag* constructed_;
DISALLOW_COPY_AND_ASSIGN(ConstructAndDestructLogger);
};
AtomicFlag* ConstructAndDestructLogger::constructed_ = nullptr;
LazyInstance<ConstructAndDestructLogger>::Leaky lazy_logger =
LAZY_INSTANCE_INITIALIZER;
} // namespace
TEST(LazyInstanceTest, Basic) {
ConstructAndDestructLogger::ScopedSetup test_scope;
EXPECT_FALSE(lazy_logger.IsCreated());
EXPECT_FALSE(test_scope.constructed().IsSet());
lazy_logger.Get();
EXPECT_TRUE(lazy_logger.IsCreated());
EXPECT_TRUE(test_scope.constructed().IsSet());
lazy_logger.Pointer();
EXPECT_TRUE(lazy_logger.IsCreated());
EXPECT_TRUE(test_scope.constructed().IsSet());
}
namespace {
class SlowConstructor {
public:
SlowConstructor() : some_int_(0) {
// Sleep for 1 second to try to cause a race.
PlatformThread::Sleep(TimeDelta::FromSeconds(1));
CHECK(!*constructed);
*constructed = true;
base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(1));
++constructed;
some_int_ = 12;
}
int some_int() const { return some_int_; }
class ScopedSetup {
public:
ScopedSetup() {
CHECK_EQ(nullptr, SlowConstructor::constructed);
SlowConstructor::constructed = &constructed_;
}
~ScopedSetup() {
CHECK_EQ(&constructed_, SlowConstructor::constructed);
SlowConstructor::constructed = nullptr;
}
bool constructed() const { return constructed_; }
static int constructed;
private:
bool constructed_ = false;
};
private:
static bool* constructed;
int some_int_;
DISALLOW_COPY_AND_ASSIGN(SlowConstructor);
};
// static
bool* SlowConstructor::constructed = nullptr;
int SlowConstructor::constructed = 0;
class SlowDelegate : public DelegateSimpleThread::Delegate {
class SlowDelegate : public base::DelegateSimpleThread::Delegate {
public:
explicit SlowDelegate(LazyInstance<SlowConstructor>::DestructorAtExit* lazy)
explicit SlowDelegate(
base::LazyInstance<SlowConstructor>::DestructorAtExit* lazy)
: lazy_(lazy) {}
void Run() override {
......@@ -133,29 +70,57 @@ class SlowDelegate : public DelegateSimpleThread::Delegate {
}
private:
LazyInstance<SlowConstructor>::DestructorAtExit* lazy_;
base::LazyInstance<SlowConstructor>::DestructorAtExit* lazy_;
DISALLOW_COPY_AND_ASSIGN(SlowDelegate);
};
LazyInstance<SlowConstructor>::DestructorAtExit lazy_slow =
} // namespace
base::LazyInstance<ConstructAndDestructLogger>::DestructorAtExit lazy_logger =
LAZY_INSTANCE_INITIALIZER;
} // namespace
TEST(LazyInstanceTest, Basic) {
{
base::ShadowingAtExitManager shadow;
EXPECT_FALSE(lazy_logger.IsCreated());
EXPECT_EQ(0, constructed_seq_.GetNext());
EXPECT_EQ(0, destructed_seq_.GetNext());
lazy_logger.Get();
EXPECT_TRUE(lazy_logger.IsCreated());
EXPECT_EQ(2, constructed_seq_.GetNext());
EXPECT_EQ(1, destructed_seq_.GetNext());
lazy_logger.Pointer();
EXPECT_TRUE(lazy_logger.IsCreated());
EXPECT_EQ(3, constructed_seq_.GetNext());
EXPECT_EQ(2, destructed_seq_.GetNext());
}
EXPECT_FALSE(lazy_logger.IsCreated());
EXPECT_EQ(4, constructed_seq_.GetNext());
EXPECT_EQ(4, destructed_seq_.GetNext());
}
base::LazyInstance<SlowConstructor>::DestructorAtExit lazy_slow =
LAZY_INSTANCE_INITIALIZER;
TEST(LazyInstanceTest, ConstructorThreadSafety) {
SlowConstructor::ScopedSetup test_scope;
{
base::ShadowingAtExitManager shadow;
SlowDelegate delegate(&lazy_slow);
EXPECT_FALSE(test_scope.constructed());
EXPECT_EQ(0, SlowConstructor::constructed);
DelegateSimpleThreadPool pool("lazy_instance_cons", 5);
base::DelegateSimpleThreadPool pool("lazy_instance_cons", 5);
pool.AddWork(&delegate, 20);
EXPECT_FALSE(test_scope.constructed());
EXPECT_EQ(0, SlowConstructor::constructed);
pool.Start();
pool.JoinAll();
EXPECT_TRUE(test_scope.constructed());
EXPECT_EQ(1, SlowConstructor::constructed);
}
}
namespace {
......@@ -178,23 +143,24 @@ class DeleteLogger {
} // anonymous namespace
TEST(LazyInstanceTest, LeakyLazyInstance) {
// Check that using a *DestructorAtExit* LazyInstance makes the dtor not run
// when the AtExitManager finishes, since it is secretly the same thing.
// Check that using a plain LazyInstance causes the dtor to run
// when the AtExitManager finishes.
bool deleted1 = false;
{
ShadowingAtExitManager shadow;
static LazyInstance<DeleteLogger>::DestructorAtExit test =
base::ShadowingAtExitManager shadow;
static base::LazyInstance<DeleteLogger>::DestructorAtExit test =
LAZY_INSTANCE_INITIALIZER;
test.Get().SetDeletedPtr(&deleted1);
}
EXPECT_FALSE(deleted1);
EXPECT_TRUE(deleted1);
// Check that using a *leaky* LazyInstance makes the dtor not run
// when the AtExitManager finishes.
bool deleted2 = false;
{
ShadowingAtExitManager shadow;
static LazyInstance<DeleteLogger>::Leaky test = LAZY_INSTANCE_INITIALIZER;
base::ShadowingAtExitManager shadow;
static base::LazyInstance<DeleteLogger>::Leaky
test = LAZY_INSTANCE_INITIALIZER;
test.Get().SetDeletedPtr(&deleted2);
}
EXPECT_FALSE(deleted2);
......@@ -216,6 +182,8 @@ class AlignedData {
EXPECT_EQ(0u, reinterpret_cast<uintptr_t>(ptr) & (align - 1))
TEST(LazyInstanceTest, Alignment) {
using base::LazyInstance;
// Create some static instances with increasing sizes and alignment
// requirements. By ordering this way, the linker will need to do some work to
// ensure proper alignment of the static data.
......@@ -239,46 +207,35 @@ class BlockingConstructor {
public:
BlockingConstructor() {
EXPECT_FALSE(WasConstructorCalled());
constructor_called_->Set();
base::subtle::NoBarrier_Store(&constructor_called_, 1);
EXPECT_TRUE(WasConstructorCalled());
while (!complete_construction_->IsSet())
PlatformThread::YieldCurrentThread();
while (!base::subtle::NoBarrier_Load(&complete_construction_))
base::PlatformThread::YieldCurrentThread();
done_construction_ = true;
}
// Returns true if BlockingConstructor() was entered.
static bool WasConstructorCalled() { return constructor_called_->IsSet(); }
// Instructs BlockingConstructor() that it may now unblock its construction.
static void CompleteConstructionNow() { complete_construction_->Set(); }
bool done_construction() { return done_construction_; }
~BlockingConstructor() {
// Restore static state for the next test.
base::subtle::NoBarrier_Store(&constructor_called_, 0);
base::subtle::NoBarrier_Store(&complete_construction_, 0);
}
class ScopedSetup {
public:
ScopedSetup() {
CHECK_EQ(nullptr, BlockingConstructor::constructor_called_);
CHECK_EQ(nullptr, BlockingConstructor::complete_construction_);
BlockingConstructor::constructor_called_ = &constructor_called_;
BlockingConstructor::complete_construction_ = &complete_construction_;
// Returns true if BlockingConstructor() was entered.
static bool WasConstructorCalled() {
return base::subtle::NoBarrier_Load(&constructor_called_);
}
~ScopedSetup() {
CHECK_EQ(&constructor_called_, BlockingConstructor::constructor_called_);
CHECK_EQ(&complete_construction_,
BlockingConstructor::complete_construction_);
BlockingConstructor::constructor_called_ = nullptr;
BlockingConstructor::complete_construction_ = nullptr;
// Instructs BlockingConstructor() that it may now unblock its construction.
static void CompleteConstructionNow() {
base::subtle::NoBarrier_Store(&complete_construction_, 1);
}
private:
AtomicFlag constructor_called_;
AtomicFlag complete_construction_;
};
bool done_construction() { return done_construction_; }
private:
static AtomicFlag* constructor_called_;
static AtomicFlag* complete_construction_;
// Use Atomic32 instead of AtomicFlag for them to be trivially initialized.
static base::subtle::Atomic32 constructor_called_;
static base::subtle::Atomic32 complete_construction_;
bool done_construction_ = false;
......@@ -287,12 +244,12 @@ class BlockingConstructor {
// A SimpleThread running at |thread_priority| which invokes |before_get|
// (optional) and then invokes Get() on the LazyInstance it's assigned.
class BlockingConstructorThread : public SimpleThread {
class BlockingConstructorThread : public base::SimpleThread {
public:
BlockingConstructorThread(
ThreadPriority thread_priority,
LazyInstance<BlockingConstructor>::DestructorAtExit* lazy,
OnceClosure before_get)
base::ThreadPriority thread_priority,
base::LazyInstance<BlockingConstructor>::DestructorAtExit* lazy,
base::OnceClosure before_get)
: SimpleThread("BlockingConstructorThread", Options(thread_priority)),
lazy_(lazy),
before_get_(std::move(before_get)) {}
......@@ -304,18 +261,18 @@ class BlockingConstructorThread : public SimpleThread {
}
private:
LazyInstance<BlockingConstructor>::DestructorAtExit* lazy_;
OnceClosure before_get_;
base::LazyInstance<BlockingConstructor>::DestructorAtExit* lazy_;
base::OnceClosure before_get_;
DISALLOW_COPY_AND_ASSIGN(BlockingConstructorThread);
};
// static
AtomicFlag* BlockingConstructor::constructor_called_ = nullptr;
base::subtle::Atomic32 BlockingConstructor::constructor_called_ = 0;
// static
AtomicFlag* BlockingConstructor::complete_construction_ = nullptr;
base::subtle::Atomic32 BlockingConstructor::complete_construction_ = 0;
LazyInstance<BlockingConstructor>::DestructorAtExit lazy_blocking =
base::LazyInstance<BlockingConstructor>::DestructorAtExit lazy_blocking =
LAZY_INSTANCE_INITIALIZER;
} // namespace
......@@ -325,29 +282,28 @@ LazyInstance<BlockingConstructor>::DestructorAtExit lazy_blocking =
// to eventually complete construction.
// This is a regression test for https://crbug.com/797129.
TEST(LazyInstanceTest, PriorityInversionAtInitializationResolves) {
BlockingConstructor::ScopedSetup test_scope;
TimeTicks test_begin = TimeTicks::Now();
base::TimeTicks test_begin = base::TimeTicks::Now();
// Construct BlockingConstructor from a background thread.
BlockingConstructorThread background_getter(ThreadPriority::BACKGROUND,
&lazy_blocking, OnceClosure());
BlockingConstructorThread background_getter(
base::ThreadPriority::BACKGROUND, &lazy_blocking, base::OnceClosure());
background_getter.Start();
while (!BlockingConstructor::WasConstructorCalled())
PlatformThread::Sleep(TimeDelta::FromMilliseconds(1));
base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(1));
// Spin 4 foreground thread per core contending to get the already under
// construction LazyInstance. When they are all running and poking at it :
// allow the background thread to complete its work.
const int kNumForegroundThreads = 4 * SysInfo::NumberOfProcessors();
std::vector<std::unique_ptr<SimpleThread>> foreground_threads;
RepeatingClosure foreground_thread_ready_callback =
BarrierClosure(kNumForegroundThreads,
BindOnce(&BlockingConstructor::CompleteConstructionNow));
const int kNumForegroundThreads = 4 * base::SysInfo::NumberOfProcessors();
std::vector<std::unique_ptr<base::SimpleThread>> foreground_threads;
base::RepeatingClosure foreground_thread_ready_callback =
base::BarrierClosure(
kNumForegroundThreads,
base::BindOnce(&BlockingConstructor::CompleteConstructionNow));
for (int i = 0; i < kNumForegroundThreads; ++i) {
foreground_threads.push_back(std::make_unique<BlockingConstructorThread>(
ThreadPriority::NORMAL, &lazy_blocking,
base::ThreadPriority::NORMAL, &lazy_blocking,
foreground_thread_ready_callback));
foreground_threads.back()->Start();
}
......@@ -361,7 +317,8 @@ TEST(LazyInstanceTest, PriorityInversionAtInitializationResolves) {
// Fail if this test takes more than 5 seconds (it takes 5-10 seconds on a
// Z840 without r527445 but is expected to be fast (~30ms) with the fix).
EXPECT_LT(TimeTicks::Now() - test_begin, TimeDelta::FromSeconds(5));
EXPECT_LT(base::TimeTicks::Now() - test_begin,
base::TimeDelta::FromSeconds(5));
}
TEST(LazyInstanceTest, ConstExprConstructible) {
......@@ -369,12 +326,11 @@ TEST(LazyInstanceTest, ConstExprConstructible) {
// much... Declaring the variable constexpr forces the compiler to verify that
// this is possible but renders the LazyInstance unusable as none of its
// methods are const.
static constexpr LazyInstance<SlowConstructor> kTestConstExprConstructible;
static constexpr LazyInstance<SlowConstructor>
static constexpr base::LazyInstance<SlowConstructor>
kTestConstExprConstructible;
static constexpr base::LazyInstance<SlowConstructor>
kTestConstExprConstructibleWithExplicitInitializer =
LAZY_INSTANCE_INITIALIZER;
ALLOW_UNUSED_LOCAL(kTestConstExprConstructible);
ALLOW_UNUSED_LOCAL(kTestConstExprConstructibleWithExplicitInitializer);
}
} // namespace base
......@@ -472,22 +472,7 @@ class StreamMixerTest : public testing::Test {
StreamMixer::Get()->SetMixerOutputStreamForTest(std::move(output));
}
~StreamMixerTest() override {
// The remaining MockPostProcessor objects are still indirectly owned by a
// leaky static. Manually verify and clear them here and mark them as
// leaked so Gmock doesn't complain.
for (auto& itr : *MockPostProcessor::instances()) {
testing::Mock::VerifyAndClear(itr.second);
testing::Mock::AllowLeak(itr.second);
}
MockPostProcessor::instances()->clear();
// Similarly, mock_alsa_ is also indirectly owned by a leaky static.
testing::Mock::VerifyAndClear(mock_alsa_);
testing::Mock::AllowLeak(mock_alsa_);
StreamMixer::Get()->ClearInputsForTest();
}
~StreamMixerTest() override { StreamMixer::Get()->ClearInputsForTest(); }
MockAlsaWrapper* mock_alsa() { return mock_alsa_; }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment