2017
[folly.git] / folly / test / SynchronizedTest.cpp
1 /*
2  * Copyright 2017 Facebook, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *   http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 // @author: Andrei Alexandrescu (aalexandre)
18
19 // Test bed for folly/Synchronized.h
20
21 #include <folly/LockTraitsBoost.h>
22 #include <folly/Portability.h>
23 #include <folly/RWSpinLock.h>
24 #include <folly/SharedMutex.h>
25 #include <folly/SpinLock.h>
26 #include <folly/Synchronized.h>
27 #include <folly/test/SynchronizedTestLib.h>
28 #include <folly/portability/GTest.h>
29
30 using namespace folly::sync_tests;
31
32 template <class Mutex>
33 class SynchronizedTest : public testing::Test {};
34
35 using SynchronizedTestTypes = testing::Types<
36     folly::SharedMutexReadPriority,
37     folly::SharedMutexWritePriority,
38     std::mutex,
39     std::recursive_mutex,
40 #if FOLLY_LOCK_TRAITS_HAVE_TIMED_MUTEXES
41     std::timed_mutex,
42     std::recursive_timed_mutex,
43 #endif
44     boost::mutex,
45     boost::recursive_mutex,
46 #if FOLLY_LOCK_TRAITS_HAVE_TIMED_MUTEXES
47     boost::timed_mutex,
48     boost::recursive_timed_mutex,
49 #endif
50 #ifdef RW_SPINLOCK_USE_X86_INTRINSIC_
51     folly::RWTicketSpinLock32,
52     folly::RWTicketSpinLock64,
53 #endif
54     boost::shared_mutex,
55     folly::SpinLock>;
56 TYPED_TEST_CASE(SynchronizedTest, SynchronizedTestTypes);
57
58 TYPED_TEST(SynchronizedTest, Basic) {
59   testBasic<TypeParam>();
60 }
61
62 TYPED_TEST(SynchronizedTest, WithLock) {
63   testWithLock<TypeParam>();
64 }
65
66 TYPED_TEST(SynchronizedTest, Unlock) {
67   testUnlock<TypeParam>();
68 }
69
70 TYPED_TEST(SynchronizedTest, Deprecated) {
71   testDeprecated<TypeParam>();
72 }
73
74 TYPED_TEST(SynchronizedTest, Concurrency) {
75   testConcurrency<TypeParam>();
76 }
77
78 TYPED_TEST(SynchronizedTest, AcquireLocked) {
79   testAcquireLocked<TypeParam>();
80 }
81
82 TYPED_TEST(SynchronizedTest, AcquireLockedWithConst) {
83   testAcquireLockedWithConst<TypeParam>();
84 }
85
86 TYPED_TEST(SynchronizedTest, DualLocking) {
87   testDualLocking<TypeParam>();
88 }
89
90 TYPED_TEST(SynchronizedTest, DualLockingWithConst) {
91   testDualLockingWithConst<TypeParam>();
92 }
93
94 TYPED_TEST(SynchronizedTest, ConstCopy) {
95   testConstCopy<TypeParam>();
96 }
97
98 template <class Mutex>
99 class SynchronizedTimedTest : public testing::Test {};
100
101 using SynchronizedTimedTestTypes = testing::Types<
102 #if FOLLY_LOCK_TRAITS_HAVE_TIMED_MUTEXES
103     std::timed_mutex,
104     std::recursive_timed_mutex,
105     boost::timed_mutex,
106     boost::recursive_timed_mutex,
107     boost::shared_mutex,
108 #endif
109 #ifdef RW_SPINLOCK_USE_X86_INTRINSIC_
110     folly::RWTicketSpinLock32,
111     folly::RWTicketSpinLock64,
112 #endif
113     folly::SharedMutexReadPriority,
114     folly::SharedMutexWritePriority>;
115 TYPED_TEST_CASE(SynchronizedTimedTest, SynchronizedTimedTestTypes);
116
117 TYPED_TEST(SynchronizedTimedTest, Timed) {
118   testTimed<TypeParam>();
119 }
120
121 TYPED_TEST(SynchronizedTimedTest, TimedSynchronized) {
122   testTimedSynchronized<TypeParam>();
123 }
124
125 template <class Mutex>
126 class SynchronizedTimedWithConstTest : public testing::Test {};
127
128 using SynchronizedTimedWithConstTestTypes = testing::Types<
129 #if FOLLY_LOCK_TRAITS_HAVE_TIMED_MUTEXES
130     boost::shared_mutex,
131 #endif
132 #ifdef RW_SPINLOCK_USE_X86_INTRINSIC_
133     folly::RWTicketSpinLock32,
134     folly::RWTicketSpinLock64,
135 #endif
136     folly::SharedMutexReadPriority,
137     folly::SharedMutexWritePriority>;
138 TYPED_TEST_CASE(
139     SynchronizedTimedWithConstTest, SynchronizedTimedWithConstTestTypes);
140
141 TYPED_TEST(SynchronizedTimedWithConstTest, TimedShared) {
142   testTimedShared<TypeParam>();
143 }
144
145 TYPED_TEST(SynchronizedTimedWithConstTest, TimedSynchronizeWithConst) {
146   testTimedSynchronizedWithConst<TypeParam>();
147 }
148
149 TYPED_TEST(SynchronizedTest, InPlaceConstruction) {
150   testInPlaceConstruction<TypeParam>();
151 }
152
153 using CountPair = std::pair<int, int>;
154 // This class is specialized only to be uesed in SynchronizedLockTest
155 class FakeMutex {
156  public:
157   void lock() {
158     ++lockCount_;
159   }
160
161   void unlock() {
162     ++unlockCount_;
163   }
164
165   static CountPair getLockUnlockCount() {
166     return CountPair{lockCount_, unlockCount_};
167   }
168
169   static void resetLockUnlockCount() {
170     lockCount_ = 0;
171     unlockCount_ = 0;
172   }
173  private:
174   // Keep these two static for test access
175   // Keep them thread_local in case of tests are run in parallel within one
176   // process
177   static FOLLY_TLS int lockCount_;
178   static FOLLY_TLS int unlockCount_;
179 };
180 FOLLY_TLS int FakeMutex::lockCount_{0};
181 FOLLY_TLS int FakeMutex::unlockCount_{0};
182
183 // SynchronizedLockTest is used to verify the correct lock unlock behavior
184 // happens per design
185 class SynchronizedLockTest : public testing::Test {
186  public:
187   void SetUp() override {
188     FakeMutex::resetLockUnlockCount();
189   }
190 };
191
192 /**
193  * Test mutex to help to automate assertions, taken from LockTraitsTest.cpp
194  */
195 class FakeAllPowerfulAssertingMutexInternal {
196  public:
197   enum class CurrentLockState { UNLOCKED, SHARED, UPGRADE, UNIQUE };
198
199   void lock() {
200     EXPECT_EQ(this->lock_state, CurrentLockState::UNLOCKED);
201     this->lock_state = CurrentLockState::UNIQUE;
202   }
203   void unlock() {
204     EXPECT_EQ(this->lock_state, CurrentLockState::UNIQUE);
205     this->lock_state = CurrentLockState::UNLOCKED;
206   }
207   void lock_shared() {
208     EXPECT_EQ(this->lock_state, CurrentLockState::UNLOCKED);
209     this->lock_state = CurrentLockState::SHARED;
210   }
211   void unlock_shared() {
212     EXPECT_EQ(this->lock_state, CurrentLockState::SHARED);
213     this->lock_state = CurrentLockState::UNLOCKED;
214   }
215   void lock_upgrade() {
216     EXPECT_EQ(this->lock_state, CurrentLockState::UNLOCKED);
217     this->lock_state = CurrentLockState::UPGRADE;
218   }
219   void unlock_upgrade() {
220     EXPECT_EQ(this->lock_state, CurrentLockState::UPGRADE);
221     this->lock_state = CurrentLockState::UNLOCKED;
222   }
223
224   void unlock_upgrade_and_lock() {
225     EXPECT_EQ(this->lock_state, CurrentLockState::UPGRADE);
226     this->lock_state = CurrentLockState::UNIQUE;
227   }
228   void unlock_and_lock_upgrade() {
229     EXPECT_EQ(this->lock_state, CurrentLockState::UNIQUE);
230     this->lock_state = CurrentLockState::UPGRADE;
231   }
232   void unlock_and_lock_shared() {
233     EXPECT_EQ(this->lock_state, CurrentLockState::UNIQUE);
234     this->lock_state = CurrentLockState::SHARED;
235   }
236   void unlock_upgrade_and_lock_shared() {
237     EXPECT_EQ(this->lock_state, CurrentLockState::UPGRADE);
238     this->lock_state = CurrentLockState::SHARED;
239   }
240
241   template <class Rep, class Period>
242   bool try_lock_for(const std::chrono::duration<Rep, Period>&) {
243     EXPECT_EQ(this->lock_state, CurrentLockState::UNLOCKED);
244     this->lock_state = CurrentLockState::UNIQUE;
245     return true;
246   }
247
248   template <class Rep, class Period>
249   bool try_lock_upgrade_for(const std::chrono::duration<Rep, Period>&) {
250     EXPECT_EQ(this->lock_state, CurrentLockState::UNLOCKED);
251     this->lock_state = CurrentLockState::UPGRADE;
252     return true;
253   }
254
255   template <class Rep, class Period>
256   bool try_unlock_upgrade_and_lock_for(
257       const std::chrono::duration<Rep, Period>&) {
258     EXPECT_EQ(this->lock_state, CurrentLockState::UPGRADE);
259     this->lock_state = CurrentLockState::UNIQUE;
260     return true;
261   }
262
263   /*
264    * Initialize the FakeMutex with an unlocked state
265    */
266   CurrentLockState lock_state{CurrentLockState::UNLOCKED};
267 };
268
269 /**
270  * The following works around the internal mutex for synchronized being
271  * private
272  *
273  * This is horridly thread unsafe.
274  */
275 static FakeAllPowerfulAssertingMutexInternal globalAllPowerfulAssertingMutex;
276
277 class FakeAllPowerfulAssertingMutex {
278  public:
279   void lock() {
280     globalAllPowerfulAssertingMutex.lock();
281   }
282   void unlock() {
283     globalAllPowerfulAssertingMutex.unlock();
284   }
285   void lock_shared() {
286     globalAllPowerfulAssertingMutex.lock_shared();
287   }
288   void unlock_shared() {
289     globalAllPowerfulAssertingMutex.unlock_shared();
290   }
291   void lock_upgrade() {
292     globalAllPowerfulAssertingMutex.lock_upgrade();
293   }
294   void unlock_upgrade() {
295     globalAllPowerfulAssertingMutex.unlock_upgrade();
296   }
297
298   void unlock_upgrade_and_lock() {
299     globalAllPowerfulAssertingMutex.unlock_upgrade_and_lock();
300   }
301   void unlock_and_lock_upgrade() {
302     globalAllPowerfulAssertingMutex.unlock_and_lock_upgrade();
303   }
304   void unlock_and_lock_shared() {
305     globalAllPowerfulAssertingMutex.unlock_and_lock_shared();
306   }
307   void unlock_upgrade_and_lock_shared() {
308     globalAllPowerfulAssertingMutex.unlock_upgrade_and_lock_shared();
309   }
310
311   template <class Rep, class Period>
312   bool try_lock_for(const std::chrono::duration<Rep, Period>& arg) {
313     return globalAllPowerfulAssertingMutex.try_lock_for(arg);
314   }
315
316   template <class Rep, class Period>
317   bool try_lock_upgrade_for(const std::chrono::duration<Rep, Period>& arg) {
318     return globalAllPowerfulAssertingMutex.try_lock_upgrade_for(arg);
319   }
320
321   template <class Rep, class Period>
322   bool try_unlock_upgrade_and_lock_for(
323       const std::chrono::duration<Rep, Period>& arg) {
324     return globalAllPowerfulAssertingMutex.try_unlock_upgrade_and_lock_for(arg);
325   }
326
327   // reset state on destruction
328   ~FakeAllPowerfulAssertingMutex() {
329     globalAllPowerfulAssertingMutex = FakeAllPowerfulAssertingMutexInternal{};
330   }
331 };
332
333 TEST_F(SynchronizedLockTest, TestCopyConstructibleValues) {
334   struct NonCopyConstructible {
335     NonCopyConstructible(const NonCopyConstructible&) = delete;
336     NonCopyConstructible& operator=(const NonCopyConstructible&) = delete;
337   };
338   struct CopyConstructible {};
339   EXPECT_FALSE(std::is_copy_constructible<
340                folly::Synchronized<NonCopyConstructible>>::value);
341   EXPECT_FALSE(std::is_copy_assignable<
342                folly::Synchronized<NonCopyConstructible>>::value);
343   EXPECT_TRUE(std::is_copy_constructible<
344               folly::Synchronized<CopyConstructible>>::value);
345   EXPECT_TRUE(
346       std::is_copy_assignable<folly::Synchronized<CopyConstructible>>::value);
347 }
348
349 TEST_F(SynchronizedLockTest, UpgradableLocking) {
350   folly::Synchronized<int, FakeAllPowerfulAssertingMutex> sync;
351
352   // sanity assert
353   static_assert(
354       std::is_same<std::decay<decltype(*sync.ulock())>::type, int>::value,
355       "The ulock function was not well configured, blame aary@instagram.com");
356
357   {
358     auto ulock = sync.ulock();
359     EXPECT_EQ(
360         globalAllPowerfulAssertingMutex.lock_state,
361         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UPGRADE);
362   }
363
364   // should be unlocked here
365   EXPECT_EQ(
366       globalAllPowerfulAssertingMutex.lock_state,
367       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
368
369   // test going from upgrade to exclusive
370   {
371     auto ulock = sync.ulock();
372     auto wlock = ulock.moveFromUpgradeToWrite();
373     EXPECT_EQ(static_cast<bool>(ulock), false);
374     EXPECT_EQ(
375         globalAllPowerfulAssertingMutex.lock_state,
376         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNIQUE);
377   }
378
379   // should be unlocked here
380   EXPECT_EQ(
381       globalAllPowerfulAssertingMutex.lock_state,
382       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
383
384   // test going from upgrade to shared
385   {
386     auto ulock = sync.ulock();
387     auto slock = ulock.moveFromUpgradeToRead();
388     EXPECT_EQ(static_cast<bool>(ulock), false);
389     EXPECT_EQ(
390         globalAllPowerfulAssertingMutex.lock_state,
391         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::SHARED);
392   }
393
394   // should be unlocked here
395   EXPECT_EQ(
396       globalAllPowerfulAssertingMutex.lock_state,
397       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
398
399   // test going from exclusive to upgrade
400   {
401     auto wlock = sync.wlock();
402     auto ulock = wlock.moveFromWriteToUpgrade();
403     EXPECT_EQ(static_cast<bool>(wlock), false);
404     EXPECT_EQ(
405         globalAllPowerfulAssertingMutex.lock_state,
406         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UPGRADE);
407   }
408
409   // should be unlocked here
410   EXPECT_EQ(
411       globalAllPowerfulAssertingMutex.lock_state,
412       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
413
414   // test going from exclusive to shared
415   {
416     auto wlock = sync.wlock();
417     auto slock = wlock.moveFromWriteToRead();
418     EXPECT_EQ(static_cast<bool>(wlock), false);
419     EXPECT_EQ(
420         globalAllPowerfulAssertingMutex.lock_state,
421         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::SHARED);
422   }
423
424   // should be unlocked here
425   EXPECT_EQ(
426       globalAllPowerfulAssertingMutex.lock_state,
427       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
428 }
429
430 TEST_F(SynchronizedLockTest, UpgradableLockingWithULock) {
431   folly::Synchronized<int, FakeAllPowerfulAssertingMutex> sync;
432
433   // sanity assert
434   static_assert(
435       std::is_same<std::decay<decltype(*sync.ulock())>::type, int>::value,
436       "The ulock function was not well configured, blame aary@instagram.com");
437
438   // test from upgrade to write
439   sync.withULockPtr([](auto ulock) {
440     EXPECT_EQ(static_cast<bool>(ulock), true);
441     EXPECT_EQ(
442         globalAllPowerfulAssertingMutex.lock_state,
443         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UPGRADE);
444
445     auto wlock = ulock.moveFromUpgradeToWrite();
446     EXPECT_EQ(static_cast<bool>(ulock), false);
447     EXPECT_EQ(
448         globalAllPowerfulAssertingMutex.lock_state,
449         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNIQUE);
450   });
451
452   // should be unlocked here
453   EXPECT_EQ(
454       globalAllPowerfulAssertingMutex.lock_state,
455       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
456
457   // test from write to upgrade
458   sync.withWLockPtr([](auto wlock) {
459     EXPECT_EQ(static_cast<bool>(wlock), true);
460     EXPECT_EQ(
461         globalAllPowerfulAssertingMutex.lock_state,
462         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNIQUE);
463
464     auto ulock = wlock.moveFromWriteToUpgrade();
465     EXPECT_EQ(static_cast<bool>(wlock), false);
466     EXPECT_EQ(
467         globalAllPowerfulAssertingMutex.lock_state,
468         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UPGRADE);
469   });
470
471   // should be unlocked here
472   EXPECT_EQ(
473       globalAllPowerfulAssertingMutex.lock_state,
474       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
475
476   // test from upgrade to shared
477   sync.withULockPtr([](auto ulock) {
478     EXPECT_EQ(static_cast<bool>(ulock), true);
479     EXPECT_EQ(
480         globalAllPowerfulAssertingMutex.lock_state,
481         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UPGRADE);
482
483     auto slock = ulock.moveFromUpgradeToRead();
484     EXPECT_EQ(static_cast<bool>(ulock), false);
485     EXPECT_EQ(
486         globalAllPowerfulAssertingMutex.lock_state,
487         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::SHARED);
488   });
489
490   // should be unlocked here
491   EXPECT_EQ(
492       globalAllPowerfulAssertingMutex.lock_state,
493       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
494
495   // test from write to shared
496   sync.withWLockPtr([](auto wlock) {
497     EXPECT_EQ(static_cast<bool>(wlock), true);
498     EXPECT_EQ(
499         globalAllPowerfulAssertingMutex.lock_state,
500         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNIQUE);
501
502     auto slock = wlock.moveFromWriteToRead();
503     EXPECT_EQ(static_cast<bool>(wlock), false);
504     EXPECT_EQ(
505         globalAllPowerfulAssertingMutex.lock_state,
506         FakeAllPowerfulAssertingMutexInternal::CurrentLockState::SHARED);
507   });
508
509   // should be unlocked here
510   EXPECT_EQ(
511       globalAllPowerfulAssertingMutex.lock_state,
512       FakeAllPowerfulAssertingMutexInternal::CurrentLockState::UNLOCKED);
513 }