#include "spinlock.hpp"
◆ value_type
◆ _halfT
◆ ordered_spinlockbase() [1/3]
363 {
365
366 }
#define QUICKCPPLIB_ANNOTATE_RWLOCK_CREATE(p)
Definition config.hpp:102
atomic< value_type > _v
Definition spinlock.hpp:338
◆ ordered_spinlockbase() [2/3]
◆ ordered_spinlockbase() [3/3]
Atomically move constructs.
◆ ~ordered_spinlockbase()
377 {
378#ifdef QUICKCPPLIB_ENABLE_VALGRIND
379 _internals i = {
_v.load(memory_order_relaxed)};
380 if(i.entry != i.exit)
381 {
383 }
384#endif
386 }
#define QUICKCPPLIB_ANNOTATE_RWLOCK_DESTROY(p)
Definition config.hpp:103
#define QUICKCPPLIB_ANNOTATE_RWLOCK_RELEASED(p, s)
Definition config.hpp:105
◆ operator=() [1/2]
◆ operator=() [2/2]
◆ load()
Returns the raw atomic.
390{
return _v.load(o); }
◆ store()
◆ try_lock()
Tries to lock the spinlock, returning true if successful.
396 {
397 _internals i = {
_v.load(memory_order_relaxed)}, o = i;
398
399 if(i.entry != i.exit)
400 return false;
401 o.entry++;
402 if(
_v.compare_exchange_weak(i.uint, o.uint, memory_order_acquire, memory_order_relaxed))
403 {
405 return true;
406 }
407 return false;
408 }
#define QUICKCPPLIB_ANNOTATE_RWLOCK_ACQUIRED(p, s)
Definition config.hpp:104
◆ _begin_try_lock()
411{
return _v.load(memory_order_relaxed); }
◆ _try_lock()
413 {
414 _internals i = {state}, o;
415 o = i;
416 o.entry++;
417 if(
_v.compare_exchange_weak(i.uint, o.uint, memory_order_acquire, memory_order_relaxed))
418 {
420 return true;
421 }
422 state = i.uint;
423 return false;
424 }
◆ unlock()
Releases the lock.
429 {
431 _internals i = {
_v.load(memory_order_relaxed)}, o;
432 for(;;)
433 {
434 o = i;
435 o.exit++;
436 if(
_v.compare_exchange_weak(i.uint, o.uint, memory_order_release, memory_order_relaxed))
437 return;
438 }
439 }
◆ int_yield()
◆ _v
The documentation for this struct was generated from the following file: