-//$$CDS-header$$
-
-#ifndef __CDS_INTRUSIVE_TSIGAS_CYCLE_QUEUE_H
-#define __CDS_INTRUSIVE_TSIGAS_CYCLE_QUEUE_H
+/*
+ This file is a part of libcds - Concurrent Data Structures library
+
+ (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
+
+ Source code repo: http://github.com/khizmax/libcds/
+ Download: http://sourceforge.net/projects/libcds/files/
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef CDSLIB_INTRUSIVE_TSIGAS_CYCLE_QUEUE_H
+#define CDSLIB_INTRUSIVE_TSIGAS_CYCLE_QUEUE_H
#include <cds/intrusive/details/base.h>
-#include <cds/cxx11_atomic.h>
+#include <cds/algo/atomic.h>
#include <cds/details/bounded_container.h>
#include <cds/opt/buffer.h>
typedef atomicity::empty_item_counter item_counter;
/// C++ memory ordering model
- /**
+ /**
Can be \p opt::v::relaxed_ordering (relaxed memory model, the default)
or \p opt::v::sequential_consistent (sequentially consisnent memory model).
*/
Example: declare \p %TsigasCycleQueue with item counting and static iternal buffer of size 1024:
\code
- typedef cds::intrusive::TsigasCycleQueue< Foo,
+ typedef cds::intrusive::TsigasCycleQueue< Foo,
typename cds::intrusive::tsigas_queue::make_traits<
cds::opt::buffer< cds::opt::v::static_buffer< void *, 1024 >,
cds::opt::item_counte< cds::atomicity::item_counter >
typedef cds::intrusive::TsigasCycleQueue< Foo, myTraits > myQueue;
// Equivalent make_traits example:
- typedef cds::intrusive::TsigasCycleQueue< Foo,
- typename cds::intrusive::tsigas_queue::make_traits<
+ typedef cds::intrusive::TsigasCycleQueue< Foo,
+ typename cds::intrusive::tsigas_queue::make_traits<
cds::opt::item_counter< cds::atomicity::item_counter >
>::type
> myQueue;
static bool is_free( const value_type * p ) CDS_NOEXCEPT
{
- return p == reinterpret_cast<value_type *>(free0) || p == reinterpret_cast<value_type *>(free1);
+ return (reinterpret_cast<intptr_t>(p) & ~intptr_t(1)) == 0;
}
size_t CDS_CONSTEXPR buffer_capacity() const CDS_NOEXCEPT
bool enqueue( value_type& data )
{
value_type * pNewNode = &data;
- assert( (reinterpret_cast<ptr_atomic_t>( pNewNode ) & 1) == 0 );
+ assert( (reinterpret_cast<uintptr_t>(pNewNode) & 1) == 0 );
back_off bkoff;
const index_type nModulo = modulo();
temp = (temp + 1) & nModulo;
}
- if ( te != m_nTail.load(memory_model::memory_order_relaxed) )
+ if ( te != m_nTail.load(memory_model::memory_order_acquire) )
continue;
// Check whether queue is full
ate = ( temp + 1 ) & nModulo;
tt = m_buffer[ ate ].load(memory_model::memory_order_relaxed);
if ( !is_free( tt ) ) {
- return false ; // Queue is full
+ return false; // Queue is full
}
// help the dequeue to update head
if ( tt == reinterpret_cast<value_type *>(free1) )
pNewNode = reinterpret_cast<value_type *>(reinterpret_cast<intptr_t>( pNewNode ) | 1);
- if ( te != m_nTail.load(memory_model::memory_order_relaxed) )
+ if ( te != m_nTail.load(memory_model::memory_order_acquire) )
continue;
// get actual tail and try to enqueue new node
if ( th != m_nHead.load(memory_model::memory_order_relaxed) )
goto TryAgain;
- // two consecutive nullptr means queue empty
+ // two consecutive nullptr means the queue is empty
if ( temp == m_nTail.load(memory_model::memory_order_acquire) )
return nullptr;
// check whether the queue is empty
if ( temp == m_nTail.load(memory_model::memory_order_acquire) ) {
// help the enqueue to update end
- m_nTail.compare_exchange_strong( temp, (temp + 1) & nModulo, memory_model::memory_order_release, atomics::memory_order_relaxed );
+ m_nTail.compare_exchange_weak( temp, (temp + 1) & nModulo, memory_model::memory_order_release, atomics::memory_order_relaxed );
continue;
}
- pNull = reinterpret_cast<value_type *>((reinterpret_cast<ptr_atomic_t>(tt)& 1) ? free0 : free1);
+ pNull = reinterpret_cast<value_type *>((reinterpret_cast<uintptr_t>(tt) & 1) ? free0 : free1);
if ( th != m_nHead.load(memory_model::memory_order_relaxed) )
continue;
// Get the actual head, null means empty
- if ( m_buffer[temp].compare_exchange_strong( tt, pNull, memory_model::memory_order_release, atomics::memory_order_relaxed )) {
+ if ( m_buffer[temp].compare_exchange_weak( tt, pNull, memory_model::memory_order_acquire, atomics::memory_order_relaxed )) {
if ( temp % 2 == 0 )
- m_nHead.compare_exchange_strong( th, temp, memory_model::memory_order_release, atomics::memory_order_relaxed );
+ m_nHead.compare_exchange_weak( th, temp, memory_model::memory_order_release, atomics::memory_order_relaxed );
--m_ItemCounter;
return reinterpret_cast<value_type *>(reinterpret_cast<intptr_t>( tt ) & ~intptr_t(1));
}
/// Returns queue's item count
/**
- The value returned depends on \p tsigas_queue::traits::item_counter.
+ The value returned depends on \p tsigas_queue::traits::item_counter.
For \p atomicity::empty_item_counter, the function always returns 0.
*/
size_t size() const CDS_NOEXCEPT
}} // namespace cds::intrusive
-#endif // #ifndef __CDS_INTRUSIVE_TSIGAS_CYCLE_QUEUE_H
+#endif // #ifndef CDSLIB_INTRUSIVE_TSIGAS_CYCLE_QUEUE_H