typedef T mapped_type;
typedef Traits original_traits;
- struct internal_mapped_type : public bronson_avltree::value
- {
- mapped_type m_val;
-
- template <typename... Args>
- internal_mapped_type( Args&&... args )
- : m_val( std::forward<Args>(args)...)
- {}
- };
-
- typedef cds::details::Allocator< internal_mapped_type, typename original_traits::allocator > cxx_allocator;
-
- struct cast_mapped_type
- {
- mapped_type * operator()( internal_mapped_type * p ) const
- {
- return &(p->m_val);
- }
- };
+ typedef cds::details::Allocator< mapped_type, typename original_traits::allocator > cxx_allocator;
struct traits : public original_traits
{
struct disposer {
- void operator()( internal_mapped_type * p ) const
+ void operator()( mapped_type * p ) const
{
cxx_allocator().Delete( p );
}
};
// Metafunction result
- typedef BronsonAVLTreeMap< RCU, Key, internal_mapped_type *, traits > type;
+ typedef BronsonAVLTreeMap< RCU, Key, mapped_type *, traits > type;
};
} // namespace details
//@endcond
static bool const c_bRelaxedInsert = traits::relaxed_insert;
typedef typename base_class::rcu_lock rcu_lock; ///< RCU scoped lock
+ /// Returned pointer to \p mapped_type of extracted node
+ typedef typename base_class::exempt_ptr exempt_ptr;
+
protected:
//@cond
typedef typename base_class::node_type node_type;
- typedef typename maker::internal_mapped_type internal_mapped_type;
typedef typename base_class::node_scoped_lock node_scoped_lock;
typedef typename maker::cxx_allocator cxx_allocator;
typedef typename base_class::update_flags update_flags;
//@endcond
- public:
-# ifdef CDSDOXYGEN_INVOKED
- /// Returned pointer to \p mapped_type of extracted node
- typedef cds::urcu::exempt_ptr< gc, implementation_defined, mapped_type, implementation_defined, implementation_defined > exempt_ptr;
-# else
- typedef cds::urcu::exempt_ptr< gc,
- internal_mapped_type,
- mapped_type,
- typename maker::traits::disposer,
- typename maker::cast_mapped_type
- > exempt_ptr;
-# endif
-
-
public:
/// Creates empty map
BronsonAVLTreeMap()
bool insert( K const& key )
{
return base_class::do_update(key, key_comparator(),
- []( node_type * pNode ) -> internal_mapped_type*
+ []( node_type * pNode ) -> mapped_type*
{
assert( pNode->m_pValue.load( memory_model::memory_order_relaxed ) == nullptr );
CDS_UNUSED( pNode );
bool insert( K const& key, V const& val )
{
return base_class::do_update( key, key_comparator(),
- [&val]( node_type * pNode ) -> internal_mapped_type*
+ [&val]( node_type * pNode ) -> mapped_type*
{
assert( pNode->m_pValue.load( memory_model::memory_order_relaxed ) == nullptr );
CDS_UNUSED( pNode );
bool insert_with( K const& key, Func func )
{
return base_class::do_update( key, key_comparator(),
- [&func]( node_type * pNode ) -> internal_mapped_type*
+ [&func]( node_type * pNode ) -> mapped_type*
{
assert( pNode->m_pValue.load( memory_model::memory_order_relaxed ) == nullptr );
- internal_mapped_type * pVal = cxx_allocator().New();
- func( pNode->m_key, pVal->m_val );
+ mapped_type * pVal = cxx_allocator().New();
+ func( pNode->m_key, *pVal );
return pVal;
},
update_flags::allow_insert
bool emplace( K&& key, Args&&... args )
{
return base_class::do_update( key, key_comparator(),
- [&]( node_type * pNode ) -> internal_mapped_type*
+ [&]( node_type * pNode ) -> mapped_type*
{
assert( pNode->m_pValue.load( memory_model::memory_order_relaxed ) == nullptr );
CDS_UNUSED( pNode );
std::pair<bool, bool> update( K const& key, Func func )
{
int result = base_class::do_update( key, key_comparator(),
- [&func]( node_type * pNode ) -> internal_mapped_type*
+ [&func]( node_type * pNode ) -> mapped_type*
{
- internal_mapped_type * pVal = pNode->m_pValue.load( memory_model::memory_order_relaxed );
+ mapped_type * pVal = pNode->m_pValue.load( memory_model::memory_order_relaxed );
if ( !pVal ) {
pVal = cxx_allocator().New();
- func( true, pNode->m_key, pVal->m_val );
+ func( true, pNode->m_key, *pVal );
}
else
- func( false, pNode->m_key, pVal->m_val );
+ func( false, pNode->m_key, *pVal );
return pVal;
},
update_flags::allow_insert | update_flags::allow_update
template <typename K, typename Func>
bool erase( K const& key, Func f )
{
- return base_class::erase( key, [&f]( internal_mapped_type& v) { f( v.m_val ); });
+ return base_class::erase( key, f );
}
/// Deletes the item from the map using \p pred predicate for searching
template <typename K, typename Less, typename Func>
bool erase_with( K const& key, Less pred, Func f )
{
- return base_class::erase_with( key, pred, [&f]( internal_mapped_type& v) { f( v.m_val ); } );
+ return base_class::erase_with( key, pred, f );
}
- /// Extracts an item with minimal key from the map
+ /// Extracts a value with minimal key from the map
/**
Returns \p exempt_ptr pointer to the leftmost item.
If the set is empty, returns empty \p exempt_ptr.
+ Note that the function returns only the value for minimal key.
+ To retrieve its key use \p extract_min( Func ) member function.
+
@note Due the concurrent nature of the map, the function extracts <i>nearly</i> minimum key.
It means that the function gets leftmost leaf of the tree and tries to unlink it.
During unlinking, a concurrent thread may insert an item with key less than leftmost item's key.
*/
exempt_ptr extract_min()
{
- return exempt_ptr( base_class::do_extract_min());
+ return base_class::extract_min();
+ }
+
+ /// Extracts minimal key key and corresponding value
+ /**
+ Returns \p exempt_ptr to the leftmost item.
+ If the tree is empty, returns empty \p exempt_ptr.
+
+ \p Func functor is used to store minimal key.
+ \p Func has the following signature:
+ \code
+ struct functor {
+ void operator()( key_type const& key );
+ };
+ \endcode
+ If the tree is empty, \p f is not called.
+ Otherwise, is it called with minimal key, the pointer to corresponding value is returned
+ as \p exempt_ptr.
+
+ @note Due the concurrent nature of the map, the function extracts <i>nearly</i> minimum key.
+ It means that the function gets leftmost leaf of the tree and tries to unlink it.
+ During unlinking, a concurrent thread may insert an item with key less than leftmost item's key.
+ So, the function returns the item with minimum key at the moment of tree traversing.
+
+ RCU \p synchronize method can be called. RCU should NOT be locked.
+ The function does not free the item.
+ The deallocator will be implicitly invoked when the returned object is destroyed or when
+ its \p release() member function is called.
+ */
+ template <typename Func>
+ exempt_ptr extract_min( Func f )
+ {
+ return base_class::extract_min( f );
+ }
+
+ /// Extracts minimal key key and corresponding value
+ /**
+ This function is a shortcut for the following call:
+ \code
+ key_type key;
+ exempt_ptr xp = theTree.extract_min( [&key]( key_type const& k ) { key = k; } );
+ \endode
+ \p key_type should be copy-assignable. The copy of minimal key
+ is returned in \p min_key argument.
+ */
+ typename std::enable_if< std::is_copy_assignable<key_type>::value, exempt_ptr >::type
+ extract_min_key( key_type& min_key )
+ {
+ return base_class::extract_min_key( min_key );
}
/// Extracts an item with maximal key from the map
Returns \p exempt_ptr pointer to the rightmost item.
If the set is empty, returns empty \p exempt_ptr.
+ Note that the function returns only the value for maximal key.
+ To retrieve its key use \p extract_max( Func ) member function.
+
@note Due the concurrent nature of the map, the function extracts <i>nearly</i> maximal key.
It means that the function gets rightmost leaf of the tree and tries to unlink it.
During unlinking, a concurrent thread may insert an item with key great than leftmost item's key.
*/
exempt_ptr extract_max()
{
- return exempt_ptr( base_class::do_extract_min());
+ return base_class::extract_max();
+ }
+
+ /// Extracts the maximal key and corresponding value
+ /**
+ Returns \p exempt_ptr pointer to the rightmost item.
+ If the set is empty, returns empty \p exempt_ptr.
+
+ \p Func functor is used to store maximal key.
+ \p Func has the following signature:
+ \code
+ struct functor {
+ void operator()( key_type const& key );
+ };
+ \endcode
+ If the tree is empty, \p f is not called.
+ Otherwise, is it called with maximal key, the pointer to corresponding value is returned
+ as \p exempt_ptr.
+
+ @note Due the concurrent nature of the map, the function extracts <i>nearly</i> maximal key.
+ It means that the function gets rightmost leaf of the tree and tries to unlink it.
+ During unlinking, a concurrent thread may insert an item with key great than leftmost item's key.
+ So, the function returns the item with maximum key at the moment of tree traversing.
+
+ RCU \p synchronize method can be called. RCU should NOT be locked.
+ The function does not free the item.
+ The deallocator will be implicitly invoked when the returned object is destroyed or when
+ its \p release() is called.
+ */
+ template <typename Func>
+ exempt_ptr extract_max( Func f )
+ {
+ return base_class::extract_max( f );
+ }
+
+ /// Extracts the maximal key and corresponding value
+ /**
+ This function is a shortcut for the following call:
+ \code
+ key_type key;
+ exempt_ptr xp = theTree.extract_max( [&key]( key_type const& k ) { key = k; } );
+ \endode
+ \p key_type should be copy-assignable. The copy of maximal key
+ is returned in \p max_key argument.
+ */
+ typename std::enable_if< std::is_copy_assignable<key_type>::value, exempt_ptr >::type
+ extract_max_key( key_type& max_key )
+ {
+ return base_class::extract_max_key( max_key );
}
/// Extracts an item from the map
template <typename Q>
exempt_ptr extract( Q const& key )
{
- return exempt_ptr( base_class::do_extract( key ));
+ return base_class::extract( key );
}
/// Extracts an item from the map using \p pred for searching
template <typename Q, typename Less>
exempt_ptr extract_with( Q const& key, Less pred )
{
- return exempt_ptr( base_class::do_extract_with( key, pred ));
+ return base_class::extract_with( key, pred );
}
/// Find the key \p key
template <typename K, typename Func>
bool find( K const& key, Func f )
{
- return base_class::find( key, [&f]( key_type const& key, internal_mapped_type& v) { f( key, v.m_val ); });
+ return base_class::find( key, f );
}
/// Finds the key \p val using \p pred predicate for searching
template <typename K, typename Less, typename Func>
bool find_with( K const& key, Less pred, Func f )
{
- return base_class::find_with( key, pred, [&f]( key_type const& key, internal_mapped_type& v) { f( key, v.m_val ); } );
+ return base_class::find_with( key, pred, f );
}
/// Find the key \p key
//@endcond
};
- /// Base value type for BronsonAVLTreeMap
- /**
- If value type for \p BronsonAVLTreeMap is based on this struct,
- the map will support some additional methods like \p BronsonAVLTreeMap::get().
- Moreover, the disposer behaviour is different for ordinal values and
- values based on \p %bronson_avltree::value:
- - for ordinal value, its disposer is called immediately after removing
- the node from the tree. It this case it is not possible to support
- map's methods that return raw pointer to the tree's value.
- - if the value type is based on \p %bronson_avltree::value,
- i.e. <tt>std::is_base_of( bronson_avltree::value, value_type )::value</tt> is \p true,
- the disposer is called via full RCU cycle. It means that under
- RCU lock the methods returning raw pointer can be implemented.
- */
- struct value
- {
- //@cond
- value * m_pNextRetired;
-
- value()
- : m_pNextRetired(nullptr)
- {}
- //@endcond
- };
-
/// BronsonAVLTreeMap internal statistics
template <typename Counter = cds::atomicity::event_counter>
struct stat {
event_counter m_nUpdateRetry; ///< Count of update retries via concurrent operations
event_counter m_nUpdateRootWaitShrinking; ///< Count of waiting until root shrinking completed duting \p update() call
event_counter m_nUpdateSuccess; ///< Count of updating data node
- event_counter m_nUpdateUnlinked; ///< Count of updating of unlinked node attempts
+ event_counter m_nUpdateUnlinked; ///< Count of attempts to update unlinked node
event_counter m_nDisposedNode; ///< Count of disposed node
event_counter m_nDisposedValue; ///< Count of disposed value
event_counter m_nExtractedValue; ///< Count of extracted value
void onExtractValue() { ++m_nExtractedValue; }
void onRemoveRetry() { ++m_nRemoveRetry; }
void onRemoveWaitShrinking() { ++m_nRemoveWaitShrinking; }
+ void onRemoveRootWaitShrinking() { ++m_nRemoveRootWaitShrinking; }
void onRotateRight() { ++m_nRightRotation; }
void onRotateLeft() { ++m_nLeftRotation; }
void onRotateRightOverLeft() { ++m_nRightLeftRotation; }
- void onRotateLeftOverRight() { ++m_nLeftRghtRotation; }
+ void onRotateLeftOverRight() { ++m_nLeftRightRotation; }
//@endcond
};
typedef cds::details::Allocator< node_type, node_allocator_type > cxx_allocator;
typedef cds::urcu::details::check_deadlock_policy< gc, rcu_check_deadlock > check_deadlock_policy;
-
- static CDS_CONSTEXPR bool const c_bRetiringValue = std::is_base_of< bronson_avltree::value, typename std::remove_pointer<mapped_type>::type>::value;
enum class find_result
{
return pNode->m_pParent.load( order );
}
-
- template <bool Enabled>
- class rcu_value_disposer;
-
- template <>
- class rcu_value_disposer<true>
- {
- bronson_avltree::value * m_pValueRetiredList; ///< head of retired value list
- public:
- rcu_value_disposer()
- : m_pValueRetiredList(nullptr)
- {}
-
- ~rcu_value_disposer()
- {
- clean();
- }
-
- void dispose( mapped_type pVal )
- {
- assert( pVal );
- static_cast< bronson_avltree::value *>(pVal)->m_pNextRetired = m_pValueRetiredList;
- m_pValueRetiredList = static_cast< bronson_avltree::value *>(pVal);
- }
-
- private:
- struct internal_disposer
- {
- void operator()( bronson_avltree::value * p ) const
- {
- free_value( static_cast<mapped_type>( p ));
- }
- };
-
- void clean()
- {
- // TODO: use RCU::batch_retire
- for ( auto p = m_pValueRetiredList; p; ) {
- auto pNext = p->m_pNextRetired;
- gc::template retire_ptr<internal_disposer>( p );
- p = pNext;
- }
- }
- };
-
- template <>
- class rcu_value_disposer<false>
- {
- public:
- void dispose( mapped_type pVal )
- {
- free_value( pVal );
- }
- };
-
// RCU safe disposer
class rcu_disposer
{
- node_type * m_pRetiredList; ///< head of retired node list
- rcu_value_disposer< c_bRetiringValue > m_RetiredValueList;
+ node_type * m_pRetiredList; ///< head of retired node list
+ mapped_type m_pRetiredValue; ///< value retired
public:
rcu_disposer()
: m_pRetiredList( nullptr )
+ , m_pRetiredValue( nullptr )
{}
~rcu_disposer()
void dispose_value( mapped_type pVal )
{
- m_RetiredValueList.dispose( pVal );
+ assert( m_pRetiredValue == nullptr );
+ m_pRetiredValue = pVal;
}
private:
gc::template retire_ptr<internal_disposer>( p );
p = pNext;
}
+
+ // Dispose value
+ if ( m_pRetiredValue )
+ gc::template retire_ptr<disposer>( m_pRetiredValue );
}
};
return do_remove(
key,
key_comparator(),
- []( mapped_type pVal, rcu_disposer& disp ) -> bool { disp.dispose_value( pVal ); return true; }
+ []( key_type const&, mapped_type pVal, rcu_disposer& disp ) -> bool { disp.dispose_value( pVal ); return true; }
);
}
return do_remove(
key,
cds::opt::details::make_comparator_from_less<Less>(),
- []( mapped_type pVal, rcu_disposer& disp ) -> bool { disp.dispose_value( pVal ); return true; }
+ []( key_type const&, mapped_type pVal, rcu_disposer& disp ) -> bool { disp.dispose_value( pVal ); return true; }
);
}
return do_remove(
key,
key_comparator(),
- [&f]( mapped_type pVal, rcu_disposer& disp ) -> bool {
+ [&f]( key_type const&, mapped_type pVal, rcu_disposer& disp ) -> bool {
assert( pVal );
f( *pVal );
disp.dispose_value(pVal);
return do_remove(
key,
cds::opt::details::make_comparator_from_less<Less>(),
- [&f]( mapped_type pVal, rcu_disposer& disp ) -> bool {
+ [&f]( key_type const&, mapped_type pVal, rcu_disposer& disp ) -> bool {
assert( pVal );
f( *pVal );
disp.dispose_value(pVal);
);
}
- /// Extracts an item with minimal key from the map
+ /// Extracts a value with minimal key from the map
/**
Returns \p exempt_ptr to the leftmost item.
- If the set is empty, returns empty \p exempt_ptr.
+ If the tree is empty, returns empty \p exempt_ptr.
+
+ Note that the function returns only the value for minimal key.
+ To retrieve its key use \p extract_min( Func ) member function.
@note Due the concurrent nature of the map, the function extracts <i>nearly</i> minimum key.
It means that the function gets leftmost leaf of the tree and tries to unlink it.
*/
exempt_ptr extract_min()
{
- return exempt_ptr(do_extract_min());
+ return exempt_ptr(do_extract_min( []( key_type const& ) {}));
}
- /// Extracts an item with maximal key from the map
+ /// Extracts minimal key key and corresponding value
+ /**
+ Returns \p exempt_ptr to the leftmost item.
+ If the tree is empty, returns empty \p exempt_ptr.
+
+ \p Func functor is used to store minimal key.
+ \p Func has the following signature:
+ \code
+ struct functor {
+ void operator()( key_type const& key );
+ };
+ \endcode
+ If the tree is empty, \p f is not called.
+ Otherwise, is it called with minimal key, the pointer to corresponding value is returned
+ as \p exempt_ptr.
+
+ @note Due the concurrent nature of the map, the function extracts <i>nearly</i> minimum key.
+ It means that the function gets leftmost leaf of the tree and tries to unlink it.
+ During unlinking, a concurrent thread may insert an item with key less than leftmost item's key.
+ So, the function returns the item with minimum key at the moment of tree traversing.
+
+ RCU \p synchronize method can be called. RCU should NOT be locked.
+ The function does not free the item.
+ The deallocator will be implicitly invoked when the returned object is destroyed or when
+ its \p release() member function is called.
+ */
+ template <typename Func>
+ exempt_ptr extract_min( Func f )
+ {
+ return exempt_ptr(do_extract_min( [&f]( key_type const& key ) { f(key); }));
+ }
+
+ /// Extracts minimal key key and corresponding value
+ /**
+ This function is a shortcut for the following call:
+ \code
+ key_type key;
+ exempt_ptr xp = theTree.extract_min( [&key]( key_type const& k ) { key = k; } );
+ \endode
+ \p key_type should be copy-assignable. The copy of minimal key
+ is returned in \p min_key argument.
+ */
+ typename std::enable_if< std::is_copy_assignable<key_type>::value, exempt_ptr >::type
+ extract_min_key( key_type& min_key )
+ {
+ return exempt_ptr(do_extract_min( [&min_key]( key_type const& key ) { min_key = key; }));
+ }
+
+ /// Extracts a value with maximal key from the tree
/**
Returns \p exempt_ptr pointer to the rightmost item.
If the set is empty, returns empty \p exempt_ptr.
+ Note that the function returns only the value for maximal key.
+ To retrieve its key use \p extract_max( Func ) member function.
+
@note Due the concurrent nature of the map, the function extracts <i>nearly</i> maximal key.
It means that the function gets rightmost leaf of the tree and tries to unlink it.
During unlinking, a concurrent thread may insert an item with key great than leftmost item's key.
*/
exempt_ptr extract_max()
{
- return exempt_ptr(do_extract_max());
+ return exempt_ptr(do_extract_max( []( key_type const& ) {}));
+ }
+
+ /// Extracts the maximal key and corresponding value
+ /**
+ Returns \p exempt_ptr pointer to the rightmost item.
+ If the set is empty, returns empty \p exempt_ptr.
+
+ \p Func functor is used to store maximal key.
+ \p Func has the following signature:
+ \code
+ struct functor {
+ void operator()( key_type const& key );
+ };
+ \endcode
+ If the tree is empty, \p f is not called.
+ Otherwise, is it called with maximal key, the pointer to corresponding value is returned
+ as \p exempt_ptr.
+
+ @note Due the concurrent nature of the map, the function extracts <i>nearly</i> maximal key.
+ It means that the function gets rightmost leaf of the tree and tries to unlink it.
+ During unlinking, a concurrent thread may insert an item with key great than leftmost item's key.
+ So, the function returns the item with maximum key at the moment of tree traversing.
+
+ RCU \p synchronize method can be called. RCU should NOT be locked.
+ The function does not free the item.
+ The deallocator will be implicitly invoked when the returned object is destroyed or when
+ its \p release() is called.
+ */
+ template <typename Func>
+ exempt_ptr extract_max( Func f )
+ {
+ return exempt_ptr(do_extract_max( [&f]( key_type const& key ) { f(key); }));
+ }
+
+ /// Extracts the maximal key and corresponding value
+ /**
+ This function is a shortcut for the following call:
+ \code
+ key_type key;
+ exempt_ptr xp = theTree.extract_max( [&key]( key_type const& k ) { key = k; } );
+ \endode
+ \p key_type should be copy-assignable. The copy of maximal key
+ is returned in \p max_key argument.
+ */
+ typename std::enable_if< std::is_copy_assignable<key_type>::value, exempt_ptr >::type
+ extract_max_key( key_type& max_key )
+ {
+ return exempt_ptr(do_extract_max( [&max_key]( key_type const& key ) { max_key = key; }));
}
/// Extracts an item from the map
return exempt_ptr(do_extract( key ));
}
+
/// Extracts an item from the map using \p pred for searching
/**
The function is an analog of \p extract(Q const&)
}
}
- mapped_type do_extract_min()
+ template <typename Func>
+ mapped_type do_extract_min( Func f )
{
mapped_type pExtracted = nullptr;
do_extract_minmax(
left_child,
- [&pExtracted]( mapped_type pVal, rcu_disposer& ) -> bool { pExtracted = pVal; return false; }
+ [&pExtracted, &f]( key_type const& key, mapped_type pVal, rcu_disposer& ) -> bool { f( key ); pExtracted = pVal; return false; }
);
return pExtracted;
}
- mapped_type do_extract_max()
+ template <typename Func>
+ mapped_type do_extract_max( Func f )
{
mapped_type pExtracted = nullptr;
do_extract_minmax(
right_child,
- [&pExtracted]( mapped_type pVal, rcu_disposer& ) -> bool { pExtracted = pVal; return false; }
+ [&pExtracted, &f]( key_type const& key, mapped_type pVal, rcu_disposer& ) -> bool { f( key ); pExtracted = pVal; return false; }
);
return pExtracted;
}
do_remove(
key,
key_comparator(),
- [&pExtracted]( mapped_type pVal, rcu_disposer& ) -> bool { pExtracted = pVal; return false; }
+ [&pExtracted]( key_type const&, mapped_type pVal, rcu_disposer& ) -> bool { pExtracted = pVal; return false; }
);
return pExtracted;
}
do_remove(
key,
cds::opt::details::make_comparator_from_less<Less>(),
- [&pExtracted]( mapped_type pVal, rcu_disposer& ) -> bool { pExtracted = pVal; return false; }
+ [&pExtracted]( key_type const&, mapped_type pVal, rcu_disposer& ) -> bool { pExtracted = pVal; return false; }
);
return pExtracted;
}
}
--m_ItemCounter;
- if ( func( pOld, disp )) // calls pOld disposer inside
+ if ( func( pNode->m_key, pOld, disp )) // calls pOld disposer inside
m_stat.onDisposeValue();
else
m_stat.onExtractValue();
if ( result == update_flags::result_removed ) {
--m_ItemCounter;
- if ( func( pOld, disp )) // calls pOld disposer inside
+ if ( func( pNode->m_key, pOld, disp )) // calls pOld disposer inside
m_stat.onDisposeValue();
else
m_stat.onExtractValue();
</ItemGroup>\r
<ItemGroup>\r
<ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_bronson_avltree_map_rcu_gpb.cpp" />\r
+ <ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_bronson_avltree_map_rcu_gpb_pool_monitor.cpp" />\r
<ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_ellenbintree_map_dhp.cpp" />\r
<ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_ellenbintree_map_hp.cpp" />\r
<ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_ellenbintree_map_rcu_gpb.cpp" />\r
<ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_bronson_avltree_map_rcu_gpb.cpp">\r
<Filter>container</Filter>\r
</ClCompile>\r
+ <ClCompile Include="..\..\..\tests\test-hdr\tree\hdr_bronson_avltree_map_rcu_gpb_pool_monitor.cpp">\r
+ <Filter>container</Filter>\r
+ </ClCompile>\r
</ItemGroup>\r
</Project>
\ No newline at end of file
{}
};
+ struct compare {
+ int operator()( key_type k1, key_type k2 )
+ {
+ return k1 < k2 ? -1 : k1 > k2 ? 1 : 0;
+ }
+ };
+
struct wrapped_int {
int nKey;
}
};
- struct extract_functor
- {
- int nKey;
-
- void operator()( value_type& v )
- {
- nKey = v.nVal;
- }
- };
-
protected:
template <class Set>
void test_with( Set& s )
{
value_type itm;
int key;
+ typedef typename Set::exempt_ptr exempt_ptr;
// insert/find test
CPPUNIT_ASSERT( !s.find( 10 ) );
s.clear();
CPPUNIT_ASSERT( s.empty() );
CPPUNIT_ASSERT( check_size( s, 0 ) );
- }
- template <typename Set>
- void fill_set( Set& s, data_array& a )
- {
- CPPUNIT_ASSERT( s.empty() );
- for ( size_t i = 0; i < c_nItemCount; ++i ) {
- CPPUNIT_ASSERT( s.insert( a[i] ) );
+ const int c_nStep = 10;
+ int keys[1000];
+ for ( key_type i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ keys[i] = i;
+ std::random_shuffle( keys, keys + sizeof(keys) / sizeof(keys[0]));
+
+ size_t nCount = 1;
+ int nPrev;
+ key_type keyPrev;
+ exempt_ptr xp;
+
+ // extract_min
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.emplace( keys[i], keys[i] * c_nStep ));
+
+ xp = s.extract_min();
+ CPPUNIT_ASSERT( xp );
+ nPrev = xp->nVal;
+ CPPUNIT_CHECK_EX( nPrev == 0, "Expected=0 real=" << nPrev );
+ while ( !s.empty() ) {
+ xp = s.extract_min();
+ CPPUNIT_ASSERT( xp );
+ CPPUNIT_CHECK_EX( nPrev + c_nStep == xp->nVal, "Expected=" << nPrev + c_nStep << " real=" << xp->nVal );
+ nPrev = xp->nVal;
+ ++nCount;
}
- CPPUNIT_ASSERT( !s.empty() );
- CPPUNIT_ASSERT( check_size( s, c_nItemCount ) );
+ CPPUNIT_CHECK( nCount == sizeof(keys) / sizeof(keys[0]));
+
+ // extract_min<Func>
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.insert( keys[i], keys[i] * c_nStep ));
+
+ nCount = 1;
+ xp = s.extract_min( [&keyPrev]( key_type k ){ keyPrev = k; });
+ CPPUNIT_ASSERT( xp );
+ nPrev = xp->nVal;
+ CPPUNIT_CHECK_EX( keyPrev == 0, "Expected=0 real=" << keyPrev );
+ CPPUNIT_CHECK_EX( nPrev == 0, "Expected=0 real=" << nPrev );
+ while ( !s.empty() ) {
+ xp = s.extract_min( [&key](key_type k){ key = k; } );
+ CPPUNIT_ASSERT( xp );
+ CPPUNIT_CHECK_EX( key == keyPrev + 1, "Expected=" << keyPrev + 1 << " real=" << key );
+ CPPUNIT_CHECK_EX( nPrev + c_nStep == xp->nVal, "Expected=" << nPrev + c_nStep << " real=" << xp->nVal );
+ nPrev = xp->nVal;
+ ++keyPrev;
+ ++nCount;
+ }
+ CPPUNIT_CHECK( nCount == sizeof(keys) / sizeof(keys[0]));
+
+ // extract_min_key
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.insert( keys[i], keys[i] * c_nStep ));
+
+ nCount = 1;
+ xp = s.extract_min_key( keyPrev );
+ CPPUNIT_ASSERT( xp );
+ nPrev = xp->nVal;
+ CPPUNIT_CHECK_EX( keyPrev == 0, "Expected=0 real=" << keyPrev );
+ CPPUNIT_CHECK_EX( nPrev == 0, "Expected=0 real=" << nPrev );
+ while ( !s.empty() ) {
+ xp = s.extract_min_key( key );
+ CPPUNIT_ASSERT( xp );
+ CPPUNIT_CHECK_EX( key == keyPrev + 1, "Expected=" << keyPrev + 1 << " real=" << key );
+ CPPUNIT_CHECK_EX( nPrev + c_nStep == xp->nVal, "Expected=" << nPrev + c_nStep << " real=" << xp->nVal );
+ nPrev = xp->nVal;
+ ++keyPrev;
+ ++nCount;
+ }
+ CPPUNIT_CHECK( nCount == sizeof(keys) / sizeof(keys[0]));
+
+ // extract_max
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.emplace( keys[i], keys[i] * c_nStep ));
+
+ nCount = 1;
+ xp = s.extract_max();
+ CPPUNIT_ASSERT( xp );
+ nPrev = xp->nVal;
+ CPPUNIT_CHECK_EX( nPrev == c_nStep * (sizeof(keys) / sizeof(keys[0]) - 1),
+ "Expected=" << c_nStep * (sizeof(keys) / sizeof(keys[0]) - 1) << " real=" << nPrev );
+ while ( !s.empty() ) {
+ xp = s.extract_max();
+ CPPUNIT_ASSERT( xp );
+ CPPUNIT_CHECK_EX( nPrev - c_nStep == xp->nVal, "Expected=" << nPrev - c_nStep << " real=" << xp->nVal );
+ nPrev = xp->nVal;
+ ++nCount;
+ }
+ CPPUNIT_CHECK( nCount == sizeof(keys) / sizeof(keys[0]));
+
+ // extract_max<Func>
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.emplace( keys[i], keys[i] * c_nStep ));
+
+ nCount = 1;
+ xp = s.extract_max( [&keyPrev]( key_type k ){ keyPrev = k; });
+ CPPUNIT_ASSERT( xp );
+ nPrev = xp->nVal;
+ CPPUNIT_CHECK_EX( keyPrev == sizeof(keys) / sizeof(keys[0]) - 1,
+ "Expected=" << sizeof(keys) / sizeof(keys[0]) - 1 << " real=" << keyPrev );
+ CPPUNIT_CHECK_EX( nPrev == c_nStep * (sizeof(keys) / sizeof(keys[0]) - 1),
+ "Expected=" << c_nStep * (sizeof(keys) / sizeof(keys[0]) - 1) << " real=" << nPrev );
+ while ( !s.empty() ) {
+ xp = s.extract_max( [&key](key_type k){ key = k; });
+ CPPUNIT_ASSERT( xp );
+ CPPUNIT_CHECK_EX( key == keyPrev - 1, "Expected=" << keyPrev - 1 << " real=" << key );
+ CPPUNIT_CHECK_EX( nPrev - c_nStep == xp->nVal, "Expected=" << nPrev - c_nStep << " real=" << xp->nVal );
+ nPrev = xp->nVal;
+ --keyPrev;
+ ++nCount;
+ }
+ CPPUNIT_CHECK( nCount == sizeof(keys) / sizeof(keys[0]));
+
+ // extract_max_key
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.emplace( keys[i], keys[i] * c_nStep ));
+
+ nCount = 1;
+ xp = s.extract_max_key( keyPrev );
+ CPPUNIT_ASSERT( xp );
+ nPrev = xp->nVal;
+ CPPUNIT_CHECK_EX( keyPrev == sizeof(keys) / sizeof(keys[0]) - 1,
+ "Expected=" << sizeof(keys) / sizeof(keys[0]) - 1 << " real=" << keyPrev );
+ CPPUNIT_CHECK_EX( nPrev == c_nStep * (sizeof(keys) / sizeof(keys[0]) - 1),
+ "Expected=" << c_nStep * (sizeof(keys) / sizeof(keys[0]) - 1) << " real=" << nPrev );
+ while ( !s.empty() ) {
+ xp = s.extract_max_key( key );
+ CPPUNIT_ASSERT( xp );
+ CPPUNIT_CHECK_EX( key == keyPrev - 1, "Expected=" << keyPrev - 1 << " real=" << key );
+ CPPUNIT_CHECK_EX( nPrev - c_nStep == xp->nVal, "Expected=" << nPrev - c_nStep << " real=" << xp->nVal );
+ nPrev = xp->nVal;
+ --keyPrev;
+ ++nCount;
+ }
+ CPPUNIT_CHECK( nCount == sizeof(keys) / sizeof(keys[0]));
+
+ // extract
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.emplace( keys[i], keys[i] * c_nStep ));
+
+ for ( int i = 0; i < sizeof( keys ) / sizeof( keys[0] ); ++i ) {
+ xp = s.extract(keys[i]);
+ CPPUNIT_CHECK_EX( xp->nVal == keys[i] * c_nStep, "Expected value=" << keys[i] * c_nStep << " real=" << xp->nVal );
+ }
+ CPPUNIT_ASSERT(s.empty());
+
+
+ // extract_with
+ for ( int i = 0; i < sizeof(keys) / sizeof(keys[0]); ++i )
+ CPPUNIT_ASSERT( s.emplace( keys[i], keys[i] * c_nStep ));
+
+ for ( int i = 0; i < sizeof( keys ) / sizeof( keys[0] ); ++i ) {
+ xp = s.extract_with( wrapped_int(keys[i]), wrapped_less());
+ CPPUNIT_CHECK_EX( xp->nVal == keys[i] * c_nStep, "Expected value=" << keys[i] * c_nStep << " real=" << xp->nVal );
+ }
+ CPPUNIT_ASSERT(s.empty());
}
template <class Set, class PrintStat>
void BronsonAVLTree_rcu_gpb_less();
+ void BronsonAVLTree_rcu_gpb_less_stat();
void BronsonAVLTree_rcu_gpb_cmp();
+ void BronsonAVLTree_rcu_gpb_cmp_stat();
void BronsonAVLTree_rcu_gpb_cmpless();
void BronsonAVLTree_rcu_gpb_less_ic();
void BronsonAVLTree_rcu_gpb_cmp_ic();
- void BronsonAVLTree_rcu_gpb_less_stat();
void BronsonAVLTree_rcu_gpb_cmp_ic_stat();
void BronsonAVLTree_rcu_gpb_cmp_ic_stat_yield();
+ void BronsonAVLTree_rcu_gpb_less_relaxed_insert();
+ void BronsonAVLTree_rcu_gpb_less_relaxed_insert_stat();
+ void BronsonAVLTree_rcu_gpb_pool_monitor_less();
+ void BronsonAVLTree_rcu_gpb_pool_monitor_less_stat();
+ void BronsonAVLTree_rcu_gpb_pool_monitor_cmp_ic_stat();
+ void BronsonAVLTree_rcu_gpb_pool_monitor_cmp_ic_stat_yield();
+ void BronsonAVLTree_rcu_gpb_pool_monitor_less_relaxed_insert();
+ void BronsonAVLTree_rcu_gpb_pool_monitor_less_relaxed_insert_stat();
CPPUNIT_TEST_SUITE( BronsonAVLTreeHdrTest )
CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmpless )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less_ic )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_ic )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less_stat )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_ic_stat )
- //CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_ic_stat_yield )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less_stat )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_stat )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmpless )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less_ic )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_ic )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_ic_stat )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_cmp_ic_stat_yield )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less_relaxed_insert )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_less_relaxed_insert_stat )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_pool_monitor_less )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_pool_monitor_less_stat )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_pool_monitor_cmp_ic_stat )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_pool_monitor_cmp_ic_stat_yield )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_pool_monitor_less_relaxed_insert )
+ CPPUNIT_TEST( BronsonAVLTree_rcu_gpb_pool_monitor_less_relaxed_insert_stat )
CPPUNIT_TEST_SUITE_END()
};
} // namespace tree
struct traits: public
cc::bronson_avltree::make_traits<
co::less< std::less<key_type> >
+ ,cc::bronson_avltree::relaxed_insert< false >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_less_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,cc::bronson_avltree::relaxed_insert< false >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_cmp()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_cmp_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_cmpless()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::less< std::less<key_type> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_less_ic()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,co::item_counter< cds::atomicity::item_counter >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_cmp_ic()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::item_counter< cds::atomicity::item_counter >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_cmp_ic_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::item_counter< cds::atomicity::item_counter >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_cmp_ic_stat_yield()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::item_counter< cds::atomicity::item_counter >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,co::back_off< cds::backoff::yield >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_less_relaxed_insert()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,cc::bronson_avltree::relaxed_insert< true >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_less_relaxed_insert_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,cc::bronson_avltree::relaxed_insert< true >
>::type
{};
typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
--- /dev/null
+//$$CDS-header$$
+
+#include "tree/hdr_bronson_avltree_map.h"
+#include <cds/urcu/general_buffered.h>
+#include <cds/container/bronson_avltree_map_rcu.h>
+#include <cds/sync/pool_monitor.h>
+#include <cds/memory/vyukov_queue_pool.h>
+
+#include "unit/print_bronsonavltree_stat.h"
+
+namespace tree {
+ namespace cc = cds::container;
+ namespace co = cds::opt;
+ namespace {
+ typedef cds::urcu::gc< cds::urcu::general_buffered<> > rcu_type;
+
+ struct print_stat {
+ template <typename Tree>
+ void operator()( Tree const& t )
+ {
+ std::cout << t.statistics();
+ }
+ };
+
+ typedef cds::memory::vyukov_queue_pool< std::mutex > simple_pool;
+ typedef cds::memory::lazy_vyukov_queue_pool< std::mutex > lazy_pool;
+ typedef cds::memory::bounded_vyukov_queue_pool< std::mutex > bounded_pool;
+ } // namespace
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_pool_monitor_less()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,co::sync_monitor< cds::sync::pool_monitor<simple_pool> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_pool_monitor_less_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,co::sync_monitor< cds::sync::pool_monitor<lazy_pool> >
+ ,cc::bronson_avltree::relaxed_insert< false >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_pool_monitor_cmp_ic_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::item_counter< cds::atomicity::item_counter >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,co::sync_monitor< cds::sync::pool_monitor<bounded_pool> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_pool_monitor_cmp_ic_stat_yield()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::compare< compare >
+ ,co::item_counter< cds::atomicity::item_counter >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,co::back_off< cds::backoff::yield >
+ ,co::sync_monitor< cds::sync::pool_monitor<lazy_pool> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_pool_monitor_less_relaxed_insert()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,cc::bronson_avltree::relaxed_insert< true >
+ ,co::sync_monitor< cds::sync::pool_monitor<lazy_pool> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+ void BronsonAVLTreeHdrTest::BronsonAVLTree_rcu_gpb_pool_monitor_less_relaxed_insert_stat()
+ {
+ struct traits: public
+ cc::bronson_avltree::make_traits<
+ co::less< std::less<key_type> >
+ ,co::stat< cc::bronson_avltree::stat<> >
+ ,cc::bronson_avltree::relaxed_insert< true >
+ ,co::sync_monitor< cds::sync::pool_monitor<simple_pool> >
+ >::type
+ {};
+ typedef cc::BronsonAVLTreeMap< rcu_type, key_type, value_type, traits > map_type;
+ test<map_type, print_stat>();
+ }
+
+} // namespace tree