3 // Copyright (C) 2007, 2008 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the terms
7 // of the GNU General Public License as published by the Free Software
8 // Foundation; either version 2, or (at your option) any later
11 // This library is distributed in the hope that it will be useful, but
12 // WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 // General Public License for more details.
16 // You should have received a copy of the GNU General Public License
17 // along with this library; see the file COPYING. If not, write to
18 // the Free Software Foundation, 59 Temple Place - Suite 330, Boston,
19 // MA 02111-1307, USA.
21 // As a special exception, you may use this file as part of a free
22 // software library without restriction. Specifically, if other files
23 // instantiate templates or use macros or inline functions from this
24 // file, or you compile this file and link it with other files to
25 // produce an executable, this file does not by itself cause the
26 // resulting executable to be covered by the GNU General Public
27 // License. This exception does not however invalidate any other
28 // reasons why the executable file might be covered by the GNU General
31 /** @file parallel/tree.h
32 * @brief Parallel red-black tree operations.
34 * This implementation is described in
36 * Leonor Frias, Johannes Singler.
37 * Parallelization of Bulk Operations for STL Dictionaries.
38 * Workshop on Highly Parallel Processing on a Chip (HPPC) 2007.
40 * This file is a GNU parallel extension to the Standard C++ Library.
43 // Written by Leonor Frias Moya, Johannes Singler.
45 #ifndef _GLIBCXX_PARALLEL_TREE_H
46 #define _GLIBCXX_PARALLEL_TREE_H 1
48 #include <parallel/parallel.h>
55 //#include <ext/malloc_allocator.h>
56 #include <bits/stl_tree.h>
58 #include <parallel/list_partition.h>
62 // XXX Declaration should go to stl_tree.h.
64 _Rb_tree_rotate_left(_Rb_tree_node_base* const __x,
65 _Rb_tree_node_base*& __root);
68 _Rb_tree_rotate_right(_Rb_tree_node_base* const __x,
69 _Rb_tree_node_base*& __root);
73 namespace __gnu_parallel
75 // XXX move into parallel/type_traits.h if <type_traits> doesn't work.
76 /** @brief Helper class: remove the const modifier from the first
77 component, if present. Set kind component.
78 * @param T Simple type, nothing to unconst */
80 struct unconst_first_component
82 /** @brief New type after removing the const */
86 /** @brief Helper class: remove the const modifier from the first
87 component, if present. Map kind component
88 * @param Key First component, from which to remove the const modifier
89 * @param Load Second component
90 * @sa unconst_first_component */
91 template<typename Key, typename Load>
92 struct unconst_first_component<std::pair<const Key, Load> >
94 /** @brief New type after removing the const */
95 typedef std::pair<Key, Load> type;
98 /** @brief Helper class: set the appropriate comparator to deal with
99 * repetitions. Comparator for unique dictionaries.
101 * StrictlyLess and LessEqual are part of a mechanism to deal with
102 * repetitions transparently whatever the actual policy is.
103 * @param _Key Keys to compare
104 * @param _Compare Comparator equal to conceptual < */
105 template<typename _Key, typename _Compare>
106 struct StrictlyLess : public std::binary_function<_Key, _Key, bool>
108 /** @brief Comparator equal to conceptual < */
111 /** @brief Constructor given a Comparator */
112 StrictlyLess(const _Compare& _c) : c(_c) { }
114 /** @brief Copy constructor */
115 StrictlyLess(const StrictlyLess<_Key, _Compare>& strictly_less)
116 : c(strictly_less.c) { }
118 /** @brief Operator() */
120 operator()(const _Key& k1, const _Key& k2) const
121 { return c(k1, k2); }
124 /** @brief Helper class: set the appropriate comparator to deal with
125 * repetitions. Comparator for non-unique dictionaries.
127 * StrictlyLess and LessEqual are part of a mechanism to deal with
128 * repetitions transparently whatever the actual policy is.
129 * @param _Key Keys to compare
130 * @param _Compare Comparator equal to conceptual <= */
131 template<typename _Key, typename _Compare>
132 struct LessEqual : public std::binary_function<_Key, _Key, bool>
134 /** @brief Comparator equal to conceptual < */
137 /** @brief Constructor given a Comparator */
138 LessEqual(const _Compare& _c) : c(_c) { }
140 /** @brief Copy constructor */
141 LessEqual(const LessEqual<_Key, _Compare>& less_equal)
142 : c(less_equal.c) { }
144 /** @brief Operator() */
146 operator()(const _Key& k1, const _Key& k2) const
147 { return !c(k2, k1); }
151 /** @brief Parallel red-black tree.
153 * Extension of the sequential red-black tree. Specifically,
154 * parallel bulk insertion operations are provided.
155 * @param _Key Keys to compare
156 * @param _Val Elements to store in the tree
157 * @param _KeyOfValue Obtains the key from an element <
158 * @param _Compare Comparator equal to conceptual <
159 * @param _Alloc Allocator for the elements */
160 template<typename _Key, typename _Val, typename _KeyOfValue,
161 typename _Compare, typename _Alloc = std::allocator<_Val> >
163 : public std::_Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc>
166 /** @brief Sequential tree */
167 typedef std::_Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc> base_type;
169 /** @brief Renaming of base node type */
170 typedef typename std::_Rb_tree_node<_Val> _Rb_tree_node;
172 /** @brief Renaming of libstdc++ node type */
173 typedef typename std::_Rb_tree_node_base _Rb_tree_node_base;
175 /** @brief Renaming of base key_type */
176 typedef typename base_type::key_type key_type;
178 /** @brief Renaming of base value_type */
179 typedef typename base_type::value_type value_type;
181 /** @brief Helper class to unconst the first component of
182 * value_type if exists.
184 * This helper class is needed for map, but may discard qualifiers
185 * for set; however, a set with a const element type is not useful
186 * and should fail in some other place anyway.
188 typedef typename unconst_first_component<value_type>::type nc_value_type;
190 /** @brief Pointer to a node */
191 typedef _Rb_tree_node* _Rb_tree_node_ptr;
193 /** @brief Wrapper comparator class to deal with repetitions
194 transparently according to dictionary type with key _Key and
195 comparator _Compare. Unique dictionaries object
197 StrictlyLess<_Key, _Compare> strictly_less;
199 /** @brief Wrapper comparator class to deal with repetitions
200 transparently according to dictionary type with key _Key and
201 comparator _Compare. Non-unique dictionaries object
203 LessEqual<_Key, _Compare> less_equal;
206 /** @brief Renaming of base size_type */
207 typedef typename base_type::size_type size_type;
209 /** @brief Constructor with a given comparator and allocator.
211 * Delegates the basic initialization to the sequential class and
212 * initializes the helper comparators of the parallel class
213 * @param c Comparator object with which to initialize the class
214 * comparator and the helper comparators
215 * @param a Allocator object with which to initialize the class comparator
217 _Rb_tree(const _Compare& c, const _Alloc& a)
218 : base_type(c, a), strictly_less(base_type::_M_impl._M_key_compare),
219 less_equal(base_type::_M_impl._M_key_compare)
222 /** @brief Copy constructor.
224 * Delegates the basic initialization to the sequential class and
225 * initializes the helper comparators of the parallel class
226 * @param __x Parallel red-black instance to copy
228 _Rb_tree(const _Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc>& __x)
229 : base_type(__x), strictly_less(base_type::_M_impl._M_key_compare),
230 less_equal(base_type::_M_impl._M_key_compare)
233 /** @brief Parallel replacement of the sequential
234 * std::_Rb_tree::_M_insert_unique()
236 * Parallel bulk insertion and construction. If the container is
237 * empty, bulk construction is performed. Otherwise, bulk
238 * insertion is performed
239 * @param __first First element of the input
240 * @param __last Last element of the input
242 template<typename _InputIterator>
244 _M_insert_unique(_InputIterator __first, _InputIterator __last)
246 if (__first == __last)
249 if (_GLIBCXX_PARALLEL_CONDITION(true))
250 if (base_type::_M_impl._M_node_count == 0)
252 _M_bulk_insertion_construction(__first, __last, true,
254 _GLIBCXX_PARALLEL_ASSERT(rb_verify());
258 _M_bulk_insertion_construction(__first, __last, false,
260 _GLIBCXX_PARALLEL_ASSERT(rb_verify());
263 base_type::_M_insert_unique(__first, __last);
266 /** @brief Parallel replacement of the sequential
267 * std::_Rb_tree::_M_insert_equal()
269 * Parallel bulk insertion and construction. If the container is
270 * empty, bulk construction is performed. Otherwise, bulk
271 * insertion is performed
272 * @param __first First element of the input
273 * @param __last Last element of the input */
274 template<typename _InputIterator>
276 _M_insert_equal(_InputIterator __first, _InputIterator __last)
278 if (__first == __last)
281 if (_GLIBCXX_PARALLEL_CONDITION(true))
282 if (base_type::_M_impl._M_node_count == 0)
283 _M_bulk_insertion_construction(__first, __last, true, less_equal);
285 _M_bulk_insertion_construction(__first, __last, false, less_equal);
287 base_type::_M_insert_equal(__first, __last);
288 _GLIBCXX_PARALLEL_ASSERT(rb_verify());
293 /** @brief Helper class of _Rb_tree: node linking.
295 * Nodes linking forming an almost complete tree. The last level
296 * is coloured red, the rest are black
297 * @param ranker Calculates the position of a node in an array of nodes
299 template<typename ranker>
300 class nodes_initializer
302 /** @brief Renaming of tree size_type */
304 typedef _Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc> tree_type;
305 typedef typename tree_type::size_type size_type;
308 /** @brief mask[%i]= 0..01..1, where the number of 1s is %i+1 */
309 size_type mask[sizeof(size_type)*8];
311 /** @brief Array of nodes (initial address) */
312 const _Rb_tree_node_ptr* r_init;
314 /** @brief Total number of (used) nodes */
317 /** @brief Rank of the last tree node that can be calculated
318 taking into account a complete tree
320 size_type splitting_point;
322 /** @brief Rank of the tree root */
325 /** @brief Height of the tree */
328 /** @brief Number of threads into which divide the work */
329 const thread_index_t num_threads;
331 /** @brief Helper object to mind potential gaps in r_init */
334 /** @brief Constructor
335 * @param r Array of nodes
336 * @param _n Total number of (used) nodes
337 * @param _num_threads Number of threads into which divide the work
338 * @param _rank Helper object to mind potential gaps in @c r_init */
339 nodes_initializer(const _Rb_tree_node_ptr* r, const size_type _n,
340 const thread_index_t _num_threads,
342 : r_init(r), n(_n), num_threads(_num_threads), rank(_rank)
345 splitting_point = 2 * (n - ((1 << height) - 1)) -1;
348 size_type max = 1 << (height + 1);
349 rank_root= (max-2) >> 1;
350 if (rank_root > splitting_point)
351 rank_root = complete_to_original(rank_root);
354 for (unsigned int i = 1; i < sizeof(size_type)*8; ++i)
355 mask[i] = (mask[i-1] << 1) + 1;
358 /** @brief Query for tree height
359 * @return Tree height */
364 /** @brief Query for the splitting point
365 * @return Splitting point */
367 get_shifted_splitting_point() const
368 { return rank.get_shifted_rank(splitting_point, 0); }
370 /** @brief Query for the tree root node
371 * @return Tree root node */
374 { return r_init[rank.get_shifted_rank(rank_root,num_threads/2)]; }
376 /** @brief Calculation of the parent position in the array of nodes
377 * @hideinitializer */
378 #define CALCULATE_PARENT \
379 if (p_s> splitting_point) \
380 p_s = complete_to_original(p_s); \
381 int s_r = rank.get_shifted_rank(p_s,iam); \
382 r->_M_parent = r_init[s_r]; \
384 /** @brief Link a node with its parent and children taking into
385 account that its rank (without gaps) is different to that in
387 * @param r Pointer to the node
388 * @param iam Partition of the array in which the node is, where
389 * iam is in [0..num_threads)
390 * @sa link_complete */
392 link_incomplete(const _Rb_tree_node_ptr& r, const int iam) const
394 size_type real_pos = rank.get_real_rank(&r-r_init, iam);
395 size_type l_s, r_s, p_s;
396 int mod_pos= original_to_complete(real_pos);
397 int zero= first_0_right(mod_pos);
399 // 1. Convert n to n', where n' will be its rank if the tree
401 // 2. Calculate neighbours for n'
402 // 3. Convert the neighbors n1', n2' and n3' to their
403 // appropriate values n1, n2, n3. Note that it must be
404 // checked that these neighbors actually exist.
405 calculate_shifts_pos_level(mod_pos, zero, l_s, r_s, p_s);
406 if (l_s > splitting_point)
408 _GLIBCXX_PARALLEL_ASSERT(r_s > splitting_point);
417 r_init[rank.get_shifted_rank(complete_to_original(l_s),
420 r_init[rank.get_shifted_rank(complete_to_original(r_s),
426 r->_M_left= r_init[rank.get_shifted_rank(l_s,iam)];
429 = r_init[rank.get_shifted_rank(complete_to_original(r_s),
434 r->_M_color = std::_S_black;
438 /** @brief Link a node with its parent and children taking into
439 account that its rank (without gaps) is the same as that in
441 * @param r Pointer to the node
442 * @param iam Partition of the array in which the node is, where
443 * iam is in [0..@c num_threads)
444 * @sa link_incomplete
447 link_complete(const _Rb_tree_node_ptr& r, const int iam) const
449 size_type real_pos = rank.get_real_rank(&r-r_init, iam);
452 // Test if it is a leaf on the last not necessarily full level
453 if ((real_pos & mask[0]) == 0)
455 if ((real_pos & 0x2) == 0)
459 r->_M_color = std::_S_red;
466 int zero = first_0_right(real_pos);
467 calculate_shifts_pos_level(real_pos, zero, l_s, r_s, p_s);
468 r->_M_color = std::_S_black;
470 r->_M_left = r_init[rank.get_shifted_rank(l_s,iam)];
471 if (r_s > splitting_point)
472 r_s = complete_to_original(r_s);
473 r->_M_right = r_init[rank.get_shifted_rank(r_s,iam)];
478 #undef CALCULATE_PARENT
481 /** @brief Change of "base": Convert the rank in the actual tree
482 into the corresponding rank if the tree was complete
483 * @param pos Rank in the actual incomplete tree
484 * @return Rank in the corresponding complete tree
485 * @sa complete_to_original */
487 original_to_complete(const int pos) const
488 { return (pos << 1) - splitting_point; }
490 /** @brief Change of "base": Convert the rank if the tree was
491 complete into the corresponding rank in the actual tree
492 * @param pos Rank in the complete tree
493 * @return Rank in the actual incomplete tree
494 * @sa original_to_complete */
496 complete_to_original(const int pos) const
497 { return (pos + splitting_point) >> 1; }
500 /** @brief Calculate the rank in the complete tree of the parent
501 and children of a node
502 * @param pos Rank in the complete tree of the node whose parent
503 * and children rank must be calculated
504 * @param level Tree level in which the node at pos is in
505 * (starting to count at leaves). @pre @c level > 1
506 * @param left_shift Rank in the complete tree of the left child
507 * of pos (out parameter)
508 * @param right_shift Rank in the complete tree of the right
509 * child of pos (out parameter)
510 * @param parent_shift Rank in the complete tree of the parent
511 * of pos (out parameter)
514 calculate_shifts_pos_level(const size_type pos, const int level,
515 size_type& left_shift,
516 size_type& right_shift,
517 size_type& parent_shift) const
519 int stride = 1 << (level -1);
520 left_shift = pos - stride;
521 right_shift = pos + stride;
522 if (((pos >> (level + 1)) & 0x1) == 0)
523 parent_shift = pos + 2*stride;
525 parent_shift = pos - 2*stride;
528 /** @brief Search for the first 0 bit (growing the weight)
529 * @param x Binary number (corresponding to a rank in the tree)
530 * whose first 0 bit must be calculated
531 * @return Position of the first 0 bit in @c x (starting to
535 first_0_right(const size_type x) const
540 return first_0_right_bs(x);
543 /** @brief Search for the first 0 bit (growing the weight) using
546 * Binary search can be used instead of a naive loop using the
547 * masks in mask array
548 * @param x Binary number (corresponding to a rank in the tree)
549 * whose first 0 bit must be calculated
550 * @param k_beg Position in which to start searching. By default is 2.
551 * @return Position of the first 0 bit in x (starting to count with 1) */
553 first_0_right_bs(const size_type x, int k_beg=2) const
555 int k_end = sizeof(size_type)*8;
556 size_type not_x = x ^ mask[k_end-1];
557 while ((k_end-k_beg) > 1)
559 int k = k_beg + (k_end-k_beg)/2;
560 if ((not_x & mask[k-1]) != 0)
569 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
570 /** @brief Helper class of nodes_initializer: mind the gaps of an
573 * Get absolute positions in an array of nodes taking into account
574 * the gaps in it @sa ranker_no_gaps
578 /** @brief Renaming of tree's size_type */
579 typedef _Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc> tree_type;
580 typedef typename tree_type::size_type size_type;
582 /** @brief Array containing the beginning ranks of all the
583 num_threads partitions just considering the valid nodes, not
585 size_type* beg_partition;
587 /** @brief Array containing the beginning ranks of all the
588 num_threads partitions considering the valid nodes and the
590 const size_type* beg_shift_partition;
592 /** @brief Array containing the number of accumulated gaps at
593 the beginning of each partition */
594 const size_type* rank_shift;
596 /** @brief Number of partitions (and threads that work on it) */
597 const thread_index_t num_threads;
600 /** @brief Constructor
601 * @param size_p Pointer to the array containing the beginning
602 * ranks of all the @c _num_threads partitions considering the
603 * valid nodes and the gaps
604 * @param shift_r Array containing the number of accumulated
605 * gaps at the beginning of each partition
606 * @param _num_threads Number of partitions (and threads that
608 ranker_gaps(const size_type* size_p, const size_type* shift_r,
609 const thread_index_t _num_threads)
610 : beg_shift_partition(size_p), rank_shift(shift_r),
611 num_threads(_num_threads)
613 beg_partition = new size_type[num_threads+1];
614 beg_partition[0] = 0;
615 for (int i = 1; i <= num_threads; ++i)
616 beg_partition[i] = (beg_partition[i-1]
617 + (beg_shift_partition[i]
618 - beg_shift_partition[i-1])
619 - (rank_shift[i] - rank_shift[i-1]));
621 // Ghost element, strictly larger than any index requested.
622 ++beg_partition[num_threads];
625 /** @brief Destructor
626 * Needs to be defined to deallocate the dynamic memory that has
627 * been allocated for beg_partition array
630 { delete[] beg_partition; }
632 /** @brief Convert a rank in the array of nodes considering
633 valid nodes and gaps, to the corresponding considering only
635 * @param pos Rank in the array of nodes considering valid nodes and gaps
636 * @param index Partition which the rank belongs to
637 * @return Rank in the array of nodes considering only the valid nodes
638 * @sa get_shifted_rank
641 get_real_rank(const size_type pos, const int index) const
642 { return pos - rank_shift[index]; }
644 /** @brief Inverse of get_real_rank: Convert a rank in the array
645 of nodes considering only valid nodes, to the corresponding
646 considering valid nodes and gaps
647 * @param pos Rank in the array of nodes considering only valid nodes
648 * @param index Partition which the rank is most likely to
649 * belong to (i. e. the corresponding if there were no gaps)
650 * @pre 0 <= @c pos <= number_of_distinct_elements
651 * @return Rank in the array of nodes considering valid nodes and gaps
652 * @post 0 <= @c return <= number_of_elements
653 * @sa get_real_rank()
656 get_shifted_rank(const size_type pos, const int index) const
659 if (beg_partition[index] <= pos and pos < beg_partition[index+1])
660 return pos + rank_shift[index];
662 // Called rarely, do not hinder inlining.
663 return get_shifted_rank_loop(pos,index);
666 /** @brief Helper method of get_shifted_rank: in case the given
667 index in get_shifted_rank is not correct, look for it and
668 then calculate the rank
669 * @param pos Rank in the array of nodes considering only valid nodes
670 * @param index Partition which the rank should have belong to
671 * if there were no gaps
672 * @return Rank in the array of nodes considering valid nodes and gaps
675 get_shifted_rank_loop(const size_type pos, int index) const
677 while (pos >= beg_partition[index+1])
679 while (pos < beg_partition[index])
681 _GLIBCXX_PARALLEL_ASSERT(0 <= index && index < num_threads);
682 return pos + rank_shift[index];
686 /** @brief Helper class of nodes_initializer: access an array of
689 * Get absolute positions in an array of nodes taking into account
690 * that there are no gaps in it. @sa ranker_gaps */
693 /** @brief Renaming of tree's size_type */
694 typedef _Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc> tree_type;
695 typedef typename tree_type::size_type size_type;
698 /** @brief Convert a rank in the array of nodes considering
699 * valid nodes and gaps, to the corresponding considering only
702 * As there are no gaps in this case, get_shifted_rank() and
703 * get_real_rank() are synonyms and make no change on pos
704 * @param pos Rank in the array of nodes considering valid nodes and gaps
705 * @param index Partition which the rank belongs to, unused here
706 * @return Rank in the array of nodes considering only the valid nodes */
708 get_real_rank(const size_type pos, const int index) const
711 /** @brief Inverse of get_real_rank: Convert a rank in the array
712 * of nodes considering only valid nodes, to the corresponding
713 * considering valid nodes and gaps
715 * As there are no gaps in this case, get_shifted_rank() and
716 * get_real_rank() are synonyms and make no change on pos
717 * @param pos Rank in the array of nodes considering only valid nodes
718 * @param index Partition which the rank belongs to, unused here
719 * @return Rank in the array of nodes considering valid nodes and gaps
722 get_shifted_rank(const size_type pos, const int index) const
727 /** @brief Helper comparator class: Invert a binary comparator
728 * @param _Comp Comparator to invert
729 * @param _Iterator Iterator to the elements to compare */
730 template<typename _Comp, typename _Iterator>
733 /** @brief Renaming value_type of _Iterator */
734 typedef typename std::iterator_traits<_Iterator>::value_type
737 /** @brief Comparator to be inverted */
741 /** @brief Constructor
742 * @param c Comparator */
743 gr_or_eq(const _Comp& c) : comp(c) { }
745 /** @brief Operator()
746 * @param a First value to compare
747 * @param b Second value to compare */
749 operator()(const value_type& a, const value_type& b) const
751 if (not (comp(_KeyOfValue()(a), _KeyOfValue()(b))))
757 /** @brief Helper comparator class: Passed as a parameter of
758 list_partition to check that a sequence is sorted
759 * @param _InputIterator Iterator to the elements to compare
760 * @param _CompIsSorted Comparator to check for sortednesss */
761 template<typename _InputIterator, typename _CompIsSorted>
762 class is_sorted_functor
764 /** @brief Element to compare with (first parameter of comp) */
767 /** @brief Comparator to check for sortednesss */
768 const _CompIsSorted comp;
770 /** @brief Sum up the history of the operator() of this
771 * comparator class Its value is true if all calls to comp from
772 * this class have returned true. It is false otherwise */
776 /** @brief Constructor
778 * Sorted is set to true
779 * @param first Element to compare with the first time the
780 * operator() is called
781 * @param c Comparator to check for sortedness */
782 is_sorted_functor(const _InputIterator first, const _CompIsSorted c)
783 : prev(first), comp(c), sorted(true) { }
785 /** @brief Operator() with only one explicit parameter. Updates
786 the class member @c prev and sorted.
787 * @param it Iterator to the element which must be compared to
788 * the element pointed by the the class member @c prev */
790 operator()(const _InputIterator it)
792 if (sorted and it != prev and comp(_KeyOfValue()(*it),
793 _KeyOfValue()(*prev)))
798 /** @brief Query method for sorted
799 * @return Current value of sorted */
805 /** @brief Helper functor: sort the input based upon elements
807 * @param KeyComparator Comparator for the key of values */
808 template<typename KeyComparator>
810 : public std::binary_function<value_type, value_type, bool>
812 /** @brief Comparator for the key of values */
813 const KeyComparator comp;
816 /** @brief Constructor
817 * @param c Comparator for the key of values */
818 ValueCompare(const KeyComparator& c): comp(c) { }
820 /** @brief Operator(): Analogous to comp but for values and not keys
821 * @param v1 First value to compare
822 * @param v2 Second value to compare
823 * @return Result of the comparison */
825 operator()(const value_type& v1, const value_type& v2) const
826 { return comp(_KeyOfValue()(v1),_KeyOfValue()(v2)); }
829 /** @brief Helper comparator: compare a key with the key in a node
830 * @param _Comparator Comparator for keys */
831 template<typename _Comparator>
832 struct compare_node_key
834 /** @brief Comparator for keys */
835 const _Comparator& c;
837 /** @brief Constructor
838 * @param _c Comparator for keys */
839 compare_node_key(const _Comparator& _c) : c(_c) { }
841 /** @brief Operator() with the first parameter being a node
842 * @param r Node whose key is to be compared
843 * @param k Key to be compared
844 * @return Result of the comparison */
846 operator()(const _Rb_tree_node_ptr r, const key_type& k) const
847 { return c(base_type::_S_key(r),k); }
849 /** @brief Operator() with the second parameter being a node
850 * @param k Key to be compared
851 * @param r Node whose key is to be compared
852 * @return Result of the comparison */
854 operator()(const key_type& k, const _Rb_tree_node_ptr r) const
855 { return c(k, base_type::_S_key(r)); }
858 /** @brief Helper comparator: compare a key with the key of a
859 value pointed by an iterator
860 * @param _Comparator Comparator for keys
862 template<typename _Iterator, typename _Comparator>
863 struct compare_value_key
865 /** @brief Comparator for keys */
866 const _Comparator& c;
868 /** @brief Constructor
869 * @param _c Comparator for keys */
870 compare_value_key(const _Comparator& _c) : c(_c){ }
872 /** @brief Operator() with the first parameter being an iterator
873 * @param v Iterator to the value whose key is to be compared
874 * @param k Key to be compared
875 * @return Result of the comparison */
877 operator()(const _Iterator& v, const key_type& k) const
878 { return c(_KeyOfValue()(*v),k); }
880 /** @brief Operator() with the second parameter being an iterator
881 * @param k Key to be compared
882 * @param v Iterator to the value whose key is to be compared
883 * @return Result of the comparison */
885 operator()(const key_type& k, const _Iterator& v) const
886 { return c(k, _KeyOfValue()(*v)); }
889 /** @brief Helper class of _Rb_tree to avoid some symmetric code
890 in tree operations */
893 /** @brief Obtain the conceptual left child of a node
894 * @param parent Node whose child must be obtained
895 * @return Reference to the child node */
896 static _Rb_tree_node_base*&
897 left(_Rb_tree_node_base* parent)
898 { return parent->_M_left; }
900 /** @brief Obtain the conceptual right child of a node
901 * @param parent Node whose child must be obtained
902 * @return Reference to the child node */
903 static _Rb_tree_node_base*&
904 right(_Rb_tree_node_base* parent)
905 { return parent->_M_right; }
908 /** @brief Helper class of _Rb_tree to avoid some symmetric code
909 in tree operations: inverse the symmetry
910 * @param S Symmetry to inverse
915 /** @brief Obtain the conceptual left child of a node, inverting
917 * @param parent Node whose child must be obtained
918 * @return Reference to the child node */
919 static _Rb_tree_node_base*&
920 left(_Rb_tree_node_base* parent)
921 { return S::right(parent);}
923 /** @brief Obtain the conceptual right child of a node,
924 inverting the symmetry
925 * @param parent Node whose child must be obtained
926 * @return Reference to the child node */
927 static _Rb_tree_node_base*&
928 right(_Rb_tree_node_base* parent)
929 { return S::left(parent);}
932 /** @brief Inverse symmetry of LeftRight */
933 typedef Opposite<LeftRight> RightLeft;
935 /** @brief Helper comparator to compare value pointers, so that
937 * @param Comparator Comparator for values
938 * @param _ValuePtr Pointer to values */
939 template<typename Comparator, typename _ValuePtr>
941 : public std::binary_function<_ValuePtr, _ValuePtr, bool>
943 /** @brief Comparator for values */
947 /** @brief Constructor
948 * @param comp Comparator for values */
949 PtrComparator(Comparator comp) : comp(comp) { }
951 /** @brief Operator(): compare the values instead of the pointers
952 * @param v1 Pointer to the first element to compare
953 * @param v2 Pointer to the second element to compare */
955 operator()(const _ValuePtr& v1, const _ValuePtr& v2) const
956 { return comp(*v1,*v2); }
959 /** @brief Iterator whose elements are pointers
960 * @param value_type Type pointed by the pointers */
961 template<typename _ValueTp>
965 /** @brief The iterator category is random access iterator */
966 typedef typename std::random_access_iterator_tag iterator_category;
967 typedef _ValueTp value_type;
968 typedef size_t difference_type;
969 typedef value_type* ValuePtr;
970 typedef ValuePtr& reference;
971 typedef value_type** pointer;
973 /** @brief Element accessed by the iterator */
976 /** @brief Trivial constructor */
979 /** @brief Constructor from an element */
980 PtrIterator(const ValuePtr& __i) : ptr(&__i) { }
982 /** @brief Constructor from a pointer */
983 PtrIterator(const pointer& __i) : ptr(__i) { }
985 /** @brief Copy constructor */
986 PtrIterator(const PtrIterator<value_type>& __i) : ptr(__i.ptr) { }
996 /** @brief Bidirectional iterator requirement */
1004 /** @brief Bidirectional iterator requirement */
1007 { return PtrIterator(ptr++); }
1009 /** @brief Bidirectional iterator requirement */
1017 /** @brief Bidirectional iterator requirement */
1020 { return PtrIterator(ptr--); }
1022 /** @brief Random access iterator requirement */
1024 operator[](const difference_type& __n) const
1025 { return *ptr[__n]; }
1027 /** @brief Random access iterator requirement */
1029 operator+=(const difference_type& __n)
1035 /** @brief Random access iterator requirement */
1037 operator+(const difference_type& __n) const
1038 { return PtrIterator(ptr + __n); }
1040 /** @brief Random access iterator requirement */
1042 operator-=(const difference_type& __n)
1048 /** @brief Random access iterator requirement */
1050 operator-(const difference_type& __n) const
1051 { return PtrIterator(ptr - __n); }
1053 /** @brief Random access iterator requirement */
1055 operator-(const PtrIterator<value_type>& iter) const
1056 { return ptr - iter.ptr; }
1058 /** @brief Random access iterator requirement */
1060 operator+(const PtrIterator<value_type>& iter) const
1061 { return ptr + iter.ptr; }
1063 /** @brief Allow assignment of an element ValuePtr to the iterator */
1064 PtrIterator<value_type>&
1065 operator=(const ValuePtr sptr)
1071 PtrIterator<value_type>&
1072 operator=(const PtrIterator<value_type>& piter)
1079 operator==(const PtrIterator<value_type>& piter)
1080 { return ptr == piter.ptr; }
1083 operator!=(const PtrIterator<value_type>& piter)
1084 { return ptr != piter.ptr; }
1089 /** @brief Bulk insertion helper: synchronization and construction
1090 of the tree bottom up */
1091 struct concat_problem
1093 /** @brief Root of a tree.
1095 * Input: Middle node to concatenate two subtrees. Out: Root of
1096 * the resulting concatenated tree. */
1097 _Rb_tree_node_ptr t;
1099 /** @brief Black height of @c t */
1102 /** @brief Synchronization variable.
1104 * \li READY_YES: the root of the tree can be concatenated with
1105 * the result of the children concatenation problems (both of
1106 * them have finished).
1107 * \li READY_NOT: at least one of the children
1108 * concatenation_problem have not finished */
1111 /** @brief Parent concatenation problem to solve when @c
1112 is_ready = READY_YES */
1113 concat_problem* par_problem;
1115 /** @brief Left concatenation problem */
1116 concat_problem* left_problem;
1118 /** @brief Right concatenation problem */
1119 concat_problem* right_problem;
1121 /** @brief Value NO for the synchronization variable. */
1122 static const int READY_NO = 0;
1124 /** @brief Value YES for the synchronization variable. */
1125 static const int READY_YES = 1;
1127 /** @brief Trivial constructor.
1129 * Initialize the synchronization variable to not ready. */
1130 concat_problem(): is_ready(READY_NO) { }
1132 /** @brief Constructor.
1134 * Initialize the synchronization variable to not ready.
1135 * @param _t Root of a tree.
1136 * @param _black_h Black height of @c _t
1137 * @param _par_problem Parent concatenation problem to solve
1138 * when @c is_ready = READY_YES
1140 concat_problem(const _Rb_tree_node_ptr _t, const int _black_h,
1141 concat_problem* _par_problem)
1142 : t(_t), black_h(_black_h), is_ready(READY_NO), par_problem(_par_problem)
1144 // The root of an insertion problem must be black.
1145 if (t != NULL and t->_M_color == std::_S_red)
1147 t->_M_color = std::_S_black;
1154 /** @brief Bulk insertion helper: insertion of a sequence of
1155 elements in a subtree
1156 @invariant t, pos_beg and pos_end will not change after initialization
1158 struct insertion_problem
1160 /** @brief Renaming of _Rb_tree @c size_type */
1161 typedef _Rb_tree<_Key, _Val, _KeyOfValue, _Compare, _Alloc> tree_type;
1162 typedef typename tree_type::size_type size_type;
1164 /** @brief Root of the tree where the elements are to be inserted */
1165 _Rb_tree_node_ptr t;
1167 /** @brief Position of the first node in the array of nodes to
1168 be inserted into @c t */
1171 /** @brief Position of the first node in the array of nodes
1172 that won't be inserted into @c t */
1175 /** @brief Partition in the array of nodes of @c pos_beg and @c
1176 pos_end (must be the same for both, and so gaps are
1178 int array_partition;
1180 /** @brief Concatenation problem to solve once the insertion
1181 problem is finished */
1182 concat_problem* conc;
1184 /** @brief Trivial constructor. */
1188 /** @brief Constructor.
1189 * @param b Position of the first node in the array of nodes to
1190 * be inserted into @c _conc->t
1191 * @param e Position of the first node in the array of nodes
1192 * that won't be inserted into @c _conc->t
1193 * @param array_p Partition in the array of nodes of @c b and @c e
1194 * @param _conc Concatenation problem to solve once the
1195 * insertion problem is finished
1197 insertion_problem(const size_type b, const size_type e,
1198 const int array_p, concat_problem* _conc)
1199 : t(_conc->t), pos_beg(b), pos_end(e), array_partition(array_p),
1202 _GLIBCXX_PARALLEL_ASSERT(pos_beg <= pos_end);
1204 //The root of an insertion problem must be black!!
1205 _GLIBCXX_PARALLEL_ASSERT(t == NULL or t->_M_color != std::_S_red);
1210 /** @brief Main bulk construction and insertion helper method
1211 * @param __first First element in a sequence to be added into the tree
1212 * @param __last End of the sequence of elements to be added into the tree
1213 * @param is_construction If true, the tree was empty and so, this
1214 * is constructed. Otherwise, the elements are added to an
1216 * @param strictly_less_or_less_equal Comparator to deal
1217 * transparently with repetitions with respect to the uniqueness
1218 * of the wrapping container
1219 * The input sequence is preprocessed so that the bulk
1220 * construction or insertion can be performed
1221 * efficiently. Essentially, the sequence is checked for
1222 * sortednesss and iterators to the middle of the structure are
1223 * saved so that afterwards the sequence can be processed
1224 * effectively in parallel. */
1225 template<typename _InputIterator, typename StrictlyLessOrLessEqual>
1227 _M_bulk_insertion_construction(const _InputIterator __first,
1228 const _InputIterator __last,
1229 const bool is_construction,
1230 StrictlyLessOrLessEqual
1231 strictly_less_or_less_equal)
1233 thread_index_t num_threads = get_max_threads();
1235 size_type beg_partition[num_threads+1];
1236 _InputIterator access[num_threads+1];
1237 beg_partition[0] = 0;
1239 is_sorted_distance_accessors(__first, __last, access,
1240 beg_partition, n, num_threads,
1241 std::__iterator_category(__first));
1244 _M_not_sorted_bulk_insertion_construction(
1245 access, beg_partition, n, num_threads,
1246 is_construction, strictly_less_or_less_equal);
1249 // The vector must be moved... all ranges must have at least
1250 // one element, or make just sequential???
1251 if (static_cast<size_type>(num_threads) > n)
1254 for (int i = 1; i <= num_threads; ++i)
1256 if (beg_partition[j-1] != beg_partition[i])
1258 beg_partition[j] = beg_partition[i];
1259 access[j] = access[i];
1263 num_threads = static_cast<thread_index_t>(n);
1266 if (is_construction)
1267 _M_sorted_bulk_construction(access, beg_partition, n,
1269 strictly_less_or_less_equal);
1271 _M_sorted_bulk_insertion(access, beg_partition, n, num_threads,
1272 strictly_less_or_less_equal);
1276 /** @brief Bulk construction and insertion helper method on an
1277 * input sequence which is not sorted
1279 * The elements are copied, according to the copy policy, in order
1280 * to be sorted. Then the
1281 * _M_not_sorted_bulk_insertion_construction() method is called
1283 * @param access Array of iterators of size @c num_threads +
1284 * 1. Each position contains the first element in each subsequence
1285 * to be added into the tree.
1286 * @param beg_partition Array of positions of size @c num_threads
1287 * + 1. Each position contains the rank of the first element in
1288 * each subsequence to be added into the tree.
1289 * @param n Size of the sequence to be inserted
1290 * @param num_threads Number of threads and corresponding
1291 * subsequences in which the insertion work is going to be shared
1292 * @param is_construction If true, the tree was empty and so, this
1293 * is constructed. Otherwise, the elements are added to an
1295 * @param strictly_less_or_less_equal Comparator to deal
1296 * transparently with repetitions with respect to the uniqueness
1297 * of the wrapping container
1299 template<typename _InputIterator, typename StrictlyLessOrLessEqual>
1301 _M_not_sorted_bulk_insertion_construction(_InputIterator* access,
1302 size_type* beg_partition,
1304 const thread_index_t
1306 const bool is_construction,
1307 StrictlyLessOrLessEqual
1308 strictly_less_or_less_equal)
1310 // Copy entire elements. In the case of a map, we would be
1311 // copying the pair. Therefore, the copy should be reconsidered
1312 // when objects are big. Essentially two cases:
1313 // - The key is small: make that the pair, is a pointer to data
1314 // instead of a copy to it
1315 // - The key is big: we simply have a pointer to the iterator
1316 #if _GLIBCXX_TREE_FULL_COPY
1318 static_cast<nc_value_type*>(::operator new(sizeof(nc_value_type)
1321 uninitialized_copy_from_accessors(access, beg_partition,
1324 _M_not_sorted_bulk_insertion_construction<nc_value_type,
1325 nc_value_type*, ValueCompare<_Compare> >
1326 (beg_partition, v, ValueCompare<_Compare>(base_type::
1327 _M_impl._M_key_compare),
1328 n, num_threads, is_construction, strictly_less_or_less_equal);
1330 // For sorting, we cannot use the new PtrIterator because we
1331 // want the pointers to be exchanged and not the elements.
1332 typedef PtrComparator<ValueCompare<_Compare>, nc_value_type*>
1333 this_ptr_comparator;
1335 static_cast<nc_value_type**>(::operator new(sizeof(nc_value_type*)
1338 uninitialized_ptr_copy_from_accessors(access, beg_partition,
1341 _M_not_sorted_bulk_insertion_construction<nc_value_type*,
1342 PtrIterator<nc_value_type>, this_ptr_comparator>
1343 (beg_partition, v, this_ptr_comparator(
1344 ValueCompare<_Compare>(base_type::_M_impl._M_key_compare)),
1345 n, num_threads, is_construction, strictly_less_or_less_equal);
1349 /** @brief Bulk construction and insertion helper method on an
1350 * input sequence which is not sorted
1352 * The elements are sorted and its accessors calculated. Then,
1353 * _M_sorted_bulk_construction() or _M_sorted_bulk_insertion() is
1355 * @param beg_partition Array of positions of size @c num_threads
1356 * + 1. Each position contains the rank of the first element in
1357 * each subsequence to be added into the tree.
1358 * @param v Array of elements to be sorted (copy of the original sequence).
1359 * @param comp Comparator to be used for sorting the elements
1360 * @param n Size of the sequence to be inserted
1361 * @param num_threads Number of threads and corresponding
1362 * subsequences in which the insertion work is going to be shared
1363 * @param is_construction If true, _M_sorted_bulk_construction()
1364 * is called. Otherwise, _M_sorted_bulk_insertion() is called.
1365 * @param strictly_less_or_less_equal Comparator to deal
1366 * transparently with repetitions with respect to the uniqueness
1367 * of the wrapping container
1369 template<typename ElementsToSort, typename IteratorSortedElements,
1370 typename Comparator, typename StrictlyLessOrLessEqual>
1372 _M_not_sorted_bulk_insertion_construction(size_type* beg_partition,
1376 thread_index_t num_threads,
1377 const bool is_construction,
1378 StrictlyLessOrLessEqual
1379 strictly_less_or_less_equal)
1381 // The accessors have been calculated for the non sorted.
1383 static_cast<thread_index_t>(std::min<size_type>(num_threads, n));
1385 std::stable_sort(v, v+n, comp);
1387 IteratorSortedElements sorted_access[num_threads+1];
1388 range_accessors(IteratorSortedElements(v),
1389 IteratorSortedElements(v + n),
1390 sorted_access, beg_partition, n, num_threads,
1391 std::__iterator_category(v));
1393 // Partial template specialization not available.
1394 if (is_construction)
1395 _M_sorted_bulk_construction(sorted_access, beg_partition, n,
1397 strictly_less_or_less_equal);
1399 _M_sorted_bulk_insertion(sorted_access, beg_partition, n,
1400 num_threads, strictly_less_or_less_equal);
1401 ::operator delete(v);
1404 /** @brief Construct a tree sequentially using the parallel routine
1405 * @param r_array Array of nodes from which to take the nodes to
1407 * @param pos_beg Position of the first node in the array of nodes
1408 * to be part of the tree
1409 * @param pos_end Position of the first node in the array of nodes
1410 * that will not be part of the tree
1411 * @param black_h Black height of the resulting tree (out)
1413 static _Rb_tree_node_ptr
1414 simple_tree_construct(_Rb_tree_node_ptr* r_array, const size_type pos_beg,
1415 const size_type pos_end, int& black_h)
1417 if (pos_beg == pos_end)
1422 if (pos_beg+1 == pos_end)
1424 // It is needed, not only for efficiency but because the
1425 // last level in our tree construction is red.
1426 make_leaf(r_array[pos_beg], black_h);
1427 return r_array[pos_beg];
1433 b_p[1] = pos_end - pos_beg;
1434 _Rb_tree_node_ptr* r= r_array + pos_beg;
1435 size_type length = pos_end - pos_beg;
1437 ranker_no_gaps rank;
1438 nodes_initializer<ranker_no_gaps> nodes_init(r, length, 1, rank);
1440 black_h = nodes_init.get_height();
1442 size_type split = nodes_init.get_shifted_splitting_point();
1443 for (size_type i = 0; i < split; ++i)
1444 nodes_init.link_complete(r[i],0);
1446 for (size_type i = split; i < length; ++i)
1447 nodes_init.link_incomplete(r[i],0);
1449 _Rb_tree_node_ptr t = nodes_init.get_root();
1450 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(t));
1451 _GLIBCXX_PARALLEL_ASSERT(t->_M_color == std::_S_black);
1456 /** @brief Allocation of an array of nodes and initialization of
1457 their value fields from an input sequence. Done in parallel.
1458 * @param access Array of iterators of size @c num_threads +
1459 * 1. Each position contains the first value in the subsequence to
1460 * be copied into the corresponding tree node.
1461 * @param beg_partition Array of positions of size @c num_threads
1462 * + 1. Each position contains the rank of the first element in
1463 * the subsequence from which to copy the data to initialize the
1465 * @param n Size of the sequence and the array of nodes to be allocated.
1466 * @param num_threads Number of threads and corresponding
1467 * subsequences in which the allocation and initialization work is
1468 * going to be shared
1470 template<typename _Iterator>
1472 _M_unsorted_bulk_allocation_and_initialization(const _Iterator* access,
1476 const thread_index_t
1479 _Rb_tree_node_ptr* r = static_cast<_Rb_tree_node_ptr*>(
1480 ::operator new (sizeof(_Rb_tree_node_ptr) * (n + 1)));
1482 // Allocate and initialize the nodes (don't check for uniqueness
1483 // because the sequence is not necessarily sorted.
1484 #pragma omp parallel num_threads(num_threads)
1487 PAPI_register_thread();
1490 int iam = omp_get_thread_num();
1491 _Iterator it = access[iam];
1492 size_type i = beg_partition[iam];
1493 while (it!= access[iam+1])
1495 r[i] = base_type::_M_create_node(*it);
1504 /** @brief Allocation of an array of nodes and initialization of
1505 * their value fields from an input sequence. Done in
1506 * parallel. Besides, the sequence is checked for uniqueness while
1507 * copying the elements, and if there are repetitions, gaps within
1508 * the partitions are created.
1510 * An extra ghost node pointer is reserved in the array to ease
1511 * comparisons later while linking the nodes
1512 * @pre The sequence is sorted.
1513 * @param access Array of iterators of size @c num_threads +
1514 * 1. Each position contains the first value in the subsequence to
1515 * be copied into the corresponding tree node.
1516 * @param beg_partition Array of positions of size @c num_threads
1517 * + 1. Each position contains the rank of the first element in
1518 * the subsequence from which to copy the data to initialize the
1520 * @param rank_shift Array of size @c num_threads + 1 containing
1521 * the number of accumulated gaps at the beginning of each
1523 * @param n Size of the sequence and the array of nodes (-1) to be
1525 * @param num_threads Number of threads and corresponding
1526 * subsequences in which the allocation and initialization work is
1527 * going to be shared
1528 * @param strictly_less_or_less_equal Comparator to deal
1529 * transparently with repetitions with respect to the uniqueness
1530 * of the wrapping container
1532 template<typename _Iterator, typename StrictlyLessOrLessEqual>
1534 _M_sorted_bulk_allocation_and_initialization(_Iterator* access,
1535 size_type* beg_partition,
1536 size_type* rank_shift,
1538 thread_index_t& num_threads,
1539 StrictlyLessOrLessEqual
1540 strictly_less_or_less_equal)
1542 // Ghost node at the end to avoid extra comparisons
1543 // in nodes_initializer.
1544 _Rb_tree_node_ptr* r = static_cast<_Rb_tree_node_ptr*>(
1545 ::operator new(sizeof(_Rb_tree_node_ptr) * (n + 1)));
1548 // Dealing with repetitions (EFFICIENCY ISSUE).
1549 _Iterator access_copy[num_threads+1];
1550 for (int i = 0; i <= num_threads; ++i)
1551 access_copy[i] = access[i];
1552 // Allocate and initialize the nodes
1553 #pragma omp parallel num_threads(num_threads)
1556 PAPI_register_thread();
1558 thread_index_t iam = omp_get_thread_num();
1559 _Iterator prev = access[iam];
1560 size_type i = beg_partition[iam];
1561 _Iterator it = prev;
1565 // Dealing with repetitions (CORRECTNESS ISSUE).
1566 while (it!= access_copy[iam+1]
1567 and not strictly_less_or_less_equal(_KeyOfValue()(*prev),
1568 _KeyOfValue()(*it)))
1570 _GLIBCXX_PARALLEL_ASSERT(not base_type::
1571 _M_impl._M_key_compare
1572 (_KeyOfValue()(*it),
1573 _KeyOfValue()(*prev)));
1577 if (it != access_copy[iam+1]){
1578 r[i] = base_type::_M_create_node(*it);
1587 r[i] = base_type::_M_create_node(*prev);
1591 while (it != access_copy[iam+1])
1593 /***** Dealing with repetitions (CORRECTNESS ISSUE) *****/
1594 if (strictly_less_or_less_equal(_KeyOfValue()(*prev),
1595 _KeyOfValue()(*it)))
1597 r[i] = base_type::_M_create_node(*it);
1602 _GLIBCXX_PARALLEL_ASSERT(not base_type::
1603 _M_impl._M_key_compare
1604 (_KeyOfValue()(*it),
1605 _KeyOfValue()(*prev)));
1608 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
1609 rank_shift[iam+1] = beg_partition[iam+1] - i;
1611 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
1613 /* Guarantee that there are no empty intervals.
1614 - If an empty interval is found, is joined with the previous one
1615 (the rank_shift of the previous is augmented with all the new
1618 thread_index_t i = 1;
1619 while (i <= num_threads
1620 and rank_shift[i] != (beg_partition[i] - beg_partition[i-1]))
1622 rank_shift[i] += rank_shift[i-1];
1625 if (i <= num_threads)
1627 thread_index_t j = i - 1;
1632 rank_shift[j] += rank_shift[i];
1635 while (i <= num_threads
1636 and rank_shift[i] == (beg_partition[i]
1637 - beg_partition[i-1]));
1639 beg_partition[j] = beg_partition[i-1];
1640 access[j] = access[i-1];
1641 if (i > num_threads) break;
1644 // Initialize with the previous.
1645 rank_shift[j] = rank_shift[j-1];
1652 /** @brief Allocation of an array of nodes and initialization of
1653 * their value fields from an input sequence.
1655 * The allocation and initialization is done in parallel. Besides,
1656 * the sequence is checked for uniqueness while copying the
1657 * elements. However, in contrast to
1658 * _M_sorted_bulk_allocation_and_initialization(), if there are
1659 * repetitions, no gaps within the partitions are created. To do
1660 * so efficiently, some extra memory is needed to compute a prefix
1662 * @pre The sequence is sorted.
1663 * @param access Array of iterators of size @c num_threads +
1664 * 1. Each position contains the first value in the subsequence to
1665 * be copied into the corresponding tree node.
1666 * @param beg_partition Array of positions of size @c num_threads
1667 * + 1. Each position contains the rank of the first element in
1668 * the subsequence from which to copy the data to initialize the
1670 * @param n Size of the sequence and the array of nodes (-1) to be
1672 * @param num_threads Number of threads and corresponding
1673 * subsequences in which the allocation and initialization work is
1674 * going to be shared
1675 * @param strictly_less_or_less_equal Comparator to deal
1676 * transparently with repetitions with respect to the uniqueness
1677 * of the wrapping container
1679 template<typename _Iterator, typename StrictlyLessOrLessEqual>
1681 _M_sorted_no_gapped_bulk_allocation_and_initialization
1682 (_Iterator* access, size_type* beg_partition, size_type& n,
1683 const thread_index_t num_threads,
1684 StrictlyLessOrLessEqual strictly_less_or_less_equal)
1687 static_cast<size_type*>(::operator new (sizeof(size_type) * n));
1688 // Allocate and initialize the nodes
1691 #pragma omp parallel num_threads(num_threads)
1694 PAPI_register_thread();
1696 int iam = omp_get_thread_num();
1697 _Iterator prev = access[iam];
1698 size_type i = beg_partition[iam];
1699 _Iterator it = prev;
1704 // First iteration here, to update accessor in case was
1705 // equal to the last element of the previous range
1707 // Dealing with repetitions (CORRECTNESS ISSUE).
1708 if (strictly_less_or_less_equal(_KeyOfValue()(*prev),
1709 _KeyOfValue()(*it)))
1725 while (it!= access[iam+1])
1727 // Dealing with repetitions (CORRECTNESS ISSUE).
1728 if (strictly_less_or_less_equal(_KeyOfValue()(*prev),
1729 _KeyOfValue()(*it)))
1740 // Should be done in parallel.
1741 partial_sum(sums,sums + n, sums);
1744 _Rb_tree_node_ptr* r = static_cast<_Rb_tree_node_ptr*>(
1745 ::operator new(sizeof(_Rb_tree_node_ptr) * (n + 1)));
1748 #pragma omp parallel num_threads(num_threads)
1751 PAPI_register_thread();
1753 int iam = omp_get_thread_num();
1754 _Iterator it = access[iam];
1755 size_type i = beg_partition[iam];
1757 size_type before = 0;
1763 beg_partition[iam] = j;
1764 while (it!= access[iam+1])
1766 while (it!= access[iam+1] and sums[i]!=before)
1772 if (it!= access[iam+1])
1774 r[j] = base_type::_M_create_node(*it);
1782 beg_partition[num_threads] = n;
1784 // Update beginning of partitions.
1785 ::operator delete(sums);
1789 /** @brief Main bulk construction method: perform the actual
1790 initialization, allocation and finally node linking once the
1791 input sequence has already been preprocessed.
1792 * @param access Array of iterators of size @c num_threads +
1793 * 1. Each position contains the first value in the subsequence to
1794 * be copied into the corresponding tree node.
1795 * @param beg_partition Array of positions of size @c num_threads
1796 * + 1. Each position contains the rank of the first element in
1797 * the subsequence from which to copy the data to initialize the
1799 * @param n Size of the sequence and the array of nodes (-1) to be
1801 * @param num_threads Number of threads and corresponding
1802 * subsequences in which the work is going to be shared
1803 * @param strictly_less_or_less_equal Comparator to deal
1804 * transparently with repetitions with respect to the uniqueness
1805 * of the wrapping container
1807 template<typename _Iterator, typename StrictlyLessOrLessEqual>
1809 _M_sorted_bulk_construction(_Iterator* access, size_type* beg_partition,
1811 thread_index_t num_threads,
1812 StrictlyLessOrLessEqual
1813 strictly_less_or_less_equal)
1815 // Dealing with repetitions (EFFICIENCY ISSUE).
1816 size_type rank_shift[num_threads+1];
1818 _Rb_tree_node_ptr* r = _M_sorted_bulk_allocation_and_initialization
1819 (access, beg_partition, rank_shift, n, num_threads,
1820 strictly_less_or_less_equal);
1822 // Link the tree appropriately.
1823 // Dealing with repetitions (EFFICIENCY ISSUE).
1824 ranker_gaps rank(beg_partition, rank_shift, num_threads);
1825 nodes_initializer<ranker_gaps>
1826 nodes_init(r, n - rank_shift[num_threads], num_threads, rank);
1827 size_type split = nodes_init.get_shifted_splitting_point();
1829 #pragma omp parallel num_threads(num_threads)
1832 PAPI_register_thread();
1834 int iam = omp_get_thread_num();
1835 size_type beg = beg_partition[iam];
1836 // Dealing with repetitions (EFFICIENCY ISSUE).
1837 size_type end = (beg_partition[iam+1]
1838 - (rank_shift[iam+1] - rank_shift[iam]));
1841 for (size_type i = beg; i < end; ++i)
1842 nodes_init.link_complete(r[i],iam);
1848 for (size_type i = beg; i < end; ++i)
1849 nodes_init.link_incomplete(r[i],iam);
1853 for (size_type i = beg; i < split; ++i)
1854 nodes_init.link_complete(r[i],iam);
1855 for (size_type i = split; i < end; ++i)
1856 nodes_init.link_incomplete(r[i],iam);
1860 // If the execution reaches this point, there has been no
1861 // exception, and so the structure can be initialized.
1863 // Join the tree laid on the array of ptrs with the header node.
1864 // Dealing with repetitions (EFFICIENCY ISSUE).
1865 base_type::_M_impl._M_node_count = n - rank_shift[num_threads];
1866 base_type::_M_impl._M_header._M_left = r[0];
1867 thread_index_t with_element = num_threads;
1868 while ((beg_partition[with_element]
1869 - beg_partition[with_element-1])
1870 == (rank_shift[with_element] - rank_shift[with_element-1]))
1873 base_type::_M_impl._M_header._M_right =
1874 r[beg_partition[with_element]
1875 - (rank_shift[with_element] - rank_shift[with_element-1]) - 1];
1876 base_type::_M_impl._M_header._M_parent = nodes_init.get_root();
1877 nodes_init.get_root()->_M_parent= &base_type::_M_impl._M_header;
1879 ::operator delete(r);
1883 /** @brief Main bulk insertion method: perform the actual
1884 initialization, allocation and finally insertion once the
1885 input sequence has already been preprocessed.
1886 * @param access Array of iterators of size @c num_threads +
1887 * 1. Each position contains the first value in the subsequence to
1888 * be copied into the corresponding tree node.
1889 * @param beg_partition Array of positions of size @c num_threads
1890 * + 1. Each position contains the rank of the first element in
1891 * the subsequence from which to copy the data to initialize the
1893 * @param k Size of the sequence to be inserted (including the
1894 * possible repeated elements among the sequence itself and
1895 * against those elements already in the tree)
1896 * @param num_threads Number of threads and corresponding
1897 * subsequences in which the work is going to be shared
1898 * @param strictly_less_or_less_equal Comparator to deal
1899 * transparently with repetitions with respect to the uniqueness
1900 * of the wrapping container
1902 template<typename _Iterator, typename StrictlyLessOrLessEqual>
1904 _M_sorted_bulk_insertion(_Iterator* access, size_type* beg_partition,
1905 size_type k, thread_index_t num_threads,
1906 StrictlyLessOrLessEqual
1907 strictly_less_or_less_equal)
1909 _GLIBCXX_PARALLEL_ASSERT((size_type)num_threads <= k);
1910 // num_thr-1 problems in the upper part of the tree
1911 // num_thr problems to further parallelize
1912 std::vector<size_type> existing(num_threads,0);
1913 #if _GLIBCXX_TREE_INITIAL_SPLITTING
1914 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
1915 size_type rank_shift[num_threads+1];
1917 // Need to create them dynamically because they are so erased
1918 concat_problem* conc[2*num_threads-1];
1920 _Rb_tree_node_ptr* r;
1921 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
1922 if (not strictly_less_or_less_equal
1923 (base_type::_S_key(base_type::_M_root()),
1924 base_type::_S_key(base_type::_M_root()) ))
1927 // Set 1 and 2 could be done in parallel ...
1928 // 1. Construct the nodes with their corresponding data
1929 #if _GLIBCXX_TREE_INITIAL_SPLITTING
1930 r = _M_sorted_bulk_allocation_and_initialization
1931 (access, beg_partition, rank_shift, k, num_threads,
1932 strictly_less_or_less_equal);
1934 r = _M_sorted_no_gapped_bulk_allocation_and_initialization
1935 (access, beg_partition, k, num_threads,
1936 strictly_less_or_less_equal);
1941 // Not unique container.
1942 r = _M_unsorted_bulk_allocation_and_initialization
1943 (access, beg_partition, k, num_threads);
1944 #if _GLIBCXX_TREE_INITIAL_SPLITTING
1945 // Trivial initialization of rank_shift.
1946 for (int i=0; i <= num_threads; ++i)
1950 #if _GLIBCXX_TREE_INITIAL_SPLITTING
1951 // Calculate position of last element to be inserted: must be
1952 // done now, or otherwise becomes messy.
1955 repetitions (EFFICIENCY ISSUE) *****/
1956 size_type last = (beg_partition[num_threads]
1957 - (rank_shift[num_threads]
1958 - rank_shift[num_threads - 1]));
1960 //2. Split the tree according to access in num_threads parts
1961 //Initialize upper concat_problems
1962 //Allocate them dynamically because they are afterwards so erased
1963 for (int i=0; i < (2*num_threads-1); ++i)
1964 conc[i] = new concat_problem ();
1965 concat_problem* root_problem =
1966 _M_bulk_insertion_initialize_upper_problems(conc, 0,
1969 // The first position of access and the last are ignored, so we
1970 // have exactly num_threads subtrees.
1971 bool before = omp_get_nested();
1972 omp_set_nested(true);
1974 _M_bulk_insertion_split_tree_by_pivot(
1975 static_cast<_Rb_tree_node_ptr>(base_type::_M_root()), r,
1976 access, beg_partition, rank_shift, 0, num_threads - 1, conc,
1977 num_threads, strictly_less_or_less_equal);
1979 omp_set_nested(before);
1981 // Construct upper tree with the first elements of ranges if
1982 // they are NULL We cannot do this by default because they could
1983 // be repeated and would not be checked.
1985 for (int pos = 1; pos < num_threads; ++pos)
1987 _GLIBCXX_PARALLEL_ASSERT(
1988 conc[(pos-1)*2]->t == NULL or conc[pos*2-1]->t == NULL
1989 or strictly_less_or_less_equal
1990 (base_type::_S_key(base_type::_S_maximum(conc[(pos-1)*2]->t)),
1991 base_type::_S_key(conc[pos*2-1]->t)));
1992 _GLIBCXX_PARALLEL_ASSERT(
1993 conc[pos*2]->t == NULL or conc[pos*2-1]->t == NULL
1994 or strictly_less_or_less_equal(
1995 base_type::_S_key(conc[pos*2-1]->t),
1996 base_type::_S_key(base_type::_S_minimum(conc[pos*2]->t))));
1997 /***** Dealing with repetitions (CORRECTNESS ISSUE) *****/
1999 // The first element of the range is the root.
2000 if (conc[pos*2-1]->t == NULL
2001 or (not(strictly_less_or_less_equal(
2002 base_type::_S_key(static_cast<_Rb_tree_node_ptr>
2003 (conc[pos*2-1]->t)),
2004 _KeyOfValue()(*access[pos])))))
2006 // There was not a candidate element
2008 // Exists an initialized position in the array which
2009 // corresponds to conc[pos*2-1]->t */
2010 if (conc[pos*2-1]->t == NULL)
2012 size_t np = beg_partition[pos];
2013 _GLIBCXX_PARALLEL_ASSERT(
2014 conc[(pos-1)*2]->t == NULL
2015 or strictly_less_or_less_equal
2016 (base_type::_S_key(base_type::
2017 _S_maximum(conc[(pos-1)*2]->t)),
2018 base_type::_S_key(r[np])));
2019 _GLIBCXX_PARALLEL_ASSERT(
2020 conc[pos*2]->t == NULL
2021 or strictly_less_or_less_equal(
2022 base_type::_S_key(r[np]),
2023 base_type::_S_key(base_type::
2024 _S_minimum(conc[pos*2]->t))));
2025 conc[pos*2-1]->t = r[np];
2026 r[np]->_M_color = std::_S_black;
2027 ++base_type::_M_impl._M_node_count;
2030 base_type::_M_destroy_node(r[beg_partition[pos]]);
2032 ++(beg_partition[pos]);
2035 _GLIBCXX_PARALLEL_ASSERT(
2036 conc[(pos-1)*2]->t == NULL
2037 or conc[(pos-1)*2]->t->_M_color == std::_S_black);
2038 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
2039 rank_shift[pos] += r_s;
2041 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
2042 rank_shift[num_threads] += r_s;
2044 concat_problem root_problem_on_stack(
2045 static_cast<_Rb_tree_node_ptr>(base_type::_M_root()),
2046 black_height(static_cast<_Rb_tree_node_ptr>(base_type::_M_root())),
2048 concat_problem * root_problem = &root_problem_on_stack;
2052 // 3. Split the range according to tree and create
2053 // 3. insertion/concatenation problems to be solved in parallel
2054 #if _GLIBCXX_TREE_DYNAMIC_BALANCING
2055 size_type min_problem = (k/num_threads) / (log2(k/num_threads + 1)+1);
2057 size_type min_problem = base_type::size() + k;
2060 RestrictedBoundedConcurrentQueue<insertion_problem>*
2061 ins_problems[num_threads];
2063 #pragma omp parallel num_threads(num_threads)
2065 int num_thread = omp_get_thread_num();
2066 ins_problems[num_thread] =
2067 new RestrictedBoundedConcurrentQueue<insertion_problem>
2068 (2 * (log2(base_type::size()) + 1));
2069 #if _GLIBCXX_TREE_INITIAL_SPLITTING
2070 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
2071 size_type end_k_thread = (beg_partition[num_thread+1]
2072 - (rank_shift[num_thread+1]
2073 - rank_shift[num_thread]));
2074 ins_problems[num_thread]->push_front(
2075 insertion_problem(beg_partition[num_thread], end_k_thread,
2076 num_thread, conc[num_thread*2]));
2078 // size_type end_k_thread = beg_partition[num_thread+1];
2080 insertion_problem ip_to_solve;
2083 #if _GLIBCXX_TREE_INITIAL_SPLITTING
2087 ins_problems[num_thread]->push_front(insertion_problem(
2094 // First do own work.
2095 while (ins_problems[num_thread]->pop_front(ip_to_solve))
2097 _GLIBCXX_PARALLEL_ASSERT(ip_to_solve.pos_beg
2098 <= ip_to_solve.pos_end);
2099 _M_bulk_insertion_split_sequence(
2100 r, ins_problems[num_thread], ip_to_solve,
2101 existing[num_thread], min_problem,
2102 strictly_less_or_less_equal);
2108 //Then, try to steal from others (and become own).
2109 for (int i=1; i<num_threads; ++i)
2111 if (ins_problems[(num_thread + i)
2112 % num_threads]->pop_back(ip_to_solve))
2115 _M_bulk_insertion_split_sequence(
2116 r, ins_problems[num_thread], ip_to_solve,
2117 existing[num_thread], min_problem,
2118 strictly_less_or_less_equal);
2125 // Update root and sizes.
2126 base_type::_M_root() = root_problem->t;
2127 root_problem->t->_M_parent = &(base_type::_M_impl._M_header);
2128 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
2130 // Add the k elements that wanted to be inserted, minus the ones
2131 // that were repeated.
2132 #if _GLIBCXX_TREE_INITIAL_SPLITTING
2133 base_type::_M_impl._M_node_count += (k - (rank_shift[num_threads]));
2135 base_type::_M_impl._M_node_count += k;
2137 // Also then, take out the ones that were already existing in the tree.
2138 for (int i = 0; i< num_threads; ++i)
2139 base_type::_M_impl._M_node_count -= existing[i];
2140 // Update leftmost and rightmost.
2141 /***** Dealing with repetitions (EFFICIENCY ISSUE) *****/
2142 if (not strictly_less_or_less_equal(
2143 base_type::_S_key(base_type::_M_root()),
2144 base_type::_S_key(base_type::_M_root())))
2146 // Unique container.
2147 if (base_type::_M_impl._M_key_compare(
2148 _KeyOfValue()(*(access[0])),
2149 base_type::_S_key(base_type::_M_leftmost())))
2150 base_type::_M_leftmost() = r[0];
2151 if (base_type::_M_impl._M_key_compare(
2152 base_type::_S_key(base_type::_M_rightmost()),
2153 _KeyOfValue()(*(--access[num_threads]))))
2154 base_type::_M_rightmost() = r[last - 1];
2158 if (strictly_less_or_less_equal(_KeyOfValue()(*(access[0])),
2159 base_type::_S_key(base_type::
2161 base_type::_M_leftmost() =
2162 base_type::_S_minimum(base_type::_M_root());
2163 if (strictly_less_or_less_equal(
2164 base_type::_S_key(base_type::_M_rightmost()),
2165 _KeyOfValue()(*(--access[num_threads]))))
2166 base_type::_M_rightmost() =
2167 base_type::_S_maximum(base_type::_M_root());
2170 #if _GLIBCXX_TREE_INITIAL_SPLITTING
2171 // Delete root problem
2172 delete root_problem;
2176 for (int pos = 0; pos < num_threads; ++pos)
2177 delete ins_problems[pos];
2179 // Delete array of pointers
2180 ::operator delete(r);
2184 /** @brief Divide a tree according to the splitter elements of a
2187 * The tree of the initial recursive call is divided in exactly
2188 * num_threads partitions, some of which may be empty. Besides,
2189 * some nodes may be extracted from it to afterwards concatenate
2190 * the subtrees resulting from inserting the elements into it.
2191 * This is done sequentially. It could be done in parallel but the
2192 * performance is much worse.
2193 * @param t Root of the tree to be split
2194 * @param r Array of nodes to be inserted into the tree (here only
2195 * used to look up its elements)
2196 * @param access Array of iterators of size @c num_threads +
2197 * 1. Each position contains the first value in the subsequence
2198 * that has been copied into the corresponding tree node.
2199 * @param beg_partition Array of positions of size @c num_threads
2200 * + 1. Each position contains the rank of the first element in
2201 * the array of nodes to be inserted.
2202 * @param rank_shift Array of size @c num_threads + 1 containing
2203 * the number of accumulated gaps at the beginning of each
2205 * @param pos_beg First position in the access array to be
2206 * considered to split @c t
2207 * @param pos_end Last position (included) in the access array to
2208 * be considered to split @c t
2209 * @param conc Array of concatenation problems to be initialized
2210 * @param num_threads Number of threads and corresponding
2211 * subsequences in which the original sequence has been
2213 * @param strictly_less_or_less_equal Comparator to deal
2214 * transparently with repetitions with respect to the uniqueness
2215 * of the wrapping container
2217 template<typename _Iterator, typename StrictlyLessOrLessEqual>
2219 _M_bulk_insertion_split_tree_by_pivot(_Rb_tree_node_ptr t,
2220 _Rb_tree_node_ptr* r,
2222 size_type* beg_partition,
2223 size_type* rank_shift,
2224 const size_type pos_beg,
2225 const size_type pos_end,
2226 concat_problem** conc,
2227 const thread_index_t num_threads,
2228 StrictlyLessOrLessEqual
2229 strictly_less_or_less_equal)
2231 if (pos_beg == pos_end)
2233 //Elements are in [pos_beg, pos_end]
2234 conc[pos_beg*2]->t = t;
2235 conc[pos_beg*2]->black_h = black_height(t);
2236 force_black_root (conc[pos_beg*2]->t, conc[pos_beg*2]->black_h);
2241 for (size_type i = pos_beg; i < pos_end; ++i)
2243 conc[i*2]->t = NULL;
2244 conc[i*2]->black_h = 0;
2245 conc[i*2+1]->t = NULL;
2247 conc[pos_end*2]->t = NULL;
2248 conc[pos_end*2]->black_h = 0;
2252 // Return the last pos, in which key >= (pos-1).
2253 // Search in the range [pos_beg, pos_end]
2254 size_type pos = (std::upper_bound(access + pos_beg,
2255 access + pos_end + 1,
2256 base_type::_S_key(t),
2257 compare_value_key<_Iterator,
2258 _Compare>(base_type::
2259 _M_impl._M_key_compare))
2264 _GLIBCXX_PARALLEL_ASSERT(
2266 or not base_type::_M_impl._M_key_compare(
2267 base_type::_S_key(t), _KeyOfValue()(*access[pos])));
2270 _Rb_tree_node_ptr ll, lr;
2271 int black_h_ll, black_h_lr;
2272 _Rb_tree_node_ptr rl, rr;
2273 int black_h_rl, black_h_rr;
2277 _Rb_tree_node_ptr prev = (r[beg_partition[pos] - 1
2279 - rank_shift[pos - 1])]);
2281 _GLIBCXX_PARALLEL_ASSERT(
2282 strictly_less_or_less_equal(base_type::_S_key(prev),
2283 _KeyOfValue()(*access[pos])));
2285 split(static_cast<_Rb_tree_node_ptr>(t->_M_left),
2286 static_cast<const key_type&>(_KeyOfValue()(*access[pos])),
2287 static_cast<const key_type&>(base_type::_S_key(prev)),
2288 conc[pos*2-1]->t, ll, lr, black_h_ll, black_h_lr,
2289 strictly_less_or_less_equal);
2291 _M_bulk_insertion_split_tree_by_pivot(
2292 ll, r, access, beg_partition, rank_shift, pos_beg, pos-1,
2293 conc, num_threads, strictly_less_or_less_equal);
2297 lr = static_cast<_Rb_tree_node_ptr>(t->_M_left);
2298 black_h_lr = black_height (lr);
2299 force_black_root (lr, black_h_lr);
2304 _Rb_tree_node_ptr prev = (r[beg_partition[pos+1] - 1
2305 - (rank_shift[pos+1]
2306 - rank_shift[pos])]);
2308 _GLIBCXX_PARALLEL_ASSERT(
2309 not base_type::_M_impl._M_key_compare(
2310 _KeyOfValue()(*access[pos+1]), base_type::_S_key(prev)));
2311 _GLIBCXX_PARALLEL_ASSERT(
2312 strictly_less_or_less_equal(base_type::_S_key(prev),
2313 _KeyOfValue()(*access[pos+1])));
2315 split(static_cast<_Rb_tree_node_ptr>(t->_M_right),
2316 static_cast<const key_type&>(_KeyOfValue()(*access[pos+1])),
2317 static_cast<const key_type&>(base_type::_S_key(prev)),
2318 conc[pos*2+1]->t, rl, rr, black_h_rl, black_h_rr,
2319 strictly_less_or_less_equal);
2321 _M_bulk_insertion_split_tree_by_pivot(
2322 rr, r, access, beg_partition, rank_shift, pos+1, pos_end,
2323 conc,num_threads, strictly_less_or_less_equal);
2327 rl = static_cast<_Rb_tree_node_ptr>(t->_M_right);
2328 black_h_rl = black_height (rl);
2329 force_black_root (rl, black_h_rl);
2332 // When key(t) is equal to key(access[pos]) and no other key in
2333 // the left tree satisfies the criteria to be conc[pos*2-1]->t,
2334 // key(t) must be assigned to it to avoid repetitions.
2335 // Therefore, we do not have a root parameter for the
2336 // concatenate function and a new concatenate function must be
2338 if (pos != pos_beg and conc[pos*2-1]->t == NULL
2339 and not strictly_less_or_less_equal(_KeyOfValue()(*access[pos]),
2340 base_type::_S_key(t)))
2342 conc[pos*2-1]->t = t;
2345 concatenate(t, lr, rl, black_h_lr, black_h_rl,
2346 conc[pos*2]->t, conc[pos*2]->black_h);
2349 /** @brief Divide the insertion problem until a leaf is reached or
2350 * the problem is small.
2352 * During the recursion, the right subproblem is queued, so that
2353 * it can be handled by any thread. The left subproblem is
2354 * divided recursively, and finally, solved right away
2356 * @param r Array of nodes containing the nodes to added into the tree
2357 * @param ins_problems Pointer to a queue of insertion
2358 * problems. The calling thread owns this queue, i. e. it is the
2359 * only one to push elements, but other threads could pop elements
2360 * from it in other methods.
2361 * @param ip Current insertion problem to be solved
2362 * @param existing Number of existing elements found when solving
2363 * the insertion problem (out)
2364 * @param min_problem Threshold size on the size of the insertion
2365 * problem in which to stop recursion
2366 * @param strictly_less_or_less_equal Comparator to deal
2367 * transparently with repetitions with respect to the uniqueness
2368 * of the wrapping container
2370 template<typename StrictlyLessOrLessEqual>
2372 _M_bulk_insertion_split_sequence(
2373 _Rb_tree_node_ptr* r,
2374 RestrictedBoundedConcurrentQueue<insertion_problem>* ins_problems,
2375 insertion_problem& ip, size_type& existing,
2376 const size_type min_problem,
2377 StrictlyLessOrLessEqual strictly_less_or_less_equal)
2379 _GLIBCXX_PARALLEL_ASSERT(ip.t == ip.conc->t);
2380 if (ip.t == NULL or (ip.pos_end- ip.pos_beg) <= min_problem)
2382 // SOLVE PROBLEM SEQUENTIALLY
2383 // Start solving the problem.
2384 _GLIBCXX_PARALLEL_ASSERT(ip.pos_beg <= ip.pos_end);
2385 _M_bulk_insertion_merge_concatenate(r, ip, existing,
2386 strictly_less_or_less_equal);
2390 size_type pos_beg_right;
2391 size_type pos_end_left = divide(r, ip.pos_beg, ip.pos_end,
2392 base_type::_S_key(ip.t), pos_beg_right,
2393 existing, strictly_less_or_less_equal);
2395 int black_h_l, black_h_r;
2396 if (ip.t->_M_color == std::_S_black)
2397 black_h_l = black_h_r = ip.conc->black_h - 1;
2399 black_h_l = black_h_r = ip.conc->black_h;
2401 // Right problem into the queue.
2402 ip.conc->right_problem = new concat_problem(
2403 static_cast<_Rb_tree_node_ptr>(ip.t->_M_right), black_h_r, ip.conc);
2404 ip.conc->left_problem = new concat_problem(
2405 static_cast<_Rb_tree_node_ptr>(ip.t->_M_left), black_h_l, ip.conc);
2407 ins_problems->push_front(insertion_problem(pos_beg_right, ip.pos_end,
2409 ip.conc->right_problem));
2411 // Solve left problem.
2412 insertion_problem ip_left(ip.pos_beg, pos_end_left, ip.array_partition,
2413 ip.conc->left_problem);
2414 _M_bulk_insertion_split_sequence(r, ins_problems, ip_left, existing,
2416 strictly_less_or_less_equal);
2420 /** @brief Insert a sequence of elements into a tree using a
2421 * divide-and-conquer scheme.
2423 * The problem is solved recursively and sequentially dividing the
2424 * sequence to be inserted according to the root of the tree. This
2425 * is done until a leaf is reached or the proportion of elements
2426 * to be inserted is small. Finally, the two resulting trees are
2428 * @param r_array Array of nodes containing the nodes to be added
2429 * into the tree (among others)
2430 * @param t Root of the tree
2431 * @param pos_beg Position of the first node in the array of
2432 * nodes to be inserted into the tree
2433 * @param pos_end Position of the first node in the array of
2434 * nodes that will not be inserted into the tree
2435 * @param existing Number of existing elements found while
2436 * inserting the range [@c pos_beg, @c pos_end) (out)
2437 * @param black_h Height of the tree @c t and of the resulting
2438 * tree after the recursive calls (in and out)
2439 * @param strictly_less_or_less_equal Comparator to deal
2440 * transparently with repetitions with respect to the uniqueness
2441 * of the wrapping container
2442 * @return Resulting tree after the elements have been inserted
2444 template<typename StrictlyLessOrLessEqual>
2446 _M_bulk_insertion_merge(_Rb_tree_node_ptr* r_array, _Rb_tree_node_ptr t,
2447 const size_type pos_beg,
2448 const size_type pos_end,
2449 size_type& existing, int& black_h,
2450 StrictlyLessOrLessEqual
2451 strictly_less_or_less_equal)
2456 _GLIBCXX_PARALLEL_ASSERT(pos_beg<=pos_end);
2458 // Leaf: a tree with the range must be constructed. Returns its
2459 // height in black nodes and its root (in ip.t) If there is
2460 // nothing to insert, we still need the height for balancing.
2463 if (pos_end == pos_beg)
2465 t = simple_tree_construct(r_array,pos_beg, pos_end, black_h);
2466 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(t,count));
2469 if (pos_end == pos_beg)
2471 if ((pos_end - pos_beg) <= (size_type)(black_h))
2473 // Exponential size tree with respect the number of elements
2475 for (size_type p = pos_beg; p < pos_end; ++p)
2476 t = _M_insert_local(t, r_array[p], existing, black_h,
2477 strictly_less_or_less_equal);
2478 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(t,count));
2482 size_type pos_beg_right;
2483 size_type pos_end_left = divide(r_array, pos_beg, pos_end,
2484 base_type::_S_key(t), pos_beg_right,
2485 existing, strictly_less_or_less_equal);
2487 int black_h_l, black_h_r;
2488 if (t->_M_color == std::_S_black)
2489 black_h_l = black_h_r = black_h - 1;
2491 black_h_l = black_h_r = black_h;
2493 force_black_root(t->_M_left, black_h_l);
2495 _Rb_tree_node_ptr l = _M_bulk_insertion_merge(
2496 r_array, static_cast<_Rb_tree_node_ptr>(t->_M_left), pos_beg,
2497 pos_end_left, existing, black_h_l, strictly_less_or_less_equal);
2499 force_black_root(t->_M_right, black_h_r);
2501 _Rb_tree_node_ptr r = _M_bulk_insertion_merge(
2502 r_array, static_cast<_Rb_tree_node_ptr>(t->_M_right), pos_beg_right,
2503 pos_end, existing, black_h_r, strictly_less_or_less_equal);
2505 concatenate(t, l, r, black_h_l, black_h_r, t, black_h);
2510 /** @brief Solve a given insertion problem and all the parent
2511 * concatenation problem that are ready to be solved.
2513 * First, solve an insertion problem.
2515 * Then, check if it is possible to solve the parent
2516 * concatenation problem. If this is the case, solve it and go
2517 * up recursively, as far as possible. Quit otherwise.
2519 * @param r Array of nodes containing the nodes to be added into
2520 * the tree (among others)
2521 * @param ip Insertion problem to solve initially.
2522 * @param existing Number of existing elements found while
2523 * inserting the range defined by the insertion problem (out)
2524 * @param strictly_less_or_less_equal Comparator to deal
2525 * transparently with repetitions with respect to the uniqueness
2526 * of the wrapping container
2528 template<typename StrictlyLessOrLessEqual>
2530 _M_bulk_insertion_merge_concatenate(_Rb_tree_node_ptr* r,
2531 insertion_problem& ip,
2532 size_type& existing,
2533 StrictlyLessOrLessEqual
2534 strictly_less_or_less_equal)
2536 concat_problem* conc = ip.conc;
2537 _GLIBCXX_PARALLEL_ASSERT(ip.pos_beg <= ip.pos_end);
2539 conc->t = _M_bulk_insertion_merge(r, ip.t, ip.pos_beg, ip.pos_end,
2540 existing, conc->black_h,
2541 strictly_less_or_less_equal);
2542 _GLIBCXX_PARALLEL_ASSERT(conc->t == NULL
2543 or conc->t->_M_color == std::_S_black);
2545 bool is_ready = true;
2546 while (conc->par_problem != NULL and is_ready)
2548 // Pre: exists left and right problem, so there is not a deadlock
2549 if (compare_and_swap(&conc->par_problem->is_ready,
2550 concat_problem::READY_NO,
2551 concat_problem::READY_YES))
2556 conc = conc->par_problem;
2557 _GLIBCXX_PARALLEL_ASSERT(conc->left_problem!=NULL
2558 and conc->right_problem!=NULL);
2559 _GLIBCXX_PARALLEL_ASSERT(conc->left_problem->black_h >=0
2560 and conc->right_problem->black_h>=0);
2561 // Finished working with the problems.
2562 concatenate(conc->t, conc->left_problem->t,
2563 conc->right_problem->t,
2564 conc->left_problem->black_h,
2565 conc->right_problem->black_h,
2566 conc->t, conc->black_h);
2568 delete conc->left_problem;
2569 delete conc->right_problem;
2574 // Begin of sorting, searching and related comparison-based helper methods.
2576 /** @brief Check whether a random-access sequence is sorted, and
2577 * calculate its size.
2579 * @param __first Begin iterator of sequence.
2580 * @param __last End iterator of sequence.
2581 * @param dist Size of the sequence (out)
2582 * @return sequence is sorted. */
2583 template<typename _RandomAccessIterator>
2585 is_sorted_distance(const _RandomAccessIterator __first,
2586 const _RandomAccessIterator __last,
2588 std::random_access_iterator_tag) const
2590 gr_or_eq<_Compare, _RandomAccessIterator>
2591 geq(base_type::_M_impl._M_key_compare);
2592 dist = __last - __first;
2595 return equal(__first + 1, __last, __first, geq);
2598 /** @brief Check whether an input sequence is sorted, and
2599 * calculate its size.
2601 * The list partitioning tool is used so that all the work is
2602 * done in only one traversal.
2603 * @param __first Begin iterator of sequence.
2604 * @param __last End iterator of sequence.
2605 * @param dist Size of the sequence (out)
2606 * @return sequence is sorted. */
2607 template<typename _InputIterator>
2609 is_sorted_distance(const _InputIterator __first,
2610 const _InputIterator __last, size_type& dist,
2611 std::input_iterator_tag) const
2614 bool is_sorted = true;
2615 _InputIterator it = __first;
2616 _InputIterator prev = it++;
2617 while (it != __last)
2620 if (base_type::_M_impl._M_key_compare(_KeyOfValue()(*it),
2621 _KeyOfValue()(*prev)))
2630 while (it != __last)
2638 /** @brief Check whether a random-access sequence is sorted,
2639 * calculate its size, and obtain intermediate accessors to the
2640 * sequence to ease parallelization.
2642 * @param __first Begin iterator of sequence.
2643 * @param __last End iterator of sequence.
2644 * @param access Array of size @c num_pieces + 1 that defines @c
2645 * num_pieces subsequences of the original sequence (out). Each
2646 * position @c i will contain an iterator to the first element in
2647 * the subsequence @c i.
2648 * @param beg_partition Array of size @c num_pieces + 1 that
2649 * defines @c num_pieces subsequences of the original sequence
2650 * (out). Each position @c i will contain the rank of the first
2651 * element in the subsequence @c i.
2652 * @param dist Size of the sequence (out)
2653 * @param num_pieces Number of pieces to generate.
2654 * @return Sequence is sorted. */
2655 template<typename _RandomAccessIterator>
2657 is_sorted_distance_accessors(const _RandomAccessIterator __first,
2658 const _RandomAccessIterator __last,
2659 _RandomAccessIterator* access,
2660 size_type* beg_partition, size_type& dist,
2661 thread_index_t& num_pieces,
2662 std::random_access_iterator_tag) const
2664 bool is_sorted = is_sorted_distance(__first, __last, dist,
2665 std::__iterator_category(__first));
2666 if (dist < (unsigned int) num_pieces)
2669 // Do it opposite way to use accessors in equal function???
2670 range_accessors(__first,__last, access, beg_partition, dist,
2671 num_pieces, std::__iterator_category(__first));
2675 /** @brief Check whether an input sequence is sorted, calculate
2676 * its size, and obtain intermediate accessors to the sequence to
2677 * ease parallelization.
2679 * The list partitioning tool is used so that all the work is
2680 * done in only one traversal.
2681 * @param __first Begin iterator of sequence.
2682 * @param __last End iterator of sequence.
2683 * @param access Array of size @c num_pieces + 1 that defines @c
2684 * num_pieces subsequences of the original sequence (out). Each
2685 * position @c i will contain an iterator to the first element in
2686 * the subsequence @c i.
2687 * @param beg_partition Array of size @c num_pieces + 1 that
2688 * defines @c num_pieces subsequences of the original sequence
2689 * (out). Each position @c i will contain the rank of the first
2690 * element in the subsequence @c i.
2691 * @param dist Size of the sequence (out)
2692 * @param num_pieces Number of pieces to generate.
2693 * @return Sequence is sorted. */
2694 template<typename _InputIterator>
2696 is_sorted_distance_accessors(const _InputIterator __first,
2697 const _InputIterator __last,
2698 _InputIterator* access,
2699 size_type* beg_partition, size_type& dist,
2700 thread_index_t& num_pieces,
2701 std::input_iterator_tag) const
2703 is_sorted_functor<_InputIterator, _Compare>
2704 sorted(__first, base_type::_M_impl._M_key_compare);
2705 dist = list_partition(__first, __last, access, (beg_partition+1),
2706 num_pieces, sorted, 0);
2708 // Calculate the rank of the beginning each partition from the
2709 // sequence sizes (what is stored at this point in beg_partition
2711 beg_partition[0] = 0;
2712 for (int i = 0; i < num_pieces; ++i)
2713 beg_partition[i+1] += beg_partition[i];
2715 return sorted.is_sorted();
2718 /** @brief Make a full copy of the elements of a sequence
2720 * The uninitialized_copy method from the STL is called in parallel
2721 * using the access array to point to the beginning of each
2723 * @param access Array of size @c num_threads + 1 that defines @c
2724 * num_threads subsequences. Each position @c i contains an
2725 * iterator to the first element in the subsequence @c i.
2726 * @param beg_partition Array of size @c num_threads + 1 that
2727 * defines @c num_threads subsequences. Each position @c i
2728 * contains the rank of the first element in the subsequence @c
2730 * @param out Begin iterator of output sequence.
2731 * @param num_threads Number of threads to use. */
2732 template<typename _InputIterator, typename _OutputIterator>
2734 uninitialized_copy_from_accessors(_InputIterator* access,
2735 size_type* beg_partition,
2736 _OutputIterator out,
2737 const thread_index_t num_threads)
2739 #pragma omp parallel num_threads(num_threads)
2741 int iam = omp_get_thread_num();
2742 uninitialized_copy(access[iam], access[iam + 1],
2743 out + beg_partition[iam]);
2747 /** @brief Make a copy of the pointers of the elements of a sequence
2748 * @param access Array of size @c num_threads + 1 that defines @c
2749 * num_threads subsequences. Each position @c i contains an
2750 * iterator to the first element in the subsequence @c i.
2751 * @param beg_partition Array of size @c num_threads + 1 that
2752 * defines @c num_threads subsequences. Each position @c i
2753 * contains the rank of the first element in the subsequence @c
2755 * @param out Begin iterator of output sequence.
2756 * @param num_threads Number of threads to use. */
2757 template<typename _InputIterator, typename _OutputIterator>
2759 uninitialized_ptr_copy_from_accessors(_InputIterator* access,
2760 size_type* beg_partition,
2761 _OutputIterator out,
2762 const thread_index_t num_threads)
2764 #pragma omp parallel num_threads(num_threads)
2766 int iam = omp_get_thread_num();
2767 _OutputIterator itout = out + beg_partition[iam];
2768 for (_InputIterator it = access[iam]; it != access[iam+1]; ++it)
2776 /** @brief Split a sorted node array in two parts according to a key.
2778 * For unique containers, if the splitting key is in the array of
2779 * nodes, the corresponding node is erased.
2780 * @param r Array of nodes containing the nodes to split (among others)
2781 * @param pos_beg Position of the first node in the array of
2782 * nodes to be considered
2783 * @param pos_end Position of the first node in the array of
2784 * nodes to be not considered
2785 * @param key Splitting key
2786 * @param pos_beg_right Position of the first node in the
2787 * resulting right partition (out)
2788 * @param existing Number of existing elements before dividing
2789 * (in) and after (out). Specifically, the counter is
2790 * incremented by one for unique containers if the splitting key
2791 * was already in the array of nodes.
2792 * @param strictly_less_or_less_equal Comparator to deal
2793 * transparently with repetitions with respect to the uniqueness
2794 * of the wrapping container
2795 * @return Position of the last node (not included) in the
2796 * resulting left partition (out)
2798 template<typename StrictlyLessOrLessEqual>
2800 divide(_Rb_tree_node_ptr* r, const size_type pos_beg,
2801 const size_type pos_end, const key_type& key,
2802 size_type& pos_beg_right, size_type& existing,
2803 StrictlyLessOrLessEqual strictly_less_or_less_equal)
2805 pos_beg_right = std::lower_bound(
2806 r + pos_beg, r + pos_end, key, compare_node_key<_Compare>(
2807 base_type::_M_impl._M_key_compare)) - r;
2809 //Check if the element exists.
2810 size_type pos_end_left = pos_beg_right;
2812 // If r[pos_beg_right] is equal to key, must be erased
2813 /***** Dealing with repetitions (CORRECTNESS ISSUE) *****/
2814 _GLIBCXX_PARALLEL_ASSERT(
2815 (pos_beg_right == pos_end)
2816 or not base_type::_M_impl._M_key_compare(
2817 base_type::_S_key(r[pos_beg_right]),key));
2818 _GLIBCXX_PARALLEL_ASSERT(
2819 (pos_beg_right + 1 >= pos_end)
2820 or strictly_less_or_less_equal(
2821 key, base_type::_S_key(r[pos_beg_right + 1])));
2823 if (pos_beg_right != pos_end
2824 and not strictly_less_or_less_equal(
2825 key, base_type::_S_key(r[pos_beg_right])))
2827 _M_destroy_node(r[pos_beg_right]);
2828 r[pos_beg_right] = NULL;
2832 _GLIBCXX_PARALLEL_ASSERT(pos_end_left <= pos_beg_right
2833 and pos_beg_right <= pos_end
2834 and pos_end_left >= pos_beg);
2835 return pos_end_left;
2839 /** @brief Parallelization helper method: Given a random-access
2840 sequence of known size, divide it into pieces of almost the
2842 * @param __first Begin iterator of sequence.
2843 * @param __last End iterator of sequence.
2844 * @param access Array of size @c num_pieces + 1 that defines @c
2845 * num_pieces subsequences. Each position @c i contains an
2846 * iterator to the first element in the subsequence @c i.
2847 * @param beg_partition Array of size @c num_pieces + 1 that
2848 * defines @c num_pieces subsequences. Each position @c i
2849 * contains the rank of the first element in the subsequence @c
2851 * @param n Sequence size
2852 * @param num_pieces Number of pieces. */
2853 template<typename _RandomAccessIterator>
2855 range_accessors(const _RandomAccessIterator __first,
2856 const _RandomAccessIterator __last,
2857 _RandomAccessIterator* access,
2858 size_type* beg_partition,
2860 const thread_index_t num_pieces,
2861 std::random_access_iterator_tag)
2863 access[0] = __first;
2864 for (int i = 1; i< num_pieces; ++i)
2866 access[i] = access[i-1] + (__last-__first)/num_pieces;
2867 beg_partition[i] = (beg_partition[i - 1]
2868 + (__last - __first) / num_pieces);
2870 beg_partition[num_pieces] = (__last - access[num_pieces - 1]
2871 + beg_partition[num_pieces - 1]);
2872 access[num_pieces]= __last;
2875 /** @brief Parallelization helper method: Given an input-access
2876 sequence of known size, divide it into pieces of almost the
2878 * @param __first Begin iterator of sequence.
2879 * @param __last End iterator of sequence.
2880 * @param access Array of size @c num_pieces + 1 that defines @c
2881 * num_pieces subsequences. Each position @c i contains an
2882 * iterator to the first element in the subsequence @c i.
2883 * @param beg_partition Array of size @c num_pieces + 1 that
2884 * defines @c num_pieces subsequences. Each position @c i
2885 * contains the rank of the first element in the subsequence @c
2887 * @param n Sequence size
2888 * @param num_pieces Number of pieces. */
2889 template<typename _InputIterator>
2891 range_accessors(const _InputIterator __first,
2892 const _InputIterator __last, _InputIterator* access,
2893 size_type* beg_partition, const size_type n,
2894 const thread_index_t num_pieces, std::input_iterator_tag)
2896 access[0] = __first;
2897 _InputIterator it= __first;
2898 for (int i = 1; i < num_pieces; ++i)
2900 for (int j=0; j< n/num_pieces; ++j)
2903 beg_partition[i]= n / num_pieces + beg_partition[i - 1];
2905 access[num_pieces] = __last;
2906 beg_partition[num_pieces] = (n - (num_pieces - 1)
2908 + beg_partition[num_pieces - 1]);
2911 /** @brief Initialize an array of concatenation problems for bulk
2912 insertion. They are linked as a tree with (end - beg) leaves.
2913 * @param conc Array of concatenation problems pointers to initialize.
2914 * @param beg Rank of the first leave to initialize
2915 * @param end Rank of the last (not included) leave to initialize
2916 * @param parent Pointer to the parent concatenation problem.
2918 static concat_problem*
2919 _M_bulk_insertion_initialize_upper_problems(concat_problem** conc,
2920 const int beg, const int end,
2921 concat_problem* parent)
2925 conc[2*beg]->par_problem = parent;
2929 int size = end - beg;
2930 int mid = beg + size/2;
2931 conc[2*mid-1]->par_problem = parent;
2932 conc[2*mid-1]->left_problem =
2933 _M_bulk_insertion_initialize_upper_problems(conc, beg, mid,
2935 conc[2*mid-1]->right_problem =
2936 _M_bulk_insertion_initialize_upper_problems(conc, mid, end,
2938 return conc[2*mid-1];
2942 /** @brief Determine black height of a node recursively.
2944 * @return Black height of the node. */
2946 black_height(const _Rb_tree_node_ptr t)
2950 int bh = black_height(static_cast<const _Rb_tree_node_ptr>(t->_M_left));
2951 if (t->_M_color == std::_S_black)
2956 /** @brief Color a leaf black
2957 * @param t Leaf pointer.
2958 * @param black_h Black height of @c t (out) */
2960 make_black_leaf(const _Rb_tree_node_ptr t, int& black_h)
2965 _GLIBCXX_PARALLEL_ASSERT(t->_M_left == NULL and t->_M_right == NULL);
2967 t->_M_color = std::_S_black;
2971 /** @brief Color a node black.
2972 * @param t Node to color black.
2973 * @param black_h Black height of @c t (out) */
2975 make_leaf(const _Rb_tree_node_ptr t, int& black_h)
2977 _GLIBCXX_PARALLEL_ASSERT(t != NULL);
2979 t->_M_color = std::_S_black;
2984 /** @brief Construct a tree from a root, a left subtree and a
2986 * @param root Root of constructed tree.
2987 * @param l Root of left subtree.
2988 * @param r Root of right subtree.
2989 * @pre @c l, @c r are black.
2991 template<typename S>
2992 static _Rb_tree_node_ptr
2993 plant(const _Rb_tree_node_ptr root, const _Rb_tree_node_ptr l,
2994 const _Rb_tree_node_ptr r)
2999 l->_M_parent = root;
3001 r->_M_parent = root;
3002 root->_M_color = std::_S_red;
3006 /** @brief Concatenate two red-black subtrees using and an
3007 intermediate node, which might be NULL
3008 * @param root Intermediate node.
3009 * @param l Left subtree.
3010 * @param r Right subtree.
3011 * @param black_h_l Black height of left subtree.
3012 * @param black_h_r Black height of right subtree.
3013 * @param t Tree resulting of the concatenation
3014 * @param black_h Black height of the resulting tree
3015 * @pre Left tree is higher than left tree
3016 * @post @c t is correct red-black tree with height @c black_h.
3019 concatenate(_Rb_tree_node_ptr root, _Rb_tree_node_ptr l,
3020 _Rb_tree_node_ptr r, int black_h_l, int black_h_r,
3021 _Rb_tree_node_ptr& t, int& black_h) const
3024 int count = 0, count1 = 0, count2 = 0;
3026 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(l, count1));
3027 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(r, count2));
3029 _GLIBCXX_PARALLEL_ASSERT(l != NULL ? l->_M_color != std::_S_red
3030 and black_h_l > 0 : black_h_l == 0);
3031 _GLIBCXX_PARALLEL_ASSERT(r != NULL ? r->_M_color != std::_S_red
3032 and black_h_r > 0 : black_h_r == 0);
3034 if (black_h_l > black_h_r)
3036 concatenate<LeftRight>(root, l, r, black_h_l, black_h_r, t, black_h);
3042 black_h = black_h_l;
3046 // XXX SHOULD BE the same as extract_min but slower.
3047 /* root = static_cast<_Rb_tree_node_ptr>(
3048 _Rb_tree_node_base::_S_minimum(r));
3050 split(r, _S_key(_Rb_tree_increment(root)),
3051 _S_key(root), root, t, r, black_h, black_h_r);
3053 extract_min(r, root, r, black_h_r);
3054 _GLIBCXX_PARALLEL_ASSERT(root != NULL);
3055 concatenate<LeftRight>(root, l, r, black_h_l,
3056 black_h_r, t, black_h);
3061 concatenate<RightLeft>(root, r, l, black_h_r, black_h_l, t, black_h);
3067 black_h = black_h_r;
3071 // XXX SHOULD BE the same as extract_max but slower
3073 root = static_cast<_Rb_tree_node_ptr>(
3074 _Rb_tree_node_base::_S_maximum(l));
3076 split(l, _S_key(root), _S_key(_Rb_tree_decrement(root)),
3077 root, l, t, black_h_l, black_h);
3079 extract_max(l, root, l, black_h_l);
3080 _GLIBCXX_PARALLEL_ASSERT(root != NULL);
3081 concatenate<RightLeft>(root, r, l, black_h_r,
3082 black_h_l, t, black_h);
3086 if (root!=NULL) ++count1;
3087 _GLIBCXX_PARALLEL_ASSERT(t == NULL or t->_M_color == std::_S_black);
3088 bool b = rb_verify_tree(t, count);
3090 _GLIBCXX_PARALLEL_ASSERT(false);
3092 _GLIBCXX_PARALLEL_ASSERT(count1+count2 == count);
3096 /** @brief Concatenate two red-black subtrees using and a not NULL
3097 * intermediate node.
3099 * @c S is the symmetry parameter.
3100 * @param rt Intermediate node.
3101 * @param l Left subtree.
3102 * @param r Right subtree.
3103 * @param black_h_l Black height of left subtree.
3104 * @param black_h_r Black height of right subtree.
3105 * @param t Tree resulting of the concatenation
3106 * @param black_h Black height of the resulting tree
3107 * @pre Left tree is higher than right tree. @c rt != NULL
3108 * @post @c t is correct red-black tree with height @c black_h.
3110 template<typename S>
3112 concatenate(const _Rb_tree_node_ptr rt, _Rb_tree_node_ptr l,
3113 _Rb_tree_node_ptr r, int black_h_l, int black_h_r,
3114 _Rb_tree_node_ptr& t, int& black_h)
3116 _Rb_tree_node_base* root = l;
3117 _Rb_tree_node_ptr parent = NULL;
3118 black_h = black_h_l;
3119 _GLIBCXX_PARALLEL_ASSERT(black_h_l >= black_h_r);
3120 while (black_h_l != black_h_r)
3122 if (l->_M_color == std::_S_black)
3125 l = static_cast<_Rb_tree_node_ptr>(S::right(l));
3126 _GLIBCXX_PARALLEL_ASSERT(
3127 (black_h_l == 0 and (l == NULL or l->_M_color == std::_S_red))
3128 or (black_h_l != 0 and l != NULL));
3129 _GLIBCXX_PARALLEL_ASSERT(
3130 (black_h_r == 0 and (r == NULL or r->_M_color == std::_S_red))
3131 or (black_h_r != 0 and r != NULL));
3133 if (l != NULL and l->_M_color == std::_S_red)
3135 //the root needs to be black
3137 l = static_cast<_Rb_tree_node_ptr>(S::right(l));
3140 _GLIBCXX_PARALLEL_ASSERT(
3141 l != NULL ? l->_M_color == std::_S_black : true);
3142 _GLIBCXX_PARALLEL_ASSERT(
3143 r != NULL ? r->_M_color == std::_S_black : true);
3145 t = plant<S>(rt, l, r);
3146 t->_M_parent = parent;
3149 S::right(parent) = t;
3150 black_h += _Rb_tree_rebalance(t, root);
3151 t = static_cast<_Rb_tree_node_ptr> (root);
3156 t->_M_color = std::_S_black;
3158 _GLIBCXX_PARALLEL_ASSERT(t->_M_color == std::_S_black);
3161 /** @brief Split a tree according to key in three parts: a left
3162 * child, a right child and an intermediate node.
3164 * Trees are concatenated once the recursive call returns. That
3165 * is, from bottom to top (i. e. smaller to larger), so the cost
3166 * bounds for split hold.
3167 * @param t Root of the tree to split.
3168 * @param key Key to split according to.
3169 * @param prev_k Key to split the intermediate node
3170 * @param root Out parameter. If a node exists whose key is
3171 * smaller or equal than @c key, but strictly larger than @c
3172 * prev_k, this is returned. Otherwise, it is null.
3173 * @param l Root of left subtree returned, nodes less than @c key.
3174 * @param r Root of right subtree returned, nodes greater or
3175 * equal than @c key.
3176 * @param black_h_l Black height of the left subtree.
3177 * @param black_h_r Black height of the right subtree.
3178 * @param strictly_less_or_less_equal Comparator to deal
3179 * transparently with repetitions with respect to the uniqueness
3180 * of the wrapping container
3181 * @return Black height of t */
3182 template<typename StrictlyLessOrEqual>
3184 split(_Rb_tree_node_ptr t, const key_type& key, const key_type& prev_k,
3185 _Rb_tree_node_ptr& root, _Rb_tree_node_ptr& l,
3186 _Rb_tree_node_ptr& r, int& black_h_l, int& black_h_r,
3187 StrictlyLessOrEqual strictly_less_or_less_equal)
3191 // Must be initialized, in case we never go left!!!
3193 int h = split_not_null(t, key, prev_k, root, l, r, black_h_l,
3194 black_h_r, strictly_less_or_less_equal);
3196 _GLIBCXX_PARALLEL_ASSERT(
3197 l == NULL or base_type::_M_impl._M_key_compare(
3198 base_type::_S_key(base_type::_S_maximum(l)),key));
3199 _GLIBCXX_PARALLEL_ASSERT(
3200 r == NULL or not base_type::_M_impl._M_key_compare(
3201 base_type::_S_key(base_type::_S_minimum(r)),key));
3203 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(l, count1));
3204 _GLIBCXX_PARALLEL_ASSERT(rb_verify_tree(r, count2));
3205 _GLIBCXX_PARALLEL_ASSERT(
3206 root == NULL or base_type::_M_impl._M_key_compare(
3207 prev_k, base_type::_S_key(root))
3208 and not base_type::_M_impl._M_key_compare(
3209 key, base_type::_S_key(root)));
3210 _GLIBCXX_PARALLEL_ASSERT(
3211 root != NULL or l==NULL
3212 or not base_type::_M_impl._M_key_compare(
3213 prev_k, base_type::_S_key(base_type::_S_maximum(l))));
3226 /** @brief Split a tree according to key in three parts: a left
3227 * child, a right child and an intermediate node.
3229 * @param t Root of the tree to split.
3230 * @param key Key to split according to.
3231 * @param prev_k Key to split the intermediate node
3232 * @param root Out parameter. If a node exists whose key is
3233 * smaller or equal than @c key, but strictly larger than @c
3234 * prev_k, this is returned. Otherwise, it is null.
3235 * @param l Root of left subtree returned, nodes less than @c key.
3236 * @param r Root of right subtree returned, nodes greater or
3237 * equal than @c key.
3238 * @param black_h_l Black height of the left subtree.
3239 * @param black_h_r Black height of the right subtree.
3240 * @param strictly_less_or_equal Comparator to deal transparently
3241 * with repetitions with respect to the uniqueness of the
3242 * wrapping container
3244 * @return Black height of t */
3245 template<typename StrictlyLessOrEqual>
3247 split_not_null(const _Rb_tree_node_ptr t, const key_type& key,
3248 const key_type& prev_k, _Rb_tree_node_ptr& root,
3249 _Rb_tree_node_ptr& l, _Rb_tree_node_ptr& r,
3250 int& black_h_l, int& black_h_r,
3251 StrictlyLessOrEqual strictly_less_or_equal)
3253 _GLIBCXX_PARALLEL_ASSERT (t != NULL);
3256 if (t->_M_color == std::_S_black)
3258 if (strictly_less_or_equal(key, base_type::_S_key(t)))
3260 if (t->_M_left != NULL )
3262 // t->M_right is at most one node
3264 b_h = black_h = split_not_null(
3265 static_cast<_Rb_tree_node_ptr>(t->_M_left), key, prev_k,
3266 root, l, r, black_h_l, black_h_r,
3267 strictly_less_or_equal);
3268 // Moin root and right subtree to already existing right
3269 // half, leave left subtree.
3270 force_black_root(t->_M_right, b_h);
3271 concatenate(t, r, static_cast<_Rb_tree_node_ptr>(t->_M_right),
3272 black_h_r, b_h, r, black_h_r);
3276 // t->M_right is at most one node
3278 black_h_r = black_node;
3279 force_black_root(r, black_h_r);
3285 _GLIBCXX_PARALLEL_ASSERT(
3286 l == NULL or base_type::_M_impl._M_key_compare(
3287 base_type::_S_key(base_type::_S_maximum(l)),key));
3288 _GLIBCXX_PARALLEL_ASSERT(
3289 r == NULL or not base_type::_M_impl._M_key_compare(
3290 base_type::_S_key(base_type::_S_minimum(r)),key));
3294 if (t->_M_right != NULL )
3297 if (strictly_less_or_equal(prev_k, base_type::_S_key(t)))
3299 b_h = black_h = split_not_null(
3300 static_cast<_Rb_tree_node_ptr>(t->_M_right), key, prev_k,
3301 root, l, r, black_h_l, black_h_r, strictly_less_or_equal);
3302 // Join root and left subtree to already existing left
3303 // half, leave right subtree.
3304 force_black_root(t->_M_left, b_h);
3307 // There was another point where we went right.
3308 concatenate(t, static_cast<_Rb_tree_node_ptr>(
3309 t->_M_left), l, b_h, black_h_l,
3314 l = static_cast<_Rb_tree_node_ptr>(t->_M_left);
3317 _GLIBCXX_PARALLEL_ASSERT(
3318 l == NULL or base_type::_M_impl._M_key_compare(
3319 base_type::_S_key(base_type::_S_maximum(l)),key));
3320 _GLIBCXX_PARALLEL_ASSERT(
3321 r == NULL or not base_type::_M_impl._M_key_compare(
3322 base_type::_S_key(base_type::_S_minimum(r)),key));
3326 if (strictly_less_or_equal(prev_k, base_type::_S_key(t)))
3329 l= static_cast<_Rb_tree_node_ptr>(t->_M_left);
3330 make_black_leaf(l, black_h_l);
3331 _GLIBCXX_PARALLEL_ASSERT(
3332 l == NULL or base_type::_M_impl._M_key_compare(
3333 base_type::_S_key(base_type::_S_maximum(l)),key));
3338 black_h_l = black_node;
3339 force_black_root(l, black_h_l);
3340 _GLIBCXX_PARALLEL_ASSERT(
3341 l == NULL or base_type::_M_impl._M_key_compare(
3342 base_type::_S_key(base_type::_S_maximum(l)),key));
3350 return black_h + black_node;
3353 /** @brief Color the root black and update the black height accordingly.
3355 * @param t Root of the tree.
3356 * @param black_h Black height of the tree @c t (out) */
3358 force_black_root(_Rb_tree_node_base* t, int& black_h)
3360 if (t != NULL and t->_M_color == std::_S_red)
3362 t->_M_color = std::_S_black;
3367 /** @brief Split the tree in two parts: the minimum element from a
3368 tree (i. e. leftmost) and the rest (right subtree)
3369 * @param t Root of the tree
3370 * @param root Minimum element (out)
3371 * @param r Right subtree: @c t - {@c root}
3372 * @param black_h_r Black height of the right subtree.
3373 * @return Black height of the original tree */
3375 extract_min(const _Rb_tree_node_ptr t, _Rb_tree_node_ptr& root,
3376 _Rb_tree_node_ptr& r, int& black_h_r)
3378 _GLIBCXX_PARALLEL_ASSERT (t != NULL);
3381 if (t->_M_color == std::_S_black)
3384 if (t->_M_left != NULL )
3386 // t->M_right is at most one node
3388 b_h = black_h = extract_min(
3389 static_cast<_Rb_tree_node_ptr>(t->_M_left), root, r, black_h_r);
3391 // Join root and right subtree to already existing right
3392 // half, leave left subtree
3393 force_black_root(t->_M_right, b_h);
3394 concatenate(t, r, static_cast<_Rb_tree_node_ptr>(t->_M_right),
3395 black_h_r, b_h, r, black_h_r);
3399 // t->M_right is at most one node
3401 if (t->_M_right == NULL)
3408 r = static_cast<_Rb_tree_node_ptr>(t->_M_right);
3410 r->_M_color = std::_S_black;
3414 return black_h + black_node;
3418 /** @brief Split the tree in two parts: the greatest element from
3419 a tree (i. e. rightmost) and the rest (left subtree)
3420 * @param t Root of the tree
3421 * @param root Maximum element (out)
3422 * @param l Left subtree: @c t - {@c root}
3423 * @param black_h_l Black height of the left subtree.
3424 * @return Black height of the original tree */
3426 extract_max(const _Rb_tree_node_ptr t, _Rb_tree_node_ptr& root,
3427 _Rb_tree_node_ptr& l, int& black_h_l) const
3429 _GLIBCXX_PARALLEL_ASSERT (t != NULL);
3432 if (t->_M_color == std::_S_black)
3435 if (t->_M_right != NULL )
3437 b_h = black_h = extract_max(
3438 static_cast<_Rb_tree_node_ptr>(t->_M_right), root, l, black_h_l);
3440 // Join root and left subtree to already existing left half,
3441 // leave right subtree.
3442 force_black_root(t->_M_left, b_h);
3444 concatenate(t, static_cast<_Rb_tree_node_ptr>(
3445 t->_M_left), l, b_h, black_h_l, l, black_h_l);
3450 if (t->_M_left == NULL)
3457 l = static_cast<_Rb_tree_node_ptr>(t->_M_left);
3459 l->_M_color = std::_S_black;
3463 return black_h + black_node;
3466 /** @brief Split tree according to key in two parts: a left tree
3467 * and a right subtree
3469 * Trees are concatenated once the recursive call returns. That
3470 * is, from bottom to top (i. e. smaller to larger), so the cost
3471 * bounds for split hold.
3472 * @param t Root of the tree to split.
3473 * @param key Key to split according to.
3474 * @param l Root of left subtree returned, nodes less than @c key.
3475 * @param r Root of right subtree returned, nodes greater than @c key.
3476 * @param black_h_l Black height of the left subtree.
3477 * @param black_h_r Black height of the right subtree.
3478 * @return Black height of the original tree */
3480 split(const _Rb_tree_node_ptr t, const key_type& key,
3481 _Rb_tree_node_ptr& l, _Rb_tree_node_ptr& r, int& black_h_l,
3482 int& black_h_r) const
3488 if (t->_M_color == std::_S_black)
3490 if (not (base_type::_M_impl._M_key_compare(base_type::_S_key(t),
3494 b_h = black_h = split(
3495 static_cast<_Rb_tree_node_ptr>(t->_M_left), key, l, r,
3496 black_h_l, black_h_r);
3498 // Join root and right subtree to already existing right
3499 // half, leave left subtree.
3500 force_black_root(t->_M_right, b_h);
3501 concatenate(t, r, static_cast<_Rb_tree_node_ptr>(
3502 t->_M_right), black_h_r, b_h, r, black_h_r);
3507 b_h = black_h = split(static_cast<_Rb_tree_node_ptr>(
3508 t->_M_right), key, l, r,
3509 black_h_l, black_h_r);
3511 // Join root and left subtree to already existing left
3512 // half, leave right subtree.
3513 force_black_root(t->_M_left, b_h);
3514 concatenate(t, static_cast<_Rb_tree_node_ptr>(
3515 t->_M_left), l, b_h, black_h_l, l, black_h_l);
3517 return black_h + black_node;
3529 /** @brief Insert an existing node in tree and rebalance it, if
3532 * The keyword "local" is used because no attributes of the
3533 * red-black tree are changed, so this insertion is not yet seen
3534 * by the global data structure.
3535 * @param t Root of tree to insert into.
3536 * @param new_t Existing node to insert.
3537 * @param existing Number of existing elements before insertion
3538 * (in) and after (out). Specifically, the counter is incremented
3539 * by one for unique containers if the key of new_t was already
3541 * @param black_h Black height of the resulting tree (out)
3542 * @param strictly_less_or_less_equal Comparator to deal
3543 * transparently with repetitions with respect to the uniqueness
3544 * of the wrapping container
3545 * @return Resulting tree after insertion */
3546 template<typename StrictlyLessOrLessEqual>
3548 _M_insert_local(_Rb_tree_node_base* t, const _Rb_tree_node_ptr new_t,
3549 size_type& existing, int& black_h,
3550 StrictlyLessOrLessEqual strictly_less_or_less_equal)
3552 _GLIBCXX_PARALLEL_ASSERT(t != NULL);
3553 if (_M_insert_local_top_down(t, new_t, NULL, NULL,
3554 true, strictly_less_or_less_equal))
3556 t->_M_parent = NULL;
3557 black_h += _Rb_tree_rebalance(new_t, t);
3558 _GLIBCXX_PARALLEL_ASSERT(t->_M_color == std::_S_black);
3559 return static_cast<_Rb_tree_node_ptr>(t);
3563 base_type::_M_destroy_node(new_t);
3565 force_black_root(t, black_h);
3566 return static_cast<_Rb_tree_node_ptr>(t);
3570 /***** Dealing with repetitions (CORRECTNESS ISSUE) *****/
3571 /** @brief Insert an existing node in tree, do no rebalancing.
3572 * @param t Root of tree to insert into.
3573 * @param new_t Existing node to insert.
3574 * @param eq_t Node candidate to be equal than new_t, only
3575 * relevant for unique containers
3576 * @param parent Parent node of @c t
3577 * @param is_left True if @c t is a left child of @c
3578 * parent. False otherwise.
3579 * @param strictly_less_or_less_equal Comparator to deal
3580 * transparently with repetitions with respect to the uniqueness
3581 * of the wrapping container
3583 * @return Success of the insertion
3585 template<typename StrictlyLessOrLessEqual>
3587 _M_insert_local_top_down(_Rb_tree_node_base* t,
3588 const _Rb_tree_node_ptr new_t,
3589 _Rb_tree_node_base* eq_t,
3590 _Rb_tree_node_base* parent, const bool is_left,
3591 StrictlyLessOrLessEqual
3592 strictly_less_or_less_equal) const
3596 if (strictly_less_or_less_equal(
3597 _S_key(new_t), _S_key(static_cast<_Rb_tree_node_ptr>(t))))
3598 return _M_insert_local_top_down(t->_M_left, new_t, eq_t, t, true,
3599 strictly_less_or_less_equal);
3601 return _M_insert_local_top_down(t->_M_right, new_t, t, t, false,
3602 strictly_less_or_less_equal);
3605 _GLIBCXX_PARALLEL_ASSERT(parent != NULL);
3608 if (eq_t == NULL or strictly_less_or_less_equal(
3609 _S_key(static_cast<_Rb_tree_node_ptr>(eq_t)), _S_key(new_t)))
3611 // The element to be inserted did not existed.
3613 parent->_M_left = new_t;
3615 parent->_M_right = new_t;
3617 new_t->_M_parent = parent;
3618 new_t->_M_left = NULL;
3619 new_t->_M_right = NULL;
3620 new_t->_M_color = std::_S_red;
3628 /** @brief Rebalance a tree locally.
3630 * Essentially, it is the same function as insert_erase from the
3631 * base class, but without the insertion and without using any
3633 * @param __x Root of the current subtree to rebalance.
3634 * @param __root Root of tree where @c __x is in (rebalancing
3635 * stops when root is reached)
3636 * @return Increment in the black height after rebalancing
3639 _Rb_tree_rebalance(_Rb_tree_node_base* __x, _Rb_tree_node_base*& __root)
3641 _GLIBCXX_PARALLEL_ASSERT(__root->_M_color == std::_S_black);
3643 while (__x != __root and __x->_M_parent != __root and
3644 __x->_M_parent->_M_color == std::_S_red)
3646 _Rb_tree_node_base* const __xpp = __x->_M_parent->_M_parent;
3648 if (__x->_M_parent == __xpp->_M_left)
3650 _Rb_tree_node_base* const __y = __xpp->_M_right;
3651 if (__y && __y->_M_color == std::_S_red)
3653 __x->_M_parent->_M_color = std::_S_black;
3654 __y->_M_color = std::_S_black;
3655 __xpp->_M_color = std::_S_red;
3660 if (__x == __x->_M_parent->_M_right)
3662 __x = __x->_M_parent;
3663 std::_Rb_tree_rotate_left(__x, __root);
3665 __x->_M_parent->_M_color = std::_S_black;
3666 __xpp->_M_color = std::_S_red;
3667 std::_Rb_tree_rotate_right(__xpp, __root);
3672 _Rb_tree_node_base* const __y = __xpp->_M_left;
3673 if (__y && __y->_M_color == std::_S_red)
3675 __x->_M_parent->_M_color = std::_S_black;
3676 __y->_M_color = std::_S_black;
3677 __xpp->_M_color = std::_S_red;
3682 if (__x == __x->_M_parent->_M_left)
3684 __x = __x->_M_parent;
3685 std::_Rb_tree_rotate_right(__x, __root);
3687 __x->_M_parent->_M_color = std::_S_black;
3688 __xpp->_M_color = std::_S_red;
3689 std::_Rb_tree_rotate_left(__xpp, __root);
3693 if (__root->_M_color == std::_S_red)
3695 __root->_M_color = std::_S_black;
3696 _GLIBCXX_PARALLEL_ASSERT(
3697 rb_verify_tree(static_cast<typename base_type::
3698 _Const_Link_type>(__root)));
3701 _GLIBCXX_PARALLEL_ASSERT(
3702 rb_verify_tree(static_cast<typename base_type::
3703 _Const_Link_type>(__root)));
3707 /** @brief Analogous to class method rb_verify() but only for a subtree.
3708 * @param __x Pointer to root of subtree to check.
3709 * @param count Returned number of nodes.
3710 * @return Tree correct.
3713 rb_verify_tree(const typename base_type::_Const_Link_type __x,
3717 return rb_verify_tree_node(__x) and rb_verify_tree(__x, count, bh);
3720 /** @brief Verify that a subtree is binary search tree (verifies
3722 * @param __x Pointer to root of subtree to check.
3723 * @return Tree correct.
3726 rb_verify_tree_node(const typename base_type::_Const_Link_type __x) const
3732 return rb_verify_node(__x)
3733 and rb_verify_tree_node(base_type::_S_left(__x))
3734 and rb_verify_tree_node( base_type::_S_right(__x));
3738 /** @brief Verify all the properties of a red-black tree except
3739 for the key ordering
3740 * @param __x Pointer to (subtree) root node.
3741 * @return Tree correct.
3744 rb_verify_tree(const typename base_type::_Const_Link_type __x)
3747 return rb_verify_tree(__x, count, bh);
3750 /** @brief Verify all the properties of a red-black tree except
3751 for the key ordering
3752 * @param __x Pointer to (subtree) root node.
3753 * @param count Number of nodes of @c __x (out).
3754 * @param black_h Black height of @c __x (out).
3755 * @return Tree correct.
3758 rb_verify_tree(const typename base_type::_Const_Link_type __x, int& count,
3767 typename base_type::_Const_Link_type __L = base_type::_S_left(__x);
3768 typename base_type::_Const_Link_type __R = base_type::_S_right(__x);
3769 int countL, countR = 0, bhL, bhR;
3770 bool ret = rb_verify_tree(__L, countL, bhL);
3771 ret = ret and rb_verify_tree(__R, countR, bhR);
3772 count = 1 + countL + countR;
3773 ret = ret and bhL == bhR;
3774 black_h = bhL + ((__x->_M_color == std::_S_red)? 0 : 1);
3778 /** @brief Verify red-black properties (including key based) for a node
3779 * @param __x Pointer to node.
3780 * @return Node correct.
3783 rb_verify_node(const typename base_type::_Const_Link_type __x) const
3785 typename base_type::_Const_Link_type __L = base_type::_S_left(__x);
3786 typename base_type::_Const_Link_type __R = base_type::_S_right(__x);
3787 if (__x->_M_color == std::_S_red)
3788 if ((__L && __L->_M_color == std::_S_red)
3789 || (__R && __R->_M_color == std::_S_red))
3794 __L = static_cast<typename base_type::_Const_Link_type>(
3795 base_type::_S_maximum(__L));
3796 if (base_type::_M_impl._M_key_compare(base_type::_S_key(__x),
3797 base_type::_S_key(__L)))
3803 __R = static_cast<typename base_type::_Const_Link_type>(
3804 base_type::_S_minimum(__R));
3805 if (base_type::_M_impl._M_key_compare(base_type::_S_key(__R),
3806 base_type::_S_key(__x)))
3813 /** @brief Print all the information of the root.
3814 * @param t Root of the tree.
3817 print_root(_Rb_tree_node_base* t)
3821 std::cout<< base_type::_S_key(t) << std::endl;
3823 std::cout<< "NULL" << std::endl;
3827 /** @brief Print all the information of the tree.
3828 * @param t Root of the tree.
3831 print_tree(_Rb_tree_node_base* t)
3836 print_tree(t->_M_left);
3837 std::cout<< base_type::_S_key(t) << std::endl;
3838 print_tree(t->_M_right);
3843 /** @brief Print blanks.
3844 * @param b Number of blanks to print.
3845 * @return A string with @c b blanks */
3846 inline static std::string
3851 for (int i=0; i < b; ++i)
3857 /** @brief Print all the information of the tree.
3858 * @param t Root of the tree.
3859 * @param c Width of a printed key.
3861 template<typename Pointer>
3863 draw_tree(Pointer t, const int c)
3868 std::cout << blanks(c) << "NULL" << std::endl;
3871 draw_tree(static_cast<Pointer>(t->_M_right), c + 8);
3872 std::cout << blanks(c) << "" << base_type::_S_key(t) << " ";
3873 if (t->_M_color == std::_S_black)
3874 std::cout << "B" << std::endl;
3876 std::cout << "R" << std::endl;
3877 draw_tree(static_cast<Pointer>(t->_M_left), c + 8);
3882 /** @brief Verify that all the red-black tree properties hold for
3883 the stored tree, as well as the additional properties that the
3884 STL implementation imposes.
3889 if (base_type::_M_impl._M_node_count == 0
3890 || base_type::begin() == base_type::end())
3892 bool res = base_type::_M_impl._M_node_count == 0
3893 && base_type::begin() == base_type::end()
3894 && base_type::_M_impl._M_header._M_left ==base_type::_M_end()
3895 && base_type::_M_impl._M_header._M_right == base_type::_M_end();
3896 _GLIBCXX_PARALLEL_ASSERT(res);
3900 unsigned int __len = _Rb_tree_black_count(base_type::_M_leftmost(),
3901 base_type::_M_root());
3902 for (typename base_type::const_iterator __it =base_type::begin();
3903 __it != base_type::end(); ++__it)
3905 typename base_type::_Const_Link_type __x =
3906 static_cast<typename base_type::_Const_Link_type>(__it._M_node);
3907 if (not rb_verify_node(__x)) return false;
3908 if (!base_type::_S_left(__x)&& !base_type::_S_right(__x)
3909 && _Rb_tree_black_count(__x,base_type::_M_root()) != __len)
3911 _GLIBCXX_PARALLEL_ASSERT(false);
3917 if (i != base_type::_M_impl._M_node_count)
3918 printf("%ld != %ld\n", i, base_type::_M_impl._M_node_count);
3920 if (base_type::_M_leftmost()
3921 != std::_Rb_tree_node_base::_S_minimum(base_type::_M_root()))
3923 _GLIBCXX_PARALLEL_ASSERT(false);
3926 if (base_type::_M_rightmost()
3927 != std::_Rb_tree_node_base::_S_maximum(base_type::_M_root()))
3929 _GLIBCXX_PARALLEL_ASSERT(false);
3932 _GLIBCXX_PARALLEL_ASSERT(i == base_type::_M_impl._M_node_count);