unordered_multiset_test.cpp
来自「Boost provides free peer-reviewed portab」· C++ 代码 · 共 774 行 · 第 1/3 页
CPP
774 行
typename unordered_multiset_type::bucket_type single_bucket[1]; for(unsigned int i = 0, max = random_size; i != max; ++i){ std::vector<typename ValueTraits::value_type> data (random_size); for (unsigned int j = 0; j < random_size; ++j) data[j].value_ = random_init[j]; unordered_multiset_type testset_new(bucket_traits(single_bucket, 1)); testset_new.insert(&data[0], &data[0]+max); testset_new.erase(testset_new.iterator_to(data[i])); BOOST_TEST (testset_new.size() == (max -1)); } } { typename unordered_multiset_type::bucket_type buckets [BucketSize]; const unsigned int NumBucketSize = BucketSize; const unsigned int LoadFactor = 3; const unsigned int NumIterations = NumBucketSize*LoadFactor; std::vector<value_type> random_init(NumIterations);//Preserve memory std::vector<value_type> set_tester; set_tester.reserve(NumIterations); //Initialize values for (unsigned int i = 0; i < NumIterations; ++i){ random_init[i].value_ = i*2;//(i/LoadFactor)*LoadFactor; } for(unsigned int initial_pos = 0; initial_pos != (NumIterations+1); ++initial_pos){ for(unsigned int final_pos = initial_pos; final_pos != (NumIterations+1); ++final_pos){ //Create intrusive container inserting values unordered_multiset_type testset ( &random_init[0] , &random_init[0] + random_init.size() , bucket_traits(buckets, NumBucketSize)); BOOST_TEST (testset.size() == random_init.size()); //Obtain the iterator range to erase iterator it_beg_pos = testset.begin(); for(unsigned int it_beg_pos_num = 0; it_beg_pos_num != initial_pos; ++it_beg_pos_num){ ++it_beg_pos; } iterator it_end_pos(it_beg_pos); for(unsigned int it_end_pos_num = 0; it_end_pos_num != (final_pos - initial_pos); ++it_end_pos_num){ ++it_end_pos; } //Erase the same values in both the intrusive and original vector std::size_t erased_cnt = std::distance(it_beg_pos, it_end_pos); //Erase values from the intrusive container testset.erase(it_beg_pos, it_end_pos); BOOST_TEST (testset.size() == (random_init.size()-(final_pos - initial_pos))); //Now test... BOOST_TEST ((random_init.size() - erased_cnt) == testset.size()); //Create an ordered copy of the intrusive container set_tester.insert(set_tester.end(), testset.begin(), testset.end()); std::sort(set_tester.begin(), set_tester.end()); { typename std::vector<value_type>::iterator it = set_tester.begin(), itend = set_tester.end(); typename std::vector<value_type>::iterator random_init_it(random_init.begin()); for( ; it != itend; ++it){ while(!random_init_it->is_linked()) ++random_init_it; BOOST_TEST(*it == *random_init_it); ++random_init_it; } } set_tester.clear(); } } }}//test: insert (seq-version), swap, erase (seq-version), size:template<class ValueTraits, bool CacheBegin, bool CompareHash, bool Incremental>void test_unordered_multiset<ValueTraits, CacheBegin, CompareHash, Incremental>:: test_swap(std::vector<typename ValueTraits::value_type>& values){ typedef typename ValueTraits::value_type value_type; typedef unordered_multiset <value_type , value_traits<ValueTraits> , constant_time_size<value_type::constant_time_size> , cache_begin<CacheBegin> , compare_hash<CompareHash> , incremental<Incremental> > unordered_multiset_type; typedef typename unordered_multiset_type::bucket_traits bucket_traits; typename unordered_multiset_type::bucket_type buckets [BucketSize]; typename unordered_multiset_type::bucket_type buckets2 [BucketSize]; unordered_multiset_type testset1(&values[0], &values[0] + 2, bucket_traits(buckets, BucketSize)); unordered_multiset_type testset2(bucket_traits(buckets2, BucketSize)); testset2.insert (&values[0] + 2, &values[0] + 6); testset1.swap (testset2); if(Incremental){ { int init_values [] = { 4, 5, 1, 2 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } { int init_values [] = { 2, 3 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset2.begin() ); } testset1.erase (testset1.iterator_to(values[4]), testset1.end()); BOOST_TEST (testset1.size() == 1); // BOOST_TEST (&testset1.front() == &values[3]); BOOST_TEST (&*testset1.begin() == &values[2]); } else{ { int init_values [] = { 1, 2, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } { int init_values [] = { 2, 3 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset2.begin() ); } testset1.erase (testset1.iterator_to(values[5]), testset1.end()); BOOST_TEST (testset1.size() == 1); // BOOST_TEST (&testset1.front() == &values[3]); BOOST_TEST (&*testset1.begin() == &values[3]); }} //test: rehash:template<class ValueTraits, bool CacheBegin, bool CompareHash, bool Incremental>void test_unordered_multiset<ValueTraits, CacheBegin, CompareHash, Incremental> ::test_rehash(std::vector<typename ValueTraits::value_type>& values, detail::true_){ typedef typename ValueTraits::value_type value_type; typedef unordered_multiset <value_type , value_traits<ValueTraits> , constant_time_size<value_type::constant_time_size> , cache_begin<CacheBegin> , compare_hash<CompareHash> , incremental<Incremental> > unordered_multiset_type; typedef typename unordered_multiset_type::bucket_traits bucket_traits; //Build a uset typename unordered_multiset_type::bucket_type buckets1 [BucketSize]; typename unordered_multiset_type::bucket_type buckets2 [BucketSize*2]; unordered_multiset_type testset1(&values[0], &values[0] + values.size(), bucket_traits(buckets1, BucketSize)); //Test current state BOOST_TEST(testset1.split_count() == BucketSize/2); { int init_values [] = { 4, 5, 1, 2, 2, 3 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Incremental rehash step BOOST_TEST (testset1.incremental_rehash() == true); BOOST_TEST(testset1.split_count() == (BucketSize/2+1)); { int init_values [] = { 5, 1, 2, 2, 3, 4 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Rest of incremental rehashes should lead to the same sequence for(std::size_t split_bucket = testset1.split_count(); split_bucket != BucketSize; ++split_bucket){ BOOST_TEST (testset1.incremental_rehash() == true); BOOST_TEST(testset1.split_count() == (split_bucket+1)); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } } //This incremental rehash should fail because we've reached the end of the bucket array BOOST_TEST (testset1.incremental_rehash() == false); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } // //Try incremental hashing specifying a new bucket traits pointing to the same array // //This incremental rehash should fail because the new size is not twice the original BOOST_TEST(testset1.incremental_rehash(bucket_traits(buckets1, BucketSize)) == false); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //This incremental rehash should success because the new size is twice the original //and split_count is the same as the old bucket count BOOST_TEST(testset1.incremental_rehash(bucket_traits(buckets1, BucketSize*2)) == true); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //This incremental rehash should also success because the new size is half the original //and split_count is the same as the new bucket count BOOST_TEST(testset1.incremental_rehash(bucket_traits(buckets1, BucketSize)) == true); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } // //Try incremental hashing specifying a new bucket traits pointing to the same array // //This incremental rehash should fail because the new size is not twice the original BOOST_TEST(testset1.incremental_rehash(bucket_traits(buckets2, BucketSize)) == false); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //This incremental rehash should success because the new size is twice the original //and split_count is the same as the old bucket count BOOST_TEST(testset1.incremental_rehash(bucket_traits(buckets2, BucketSize*2)) == true); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //This incremental rehash should also success because the new size is half the original //and split_count is the same as the new bucket count BOOST_TEST(testset1.incremental_rehash(bucket_traits(buckets1, BucketSize)) == true); BOOST_TEST(testset1.split_count() == BucketSize); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Full shrink rehash testset1.rehash(bucket_traits(buckets1, 4)); BOOST_TEST (testset1.size() == values.size()); BOOST_TEST (testset1.incremental_rehash() == false); { int init_values [] = { 4, 5, 1, 2, 2, 3 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Full shrink rehash again testset1.rehash(bucket_traits(buckets1, 2)); BOOST_TEST (testset1.size() == values.size()); BOOST_TEST (testset1.incremental_rehash() == false); { int init_values [] = { 2, 2, 4, 3, 5, 1 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Full growing rehash testset1.rehash(bucket_traits(buckets1, BucketSize)); BOOST_TEST (testset1.size() == values.size()); BOOST_TEST (testset1.incremental_rehash() == false); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Incremental rehash shrinking //First incremental rehashes should lead to the same sequence for(std::size_t split_bucket = testset1.split_count(); split_bucket > 6; --split_bucket){ BOOST_TEST (testset1.incremental_rehash(false) == true); BOOST_TEST(testset1.split_count() == (split_bucket-1)); { int init_values [] = { 1, 2, 2, 3, 4, 5 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } } //Incremental rehash step BOOST_TEST (testset1.incremental_rehash(false) == true); BOOST_TEST(testset1.split_count() == (BucketSize/2+1)); { int init_values [] = { 5, 1, 2, 2, 3, 4 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //Incremental rehash step 2 BOOST_TEST (testset1.incremental_rehash(false) == true); BOOST_TEST(testset1.split_count() == (BucketSize/2)); { int init_values [] = { 4, 5, 1, 2, 2, 3 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); } //This incremental rehash should fail because we've reached the half of the bucket array BOOST_TEST(testset1.incremental_rehash(false) == false); BOOST_TEST(testset1.split_count() == BucketSize/2); { int init_values [] = { 4, 5, 1, 2, 2, 3 }; TEST_INTRUSIVE_SEQUENCE( init_values, testset1.begin() ); }}template<class ValueTraits, bool CacheBegin, bool CompareHash, bool Incremental>void test_unordered_multiset<ValueTraits, CacheBegin, CompareHash, Incremental>
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?