00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #ifndef __TBB_enumerable_thread_specific_H
00022 #define __TBB_enumerable_thread_specific_H
00023
00024 #include "concurrent_vector.h"
00025 #include "tbb_thread.h"
00026 #include "tbb_allocator.h"
00027 #include "cache_aligned_allocator.h"
00028 #include "aligned_space.h"
00029 #include <string.h>
00030
00031 #if _WIN32||_WIN64
00032 #include "machine/windows_api.h"
00033 #else
00034 #include <pthread.h>
00035 #endif
00036
00037 namespace tbb {
00038
00040 enum ets_key_usage_type { ets_key_per_instance, ets_no_key };
00041
00042 namespace interface6 {
00043
00045 namespace internal {
00046
00047 template<ets_key_usage_type ETS_key_type>
00048 class ets_base: tbb::internal::no_copy {
00049 protected:
00050 #if _WIN32||_WIN64
00051 typedef DWORD key_type;
00052 #else
00053 typedef pthread_t key_type;
00054 #endif
00055 #if __TBB_PROTECTED_NESTED_CLASS_BROKEN
00056 public:
00057 #endif
00058 struct slot;
00059
00060 struct array {
00061 array* next;
00062 size_t lg_size;
00063 slot& at( size_t k ) {
00064 return ((slot*)(void*)(this+1))[k];
00065 }
00066 size_t size() const {return (size_t)1<<lg_size;}
00067 size_t mask() const {return size()-1;}
00068 size_t start( size_t h ) const {
00069 return h>>(8*sizeof(size_t)-lg_size);
00070 }
00071 };
00072 struct slot {
00073 key_type key;
00074 void* ptr;
00075 bool empty() const {return !key;}
00076 bool match( key_type k ) const {return key==k;}
00077 bool claim( key_type k ) {
00078 __TBB_ASSERT(sizeof(tbb::atomic<key_type>)==sizeof(key_type), NULL);
00079 return tbb::internal::punned_cast<tbb::atomic<key_type>*>(&key)->compare_and_swap(k,0)==0;
00080 }
00081 };
00082 #if __TBB_PROTECTED_NESTED_CLASS_BROKEN
00083 protected:
00084 #endif
00085
00086 static key_type key_of_current_thread() {
00087 tbb::tbb_thread::id id = tbb::this_tbb_thread::get_id();
00088 key_type k;
00089 memcpy( &k, &id, sizeof(k) );
00090 return k;
00091 }
00092
00094
00096 atomic<array*> my_root;
00097 atomic<size_t> my_count;
00098 virtual void* create_local() = 0;
00099 virtual void* create_array(size_t _size) = 0;
00100 virtual void free_array(void* ptr, size_t _size) = 0;
00101 array* allocate( size_t lg_size ) {
00102 size_t n = 1<<lg_size;
00103 array* a = static_cast<array*>(create_array( sizeof(array)+n*sizeof(slot) ));
00104 a->lg_size = lg_size;
00105 std::memset( a+1, 0, n*sizeof(slot) );
00106 return a;
00107 }
00108 void free(array* a) {
00109 size_t n = 1<<(a->lg_size);
00110 free_array( (void *)a, size_t(sizeof(array)+n*sizeof(slot)) );
00111 }
00112 static size_t hash( key_type k ) {
00113
00114
00115 return uintptr_t(k)*tbb::internal::size_t_select(0x9E3779B9,0x9E3779B97F4A7C15ULL);
00116 }
00117
00118 ets_base() {my_root=NULL; my_count=0;}
00119 virtual ~ets_base();
00120 void* table_lookup( bool& exists );
00121 void table_clear();
00122 slot& table_find( key_type k ) {
00123 size_t h = hash(k);
00124 array* r = my_root;
00125 size_t mask = r->mask();
00126 for(size_t i = r->start(h);;i=(i+1)&mask) {
00127 slot& s = r->at(i);
00128 if( s.empty() || s.match(k) )
00129 return s;
00130 }
00131 }
00132 void table_reserve_for_copy( const ets_base& other ) {
00133 __TBB_ASSERT(!my_root,NULL);
00134 __TBB_ASSERT(!my_count,NULL);
00135 if( other.my_root ) {
00136 array* a = allocate(other.my_root->lg_size);
00137 a->next = NULL;
00138 my_root = a;
00139 my_count = other.my_count;
00140 }
00141 }
00142 };
00143
00144 template<ets_key_usage_type ETS_key_type>
00145 ets_base<ETS_key_type>::~ets_base() {
00146 __TBB_ASSERT(!my_root, NULL);
00147 }
00148
00149 template<ets_key_usage_type ETS_key_type>
00150 void ets_base<ETS_key_type>::table_clear() {
00151 while( array* r = my_root ) {
00152 my_root = r->next;
00153 free(r);
00154 }
00155 my_count = 0;
00156 }
00157
00158 template<ets_key_usage_type ETS_key_type>
00159 void* ets_base<ETS_key_type>::table_lookup( bool& exists ) {
00160 const key_type k = key_of_current_thread();
00161
00162 __TBB_ASSERT(k!=0,NULL);
00163 void* found;
00164 size_t h = hash(k);
00165 for( array* r=my_root; r; r=r->next ) {
00166 size_t mask=r->mask();
00167 for(size_t i = r->start(h); ;i=(i+1)&mask) {
00168 slot& s = r->at(i);
00169 if( s.empty() ) break;
00170 if( s.match(k) ) {
00171 if( r==my_root ) {
00172
00173 exists = true;
00174 return s.ptr;
00175 } else {
00176
00177 exists = true;
00178 found = s.ptr;
00179 goto insert;
00180 }
00181 }
00182 }
00183 }
00184
00185 exists = false;
00186 found = create_local();
00187 {
00188 size_t c = ++my_count;
00189 array* r = my_root;
00190 if( !r || c>r->size()/2 ) {
00191 size_t s = r ? r->lg_size : 2;
00192 while( c>size_t(1)<<(s-1) ) ++s;
00193 array* a = allocate(s);
00194 for(;;) {
00195 a->next = my_root;
00196 array* new_r = my_root.compare_and_swap(a,r);
00197 if( new_r==r ) break;
00198 if( new_r->lg_size>=s ) {
00199
00200 free(a);
00201 break;
00202 }
00203 r = new_r;
00204 }
00205 }
00206 }
00207 insert:
00208
00209 array* ir = my_root;
00210 size_t mask = ir->mask();
00211 for(size_t i = ir->start(h);;i=(i+1)&mask) {
00212 slot& s = ir->at(i);
00213 if( s.empty() ) {
00214 if( s.claim(k) ) {
00215 s.ptr = found;
00216 return found;
00217 }
00218 }
00219 }
00220 }
00221
00223 template <>
00224 class ets_base<ets_key_per_instance>: protected ets_base<ets_no_key> {
00225 typedef ets_base<ets_no_key> super;
00226 #if _WIN32||_WIN64
00227 typedef DWORD tls_key_t;
00228 void create_key() { my_key = TlsAlloc(); }
00229 void destroy_key() { TlsFree(my_key); }
00230 void set_tls(void * value) { TlsSetValue(my_key, (LPVOID)value); }
00231 void* get_tls() { return (void *)TlsGetValue(my_key); }
00232 #else
00233 typedef pthread_key_t tls_key_t;
00234 void create_key() { pthread_key_create(&my_key, NULL); }
00235 void destroy_key() { pthread_key_delete(my_key); }
00236 void set_tls( void * value ) const { pthread_setspecific(my_key, value); }
00237 void* get_tls() const { return pthread_getspecific(my_key); }
00238 #endif
00239 tls_key_t my_key;
00240 virtual void* create_local() = 0;
00241 virtual void* create_array(size_t _size) = 0;
00242 virtual void free_array(void* ptr, size_t _size) = 0;
00243 public:
00244 ets_base() {create_key();}
00245 ~ets_base() {destroy_key();}
00246 void* table_lookup( bool& exists ) {
00247 void* found = get_tls();
00248 if( found ) {
00249 exists=true;
00250 } else {
00251 found = super::table_lookup(exists);
00252 set_tls(found);
00253 }
00254 return found;
00255 }
00256 void table_clear() {
00257 destroy_key();
00258 create_key();
00259 super::table_clear();
00260 }
00261 };
00262
00264 template< typename Container, typename Value >
00265 class enumerable_thread_specific_iterator
00266 #if defined(_WIN64) && defined(_MSC_VER)
00267
00268 : public std::iterator<std::random_access_iterator_tag,Value>
00269 #endif
00270 {
00272
00273 Container *my_container;
00274 typename Container::size_type my_index;
00275 mutable Value *my_value;
00276
00277 template<typename C, typename T>
00278 friend enumerable_thread_specific_iterator<C,T> operator+( ptrdiff_t offset,
00279 const enumerable_thread_specific_iterator<C,T>& v );
00280
00281 template<typename C, typename T, typename U>
00282 friend bool operator==( const enumerable_thread_specific_iterator<C,T>& i,
00283 const enumerable_thread_specific_iterator<C,U>& j );
00284
00285 template<typename C, typename T, typename U>
00286 friend bool operator<( const enumerable_thread_specific_iterator<C,T>& i,
00287 const enumerable_thread_specific_iterator<C,U>& j );
00288
00289 template<typename C, typename T, typename U>
00290 friend ptrdiff_t operator-( const enumerable_thread_specific_iterator<C,T>& i, const enumerable_thread_specific_iterator<C,U>& j );
00291
00292 template<typename C, typename U>
00293 friend class enumerable_thread_specific_iterator;
00294
00295 public:
00296
00297 enumerable_thread_specific_iterator( const Container &container, typename Container::size_type index ) :
00298 my_container(&const_cast<Container &>(container)), my_index(index), my_value(NULL) {}
00299
00301 enumerable_thread_specific_iterator() : my_container(NULL), my_index(0), my_value(NULL) {}
00302
00303 template<typename U>
00304 enumerable_thread_specific_iterator( const enumerable_thread_specific_iterator<Container, U>& other ) :
00305 my_container( other.my_container ), my_index( other.my_index), my_value( const_cast<Value *>(other.my_value) ) {}
00306
00307 enumerable_thread_specific_iterator operator+( ptrdiff_t offset ) const {
00308 return enumerable_thread_specific_iterator(*my_container, my_index + offset);
00309 }
00310
00311 enumerable_thread_specific_iterator &operator+=( ptrdiff_t offset ) {
00312 my_index += offset;
00313 my_value = NULL;
00314 return *this;
00315 }
00316
00317 enumerable_thread_specific_iterator operator-( ptrdiff_t offset ) const {
00318 return enumerable_thread_specific_iterator( *my_container, my_index-offset );
00319 }
00320
00321 enumerable_thread_specific_iterator &operator-=( ptrdiff_t offset ) {
00322 my_index -= offset;
00323 my_value = NULL;
00324 return *this;
00325 }
00326
00327 Value& operator*() const {
00328 Value* value = my_value;
00329 if( !value ) {
00330 value = my_value = reinterpret_cast<Value *>(&(*my_container)[my_index].value);
00331 }
00332 __TBB_ASSERT( value==reinterpret_cast<Value *>(&(*my_container)[my_index].value), "corrupt cache" );
00333 return *value;
00334 }
00335
00336 Value& operator[]( ptrdiff_t k ) const {
00337 return (*my_container)[my_index + k].value;
00338 }
00339
00340 Value* operator->() const {return &operator*();}
00341
00342 enumerable_thread_specific_iterator& operator++() {
00343 ++my_index;
00344 my_value = NULL;
00345 return *this;
00346 }
00347
00348 enumerable_thread_specific_iterator& operator--() {
00349 --my_index;
00350 my_value = NULL;
00351 return *this;
00352 }
00353
00355 enumerable_thread_specific_iterator operator++(int) {
00356 enumerable_thread_specific_iterator result = *this;
00357 ++my_index;
00358 my_value = NULL;
00359 return result;
00360 }
00361
00363 enumerable_thread_specific_iterator operator--(int) {
00364 enumerable_thread_specific_iterator result = *this;
00365 --my_index;
00366 my_value = NULL;
00367 return result;
00368 }
00369
00370
00371 typedef ptrdiff_t difference_type;
00372 typedef Value value_type;
00373 typedef Value* pointer;
00374 typedef Value& reference;
00375 typedef std::random_access_iterator_tag iterator_category;
00376 };
00377
00378 template<typename Container, typename T>
00379 enumerable_thread_specific_iterator<Container,T> operator+( ptrdiff_t offset,
00380 const enumerable_thread_specific_iterator<Container,T>& v ) {
00381 return enumerable_thread_specific_iterator<Container,T>( v.my_container, v.my_index + offset );
00382 }
00383
00384 template<typename Container, typename T, typename U>
00385 bool operator==( const enumerable_thread_specific_iterator<Container,T>& i,
00386 const enumerable_thread_specific_iterator<Container,U>& j ) {
00387 return i.my_index==j.my_index && i.my_container == j.my_container;
00388 }
00389
00390 template<typename Container, typename T, typename U>
00391 bool operator!=( const enumerable_thread_specific_iterator<Container,T>& i,
00392 const enumerable_thread_specific_iterator<Container,U>& j ) {
00393 return !(i==j);
00394 }
00395
00396 template<typename Container, typename T, typename U>
00397 bool operator<( const enumerable_thread_specific_iterator<Container,T>& i,
00398 const enumerable_thread_specific_iterator<Container,U>& j ) {
00399 return i.my_index<j.my_index;
00400 }
00401
00402 template<typename Container, typename T, typename U>
00403 bool operator>( const enumerable_thread_specific_iterator<Container,T>& i,
00404 const enumerable_thread_specific_iterator<Container,U>& j ) {
00405 return j<i;
00406 }
00407
00408 template<typename Container, typename T, typename U>
00409 bool operator>=( const enumerable_thread_specific_iterator<Container,T>& i,
00410 const enumerable_thread_specific_iterator<Container,U>& j ) {
00411 return !(i<j);
00412 }
00413
00414 template<typename Container, typename T, typename U>
00415 bool operator<=( const enumerable_thread_specific_iterator<Container,T>& i,
00416 const enumerable_thread_specific_iterator<Container,U>& j ) {
00417 return !(j<i);
00418 }
00419
00420 template<typename Container, typename T, typename U>
00421 ptrdiff_t operator-( const enumerable_thread_specific_iterator<Container,T>& i,
00422 const enumerable_thread_specific_iterator<Container,U>& j ) {
00423 return i.my_index-j.my_index;
00424 }
00425
00426 template<typename SegmentedContainer, typename Value >
00427 class segmented_iterator
00428 #if defined(_WIN64) && defined(_MSC_VER)
00429 : public std::iterator<std::input_iterator_tag, Value>
00430 #endif
00431 {
00432 template<typename C, typename T, typename U>
00433 friend bool operator==(const segmented_iterator<C,T>& i, const segmented_iterator<C,U>& j);
00434
00435 template<typename C, typename T, typename U>
00436 friend bool operator!=(const segmented_iterator<C,T>& i, const segmented_iterator<C,U>& j);
00437
00438 template<typename C, typename U>
00439 friend class segmented_iterator;
00440
00441 public:
00442
00443 segmented_iterator() {my_segcont = NULL;}
00444
00445 segmented_iterator( const SegmentedContainer& _segmented_container ) :
00446 my_segcont(const_cast<SegmentedContainer*>(&_segmented_container)),
00447 outer_iter(my_segcont->end()) { }
00448
00449 ~segmented_iterator() {}
00450
00451 typedef typename SegmentedContainer::iterator outer_iterator;
00452 typedef typename SegmentedContainer::value_type InnerContainer;
00453 typedef typename InnerContainer::iterator inner_iterator;
00454
00455
00456 typedef ptrdiff_t difference_type;
00457 typedef Value value_type;
00458 typedef typename SegmentedContainer::size_type size_type;
00459 typedef Value* pointer;
00460 typedef Value& reference;
00461 typedef std::input_iterator_tag iterator_category;
00462
00463
00464 template<typename U>
00465 segmented_iterator(const segmented_iterator<SegmentedContainer, U>& other) :
00466 my_segcont(other.my_segcont),
00467 outer_iter(other.outer_iter),
00468
00469 inner_iter(other.inner_iter)
00470 {}
00471
00472
00473 template<typename U>
00474 segmented_iterator& operator=( const segmented_iterator<SegmentedContainer, U>& other) {
00475 if(this != &other) {
00476 my_segcont = other.my_segcont;
00477 outer_iter = other.outer_iter;
00478 if(outer_iter != my_segcont->end()) inner_iter = other.inner_iter;
00479 }
00480 return *this;
00481 }
00482
00483
00484
00485
00486 segmented_iterator& operator=(const outer_iterator& new_outer_iter) {
00487 __TBB_ASSERT(my_segcont != NULL, NULL);
00488
00489 for(outer_iter = new_outer_iter ;outer_iter!=my_segcont->end(); ++outer_iter) {
00490 if( !outer_iter->empty() ) {
00491 inner_iter = outer_iter->begin();
00492 break;
00493 }
00494 }
00495 return *this;
00496 }
00497
00498
00499 segmented_iterator& operator++() {
00500 advance_me();
00501 return *this;
00502 }
00503
00504
00505 segmented_iterator operator++(int) {
00506 segmented_iterator tmp = *this;
00507 operator++();
00508 return tmp;
00509 }
00510
00511 bool operator==(const outer_iterator& other_outer) const {
00512 __TBB_ASSERT(my_segcont != NULL, NULL);
00513 return (outer_iter == other_outer &&
00514 (outer_iter == my_segcont->end() || inner_iter == outer_iter->begin()));
00515 }
00516
00517 bool operator!=(const outer_iterator& other_outer) const {
00518 return !operator==(other_outer);
00519
00520 }
00521
00522
00523 reference operator*() const {
00524 __TBB_ASSERT(my_segcont != NULL, NULL);
00525 __TBB_ASSERT(outer_iter != my_segcont->end(), "Dereferencing a pointer at end of container");
00526 __TBB_ASSERT(inner_iter != outer_iter->end(), NULL);
00527 return *inner_iter;
00528 }
00529
00530
00531 pointer operator->() const { return &operator*();}
00532
00533 private:
00534 SegmentedContainer* my_segcont;
00535 outer_iterator outer_iter;
00536 inner_iterator inner_iter;
00537
00538 void advance_me() {
00539 __TBB_ASSERT(my_segcont != NULL, NULL);
00540 __TBB_ASSERT(outer_iter != my_segcont->end(), NULL);
00541 __TBB_ASSERT(inner_iter != outer_iter->end(), NULL);
00542 ++inner_iter;
00543 while(inner_iter == outer_iter->end() && ++outer_iter != my_segcont->end()) {
00544 inner_iter = outer_iter->begin();
00545 }
00546 }
00547 };
00548
00549 template<typename SegmentedContainer, typename T, typename U>
00550 bool operator==( const segmented_iterator<SegmentedContainer,T>& i,
00551 const segmented_iterator<SegmentedContainer,U>& j ) {
00552 if(i.my_segcont != j.my_segcont) return false;
00553 if(i.my_segcont == NULL) return true;
00554 if(i.outer_iter != j.outer_iter) return false;
00555 if(i.outer_iter == i.my_segcont->end()) return true;
00556 return i.inner_iter == j.inner_iter;
00557 }
00558
00559
00560 template<typename SegmentedContainer, typename T, typename U>
00561 bool operator!=( const segmented_iterator<SegmentedContainer,T>& i,
00562 const segmented_iterator<SegmentedContainer,U>& j ) {
00563 return !(i==j);
00564 }
00565
00566 template<typename T>
00567 struct destruct_only: tbb::internal::no_copy {
00568 tbb::aligned_space<T,1> value;
00569 ~destruct_only() {value.begin()[0].~T();}
00570 };
00571
00572 template<typename T>
00573 struct construct_by_default: tbb::internal::no_assign {
00574 void construct(void*where) {new(where) T();}
00575 construct_by_default( int ) {}
00576 };
00577
00578 template<typename T>
00579 struct construct_by_exemplar: tbb::internal::no_assign {
00580 const T exemplar;
00581 void construct(void*where) {new(where) T(exemplar);}
00582 construct_by_exemplar( const T& t ) : exemplar(t) {}
00583 };
00584
00585 template<typename T, typename Finit>
00586 struct construct_by_finit: tbb::internal::no_assign {
00587 Finit f;
00588 void construct(void* where) {new(where) T(f());}
00589 construct_by_finit( const Finit& f_ ) : f(f_) {}
00590 };
00591
00592
00593 template<typename T>
00594 class callback_base {
00595 public:
00596
00597 virtual callback_base* clone() = 0;
00598
00599 virtual void destroy() = 0;
00600
00601 virtual ~callback_base() { }
00602
00603 virtual void construct(void* where) = 0;
00604 };
00605
00606 template <typename T, typename Constructor>
00607 class callback_leaf: public callback_base<T>, Constructor {
00608 template<typename X> callback_leaf( const X& x ) : Constructor(x) {}
00609
00610 typedef typename tbb::tbb_allocator<callback_leaf> my_allocator_type;
00611
00612 callback_base<T>* clone() {
00613 void* where = my_allocator_type().allocate(1);
00614 return new(where) callback_leaf(*this);
00615 }
00616
00617 void destroy() {
00618 my_allocator_type().destroy(this);
00619 my_allocator_type().deallocate(this,1);
00620 }
00621
00622 void construct(void* where) {
00623 Constructor::construct(where);
00624 }
00625 public:
00626 template<typename X>
00627 static callback_base<T>* make( const X& x ) {
00628 void* where = my_allocator_type().allocate(1);
00629 return new(where) callback_leaf(x);
00630 }
00631 };
00632
00634
00639 template<typename U, size_t ModularSize>
00640 struct ets_element {
00641 char value[ModularSize==0 ? sizeof(U) : sizeof(U)+(tbb::internal::NFS_MaxLineSize-ModularSize)];
00642 void unconstruct() {
00643 tbb::internal::punned_cast<U*>(&value)->~U();
00644 }
00645 };
00646
00647 }
00649
00651
00670 template <typename T,
00671 typename Allocator=cache_aligned_allocator<T>,
00672 ets_key_usage_type ETS_key_type=ets_no_key >
00673 class enumerable_thread_specific: internal::ets_base<ETS_key_type> {
00674
00675 template<typename U, typename A, ets_key_usage_type C> friend class enumerable_thread_specific;
00676
00677 typedef internal::ets_element<T,sizeof(T)%tbb::internal::NFS_MaxLineSize> padded_element;
00678
00680 template<typename I>
00681 class generic_range_type: public blocked_range<I> {
00682 public:
00683 typedef T value_type;
00684 typedef T& reference;
00685 typedef const T& const_reference;
00686 typedef I iterator;
00687 typedef ptrdiff_t difference_type;
00688 generic_range_type( I begin_, I end_, size_t grainsize_ = 1) : blocked_range<I>(begin_,end_,grainsize_) {}
00689 template<typename U>
00690 generic_range_type( const generic_range_type<U>& r) : blocked_range<I>(r.begin(),r.end(),r.grainsize()) {}
00691 generic_range_type( generic_range_type& r, split ) : blocked_range<I>(r,split()) {}
00692 };
00693
00694 typedef typename Allocator::template rebind< padded_element >::other padded_allocator_type;
00695 typedef tbb::concurrent_vector< padded_element, padded_allocator_type > internal_collection_type;
00696
00697 internal::callback_base<T> *my_construct_callback;
00698
00699 internal_collection_type my_locals;
00700
00701 void* create_local() {
00702 #if TBB_DEPRECATED
00703 void* lref = &my_locals[my_locals.push_back(padded_element())];
00704 #else
00705 void* lref = &*my_locals.push_back(padded_element());
00706 #endif
00707 my_construct_callback->construct(lref);
00708 return lref;
00709 }
00710
00711 void unconstruct_locals() {
00712 for(typename internal_collection_type::iterator cvi = my_locals.begin(); cvi != my_locals.end(); ++cvi) {
00713 cvi->unconstruct();
00714 }
00715 }
00716
00717 typedef typename Allocator::template rebind< uintptr_t >::other array_allocator_type;
00718
00719
00720 void* create_array(size_t _size) {
00721 size_t nelements = (_size + sizeof(uintptr_t) -1) / sizeof(uintptr_t);
00722 return array_allocator_type().allocate(nelements);
00723 }
00724
00725 void free_array( void* _ptr, size_t _size) {
00726 size_t nelements = (_size + sizeof(uintptr_t) -1) / sizeof(uintptr_t);
00727 array_allocator_type().deallocate( reinterpret_cast<uintptr_t *>(_ptr),nelements);
00728 }
00729
00730 public:
00731
00733 typedef Allocator allocator_type;
00734 typedef T value_type;
00735 typedef T& reference;
00736 typedef const T& const_reference;
00737 typedef T* pointer;
00738 typedef const T* const_pointer;
00739 typedef typename internal_collection_type::size_type size_type;
00740 typedef typename internal_collection_type::difference_type difference_type;
00741
00742
00743 typedef typename internal::enumerable_thread_specific_iterator< internal_collection_type, value_type > iterator;
00744 typedef typename internal::enumerable_thread_specific_iterator< internal_collection_type, const value_type > const_iterator;
00745
00746
00747 typedef generic_range_type< iterator > range_type;
00748 typedef generic_range_type< const_iterator > const_range_type;
00749
00751 enumerable_thread_specific() :
00752 my_construct_callback( internal::callback_leaf<T,internal::construct_by_default<T> >::make(0) )
00753 {}
00754
00756 template <typename Finit>
00757 enumerable_thread_specific( Finit finit ) :
00758 my_construct_callback( internal::callback_leaf<T,internal::construct_by_finit<T,Finit> >::make( finit ) )
00759 {}
00760
00762 enumerable_thread_specific(const T& exemplar) :
00763 my_construct_callback( internal::callback_leaf<T,internal::construct_by_exemplar<T> >::make( exemplar ) )
00764 {}
00765
00767 ~enumerable_thread_specific() {
00768 my_construct_callback->destroy();
00769 this->clear();
00770
00771 }
00772
00774 reference local() {
00775 bool exists;
00776 return local(exists);
00777 }
00778
00780 reference local(bool& exists) {
00781 void* ptr = this->table_lookup(exists);
00782 return *(T*)ptr;
00783 }
00784
00786 size_type size() const { return my_locals.size(); }
00787
00789 bool empty() const { return my_locals.empty(); }
00790
00792 iterator begin() { return iterator( my_locals, 0 ); }
00794 iterator end() { return iterator(my_locals, my_locals.size() ); }
00795
00797 const_iterator begin() const { return const_iterator(my_locals, 0); }
00798
00800 const_iterator end() const { return const_iterator(my_locals, my_locals.size()); }
00801
00803 range_type range( size_t grainsize=1 ) { return range_type( begin(), end(), grainsize ); }
00804
00806 const_range_type range( size_t grainsize=1 ) const { return const_range_type( begin(), end(), grainsize ); }
00807
00809 void clear() {
00810 unconstruct_locals();
00811 my_locals.clear();
00812 this->table_clear();
00813
00814
00815 }
00816
00817 private:
00818
00819 template<typename U, typename A2, ets_key_usage_type C2>
00820 void internal_copy( const enumerable_thread_specific<U, A2, C2>& other);
00821
00822 public:
00823
00824 template<typename U, typename Alloc, ets_key_usage_type Cachetype>
00825 enumerable_thread_specific( const enumerable_thread_specific<U, Alloc, Cachetype>& other ) : internal::ets_base<ETS_key_type> ()
00826 {
00827 internal_copy(other);
00828 }
00829
00830 enumerable_thread_specific( const enumerable_thread_specific& other ) : internal::ets_base<ETS_key_type> ()
00831 {
00832 internal_copy(other);
00833 }
00834
00835 private:
00836
00837 template<typename U, typename A2, ets_key_usage_type C2>
00838 enumerable_thread_specific &
00839 internal_assign(const enumerable_thread_specific<U, A2, C2>& other) {
00840 if(static_cast<void *>( this ) != static_cast<const void *>( &other )) {
00841 this->clear();
00842 my_construct_callback->destroy();
00843 my_construct_callback = 0;
00844 internal_copy( other );
00845 }
00846 return *this;
00847 }
00848
00849 public:
00850
00851
00852 enumerable_thread_specific& operator=(const enumerable_thread_specific& other) {
00853 return internal_assign(other);
00854 }
00855
00856 template<typename U, typename Alloc, ets_key_usage_type Cachetype>
00857 enumerable_thread_specific& operator=(const enumerable_thread_specific<U, Alloc, Cachetype>& other)
00858 {
00859 return internal_assign(other);
00860 }
00861
00862
00863 template <typename combine_func_t>
00864 T combine(combine_func_t f_combine) {
00865 if(begin() == end()) {
00866 internal::destruct_only<T> location;
00867 my_construct_callback->construct(location.value.begin());
00868 return *location.value.begin();
00869 }
00870 const_iterator ci = begin();
00871 T my_result = *ci;
00872 while(++ci != end())
00873 my_result = f_combine( my_result, *ci );
00874 return my_result;
00875 }
00876
00877
00878 template <typename combine_func_t>
00879 void combine_each(combine_func_t f_combine) {
00880 for(const_iterator ci = begin(); ci != end(); ++ci) {
00881 f_combine( *ci );
00882 }
00883 }
00884
00885 };
00886
00887 template <typename T, typename Allocator, ets_key_usage_type ETS_key_type>
00888 template<typename U, typename A2, ets_key_usage_type C2>
00889 void enumerable_thread_specific<T,Allocator,ETS_key_type>::internal_copy( const enumerable_thread_specific<U, A2, C2>& other) {
00890
00891 my_construct_callback = other.my_construct_callback->clone();
00892
00893 typedef internal::ets_base<ets_no_key> base;
00894 __TBB_ASSERT(my_locals.size()==0,NULL);
00895 this->table_reserve_for_copy( other );
00896 for( base::array* r=other.my_root; r; r=r->next ) {
00897 for( size_t i=0; i<r->size(); ++i ) {
00898 base::slot& s1 = r->at(i);
00899 if( !s1.empty() ) {
00900 base::slot& s2 = this->table_find(s1.key);
00901 if( s2.empty() ) {
00902 #if TBB_DEPRECATED
00903 void* lref = &my_locals[my_locals.push_back(padded_element())];
00904 #else
00905 void* lref = &*my_locals.push_back(padded_element());
00906 #endif
00907 s2.ptr = new(lref) T(*(U*)s1.ptr);
00908 s2.key = s1.key;
00909 } else {
00910
00911 }
00912 }
00913 }
00914 }
00915 }
00916
00917 template< typename Container >
00918 class flattened2d {
00919
00920
00921 typedef typename Container::value_type conval_type;
00922
00923 public:
00924
00926 typedef typename conval_type::size_type size_type;
00927 typedef typename conval_type::difference_type difference_type;
00928 typedef typename conval_type::allocator_type allocator_type;
00929 typedef typename conval_type::value_type value_type;
00930 typedef typename conval_type::reference reference;
00931 typedef typename conval_type::const_reference const_reference;
00932 typedef typename conval_type::pointer pointer;
00933 typedef typename conval_type::const_pointer const_pointer;
00934
00935 typedef typename internal::segmented_iterator<Container, value_type> iterator;
00936 typedef typename internal::segmented_iterator<Container, const value_type> const_iterator;
00937
00938 flattened2d( const Container &c, typename Container::const_iterator b, typename Container::const_iterator e ) :
00939 my_container(const_cast<Container*>(&c)), my_begin(b), my_end(e) { }
00940
00941 flattened2d( const Container &c ) :
00942 my_container(const_cast<Container*>(&c)), my_begin(c.begin()), my_end(c.end()) { }
00943
00944 iterator begin() { return iterator(*my_container) = my_begin; }
00945 iterator end() { return iterator(*my_container) = my_end; }
00946 const_iterator begin() const { return const_iterator(*my_container) = my_begin; }
00947 const_iterator end() const { return const_iterator(*my_container) = my_end; }
00948
00949 size_type size() const {
00950 size_type tot_size = 0;
00951 for(typename Container::const_iterator i = my_begin; i != my_end; ++i) {
00952 tot_size += i->size();
00953 }
00954 return tot_size;
00955 }
00956
00957 private:
00958
00959 Container *my_container;
00960 typename Container::const_iterator my_begin;
00961 typename Container::const_iterator my_end;
00962
00963 };
00964
00965 template <typename Container>
00966 flattened2d<Container> flatten2d(const Container &c, const typename Container::const_iterator b, const typename Container::const_iterator e) {
00967 return flattened2d<Container>(c, b, e);
00968 }
00969
00970 template <typename Container>
00971 flattened2d<Container> flatten2d(const Container &c) {
00972 return flattened2d<Container>(c);
00973 }
00974
00975 }
00976
00977 namespace internal {
00978 using interface6::internal::segmented_iterator;
00979 }
00980
00981 using interface6::enumerable_thread_specific;
00982 using interface6::flattened2d;
00983 using interface6::flatten2d;
00984
00985 }
00986
00987 #endif