Main Page | Modules | Namespace List | Class Hierarchy | Class List | File List | Namespace Members | Class Members | Related Pages

atomic.h

00001 /*
00002     Copyright 2005-2008 Intel Corporation.  All Rights Reserved.
00003 
00004     The source code contained or described herein and all documents related
00005     to the source code ("Material") are owned by Intel Corporation or its
00006     suppliers or licensors.  Title to the Material remains with Intel
00007     Corporation or its suppliers and licensors.  The Material is protected
00008     by worldwide copyright laws and treaty provisions.  No part of the
00009     Material may be used, copied, reproduced, modified, published, uploaded,
00010     posted, transmitted, distributed, or disclosed in any way without
00011     Intel's prior express written permission.
00012 
00013     No license under any patent, copyright, trade secret or other
00014     intellectual property right is granted to or conferred upon you by
00015     disclosure or delivery of the Materials, either expressly, by
00016     implication, inducement, estoppel or otherwise.  Any license under such
00017     intellectual property rights must be express and approved by Intel in
00018     writing.
00019 */
00020 
00021 #ifndef __TBB_atomic_H
00022 #define __TBB_atomic_H
00023 
00024 #include <cstddef>
00025 #include "tbb_stddef.h"
00026 
00027 #if _MSC_VER 
00028 #define __TBB_LONG_LONG __int64
00029 #else
00030 #define __TBB_LONG_LONG long long
00031 #endif /* _MSC_VER */
00032 
00033 #include "tbb_machine.h"
00034 
00035 #if defined(_MSC_VER) && defined(_Wp64)
00036     // Workaround for overzealous compiler warnings in /Wp64 mode
00037     #pragma warning (push)
00038     #pragma warning (disable: 4244 4267)
00039 #endif /* _MSC_VER && _Wp64 */
00040 
00041 namespace tbb {
00042 
00044 enum memory_semantics {
00046     __TBB_full_fence,
00048     acquire,
00050     release
00051 };
00052 
00054 namespace internal {
00055 
00056 template<size_t Size, memory_semantics M>
00057 struct atomic_traits {       // Primary template
00058 };
00059 
00060 template<size_t Size>
00061 struct atomic_word {             // Primary template
00062     typedef intptr word;
00063 };
00064 
00065 template<typename I>            // Primary template
00066 struct atomic_base {
00067     I my_value;
00068 };
00069 
00070 #if __GNUC__ || __SUNPRO_CC
00071 #define __TBB_DECL_ATOMIC_FIELD(t,f,a) t f  __attribute__ ((aligned(a)));
00072 #elif defined(__INTEL_COMPILER)||_MSC_VER >= 1300
00073 #define __TBB_DECL_ATOMIC_FIELD(t,f,a) __declspec(align(a)) t f;
00074 #else 
00075 #error Do not know syntax for forcing alignment.
00076 #endif /* __GNUC__ */
00077 
00078 template<>
00079 struct atomic_word<8> {          // Specialization
00080     typedef int64_t word;
00081 };
00082 
00083 #if _WIN32 && __TBB_x86_64
00084 // ATTENTION: On 64-bit Windows, we currently have to specialize atomic_word
00085 // for every size to avoid type conversion warnings
00086 // See declarations of atomic primitives in machine/windows_em64t.h
00087 template<>
00088 struct atomic_word<1> {          // Specialization
00089     typedef int8_t word;
00090 };
00091 template<>
00092 struct atomic_word<2> {          // Specialization
00093     typedef int16_t word;
00094 };
00095 template<>
00096 struct atomic_word<4> {          // Specialization
00097     typedef int32_t word;
00098 };
00099 #endif
00100 
00101 template<>
00102 struct atomic_base<uint64_t> {   // Specialization
00103     __TBB_DECL_ATOMIC_FIELD(uint64_t,my_value,8)
00104 };
00105 
00106 template<>
00107 struct atomic_base<int64_t> {    // Specialization
00108     __TBB_DECL_ATOMIC_FIELD(int64_t,my_value,8)
00109 };
00110 
00111 #define __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(S,M)                         \
00112     template<> struct atomic_traits<S,M> {                               \
00113         typedef atomic_word<S>::word word;                               \
00114         inline static word compare_and_swap( volatile void* location, word new_value, word comparand ) {\
00115             return __TBB_CompareAndSwap##S##M(location,new_value,comparand);    \
00116         }                                                                       \
00117         inline static word fetch_and_add( volatile void* location, word addend ) { \
00118             return __TBB_FetchAndAdd##S##M(location,addend);                    \
00119         }                                                                       \
00120         inline static word fetch_and_store( volatile void* location, word value ) {\
00121             return __TBB_FetchAndStore##S##M(location,value);                   \
00122         }                                                                       \
00123     };
00124 
00125 #define __TBB_DECL_ATOMIC_PRIMITIVES(S)                                  \
00126     template<memory_semantics M>                                         \
00127     struct atomic_traits<S,M> {                                          \
00128         typedef atomic_word<S>::word word;                               \
00129         inline static word compare_and_swap( volatile void* location, word new_value, word comparand ) {\
00130             return __TBB_CompareAndSwap##S(location,new_value,comparand);       \
00131         }                                                                       \
00132         inline static word fetch_and_add( volatile void* location, word addend ) { \
00133             return __TBB_FetchAndAdd##S(location,addend);                       \
00134         }                                                                       \
00135         inline static word fetch_and_store( volatile void* location, word value ) {\
00136             return __TBB_FetchAndStore##S(location,value);                      \
00137         }                                                                       \
00138     };
00139 
00140 #if __TBB_DECL_FENCED_ATOMICS
00141 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,__TBB_full_fence)
00142 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,__TBB_full_fence)
00143 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,__TBB_full_fence)
00144 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,__TBB_full_fence)
00145 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,acquire)
00146 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,acquire)
00147 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,acquire)
00148 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,acquire)
00149 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(1,release)
00150 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(2,release)
00151 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(4,release)
00152 __TBB_DECL_FENCED_ATOMIC_PRIMITIVES(8,release)
00153 #else
00154 __TBB_DECL_ATOMIC_PRIMITIVES(1)
00155 __TBB_DECL_ATOMIC_PRIMITIVES(2)
00156 __TBB_DECL_ATOMIC_PRIMITIVES(4)
00157 __TBB_DECL_ATOMIC_PRIMITIVES(8)
00158 #endif
00159 
00161 
00163 #define __TBB_MINUS_ONE(T) (T(T(0)-T(1)))
00164 
00165 template<typename I, typename D, size_t Step>
00166 struct atomic_impl: private atomic_base<I> {
00167 private:
00168     typedef typename atomic_word<sizeof(I)>::word word;
00169 public:
00170     typedef I value_type;
00171 
00172     template<memory_semantics M>
00173     value_type fetch_and_add( D addend ) {
00174         return value_type(internal::atomic_traits<sizeof(value_type),M>::fetch_and_add( &this->my_value, addend*Step ));
00175     }
00176 
00177     value_type fetch_and_add( D addend ) {
00178         return fetch_and_add<__TBB_full_fence>(addend);
00179     }
00180 
00181     template<memory_semantics M>
00182     value_type fetch_and_increment() {
00183         return fetch_and_add<M>(1);
00184     }
00185 
00186     value_type fetch_and_increment() {
00187         return fetch_and_add(1);
00188     }
00189 
00190     template<memory_semantics M>
00191     value_type fetch_and_decrement() {
00192         return fetch_and_add<M>(__TBB_MINUS_ONE(D));
00193     }
00194 
00195     value_type fetch_and_decrement() {
00196         return fetch_and_add(__TBB_MINUS_ONE(D));
00197     }
00198 
00199     template<memory_semantics M>
00200     value_type fetch_and_store( value_type value ) {
00201         return value_type(internal::atomic_traits<sizeof(value_type),M>::fetch_and_store(&this->my_value,word(value)));
00202     }
00203 
00204     value_type fetch_and_store( value_type value ) {
00205         return fetch_and_store<__TBB_full_fence>(value);
00206     }
00207 
00208     template<memory_semantics M>
00209     value_type compare_and_swap( value_type value, value_type comparand ) {
00210         return value_type(internal::atomic_traits<sizeof(value_type),M>::compare_and_swap(&this->my_value,word(value),word(comparand)));
00211     }
00212 
00213     value_type compare_and_swap( value_type value, value_type comparand ) {
00214         return compare_and_swap<__TBB_full_fence>(value,comparand);
00215     }
00216 
00217     operator value_type() const volatile {                // volatile qualifier here for backwards compatibility 
00218         return __TBB_load_with_acquire( this->my_value );
00219     }
00220 
00221     value_type& _internal_reference() const {
00222         return static_cast<value_type&>(this->my_value);
00223     }
00224 
00225 protected:
00226     value_type store_with_release( value_type rhs ) {
00227         __TBB_store_with_release(this->my_value,rhs);
00228         return rhs;
00229     }
00230 
00231 public:
00232     value_type operator+=( D addend ) {
00233         return fetch_and_add(addend)+addend;
00234     }
00235 
00236     value_type operator-=( D addend ) {
00237         // Additive inverse of addend computed using binary minus,
00238         // instead of unary minus, for sake of avoiding compiler warnings.
00239         return operator+=(D(0)-addend);    
00240     }
00241 
00242     value_type operator++() {
00243         return fetch_and_add(1)+1;
00244     }
00245 
00246     value_type operator--() {
00247         return fetch_and_add(__TBB_MINUS_ONE(D))-1;
00248     }
00249 
00250     value_type operator++(int) {
00251         return fetch_and_add(1);
00252     }
00253 
00254     value_type operator--(int) {
00255         return fetch_and_add(__TBB_MINUS_ONE(D));
00256     }
00257 };
00258 
00259 #if __TBB_WORDSIZE == 4
00260 // Plaforms with 32-bit hardware require special effort for 64-bit loads and stores.
00261 #if defined(__INTEL_COMPILER)||!defined(_MSC_VER)||_MSC_VER>=1400
00262 
00263 template<>
00264 inline atomic_impl<__TBB_LONG_LONG,__TBB_LONG_LONG,1>::operator atomic_impl<__TBB_LONG_LONG,__TBB_LONG_LONG,1>::value_type() const volatile {
00265     return __TBB_Load8(&this->my_value);
00266 }
00267 
00268 template<>
00269 inline atomic_impl<unsigned __TBB_LONG_LONG,unsigned __TBB_LONG_LONG,1>::operator atomic_impl<unsigned __TBB_LONG_LONG,unsigned __TBB_LONG_LONG,1>::value_type() const volatile {
00270     return __TBB_Load8(&this->my_value);
00271 }
00272 
00273 template<>
00274 inline atomic_impl<__TBB_LONG_LONG,__TBB_LONG_LONG,1>::value_type atomic_impl<__TBB_LONG_LONG,__TBB_LONG_LONG,1>::store_with_release( value_type rhs ) {
00275     __TBB_Store8(&this->my_value,rhs);
00276     return rhs;
00277 }
00278 
00279 template<>
00280 inline atomic_impl<unsigned __TBB_LONG_LONG,unsigned __TBB_LONG_LONG,1>::value_type atomic_impl<unsigned __TBB_LONG_LONG,unsigned __TBB_LONG_LONG,1>::store_with_release( value_type rhs ) {
00281     __TBB_Store8(&this->my_value,rhs);
00282     return rhs;
00283 }
00284 
00285 #endif /* defined(__INTEL_COMPILER)||!defined(_MSC_VER)||_MSC_VER>=1400 */
00286 #endif /* __TBB_WORDSIZE==4 */
00287 
00288 } /* Internal */
00290 
00292 
00294 template<typename T>
00295 struct atomic {
00296 };
00297 
00298 #define __TBB_DECL_ATOMIC(T) \
00299     template<> struct atomic<T>: internal::atomic_impl<T,T,1> {  \
00300         T operator=( T rhs ) {return store_with_release(rhs);}  \
00301         atomic<T>& operator=( const atomic<T>& rhs ) {store_with_release(rhs); return *this;}  \
00302     };
00303 
00304 #if defined(__INTEL_COMPILER)||!defined(_MSC_VER)||_MSC_VER>=1400
00305 __TBB_DECL_ATOMIC(__TBB_LONG_LONG)
00306 __TBB_DECL_ATOMIC(unsigned __TBB_LONG_LONG)
00307 #else
00308 // Some old versions of MVSC cannot correctly compile templates with "long long".
00309 #endif /* defined(__INTEL_COMPILER)||!defined(_MSC_VER)||_MSC_VER>=1400 */
00310 __TBB_DECL_ATOMIC(long)
00311 __TBB_DECL_ATOMIC(unsigned long)
00312 __TBB_DECL_ATOMIC(unsigned int)
00313 __TBB_DECL_ATOMIC(int)
00314 __TBB_DECL_ATOMIC(unsigned short)
00315 __TBB_DECL_ATOMIC(short)
00316 __TBB_DECL_ATOMIC(char)
00317 __TBB_DECL_ATOMIC(signed char)
00318 __TBB_DECL_ATOMIC(unsigned char)
00319 
00320 #if !defined(_MSC_VER)||defined(_NATIVE_WCHAR_T_DEFINED) 
00321 __TBB_DECL_ATOMIC(wchar_t)
00322 #endif /* _MSC_VER||!defined(_NATIVE_WCHAR_T_DEFINED) */
00323 
00324 template<typename T> struct atomic<T*>: internal::atomic_impl<T*,ptrdiff_t,sizeof(T)> {
00325     T* operator=( T* rhs ) {
00326         // "this" required here in strict ISO C++ because store_with_release is a dependent name
00327         return this->store_with_release(rhs);
00328     }
00329     atomic<T*>& operator=( const atomic<T*> rhs ) {this->store_with_release(rhs); return *this;}
00330     T* operator->() const {
00331         return (*this);
00332     }
00333 };
00334 
00335 template<>
00336 struct atomic<void*> {
00337 private:
00338     void* my_value;
00339 
00340 public:
00341     typedef void* value_type;
00342 
00343     template<memory_semantics M>
00344     value_type compare_and_swap( value_type value, value_type comparand ) {
00345         return value_type(internal::atomic_traits<sizeof(value_type),M>::compare_and_swap(&my_value,internal::intptr(value),internal::intptr(comparand)));
00346     }
00347 
00348     value_type compare_and_swap( value_type value, value_type comparand ) {
00349         return compare_and_swap<__TBB_full_fence>(value,comparand);
00350     }
00351 
00352     template<memory_semantics M>
00353     value_type fetch_and_store( value_type value ) {
00354         return value_type(internal::atomic_traits<sizeof(value_type),M>::fetch_and_store(&my_value,internal::intptr(value)));
00355     }
00356 
00357     value_type fetch_and_store( value_type value ) {
00358         return fetch_and_store<__TBB_full_fence>(value);
00359     }
00360 
00361     operator value_type() const {
00362         return __TBB_load_with_acquire(my_value);
00363     }
00364 
00365     value_type operator=( value_type rhs ) {
00366         __TBB_store_with_release(my_value,rhs);
00367         return rhs;
00368     }
00369 
00370     atomic<void*>& operator=( const atomic<void*>& rhs ) {
00371         __TBB_store_with_release(my_value,rhs);
00372         return *this;
00373     }
00374 };
00375 
00376 template<>
00377 struct atomic<bool> {
00378 private:
00379     bool my_value;
00380     typedef internal::atomic_word<sizeof(bool)>::word word;
00381 public:
00382     typedef bool value_type;
00383     template<memory_semantics M>
00384     value_type compare_and_swap( value_type value, value_type comparand ) {
00385         return internal::atomic_traits<sizeof(value_type),M>::compare_and_swap(&my_value,word(value),word(comparand))!=0;
00386     }
00387 
00388     value_type compare_and_swap( value_type value, value_type comparand ) {
00389         return compare_and_swap<__TBB_full_fence>(value,comparand);
00390     }
00391 
00392     template<memory_semantics M>
00393     value_type fetch_and_store( value_type value ) {
00394         return internal::atomic_traits<sizeof(value_type),M>::fetch_and_store(&my_value,word(value))!=0;
00395     }
00396 
00397     value_type fetch_and_store( value_type value ) {
00398         return fetch_and_store<__TBB_full_fence>(value);
00399     }
00400 
00401     operator value_type() const {
00402         return __TBB_load_with_acquire(my_value);
00403     }
00404 
00405     value_type operator=( value_type rhs ) {
00406         __TBB_store_with_release(my_value,rhs);
00407         return rhs;
00408     }
00409 
00410     atomic<bool>& operator=( const atomic<bool>& rhs ) {
00411         __TBB_store_with_release(my_value,rhs);
00412         return *this;
00413     }
00414 };
00415 
00416 } // namespace tbb
00417 
00418 #if defined(_MSC_VER) && defined(_Wp64)
00419     // Workaround for overzealous compiler warnings in /Wp64 mode
00420     #pragma warning (pop)
00421 #endif /* _MSC_VER && _Wp64 */
00422 
00423 #endif /* __TBB_atomic_H */

Copyright © 2005-2008 Intel Corporation. All Rights Reserved.

Intel, Pentium, Intel Xeon, Itanium, Intel XScale and VTune are registered trademarks or trademarks of Intel Corporation or its subsidiaries in the United States and other countries.

* Other names and brands may be claimed as the property of others.