Changeset 33e62f1b for doc/theses/thierry_delisle_PhD/code
- Timestamp:
- May 21, 2020, 5:11:46 PM (5 years ago)
- Branches:
- ADT, arm-eh, ast-experimental, enum, forall-pointer-decay, jacob/cs343-translation, master, new-ast, new-ast-unique-expr, pthread-emulation, qualifiedEnum
- Children:
- 2802824
- Parents:
- 9da5a50
- Location:
- doc/theses/thierry_delisle_PhD/code
- Files:
-
- 1 added
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
doc/theses/thierry_delisle_PhD/code/relaxed_list.hpp
r9da5a50 r33e62f1b 11 11 #include "assert.hpp" 12 12 #include "utils.hpp" 13 #include "snzi.hpp" 13 14 14 15 using namespace std; 15 16 struct spinlock_t {17 std::atomic_bool ll = { false };18 19 inline void lock() {20 while( __builtin_expect(ll.exchange(true),false) ) {21 while(ll.load(std::memory_order_relaxed))22 asm volatile("pause");23 }24 }25 26 inline bool try_lock() {27 return false == ll.exchange(true);28 }29 30 inline void unlock() {31 ll.store(false, std::memory_order_release);32 }33 34 inline explicit operator bool() {35 return ll.load(std::memory_order_relaxed);36 }37 };38 39 static inline bool bts(std::atomic_size_t & target, size_t bit ) {40 //*41 int result = 0;42 asm volatile(43 "LOCK btsq %[bit], %[target]\n\t"44 :"=@ccc" (result)45 : [target] "m" (target), [bit] "r" (bit)46 );47 return result != 0;48 /*/49 size_t mask = 1ul << bit;50 size_t ret = target.fetch_or(mask, std::memory_order_relaxed);51 return (ret & mask) != 0;52 //*/53 }54 55 static inline bool btr(std::atomic_size_t & target, size_t bit ) {56 //*57 int result = 0;58 asm volatile(59 "LOCK btrq %[bit], %[target]\n\t"60 :"=@ccc" (result)61 : [target] "m" (target), [bit] "r" (bit)62 );63 return result != 0;64 /*/65 size_t mask = 1ul << bit;66 size_t ret = target.fetch_and(~mask, std::memory_order_relaxed);67 return (ret & mask) != 0;68 //*/69 }70 16 71 17 extern bool enable_stats; … … 110 56 : lists(new intrusive_queue_t[numLists]) 111 57 , numLists(numLists) 58 #if defined(SIMPLE_SNZI) 59 , snzi(4) 60 #endif 112 61 { 113 62 assertf(7 * 8 * 8 >= numLists, "List currently only supports 448 sublists"); … … 140 89 if( !lists[i].lock.try_lock() ) continue; 141 90 142 __attribute__((unused)) int num = numNonEmpty; 91 #if !defined(SIMPLE_SNZI) 92 __attribute__((unused)) int num = numNonEmpty; 93 #endif 143 94 144 95 // Actually push it … … 149 100 assert(qword == 0); 150 101 bts(tls.mask, bit); 102 #elif defined(SIMPLE_SNZI) 103 snzi.arrive(i); 151 104 #elif !defined(NO_BITMASK) 152 105 numNonEmpty++; … … 161 114 #endif 162 115 } 163 assert(numNonEmpty <= (int)numLists); 116 #if !defined(SIMPLE_SNZI) 117 assert(numNonEmpty <= (int)numLists); 118 #endif 164 119 165 120 // Unlock and return … … 168 123 #ifndef NO_STATS 169 124 tls.pick.push.success++; 170 tls.empty.push.value += num; 171 tls.empty.push.count += 1; 125 #if !defined(SIMPLE_SNZI) 126 tls.empty.push.value += num; 127 tls.empty.push.count += 1; 128 #endif 172 129 #endif 173 130 return; … … 208 165 if(auto node = try_pop(i, j)) return node; 209 166 } 167 #elif defined(SIMPLE_SNZI) 168 while(snzi.query()) { 169 // Pick two lists at random 170 int i = tls.rng.next() % numLists; 171 int j = tls.rng.next() % numLists; 172 173 if(auto node = try_pop(i, j)) return node; 174 } 210 175 #elif !defined(NO_BITMASK) 211 176 int nnempty; … … 280 245 if( !list.lock.try_lock() ) return nullptr; 281 246 282 __attribute__((unused)) int num = numNonEmpty; 247 #if !defined(SIMPLE_SNZI) 248 __attribute__((unused)) int num = numNonEmpty; 249 #endif 283 250 284 251 // If list is empty, unlock and retry … … 300 267 assert(qword == 0); 301 268 __attribute__((unused)) bool ret = btr(tls.mask, bit); 269 #elif defined(SIMPLE_SNZI) 270 snzi.depart(w); 302 271 #elif !defined(NO_BITMASK) 303 272 numNonEmpty--; … … 315 284 // Unlock and return 316 285 list.lock.unlock(); 317 assert(numNonEmpty >= 0); 286 #if !defined(SIMPLE_SNZI) 287 assert(numNonEmpty >= 0); 288 #endif 318 289 #ifndef NO_STATS 319 290 tls.pick.pop.success++; 320 tls.empty.pop.value += num; 321 tls.empty.pop.count += 1; 291 #if !defined(SIMPLE_SNZI) 292 tls.empty.pop.value += num; 293 tls.empty.pop.count += 1; 294 #endif 322 295 #endif 323 296 return node; … … 336 309 size_t push = 0; 337 310 size_t pop = 0; 338 // size_t value = 0;339 // size_t count = 0;340 311 }; 341 312 … … 460 431 } tls; 461 432 462 public:463 std::atomic_int numNonEmpty = { 0 }; // number of non-empty lists464 std::atomic_size_t list_mask[7] = { {0}, {0}, {0}, {0}, {0}, {0}, {0} }; // which queues are empty465 433 private: 466 434 __attribute__((aligned(64))) std::unique_ptr<intrusive_queue_t []> lists; 467 435 const unsigned numLists; 436 private: 437 #if defined(SIMPLE_SNZI) 438 snzi_t snzi; 439 #else 440 std::atomic_int numNonEmpty = { 0 }; // number of non-empty lists 441 std::atomic_size_t list_mask[7] = { {0}, {0}, {0}, {0}, {0}, {0}, {0} }; // which queues are empty 442 #endif 468 443 469 444 public: -
doc/theses/thierry_delisle_PhD/code/utils.hpp
r9da5a50 r33e62f1b 143 143 #endif 144 144 } 145 146 struct spinlock_t { 147 std::atomic_bool ll = { false }; 148 149 inline void lock() { 150 while( __builtin_expect(ll.exchange(true),false) ) { 151 while(ll.load(std::memory_order_relaxed)) 152 asm volatile("pause"); 153 } 154 } 155 156 inline bool try_lock() { 157 return false == ll.exchange(true); 158 } 159 160 inline void unlock() { 161 ll.store(false, std::memory_order_release); 162 } 163 164 inline explicit operator bool() { 165 return ll.load(std::memory_order_relaxed); 166 } 167 }; 168 169 static inline bool bts(std::atomic_size_t & target, size_t bit ) { 170 //* 171 int result = 0; 172 asm volatile( 173 "LOCK btsq %[bit], %[target]\n\t" 174 :"=@ccc" (result) 175 : [target] "m" (target), [bit] "r" (bit) 176 ); 177 return result != 0; 178 /*/ 179 size_t mask = 1ul << bit; 180 size_t ret = target.fetch_or(mask, std::memory_order_relaxed); 181 return (ret & mask) != 0; 182 //*/ 183 } 184 185 static inline bool btr(std::atomic_size_t & target, size_t bit ) { 186 //* 187 int result = 0; 188 asm volatile( 189 "LOCK btrq %[bit], %[target]\n\t" 190 :"=@ccc" (result) 191 : [target] "m" (target), [bit] "r" (bit) 192 ); 193 return result != 0; 194 /*/ 195 size_t mask = 1ul << bit; 196 size_t ret = target.fetch_and(~mask, std::memory_order_relaxed); 197 return (ret & mask) != 0; 198 //*/ 199 }
Note: See TracChangeset
for help on using the changeset viewer.