Changeset 533540a
- Timestamp:
- May 28, 2018, 5:02:07 PM (6 years ago)
- Branches:
- ADT, aaron-thesis, arm-eh, ast-experimental, cleanup-dtors, deferred_resn, demangler, enum, forall-pointer-decay, jacob/cs343-translation, jenkins-sandbox, master, new-ast, new-ast-unique-expr, new-env, no_list, persistent-indexer, pthread-emulation, qualifiedEnum, with_gc
- Children:
- 17238fd, 4ee3b0c1
- Parents:
- 13e8427 (diff), 1f81d61 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the(diff)
links above to see all the changes relative to each parent. - Location:
- src
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
src/libcfa/concurrency/preemption.c
r13e8427 r533540a 161 161 void disable_interrupts() { 162 162 with( kernelTLS.preemption_state ) { 163 #if GCC_VERSION > 50000 163 164 static_assert(__atomic_always_lock_free(sizeof(enabled), &enabled), "Must be lock-free"); 165 #endif 164 166 165 167 // Set enabled flag to false … … 190 192 // Check if we need to prempt the thread because an interrupt was missed 191 193 if( prev == 1 ) { 194 #if GCC_VERSION > 50000 192 195 static_assert(__atomic_always_lock_free(sizeof(enabled), &enabled), "Must be lock-free"); 196 #endif 193 197 194 198 // Set enabled flag to true … … 217 221 verifyf( prev != 0u, "Incremented from %u\n", prev ); // If this triggers someone is enabled already enabled interrupts 218 222 if( prev == 1 ) { 223 #if GCC_VERSION > 50000 219 224 static_assert(__atomic_always_lock_free(sizeof(kernelTLS.preemption_state.enabled), &kernelTLS.preemption_state.enabled), "Must be lock-free"); 225 #endif 220 226 // Set enabled flag to true 221 227 // should be atomic to avoid preemption in the middle of the operation. … … 376 382 377 383 // Clear sighandler mask before context switching. 384 #if GCC_VERSION > 50000 378 385 static_assert( sizeof( sigset_t ) == sizeof( cxt->uc_sigmask ), "Expected cxt->uc_sigmask to be of sigset_t" ); 386 #endif 379 387 if ( pthread_sigmask( SIG_SETMASK, (sigset_t *)&(cxt->uc_sigmask), NULL ) == -1 ) { 380 388 abort( "internal error, sigprocmask" ); -
src/prelude/Makefile.am
r13e8427 r533540a 37 37 # create forward declarations for gcc builtins 38 38 gcc-builtins.cf : gcc-builtins.c prototypes.sed 39 ${AM_V_GEN}@BACKEND_CC@ -E -P $< | sed -r -f prototypes.sed > $@39 ${AM_V_GEN}@BACKEND_CC@ @CFA_FLAGS@ -E -P $< | sed -r -f prototypes.sed > $@ 40 40 41 41 gcc-builtins.c : builtins.def prototypes.awk sync-builtins.cf 42 ${AM_V_GEN}@BACKEND_CC@ -E prototypes.c | awk -f prototypes.awk > $@42 ${AM_V_GEN}@BACKEND_CC@ @CFA_FLAGS@ -E prototypes.c | awk -f prototypes.awk > $@ 43 43 44 44 builtins.def : -
src/prelude/Makefile.in
r13e8427 r533540a 506 506 # create forward declarations for gcc builtins 507 507 gcc-builtins.cf : gcc-builtins.c prototypes.sed 508 ${AM_V_GEN}@BACKEND_CC@ -E -P $< | sed -r -f prototypes.sed > $@508 ${AM_V_GEN}@BACKEND_CC@ @CFA_FLAGS@ -E -P $< | sed -r -f prototypes.sed > $@ 509 509 510 510 gcc-builtins.c : builtins.def prototypes.awk sync-builtins.cf 511 ${AM_V_GEN}@BACKEND_CC@ -E prototypes.c | awk -f prototypes.awk > $@511 ${AM_V_GEN}@BACKEND_CC@ @CFA_FLAGS@ -E prototypes.c | awk -f prototypes.awk > $@ 512 512 513 513 builtins.def : -
src/prelude/prelude.cf
r13e8427 r533540a 458 458 signed long long int ?=?( signed long long int &, signed long long int ), ?=?( volatile signed long long int &, signed long long int ); 459 459 unsigned long long int ?=?( unsigned long long int &, unsigned long long int ), ?=?( volatile unsigned long long int &, unsigned long long int ); 460 __int128 ?=?( __int128 &, __int128 ), ?=?( volatile __int128 &, __int128 );461 460 zero_t ?=?( zero_t &, zero_t ); 462 461 one_t ?=?( one_t &, one_t ); -
src/prelude/sync-builtins.cf
r13e8427 r533540a 7 7 long long int __sync_fetch_and_add(volatile long long int *, long long int,...); 8 8 long long int __sync_fetch_and_add_8(volatile long long int *, long long int,...); 9 #if defined(__SIZEOF_INT128__) 9 10 __int128 __sync_fetch_and_add(volatile __int128 *, __int128,...); 10 11 __int128 __sync_fetch_and_add_16(volatile __int128 *, __int128,...); 12 #endif 11 13 12 14 char __sync_fetch_and_sub(volatile char *, char,...); … … 18 20 long long int __sync_fetch_and_sub(volatile long long int *, long long int,...); 19 21 long long int __sync_fetch_and_sub_8(volatile long long int *, long long int,...); 22 #if defined(__SIZEOF_INT128__) 20 23 __int128 __sync_fetch_and_sub(volatile __int128 *, __int128,...); 21 24 __int128 __sync_fetch_and_sub_16(volatile __int128 *, __int128,...); 25 #endif 22 26 23 27 char __sync_fetch_and_or(volatile char *, char,...); … … 29 33 long long int __sync_fetch_and_or(volatile long long int *, long long int,...); 30 34 long long int __sync_fetch_and_or_8(volatile long long int *, long long int,...); 35 #if defined(__SIZEOF_INT128__) 31 36 __int128 __sync_fetch_and_or(volatile __int128 *, __int128,...); 32 37 __int128 __sync_fetch_and_or_16(volatile __int128 *, __int128,...); 38 #endif 33 39 34 40 char __sync_fetch_and_and(volatile char *, char,...); … … 40 46 long long int __sync_fetch_and_and(volatile long long int *, long long int,...); 41 47 long long int __sync_fetch_and_and_8(volatile long long int *, long long int,...); 48 #if defined(__SIZEOF_INT128__) 42 49 __int128 __sync_fetch_and_and(volatile __int128 *, __int128,...); 43 50 __int128 __sync_fetch_and_and_16(volatile __int128 *, __int128,...); 51 #endif 44 52 45 53 char __sync_fetch_and_xor(volatile char *, char,...); … … 51 59 long long int __sync_fetch_and_xor(volatile long long int *, long long int,...); 52 60 long long int __sync_fetch_and_xor_8(volatile long long int *, long long int,...); 61 #if defined(__SIZEOF_INT128__) 53 62 __int128 __sync_fetch_and_xor(volatile __int128 *, __int128,...); 54 63 __int128 __sync_fetch_and_xor_16(volatile __int128 *, __int128,...); 64 #endif 55 65 56 66 char __sync_fetch_and_nand(volatile char *, char,...); … … 62 72 long long int __sync_fetch_and_nand(volatile long long int *, long long int,...); 63 73 long long int __sync_fetch_and_nand_8(volatile long long int *, long long int,...); 74 #if defined(__SIZEOF_INT128__) 64 75 __int128 __sync_fetch_and_nand(volatile __int128 *, __int128,...); 65 76 __int128 __sync_fetch_and_nand_16(volatile __int128 *, __int128,...); 77 #endif 66 78 67 79 char __sync_add_and_fetch(volatile char *, char,...); … … 73 85 long long int __sync_add_and_fetch(volatile long long int *, long long int,...); 74 86 long long int __sync_add_and_fetch_8(volatile long long int *, long long int,...); 87 #if defined(__SIZEOF_INT128__) 75 88 __int128 __sync_add_and_fetch(volatile __int128 *, __int128,...); 76 89 __int128 __sync_add_and_fetch_16(volatile __int128 *, __int128,...); 90 #endif 77 91 78 92 char __sync_sub_and_fetch(volatile char *, char,...); … … 84 98 long long int __sync_sub_and_fetch(volatile long long int *, long long int,...); 85 99 long long int __sync_sub_and_fetch_8(volatile long long int *, long long int,...); 100 #if defined(__SIZEOF_INT128__) 86 101 __int128 __sync_sub_and_fetch(volatile __int128 *, __int128,...); 87 102 __int128 __sync_sub_and_fetch_16(volatile __int128 *, __int128,...); 103 #endif 88 104 89 105 char __sync_or_and_fetch(volatile char *, char,...); … … 95 111 long long int __sync_or_and_fetch(volatile long long int *, long long int,...); 96 112 long long int __sync_or_and_fetch_8(volatile long long int *, long long int,...); 113 #if defined(__SIZEOF_INT128__) 97 114 __int128 __sync_or_and_fetch(volatile __int128 *, __int128,...); 98 115 __int128 __sync_or_and_fetch_16(volatile __int128 *, __int128,...); 116 #endif 99 117 100 118 char __sync_and_and_fetch(volatile char *, char,...); … … 106 124 long long int __sync_and_and_fetch(volatile long long int *, long long int,...); 107 125 long long int __sync_and_and_fetch_8(volatile long long int *, long long int,...); 126 #if defined(__SIZEOF_INT128__) 108 127 __int128 __sync_and_and_fetch(volatile __int128 *, __int128,...); 109 128 __int128 __sync_and_and_fetch_16(volatile __int128 *, __int128,...); 129 #endif 110 130 111 131 char __sync_xor_and_fetch(volatile char *, char,...); … … 117 137 long long int __sync_xor_and_fetch(volatile long long int *, long long int,...); 118 138 long long int __sync_xor_and_fetch_8(volatile long long int *, long long int,...); 139 #if defined(__SIZEOF_INT128__) 119 140 __int128 __sync_xor_and_fetch(volatile __int128 *, __int128,...); 120 141 __int128 __sync_xor_and_fetch_16(volatile __int128 *, __int128,...); 142 #endif 121 143 122 144 char __sync_nand_and_fetch(volatile char *, char,...); … … 128 150 long long int __sync_nand_and_fetch(volatile long long int *, long long int,...); 129 151 long long int __sync_nand_and_fetch_8(volatile long long int *, long long int,...); 152 #if defined(__SIZEOF_INT128__) 130 153 __int128 __sync_nand_and_fetch(volatile __int128 *, __int128,...); 131 154 __int128 __sync_nand_and_fetch_16(volatile __int128 *, __int128,...); 155 #endif 132 156 133 157 _Bool __sync_bool_compare_and_swap(volatile char *, char, char,...); … … 139 163 _Bool __sync_bool_compare_and_swap(volatile long long int *, long long int, long long int,...); 140 164 _Bool __sync_bool_compare_and_swap_8(volatile long long int *, long long int, long long int,...); 165 #if defined(__SIZEOF_INT128__) 141 166 _Bool __sync_bool_compare_and_swap(volatile __int128 *, __int128, __int128,...); 142 167 _Bool __sync_bool_compare_and_swap_16(volatile __int128 *, __int128, __int128,...); 168 #endif 143 169 144 170 char __sync_val_compare_and_swap(volatile char *, char, char,...); … … 150 176 long long int __sync_val_compare_and_swap(volatile long long int *, long long int, long long int,...); 151 177 long long int __sync_val_compare_and_swap_8(volatile long long int *, long long int, long long int,...); 178 #if defined(__SIZEOF_INT128__) 152 179 __int128 __sync_val_compare_and_swap(volatile __int128 *, __int128, __int128,...); 153 180 __int128 __sync_val_compare_and_swap_16(volatile __int128 *, __int128, __int128,...); 181 #endif 154 182 155 183 char __sync_lock_test_and_set(volatile char *, char,...); … … 161 189 long long int __sync_lock_test_and_set(volatile long long int *, long long int,...); 162 190 long long int __sync_lock_test_and_set_8(volatile long long int *, long long int,...); 191 #if defined(__SIZEOF_INT128__) 163 192 __int128 __sync_lock_test_and_set(volatile __int128 *, __int128,...); 164 193 __int128 __sync_lock_test_and_set_16(volatile __int128 *, __int128,...); 194 #endif 165 195 166 196 void __sync_lock_release(volatile char *,...); … … 172 202 void __sync_lock_release(volatile long long int *,...); 173 203 void __sync_lock_release_8(volatile long long int *,...); 204 #if defined(__SIZEOF_INT128__) 174 205 void __sync_lock_release(volatile __int128 *,...); 175 206 void __sync_lock_release_16(volatile __int128 *,...); 207 #endif 176 208 177 209 void __sync_synchronize(); … … 185 217 _Bool __atomic_test_and_set(volatile int *, int); 186 218 _Bool __atomic_test_and_set(volatile long long int *, int); 219 #if defined(__SIZEOF_INT128__) 187 220 _Bool __atomic_test_and_set(volatile __int128 *, int); 221 #endif 222 188 223 void __atomic_clear(volatile _Bool *, int); 189 224 void __atomic_clear(volatile char *, int); … … 191 226 void __atomic_clear(volatile int *, int); 192 227 void __atomic_clear(volatile long long int *, int); 228 #if defined(__SIZEOF_INT128__) 193 229 void __atomic_clear(volatile __int128 *, int); 230 #endif 194 231 195 232 char __atomic_exchange_n(volatile char *, volatile char *, int); … … 205 242 long long int __atomic_exchange_8(volatile long long int *, long long int, int); 206 243 void __atomic_exchange(volatile long long int *, volatile long long int *, volatile long long int *, int); 244 #if defined(__SIZEOF_INT128__) 207 245 __int128 __atomic_exchange_n(volatile __int128 *, volatile __int128 *, int); 208 246 __int128 __atomic_exchange_16(volatile __int128 *, __int128, int); 209 247 void __atomic_exchange(volatile __int128 *, volatile __int128 *, volatile __int128 *, int); 248 #endif 210 249 211 250 char __atomic_load_n(const volatile char *, int); … … 221 260 long long int __atomic_load_8(const volatile long long int *, int); 222 261 void __atomic_load(const volatile long long int *, volatile long long int *, int); 262 #if defined(__SIZEOF_INT128__) 223 263 __int128 __atomic_load_n(const volatile __int128 *, int); 224 264 __int128 __atomic_load_16(const volatile __int128 *, int); 225 265 void __atomic_load(const volatile __int128 *, volatile __int128 *, int); 266 #endif 226 267 227 268 _Bool __atomic_compare_exchange_n(volatile char *, char *, char, _Bool, int, int); … … 237 278 _Bool __atomic_compare_exchange_8(volatile long long int *, long long int *, long long int, _Bool, int, int); 238 279 _Bool __atomic_compare_exchange (volatile long long int *, long long int *, long long int *, _Bool, int, int); 280 #if defined(__SIZEOF_INT128__) 239 281 _Bool __atomic_compare_exchange_n (volatile __int128 *, __int128 *, __int128, _Bool, int, int); 240 282 _Bool __atomic_compare_exchange_16(volatile __int128 *, __int128 *, __int128, _Bool, int, int); 241 283 _Bool __atomic_compare_exchange (volatile __int128 *, __int128 *, __int128 *, _Bool, int, int); 284 #endif 242 285 243 286 void __atomic_store_n(volatile _Bool *, _Bool, int); … … 256 299 void __atomic_store_8(volatile long long int *, long long int, int); 257 300 void __atomic_store(volatile long long int *, long long int *, int); 301 #if defined(__SIZEOF_INT128__) 258 302 void __atomic_store_n(volatile __int128 *, __int128, int); 259 303 void __atomic_store_16(volatile __int128 *, __int128, int); 260 304 void __atomic_store(volatile __int128 *, __int128 *, int); 305 #endif 261 306 262 307 char __atomic_add_fetch (volatile char *, char, int); … … 268 313 long long int __atomic_add_fetch (volatile long long int *, long long int, int); 269 314 long long int __atomic_add_fetch_8(volatile long long int *, long long int, int); 315 #if defined(__SIZEOF_INT128__) 270 316 __int128 __atomic_add_fetch (volatile __int128 *, __int128, int); 271 317 __int128 __atomic_add_fetch_16(volatile __int128 *, __int128, int); 318 #endif 272 319 273 320 char __atomic_sub_fetch (volatile char *, char, int); … … 279 326 long long int __atomic_sub_fetch (volatile long long int *, long long int, int); 280 327 long long int __atomic_sub_fetch_8(volatile long long int *, long long int, int); 328 #if defined(__SIZEOF_INT128__) 281 329 __int128 __atomic_sub_fetch (volatile __int128 *, __int128, int); 282 330 __int128 __atomic_sub_fetch_16(volatile __int128 *, __int128, int); 331 #endif 283 332 284 333 char __atomic_and_fetch (volatile char *, char, int); … … 290 339 long long int __atomic_and_fetch (volatile long long int *, long long int, int); 291 340 long long int __atomic_and_fetch_8(volatile long long int *, long long int, int); 341 #if defined(__SIZEOF_INT128__) 292 342 __int128 __atomic_and_fetch (volatile __int128 *, __int128, int); 293 343 __int128 __atomic_and_fetch_16(volatile __int128 *, __int128, int); 344 #endif 294 345 295 346 char __atomic_nand_fetch (volatile char *, char, int); … … 301 352 long long int __atomic_nand_fetch (volatile long long int *, long long int, int); 302 353 long long int __atomic_nand_fetch_8(volatile long long int *, long long int, int); 354 #if defined(__SIZEOF_INT128__) 303 355 __int128 __atomic_nand_fetch (volatile __int128 *, __int128, int); 304 356 __int128 __atomic_nand_fetch_16(volatile __int128 *, __int128, int); 357 #endif 305 358 306 359 char __atomic_xor_fetch (volatile char *, char, int); … … 312 365 long long int __atomic_xor_fetch (volatile long long int *, long long int, int); 313 366 long long int __atomic_xor_fetch_8(volatile long long int *, long long int, int); 367 #if defined(__SIZEOF_INT128__) 314 368 __int128 __atomic_xor_fetch (volatile __int128 *, __int128, int); 315 369 __int128 __atomic_xor_fetch_16(volatile __int128 *, __int128, int); 370 #endif 316 371 317 372 char __atomic_or_fetch (volatile char *, char, int); … … 323 378 long long int __atomic_or_fetch (volatile long long int *, long long int, int); 324 379 long long int __atomic_or_fetch_8(volatile long long int *, long long int, int); 380 #if defined(__SIZEOF_INT128__) 325 381 __int128 __atomic_or_fetch (volatile __int128 *, __int128, int); 326 382 __int128 __atomic_or_fetch_16(volatile __int128 *, __int128, int); 383 #endif 327 384 328 385 char __atomic_fetch_add (volatile char *, char, int); … … 334 391 long long int __atomic_fetch_add (volatile long long int *, long long int, int); 335 392 long long int __atomic_fetch_add_8(volatile long long int *, long long int, int); 393 #if defined(__SIZEOF_INT128__) 336 394 __int128 __atomic_fetch_add (volatile __int128 *, __int128, int); 337 395 __int128 __atomic_fetch_add_16(volatile __int128 *, __int128, int); 396 #endif 338 397 339 398 char __atomic_fetch_sub (volatile char *, char, int); … … 345 404 long long int __atomic_fetch_sub (volatile long long int *, long long int, int); 346 405 long long int __atomic_fetch_sub_8(volatile long long int *, long long int, int); 406 #if defined(__SIZEOF_INT128__) 347 407 __int128 __atomic_fetch_sub (volatile __int128 *, __int128, int); 348 408 __int128 __atomic_fetch_sub_16(volatile __int128 *, __int128, int); 409 #endif 349 410 350 411 char __atomic_fetch_and (volatile char *, char, int); … … 356 417 long long int __atomic_fetch_and (volatile long long int *, long long int, int); 357 418 long long int __atomic_fetch_and_8(volatile long long int *, long long int, int); 419 #if defined(__SIZEOF_INT128__) 358 420 __int128 __atomic_fetch_and (volatile __int128 *, __int128, int); 359 421 __int128 __atomic_fetch_and_16(volatile __int128 *, __int128, int); 422 #endif 360 423 361 424 char __atomic_fetch_nand (volatile char *, char, int); … … 367 430 long long int __atomic_fetch_nand (volatile long long int *, long long int, int); 368 431 long long int __atomic_fetch_nand_8(volatile long long int *, long long int, int); 432 #if defined(__SIZEOF_INT128__) 369 433 __int128 __atomic_fetch_nand (volatile __int128 *, __int128, int); 370 434 __int128 __atomic_fetch_nand_16(volatile __int128 *, __int128, int); 435 #endif 371 436 372 437 char __atomic_fetch_xor (volatile char *, char, int); … … 378 443 long long int __atomic_fetch_xor (volatile long long int *, long long int, int); 379 444 long long int __atomic_fetch_xor_8(volatile long long int *, long long int, int); 445 #if defined(__SIZEOF_INT128__) 380 446 __int128 __atomic_fetch_xor (volatile __int128 *, __int128, int); 381 447 __int128 __atomic_fetch_xor_16(volatile __int128 *, __int128, int); 448 #endif 382 449 383 450 char __atomic_fetch_or (volatile char *, char, int); … … 389 456 long long int __atomic_fetch_or (volatile long long int *, long long int, int); 390 457 long long int __atomic_fetch_or_8(volatile long long int *, long long int, int); 458 #if defined(__SIZEOF_INT128__) 391 459 __int128 __atomic_fetch_or (volatile __int128 *, __int128, int); 392 460 __int128 __atomic_fetch_or_16(volatile __int128 *, __int128, int); 461 #endif 393 462 394 463 _Bool __atomic_always_lock_free(unsigned long, const volatile void *); -
src/tests/builtins/sync.c
r13e8427 r533540a 8 8 volatile int * vp4 = 0; int * rp4 = 0; int v4 = 0; 9 9 volatile long long int * vp8 = 0; long long int * rp8 = 0; long long int v8 = 0; 10 #if defined(__SIZEOF_INT128__) 10 11 volatile __int128 * vp16 = 0; __int128 * rp16 = 0; __int128 v16 = 0; 12 #endif 11 13 12 14 { char ret; ret = __sync_fetch_and_add(vp1, v1); } … … 18 20 { long long int ret; ret = __sync_fetch_and_add(vp8, v8); } 19 21 { long long int ret; ret = __sync_fetch_and_add_8(vp8, v8); } 22 #if defined(__SIZEOF_INT128__) 20 23 { __int128 ret; ret = __sync_fetch_and_add(vp16, v16); } 21 24 { __int128 ret; ret = __sync_fetch_and_add_16(vp16, v16); } 25 #endif 22 26 23 27 { char ret; ret = __sync_fetch_and_sub(vp1, v1); } … … 29 33 { long long int ret; ret = __sync_fetch_and_sub(vp8, v8); } 30 34 { long long int ret; ret = __sync_fetch_and_sub_8(vp8, v8); } 35 #if defined(__SIZEOF_INT128__) 31 36 { __int128 ret; ret = __sync_fetch_and_sub(vp16, v16); } 32 37 { __int128 ret; ret = __sync_fetch_and_sub_16(vp16, v16); } 38 #endif 33 39 34 40 { char ret; ret = __sync_fetch_and_or(vp1, v1); } … … 40 46 { long long int ret; ret = __sync_fetch_and_or(vp8, v8); } 41 47 { long long int ret; ret = __sync_fetch_and_or_8(vp8, v8); } 48 #if defined(__SIZEOF_INT128__) 42 49 { __int128 ret; ret = __sync_fetch_and_or(vp16, v16); } 43 50 { __int128 ret; ret = __sync_fetch_and_or_16(vp16, v16); } 51 #endif 44 52 45 53 { char ret; ret = __sync_fetch_and_and(vp1, v1); } … … 51 59 { long long int ret; ret = __sync_fetch_and_and(vp8, v8); } 52 60 { long long int ret; ret = __sync_fetch_and_and_8(vp8, v8); } 61 #if defined(__SIZEOF_INT128__) 53 62 { __int128 ret; ret = __sync_fetch_and_and(vp16, v16); } 54 63 { __int128 ret; ret = __sync_fetch_and_and_16(vp16, v16); } 64 #endif 55 65 56 66 { char ret; ret = __sync_fetch_and_xor(vp1, v1); } … … 62 72 { long long int ret; ret = __sync_fetch_and_xor(vp8, v8); } 63 73 { long long int ret; ret = __sync_fetch_and_xor_8(vp8, v8); } 74 #if defined(__SIZEOF_INT128__) 64 75 { __int128 ret; ret = __sync_fetch_and_xor(vp16, v16); } 65 76 { __int128 ret; ret = __sync_fetch_and_xor_16(vp16, v16); } 77 #endif 66 78 67 79 { char ret; ret = __sync_fetch_and_nand(vp1, v1); } … … 73 85 { long long int ret; ret = __sync_fetch_and_nand(vp8, v8); } 74 86 { long long int ret; ret = __sync_fetch_and_nand_8(vp8, v8); } 87 #if defined(__SIZEOF_INT128__) 75 88 { __int128 ret; ret = __sync_fetch_and_nand(vp16, v16); } 76 89 { __int128 ret; ret = __sync_fetch_and_nand_16(vp16, v16); } 90 #endif 77 91 78 92 { char ret; ret = __sync_add_and_fetch(vp1, v1); } … … 84 98 { long long int ret; ret = __sync_add_and_fetch(vp8, v8); } 85 99 { long long int ret; ret = __sync_add_and_fetch_8(vp8, v8); } 100 #if defined(__SIZEOF_INT128__) 86 101 { __int128 ret; ret = __sync_add_and_fetch(vp16, v16); } 87 102 { __int128 ret; ret = __sync_add_and_fetch_16(vp16, v16); } 103 #endif 88 104 89 105 { char ret; ret = __sync_sub_and_fetch(vp1, v1); } … … 95 111 { long long int ret; ret = __sync_sub_and_fetch(vp8, v8); } 96 112 { long long int ret; ret = __sync_sub_and_fetch_8(vp8, v8); } 113 #if defined(__SIZEOF_INT128__) 97 114 { __int128 ret; ret = __sync_sub_and_fetch(vp16, v16); } 98 115 { __int128 ret; ret = __sync_sub_and_fetch_16(vp16, v16); } 116 #endif 99 117 100 118 { char ret; ret = __sync_or_and_fetch(vp1, v1); } … … 106 124 { long long int ret; ret = __sync_or_and_fetch(vp8, v8); } 107 125 { long long int ret; ret = __sync_or_and_fetch_8(vp8, v8); } 126 #if defined(__SIZEOF_INT128__) 108 127 { __int128 ret; ret = __sync_or_and_fetch(vp16, v16); } 109 128 { __int128 ret; ret = __sync_or_and_fetch_16(vp16, v16); } 129 #endif 110 130 111 131 { char ret; ret = __sync_and_and_fetch(vp1, v1); } … … 117 137 { long long int ret; ret = __sync_and_and_fetch(vp8, v8); } 118 138 { long long int ret; ret = __sync_and_and_fetch_8(vp8, v8); } 139 #if defined(__SIZEOF_INT128__) 119 140 { __int128 ret; ret = __sync_and_and_fetch(vp16, v16); } 120 141 { __int128 ret; ret = __sync_and_and_fetch_16(vp16, v16); } 142 #endif 121 143 122 144 { char ret; ret = __sync_xor_and_fetch(vp1, v1); } … … 128 150 { long long int ret; ret = __sync_xor_and_fetch(vp8, v8); } 129 151 { long long int ret; ret = __sync_xor_and_fetch_8(vp8, v8); } 152 #if defined(__SIZEOF_INT128__) 130 153 { __int128 ret; ret = __sync_xor_and_fetch(vp16, v16); } 131 154 { __int128 ret; ret = __sync_xor_and_fetch_16(vp16, v16); } 155 #endif 132 156 133 157 { char ret; ret = __sync_nand_and_fetch(vp1, v1); } … … 139 163 { long long int ret; ret = __sync_nand_and_fetch(vp8, v8); } 140 164 { long long int ret; ret = __sync_nand_and_fetch_8(vp8, v8); } 165 #if defined(__SIZEOF_INT128__) 141 166 { __int128 ret; ret = __sync_nand_and_fetch(vp16, v16); } 142 167 { __int128 ret; ret = __sync_nand_and_fetch_16(vp16, v16); } 168 #endif 143 169 144 170 { _Bool ret; ret = __sync_bool_compare_and_swap(vp1, v1, v1); } … … 150 176 { _Bool ret; ret = __sync_bool_compare_and_swap(vp8, v8, v8); } 151 177 { _Bool ret; ret = __sync_bool_compare_and_swap_8(vp8, v8, v8); } 178 #if defined(__SIZEOF_INT128__) 152 179 { _Bool ret; ret = __sync_bool_compare_and_swap(vp16, v16, v16); } 153 180 { _Bool ret; ret = __sync_bool_compare_and_swap_16(vp16, v16,v16); } 181 #endif 154 182 155 183 { char ret; ret = __sync_val_compare_and_swap(vp1, v1, v1); } … … 161 189 { long long int ret; ret = __sync_val_compare_and_swap(vp8, v8, v8); } 162 190 { long long int ret; ret = __sync_val_compare_and_swap_8(vp8, v8, v8); } 191 #if defined(__SIZEOF_INT128__) 163 192 { __int128 ret; ret = __sync_val_compare_and_swap(vp16, v16, v16); } 164 193 { __int128 ret; ret = __sync_val_compare_and_swap_16(vp16, v16,v16); } 194 #endif 165 195 166 196 { char ret; ret = __sync_lock_test_and_set(vp1, v1); } … … 172 202 { long long int ret; ret = __sync_lock_test_and_set(vp8, v8); } 173 203 { long long int ret; ret = __sync_lock_test_and_set_8(vp8, v8); } 204 #if defined(__SIZEOF_INT128__) 174 205 { __int128 ret; ret = __sync_lock_test_and_set(vp16, v16); } 175 206 { __int128 ret; ret = __sync_lock_test_and_set_16(vp16, v16); } 207 #endif 176 208 177 209 { __sync_lock_release(vp1); } … … 183 215 { __sync_lock_release(vp8); } 184 216 { __sync_lock_release_8(vp8); } 217 #if defined(__SIZEOF_INT128__) 185 218 { __sync_lock_release(vp16); } 186 219 { __sync_lock_release_16(vp16); } 220 #endif 187 221 188 222 { __sync_synchronize(); } … … 208 242 { long long int ret; ret = __atomic_exchange_8(vp8, v8, __ATOMIC_SEQ_CST); } 209 243 { long long int ret; __atomic_exchange(vp8, &v8, &ret, __ATOMIC_SEQ_CST); } 244 #if defined(__SIZEOF_INT128__) 210 245 { __int128 ret; ret = __atomic_exchange_n(vp16, &v16, __ATOMIC_SEQ_CST); } 211 246 { __int128 ret; ret = __atomic_exchange_16(vp16, v16, __ATOMIC_SEQ_CST); } 212 247 { __int128 ret; __atomic_exchange(vp16, &v16, &ret, __ATOMIC_SEQ_CST); } 248 #endif 213 249 214 250 { char ret; ret = __atomic_load_n(vp1, __ATOMIC_SEQ_CST); } … … 224 260 { long long int ret; ret = __atomic_load_8(vp8, __ATOMIC_SEQ_CST); } 225 261 { long long int ret; __atomic_load(vp8, &ret, __ATOMIC_SEQ_CST); } 262 #if defined(__SIZEOF_INT128__) 226 263 { __int128 ret; ret = __atomic_load_n(vp16, __ATOMIC_SEQ_CST); } 227 264 { __int128 ret; ret = __atomic_load_16(vp16, __ATOMIC_SEQ_CST); } 228 265 { __int128 ret; __atomic_load(vp16, &ret, __ATOMIC_SEQ_CST); } 266 #endif 229 267 230 268 { _Bool ret; ret = __atomic_compare_exchange_n(vp1, rp1, v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } … … 240 278 { _Bool ret; ret = __atomic_compare_exchange_8(vp8, rp8, v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 241 279 { _Bool ret; ret = __atomic_compare_exchange(vp8, rp8, &v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 280 #if defined(__SIZEOF_INT128__) 242 281 { _Bool ret; ret = __atomic_compare_exchange_n(vp16, rp16, v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 243 282 { _Bool ret; ret = __atomic_compare_exchange_16(vp16, rp16, v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 244 283 { _Bool ret; ret = __atomic_compare_exchange(vp16, rp16, &v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 284 #endif 245 285 246 286 { __atomic_store_n(vp1, v1, __ATOMIC_SEQ_CST); } … … 256 296 { __atomic_store_8(vp8, v8, __ATOMIC_SEQ_CST); } 257 297 { __atomic_store(vp8, &v8, __ATOMIC_SEQ_CST); } 298 #if defined(__SIZEOF_INT128__) 258 299 { __atomic_store_n(vp16, v16, __ATOMIC_SEQ_CST); } 259 300 { __atomic_store_16(vp16, v16, __ATOMIC_SEQ_CST); } 260 301 { __atomic_store(vp16, &v16, __ATOMIC_SEQ_CST); } 302 #endif 261 303 262 304 { char ret; ret = __atomic_add_fetch(vp1, v1, __ATOMIC_SEQ_CST); } … … 268 310 { long long int ret; ret = __atomic_add_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 269 311 { long long int ret; ret = __atomic_add_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 312 #if defined(__SIZEOF_INT128__) 270 313 { __int128 ret; ret = __atomic_add_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 271 314 { __int128 ret; ret = __atomic_add_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 315 #endif 272 316 273 317 { char ret; ret = __atomic_sub_fetch(vp1, v1, __ATOMIC_SEQ_CST); } … … 279 323 { long long int ret; ret = __atomic_sub_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 280 324 { long long int ret; ret = __atomic_sub_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 325 #if defined(__SIZEOF_INT128__) 281 326 { __int128 ret; ret = __atomic_sub_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 282 327 { __int128 ret; ret = __atomic_sub_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 328 #endif 283 329 284 330 { char ret; ret = __atomic_and_fetch(vp1, v1, __ATOMIC_SEQ_CST); } … … 290 336 { long long int ret; ret = __atomic_and_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 291 337 { long long int ret; ret = __atomic_and_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 338 #if defined(__SIZEOF_INT128__) 292 339 { __int128 ret; ret = __atomic_and_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 293 340 { __int128 ret; ret = __atomic_and_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 341 #endif 294 342 295 343 { char ret; ret = __atomic_nand_fetch(vp1, v1, __ATOMIC_SEQ_CST); } … … 301 349 { long long int ret; ret = __atomic_nand_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 302 350 { long long int ret; ret = __atomic_nand_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 351 #if defined(__SIZEOF_INT128__) 303 352 { __int128 ret; ret = __atomic_nand_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 304 353 { __int128 ret; ret = __atomic_nand_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 354 #endif 305 355 306 356 { char ret; ret = __atomic_xor_fetch(vp1, v1, __ATOMIC_SEQ_CST); } … … 312 362 { long long int ret; ret = __atomic_xor_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 313 363 { long long int ret; ret = __atomic_xor_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 364 #if defined(__SIZEOF_INT128__) 314 365 { __int128 ret; ret = __atomic_xor_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 315 366 { __int128 ret; ret = __atomic_xor_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 367 #endif 316 368 317 369 { char ret; ret = __atomic_or_fetch(vp1, v1, __ATOMIC_SEQ_CST); } … … 323 375 { long long int ret; ret = __atomic_or_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 324 376 { long long int ret; ret = __atomic_or_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 377 #if defined(__SIZEOF_INT128__) 325 378 { __int128 ret; ret = __atomic_or_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 326 379 { __int128 ret; ret = __atomic_or_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 380 #endif 327 381 328 382 { char ret; ret = __atomic_fetch_add(vp1, v1, __ATOMIC_SEQ_CST); } … … 334 388 { long long int ret; ret = __atomic_fetch_add(vp8, v8, __ATOMIC_SEQ_CST); } 335 389 { long long int ret; ret = __atomic_fetch_add_8(vp8, v8, __ATOMIC_SEQ_CST); } 390 #if defined(__SIZEOF_INT128__) 336 391 { __int128 ret; ret = __atomic_fetch_add(vp16, v16, __ATOMIC_SEQ_CST); } 337 392 { __int128 ret; ret = __atomic_fetch_add_16(vp16, v16, __ATOMIC_SEQ_CST); } 393 #endif 338 394 339 395 { char ret; ret = __atomic_fetch_sub(vp1, v1, __ATOMIC_SEQ_CST); } … … 345 401 { long long int ret; ret = __atomic_fetch_sub(vp8, v8, __ATOMIC_SEQ_CST); } 346 402 { long long int ret; ret = __atomic_fetch_sub_8(vp8, v8, __ATOMIC_SEQ_CST); } 403 #if defined(__SIZEOF_INT128__) 347 404 { __int128 ret; ret = __atomic_fetch_sub(vp16, v16, __ATOMIC_SEQ_CST); } 348 405 { __int128 ret; ret = __atomic_fetch_sub_16(vp16, v16, __ATOMIC_SEQ_CST); } 406 #endif 349 407 350 408 { char ret; ret = __atomic_fetch_and(vp1, v1, __ATOMIC_SEQ_CST); } … … 356 414 { long long int ret; ret = __atomic_fetch_and(vp8, v8, __ATOMIC_SEQ_CST); } 357 415 { long long int ret; ret = __atomic_fetch_and_8(vp8, v8, __ATOMIC_SEQ_CST); } 416 #if defined(__SIZEOF_INT128__) 358 417 { __int128 ret; ret = __atomic_fetch_and(vp16, v16, __ATOMIC_SEQ_CST); } 359 418 { __int128 ret; ret = __atomic_fetch_and_16(vp16, v16, __ATOMIC_SEQ_CST); } 419 #endif 360 420 361 421 { char ret; ret = __atomic_fetch_nand(vp1, v1, __ATOMIC_SEQ_CST); } … … 367 427 { long long int ret; ret = __atomic_fetch_nand(vp8, v8, __ATOMIC_SEQ_CST); } 368 428 { long long int ret; ret = __atomic_fetch_nand_8(vp8, v8, __ATOMIC_SEQ_CST); } 429 #if defined(__SIZEOF_INT128__) 369 430 { __int128 ret; ret = __atomic_fetch_nand(vp16, v16, __ATOMIC_SEQ_CST); } 370 431 { __int128 ret; ret = __atomic_fetch_nand_16(vp16, v16, __ATOMIC_SEQ_CST); } 432 #endif 371 433 372 434 { char ret; ret = __atomic_fetch_xor(vp1, v1, __ATOMIC_SEQ_CST); } … … 378 440 { long long int ret; ret = __atomic_fetch_xor(vp8, v8, __ATOMIC_SEQ_CST); } 379 441 { long long int ret; ret = __atomic_fetch_xor_8(vp8, v8, __ATOMIC_SEQ_CST); } 442 #if defined(__SIZEOF_INT128__) 380 443 { __int128 ret; ret = __atomic_fetch_xor(vp16, v16, __ATOMIC_SEQ_CST); } 381 444 { __int128 ret; ret = __atomic_fetch_xor_16(vp16, v16, __ATOMIC_SEQ_CST); } 445 #endif 382 446 383 447 { char ret; ret = __atomic_fetch_or(vp1, v1, __ATOMIC_SEQ_CST); } … … 389 453 { long long int ret; ret = __atomic_fetch_or(vp8, v8, __ATOMIC_SEQ_CST); } 390 454 { long long int ret; ret = __atomic_fetch_or_8(vp8, v8, __ATOMIC_SEQ_CST); } 455 #if defined(__SIZEOF_INT128__) 391 456 { __int128 ret; ret = __atomic_fetch_or(vp16, v16, __ATOMIC_SEQ_CST); } 392 457 { __int128 ret; ret = __atomic_fetch_or_16(vp16, v16, __ATOMIC_SEQ_CST); } 458 #endif 393 459 394 460 { _Bool ret; ret = __atomic_always_lock_free(sizeof(int), vp4); }
Note: See TracChangeset
for help on using the changeset viewer.