- Timestamp:
- Dec 12, 2019, 10:04:15 AM (6 years ago)
- Branches:
- ADT, arm-eh, ast-experimental, enum, forall-pointer-decay, jacob/cs343-translation, master, new-ast-unique-expr, pthread-emulation, qualifiedEnum
- Children:
- e752e4e
- Parents:
- aca6a54c (diff), 2cd949b (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the(diff)
links above to see all the changes relative to each parent. - Location:
- tests
- Files:
-
- 2 added
- 17 edited
Legend:
- Unmodified
- Added
- Removed
-
tests/.expect/alloc.txt
raca6a54c r2fa5bd2 30 30 CFA resize array alloc 31 31 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 32 CFA resize array alloc , fill32 CFA resize array alloc 33 33 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 34 CFA resize array alloc , fill34 CFA resize array alloc 35 35 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 36 36 CFA resize array alloc, fill 37 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x 1010101 0x1010101 0x1010101 0x10101010xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede37 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 38 38 39 39 C memalign 42 42.5 -
tests/.expect/gccExtensions.x64.txt
raca6a54c r2fa5bd2 12 12 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=" "r" ( _X3dsti_2 ) : : ); 13 13 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ) : "r" ( _X3srci_2 ) : ); 14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ _X3srci_2] "r" ( _X3dsti_2 ) : "r0" );14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ src ] "r" ( _X3dsti_2 ) : "r0" ); 15 15 L2: L1: asm goto ( "frob %%r5, %1; jc %l[L1]; mov (%2), %%r5" : : "r" ( _X3srci_2 ), "r" ( (&_X3dsti_2) ) : "r5", "memory" : L1, L2 ); 16 16 double _Complex _X2c1Cd_2; -
tests/.expect/gccExtensions.x86.txt
raca6a54c r2fa5bd2 12 12 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=" "r" ( _X3dsti_2 ) : : ); 13 13 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ) : "r" ( _X3srci_2 ) : ); 14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ _X3srci_2] "r" ( _X3dsti_2 ) : "r0" );14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ src ] "r" ( _X3dsti_2 ) : "r0" ); 15 15 L2: L1: asm goto ( "frob %%r5, %1; jc %l[L1]; mov (%2), %%r5" : : "r" ( _X3srci_2 ), "r" ( (&_X3dsti_2) ) : "r5", "memory" : L1, L2 ); 16 16 double _Complex _X2c1Cd_2; -
tests/.expect/references.txt
raca6a54c r2fa5bd2 36 36 3 37 37 3 9 { 1., 7. }, [1, 2, 3] 38 4 38 39 Destructing a Y 39 40 Destructing a Y -
tests/Makefile.am
raca6a54c r2fa5bd2 46 46 47 47 # adjust CC to current flags 48 CC = $(if $(DISTCC_CFA_PATH),distcc $(DISTCC_CFA_PATH) ,$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS})48 CC = $(if $(DISTCC_CFA_PATH),distcc $(DISTCC_CFA_PATH) ${ARCH_FLAGS},$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS}) 49 49 CFACC = $(CC) 50 50 … … 53 53 54 54 # adjusted CC but without the actual distcc call 55 CFACCLOCAL = $(if $(DISTCC_CFA_PATH),$(DISTCC_CFA_PATH) ,$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS})55 CFACCLOCAL = $(if $(DISTCC_CFA_PATH),$(DISTCC_CFA_PATH) ${ARCH_FLAGS},$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS}) 56 56 57 57 PRETTY_PATH=mkdir -p $(dir $(abspath ${@})) && cd ${srcdir} && -
tests/Makefile.in
raca6a54c r2fa5bd2 214 214 215 215 # adjust CC to current flags 216 CC = $(if $(DISTCC_CFA_PATH),distcc $(DISTCC_CFA_PATH) ,$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS})216 CC = $(if $(DISTCC_CFA_PATH),distcc $(DISTCC_CFA_PATH) ${ARCH_FLAGS},$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS}) 217 217 CCAS = @CCAS@ 218 218 CCASDEPMODE = @CCASDEPMODE@ … … 358 358 LTCFACOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ 359 359 $(LIBTOOLFLAGS) --mode=compile $(CFACC) $(DEFS) \ 360 $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CFAFLAGS) $(CFAFLAGS) \ 361 $(AM_CFLAGS) $(CFLAGS) 360 $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CFAFLAGS) $(AM_CFLAGS) $(CFAFLAGS) $(CFLAGS) 362 361 363 362 AM_V_CFA = $(am__v_CFA_@AM_V@) … … 405 404 406 405 # adjusted CC but without the actual distcc call 407 CFACCLOCAL = $(if $(DISTCC_CFA_PATH),$(DISTCC_CFA_PATH) ,$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS})406 CFACCLOCAL = $(if $(DISTCC_CFA_PATH),$(DISTCC_CFA_PATH) ${ARCH_FLAGS},$(TARGET_CFA) ${DEBUG_FLAGS} ${ARCH_FLAGS}) 408 407 PRETTY_PATH = mkdir -p $(dir $(abspath ${@})) && cd ${srcdir} && 409 408 avl_test_SOURCES = avltree/avl_test.cfa avltree/avl0.cfa avltree/avl1.cfa avltree/avl2.cfa avltree/avl3.cfa avltree/avl4.cfa avltree/avl-private.cfa -
tests/alloc.cfa
raca6a54c r2fa5bd2 10 10 // Created On : Wed Feb 3 07:56:22 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Sun Oct 20 21:45:21201913 // Update Count : 39112 // Last Modified On : Fri Nov 22 15:34:19 2019 13 // Update Count : 404 14 14 // 15 15 … … 126 126 127 127 p = alloc( p, 2 * dim ); // CFA resize array alloc 128 for ( i; dim ~ 2 * dim ) { p[i] = 0x1010101; } 128 for ( i; dim ~ 2 * dim ) { p[i] = 0x1010101; } // fill upper part 129 129 printf( "CFA resize array alloc\n" ); 130 130 for ( i; 2 * dim ) { printf( "%#x ", p[i] ); } … … 139 139 140 140 p = alloc_set( p, 3 * dim, fill ); // CFA resize array alloc, fill 141 printf( "CFA resize array alloc , fill\n" );141 printf( "CFA resize array alloc\n" ); 142 142 for ( i; 3 * dim ) { printf( "%#x ", p[i] ); } 143 143 printf( "\n" ); … … 145 145 146 146 p = alloc_set( p, dim, fill ); // CFA resize array alloc, fill 147 printf( "CFA resize array alloc , fill\n" );147 printf( "CFA resize array alloc\n" ); 148 148 for ( i; dim ) { printf( "%#x ", p[i] ); } 149 149 printf( "\n" ); -
tests/builtins/sync.cfa
raca6a54c r2fa5bd2 4 4 void foo() { 5 5 volatile _Bool * vpB = 0; _Bool vB = 0; 6 volatile char * vp1 = 0; char * rp1 = 0; char v1 = 0; 7 volatile short * vp2 = 0; short * rp2 = 0; short v2 = 0; 8 volatile int * vp4 = 0; int * rp4 = 0; int v4 = 0; 9 volatile long long int * vp8 = 0; long long int * rp8 = 0; long long int v8 = 0; 10 #if defined(__SIZEOF_INT128__) 11 volatile __int128 * vp16 = 0; __int128 * rp16 = 0; __int128 v16 = 0; 6 volatile char * vpc = 0; char * rpc = 0; char vc = 0; 7 volatile short * vps = 0; short * rps = 0; short vs = 0; 8 volatile int * vpi = 0; int * rpi = 0; int vi = 0; 9 volatile long int * vpl = 0; long int * rpl = 0; long int vl = 0; 10 volatile long long int * vpll = 0; long long int * rpll = 0; long long int vll = 0; 11 #if defined(__SIZEOF_INT128__) 12 volatile __int128 * vplll = 0; __int128 * rplll = 0; __int128 vlll = 0; 12 13 #endif 13 14 struct type * volatile * vpp = 0; struct type ** rpp = 0; struct type * vp = 0; 14 15 15 { char ret; ret = __sync_fetch_and_add(vp1, v1); } 16 { char ret; ret = __sync_fetch_and_add_1(vp1, v1); } 17 { short ret; ret = __sync_fetch_and_add(vp2, v2); } 18 { short ret; ret = __sync_fetch_and_add_2(vp2, v2); } 19 { int ret; ret = __sync_fetch_and_add(vp4, v4); } 20 { int ret; ret = __sync_fetch_and_add_4(vp4, v4); } 21 { long long int ret; ret = __sync_fetch_and_add(vp8, v8); } 22 { long long int ret; ret = __sync_fetch_and_add_8(vp8, v8); } 23 #if defined(__SIZEOF_INT128__) 24 { __int128 ret; ret = __sync_fetch_and_add(vp16, v16); } 25 { __int128 ret; ret = __sync_fetch_and_add_16(vp16, v16); } 26 #endif 27 28 { char ret; ret = __sync_fetch_and_sub(vp1, v1); } 29 { char ret; ret = __sync_fetch_and_sub_1(vp1, v1); } 30 { short ret; ret = __sync_fetch_and_sub(vp2, v2); } 31 { short ret; ret = __sync_fetch_and_sub_2(vp2, v2); } 32 { int ret; ret = __sync_fetch_and_sub(vp4, v4); } 33 { int ret; ret = __sync_fetch_and_sub_4(vp4, v4); } 34 { long long int ret; ret = __sync_fetch_and_sub(vp8, v8); } 35 { long long int ret; ret = __sync_fetch_and_sub_8(vp8, v8); } 36 #if defined(__SIZEOF_INT128__) 37 { __int128 ret; ret = __sync_fetch_and_sub(vp16, v16); } 38 { __int128 ret; ret = __sync_fetch_and_sub_16(vp16, v16); } 39 #endif 40 41 { char ret; ret = __sync_fetch_and_or(vp1, v1); } 42 { char ret; ret = __sync_fetch_and_or_1(vp1, v1); } 43 { short ret; ret = __sync_fetch_and_or(vp2, v2); } 44 { short ret; ret = __sync_fetch_and_or_2(vp2, v2); } 45 { int ret; ret = __sync_fetch_and_or(vp4, v4); } 46 { int ret; ret = __sync_fetch_and_or_4(vp4, v4); } 47 { long long int ret; ret = __sync_fetch_and_or(vp8, v8); } 48 { long long int ret; ret = __sync_fetch_and_or_8(vp8, v8); } 49 #if defined(__SIZEOF_INT128__) 50 { __int128 ret; ret = __sync_fetch_and_or(vp16, v16); } 51 { __int128 ret; ret = __sync_fetch_and_or_16(vp16, v16); } 52 #endif 53 54 { char ret; ret = __sync_fetch_and_and(vp1, v1); } 55 { char ret; ret = __sync_fetch_and_and_1(vp1, v1); } 56 { short ret; ret = __sync_fetch_and_and(vp2, v2); } 57 { short ret; ret = __sync_fetch_and_and_2(vp2, v2); } 58 { int ret; ret = __sync_fetch_and_and(vp4, v4); } 59 { int ret; ret = __sync_fetch_and_and_4(vp4, v4); } 60 { long long int ret; ret = __sync_fetch_and_and(vp8, v8); } 61 { long long int ret; ret = __sync_fetch_and_and_8(vp8, v8); } 62 #if defined(__SIZEOF_INT128__) 63 { __int128 ret; ret = __sync_fetch_and_and(vp16, v16); } 64 { __int128 ret; ret = __sync_fetch_and_and_16(vp16, v16); } 65 #endif 66 67 { char ret; ret = __sync_fetch_and_xor(vp1, v1); } 68 { char ret; ret = __sync_fetch_and_xor_1(vp1, v1); } 69 { short ret; ret = __sync_fetch_and_xor(vp2, v2); } 70 { short ret; ret = __sync_fetch_and_xor_2(vp2, v2); } 71 { int ret; ret = __sync_fetch_and_xor(vp4, v4); } 72 { int ret; ret = __sync_fetch_and_xor_4(vp4, v4); } 73 { long long int ret; ret = __sync_fetch_and_xor(vp8, v8); } 74 { long long int ret; ret = __sync_fetch_and_xor_8(vp8, v8); } 75 #if defined(__SIZEOF_INT128__) 76 { __int128 ret; ret = __sync_fetch_and_xor(vp16, v16); } 77 { __int128 ret; ret = __sync_fetch_and_xor_16(vp16, v16); } 78 #endif 79 80 { char ret; ret = __sync_fetch_and_nand(vp1, v1); } 81 { char ret; ret = __sync_fetch_and_nand_1(vp1, v1); } 82 { short ret; ret = __sync_fetch_and_nand(vp2, v2); } 83 { short ret; ret = __sync_fetch_and_nand_2(vp2, v2); } 84 { int ret; ret = __sync_fetch_and_nand(vp4, v4); } 85 { int ret; ret = __sync_fetch_and_nand_4(vp4, v4); } 86 { long long int ret; ret = __sync_fetch_and_nand(vp8, v8); } 87 { long long int ret; ret = __sync_fetch_and_nand_8(vp8, v8); } 88 #if defined(__SIZEOF_INT128__) 89 { __int128 ret; ret = __sync_fetch_and_nand(vp16, v16); } 90 { __int128 ret; ret = __sync_fetch_and_nand_16(vp16, v16); } 91 #endif 92 93 { char ret; ret = __sync_add_and_fetch(vp1, v1); } 94 { char ret; ret = __sync_add_and_fetch_1(vp1, v1); } 95 { short ret; ret = __sync_add_and_fetch(vp2, v2); } 96 { short ret; ret = __sync_add_and_fetch_2(vp2, v2); } 97 { int ret; ret = __sync_add_and_fetch(vp4, v4); } 98 { int ret; ret = __sync_add_and_fetch_4(vp4, v4); } 99 { long long int ret; ret = __sync_add_and_fetch(vp8, v8); } 100 { long long int ret; ret = __sync_add_and_fetch_8(vp8, v8); } 101 #if defined(__SIZEOF_INT128__) 102 { __int128 ret; ret = __sync_add_and_fetch(vp16, v16); } 103 { __int128 ret; ret = __sync_add_and_fetch_16(vp16, v16); } 104 #endif 105 106 { char ret; ret = __sync_sub_and_fetch(vp1, v1); } 107 { char ret; ret = __sync_sub_and_fetch_1(vp1, v1); } 108 { short ret; ret = __sync_sub_and_fetch(vp2, v2); } 109 { short ret; ret = __sync_sub_and_fetch_2(vp2, v2); } 110 { int ret; ret = __sync_sub_and_fetch(vp4, v4); } 111 { int ret; ret = __sync_sub_and_fetch_4(vp4, v4); } 112 { long long int ret; ret = __sync_sub_and_fetch(vp8, v8); } 113 { long long int ret; ret = __sync_sub_and_fetch_8(vp8, v8); } 114 #if defined(__SIZEOF_INT128__) 115 { __int128 ret; ret = __sync_sub_and_fetch(vp16, v16); } 116 { __int128 ret; ret = __sync_sub_and_fetch_16(vp16, v16); } 117 #endif 118 119 { char ret; ret = __sync_or_and_fetch(vp1, v1); } 120 { char ret; ret = __sync_or_and_fetch_1(vp1, v1); } 121 { short ret; ret = __sync_or_and_fetch(vp2, v2); } 122 { short ret; ret = __sync_or_and_fetch_2(vp2, v2); } 123 { int ret; ret = __sync_or_and_fetch(vp4, v4); } 124 { int ret; ret = __sync_or_and_fetch_4(vp4, v4); } 125 { long long int ret; ret = __sync_or_and_fetch(vp8, v8); } 126 { long long int ret; ret = __sync_or_and_fetch_8(vp8, v8); } 127 #if defined(__SIZEOF_INT128__) 128 { __int128 ret; ret = __sync_or_and_fetch(vp16, v16); } 129 { __int128 ret; ret = __sync_or_and_fetch_16(vp16, v16); } 130 #endif 131 132 { char ret; ret = __sync_and_and_fetch(vp1, v1); } 133 { char ret; ret = __sync_and_and_fetch_1(vp1, v1); } 134 { short ret; ret = __sync_and_and_fetch(vp2, v2); } 135 { short ret; ret = __sync_and_and_fetch_2(vp2, v2); } 136 { int ret; ret = __sync_and_and_fetch(vp4, v4); } 137 { int ret; ret = __sync_and_and_fetch_4(vp4, v4); } 138 { long long int ret; ret = __sync_and_and_fetch(vp8, v8); } 139 { long long int ret; ret = __sync_and_and_fetch_8(vp8, v8); } 140 #if defined(__SIZEOF_INT128__) 141 { __int128 ret; ret = __sync_and_and_fetch(vp16, v16); } 142 { __int128 ret; ret = __sync_and_and_fetch_16(vp16, v16); } 143 #endif 144 145 { char ret; ret = __sync_xor_and_fetch(vp1, v1); } 146 { char ret; ret = __sync_xor_and_fetch_1(vp1, v1); } 147 { short ret; ret = __sync_xor_and_fetch(vp2, v2); } 148 { short ret; ret = __sync_xor_and_fetch_2(vp2, v2); } 149 { int ret; ret = __sync_xor_and_fetch(vp4, v4); } 150 { int ret; ret = __sync_xor_and_fetch_4(vp4, v4); } 151 { long long int ret; ret = __sync_xor_and_fetch(vp8, v8); } 152 { long long int ret; ret = __sync_xor_and_fetch_8(vp8, v8); } 153 #if defined(__SIZEOF_INT128__) 154 { __int128 ret; ret = __sync_xor_and_fetch(vp16, v16); } 155 { __int128 ret; ret = __sync_xor_and_fetch_16(vp16, v16); } 156 #endif 157 158 { char ret; ret = __sync_nand_and_fetch(vp1, v1); } 159 { char ret; ret = __sync_nand_and_fetch_1(vp1, v1); } 160 { short ret; ret = __sync_nand_and_fetch(vp2, v2); } 161 { short ret; ret = __sync_nand_and_fetch_2(vp2, v2); } 162 { int ret; ret = __sync_nand_and_fetch(vp4, v4); } 163 { int ret; ret = __sync_nand_and_fetch_4(vp4, v4); } 164 { long long int ret; ret = __sync_nand_and_fetch(vp8, v8); } 165 { long long int ret; ret = __sync_nand_and_fetch_8(vp8, v8); } 166 #if defined(__SIZEOF_INT128__) 167 { __int128 ret; ret = __sync_nand_and_fetch(vp16, v16); } 168 { __int128 ret; ret = __sync_nand_and_fetch_16(vp16, v16); } 169 #endif 170 171 { _Bool ret; ret = __sync_bool_compare_and_swap(vp1, v1, v1); } 172 { _Bool ret; ret = __sync_bool_compare_and_swap_1(vp1, v1, v1); } 173 { _Bool ret; ret = __sync_bool_compare_and_swap(vp2, v2, v2); } 174 { _Bool ret; ret = __sync_bool_compare_and_swap_2(vp2, v2, v2); } 175 { _Bool ret; ret = __sync_bool_compare_and_swap(vp4, v4, v4); } 176 { _Bool ret; ret = __sync_bool_compare_and_swap_4(vp4, v4, v4); } 177 { _Bool ret; ret = __sync_bool_compare_and_swap(vp8, v8, v8); } 178 { _Bool ret; ret = __sync_bool_compare_and_swap_8(vp8, v8, v8); } 179 #if defined(__SIZEOF_INT128__) 180 { _Bool ret; ret = __sync_bool_compare_and_swap(vp16, v16, v16); } 181 { _Bool ret; ret = __sync_bool_compare_and_swap_16(vp16, v16,v16); } 16 { char ret; ret = __sync_fetch_and_add(vpc, vc); } 17 { short ret; ret = __sync_fetch_and_add(vps, vs); } 18 { int ret; ret = __sync_fetch_and_add(vpi, vi); } 19 { long int ret; ret = __sync_fetch_and_add(vpl, vl); } 20 { long long int ret; ret = __sync_fetch_and_add(vpll, vll); } 21 #if defined(__SIZEOF_INT128__) 22 { __int128 ret; ret = __sync_fetch_and_add(vplll, vlll); } 23 #endif 24 25 { char ret; ret = __sync_fetch_and_sub(vpc, vc); } 26 { short ret; ret = __sync_fetch_and_sub(vps, vs); } 27 { int ret; ret = __sync_fetch_and_sub(vpi, vi); } 28 { long int ret; ret = __sync_fetch_and_sub(vpl, vl); } 29 { long long int ret; ret = __sync_fetch_and_sub(vpll, vll); } 30 #if defined(__SIZEOF_INT128__) 31 { __int128 ret; ret = __sync_fetch_and_sub(vplll, vlll); } 32 #endif 33 34 { char ret; ret = __sync_fetch_and_or(vpc, vc); } 35 { short ret; ret = __sync_fetch_and_or(vps, vs); } 36 { int ret; ret = __sync_fetch_and_or(vpi, vi); } 37 { long int ret; ret = __sync_fetch_and_or(vpl, vl); } 38 { long long int ret; ret = __sync_fetch_and_or(vpll, vll); } 39 #if defined(__SIZEOF_INT128__) 40 { __int128 ret; ret = __sync_fetch_and_or(vplll, vlll); } 41 #endif 42 43 { char ret; ret = __sync_fetch_and_and(vpc, vc); } 44 { short ret; ret = __sync_fetch_and_and(vps, vs); } 45 { int ret; ret = __sync_fetch_and_and(vpi, vi); } 46 { long int ret; ret = __sync_fetch_and_and(vpl, vl); } 47 { long long int ret; ret = __sync_fetch_and_and(vpll, vll); } 48 #if defined(__SIZEOF_INT128__) 49 { __int128 ret; ret = __sync_fetch_and_and(vplll, vlll); } 50 #endif 51 52 { char ret; ret = __sync_fetch_and_xor(vpc, vc); } 53 { short ret; ret = __sync_fetch_and_xor(vps, vs); } 54 { int ret; ret = __sync_fetch_and_xor(vpi, vi); } 55 { long int ret; ret = __sync_fetch_and_xor(vpl, vl); } 56 { long long int ret; ret = __sync_fetch_and_xor(vpll, vll); } 57 #if defined(__SIZEOF_INT128__) 58 { __int128 ret; ret = __sync_fetch_and_xor(vplll, vlll); } 59 #endif 60 61 { char ret; ret = __sync_fetch_and_nand(vpc, vc); } 62 { short ret; ret = __sync_fetch_and_nand(vps, vs); } 63 { int ret; ret = __sync_fetch_and_nand(vpi, vi); } 64 { long int ret; ret = __sync_fetch_and_nand(vpl, vl); } 65 { long long int ret; ret = __sync_fetch_and_nand(vpll, vll); } 66 #if defined(__SIZEOF_INT128__) 67 { __int128 ret; ret = __sync_fetch_and_nand(vplll, vlll); } 68 { __int128 ret; ret = __sync_fetch_and_nand_16(vplll, vlll); } 69 #endif 70 71 { char ret; ret = __sync_add_and_fetch(vpc, vc); } 72 { short ret; ret = __sync_add_and_fetch(vps, vs); } 73 { int ret; ret = __sync_add_and_fetch(vpi, vi); } 74 { long int ret; ret = __sync_add_and_fetch(vpl, vl); } 75 { long long int ret; ret = __sync_add_and_fetch(vpll, vll); } 76 #if defined(__SIZEOF_INT128__) 77 { __int128 ret; ret = __sync_add_and_fetch(vplll, vlll); } 78 #endif 79 80 { char ret; ret = __sync_sub_and_fetch(vpc, vc); } 81 { short ret; ret = __sync_sub_and_fetch(vps, vs); } 82 { int ret; ret = __sync_sub_and_fetch(vpi, vi); } 83 { long int ret; ret = __sync_sub_and_fetch(vpl, vl); } 84 { long long int ret; ret = __sync_sub_and_fetch(vpll, vll); } 85 #if defined(__SIZEOF_INT128__) 86 { __int128 ret; ret = __sync_sub_and_fetch(vplll, vlll); } 87 #endif 88 89 { char ret; ret = __sync_or_and_fetch(vpc, vc); } 90 { short ret; ret = __sync_or_and_fetch(vps, vs); } 91 { int ret; ret = __sync_or_and_fetch(vpi, vi); } 92 { long int ret; ret = __sync_or_and_fetch(vpl, vl); } 93 { long long int ret; ret = __sync_or_and_fetch(vpll, vll); } 94 #if defined(__SIZEOF_INT128__) 95 { __int128 ret; ret = __sync_or_and_fetch(vplll, vlll); } 96 #endif 97 98 { char ret; ret = __sync_and_and_fetch(vpc, vc); } 99 { short ret; ret = __sync_and_and_fetch(vps, vs); } 100 { int ret; ret = __sync_and_and_fetch(vpi, vi); } 101 { long int ret; ret = __sync_and_and_fetch(vpl, vl); } 102 { long long int ret; ret = __sync_and_and_fetch(vpll, vll); } 103 #if defined(__SIZEOF_INT128__) 104 { __int128 ret; ret = __sync_and_and_fetch(vplll, vlll); } 105 #endif 106 107 { char ret; ret = __sync_xor_and_fetch(vpc, vc); } 108 { short ret; ret = __sync_xor_and_fetch(vps, vs); } 109 { int ret; ret = __sync_xor_and_fetch(vpi, vi); } 110 { long int ret; ret = __sync_xor_and_fetch(vpl, vl); } 111 { long long int ret; ret = __sync_xor_and_fetch(vpll, vll); } 112 #if defined(__SIZEOF_INT128__) 113 { __int128 ret; ret = __sync_xor_and_fetch(vplll, vlll); } 114 #endif 115 116 { char ret; ret = __sync_nand_and_fetch(vpc, vc); } 117 { short ret; ret = __sync_nand_and_fetch(vps, vs); } 118 { int ret; ret = __sync_nand_and_fetch(vpi, vi); } 119 { long int ret; ret = __sync_nand_and_fetch(vpl, vl); } 120 { long long int ret; ret = __sync_nand_and_fetch(vpll, vll); } 121 #if defined(__SIZEOF_INT128__) 122 { __int128 ret; ret = __sync_nand_and_fetch(vplll, vlll); } 123 #endif 124 125 { _Bool ret; ret = __sync_bool_compare_and_swap(vpc, vc, vc); } 126 { _Bool ret; ret = __sync_bool_compare_and_swap(vps, vs, vs); } 127 { _Bool ret; ret = __sync_bool_compare_and_swap(vpi, vi, vi); } 128 { _Bool ret; ret = __sync_bool_compare_and_swap(vpl, vl, vl); } 129 { _Bool ret; ret = __sync_bool_compare_and_swap(vpll, vll, vll); } 130 #if defined(__SIZEOF_INT128__) 131 { _Bool ret; ret = __sync_bool_compare_and_swap(vplll, vlll, vlll); } 182 132 #endif 183 133 { _Bool ret; ret = __sync_bool_compare_and_swap(vpp, vp, vp); } 184 134 185 { char ret; ret = __sync_val_compare_and_swap(vp1, v1, v1); } 186 { char ret; ret = __sync_val_compare_and_swap_1(vp1, v1, v1); } 187 { short ret; ret = __sync_val_compare_and_swap(vp2, v2, v2); } 188 { short ret; ret = __sync_val_compare_and_swap_2(vp2, v2, v2); } 189 { int ret; ret = __sync_val_compare_and_swap(vp4, v4, v4); } 190 { int ret; ret = __sync_val_compare_and_swap_4(vp4, v4, v4); } 191 { long long int ret; ret = __sync_val_compare_and_swap(vp8, v8, v8); } 192 { long long int ret; ret = __sync_val_compare_and_swap_8(vp8, v8, v8); } 193 #if defined(__SIZEOF_INT128__) 194 { __int128 ret; ret = __sync_val_compare_and_swap(vp16, v16, v16); } 195 { __int128 ret; ret = __sync_val_compare_and_swap_16(vp16, v16,v16); } 135 { char ret; ret = __sync_val_compare_and_swap(vpc, vc, vc); } 136 { short ret; ret = __sync_val_compare_and_swap(vps, vs, vs); } 137 { int ret; ret = __sync_val_compare_and_swap(vpi, vi, vi); } 138 { long int ret; ret = __sync_val_compare_and_swap(vpl, vl, vl); } 139 { long long int ret; ret = __sync_val_compare_and_swap(vpll, vll, vll); } 140 #if defined(__SIZEOF_INT128__) 141 { __int128 ret; ret = __sync_val_compare_and_swap(vplll, vlll, vlll); } 196 142 #endif 197 143 { struct type * ret; ret = __sync_val_compare_and_swap(vpp, vp, vp); } 198 144 199 145 200 { char ret; ret = __sync_lock_test_and_set(vp1, v1); } 201 { char ret; ret = __sync_lock_test_and_set_1(vp1, v1); } 202 { short ret; ret = __sync_lock_test_and_set(vp2, v2); } 203 { short ret; ret = __sync_lock_test_and_set_2(vp2, v2); } 204 { int ret; ret = __sync_lock_test_and_set(vp4, v4); } 205 { int ret; ret = __sync_lock_test_and_set_4(vp4, v4); } 206 { long long int ret; ret = __sync_lock_test_and_set(vp8, v8); } 207 { long long int ret; ret = __sync_lock_test_and_set_8(vp8, v8); } 208 #if defined(__SIZEOF_INT128__) 209 { __int128 ret; ret = __sync_lock_test_and_set(vp16, v16); } 210 { __int128 ret; ret = __sync_lock_test_and_set_16(vp16, v16); } 211 #endif 212 213 { __sync_lock_release(vp1); } 214 { __sync_lock_release_1(vp1); } 215 { __sync_lock_release(vp2); } 216 { __sync_lock_release_2(vp2); } 217 { __sync_lock_release(vp4); } 218 { __sync_lock_release_4(vp4); } 219 { __sync_lock_release(vp8); } 220 { __sync_lock_release_8(vp8); } 221 #if defined(__SIZEOF_INT128__) 222 { __sync_lock_release(vp16); } 223 { __sync_lock_release_16(vp16); } 146 { char ret; ret = __sync_lock_test_and_set(vpc, vc); } 147 { short ret; ret = __sync_lock_test_and_set(vps, vs); } 148 { int ret; ret = __sync_lock_test_and_set(vpi, vi); } 149 { long int ret; ret = __sync_lock_test_and_set(vpl, vl); } 150 { long long int ret; ret = __sync_lock_test_and_set(vpll, vll); } 151 #if defined(__SIZEOF_INT128__) 152 { __int128 ret; ret = __sync_lock_test_and_set(vplll, vlll); } 153 #endif 154 155 { __sync_lock_release(vpc); } 156 { __sync_lock_release(vps); } 157 { __sync_lock_release(vpi); } 158 { __sync_lock_release(vpl); } 159 { __sync_lock_release(vpll); } 160 #if defined(__SIZEOF_INT128__) 161 { __sync_lock_release(vplll); } 224 162 #endif 225 163 … … 230 168 231 169 { _Bool ret; ret = __atomic_test_and_set(vpB, vB); } 232 { _Bool ret; ret = __atomic_test_and_set(vp 1, v1); }170 { _Bool ret; ret = __atomic_test_and_set(vpc, vc); } 233 171 { __atomic_clear(vpB, vB); } 234 { __atomic_clear(vp1, v1); } 235 236 { char ret; ret = __atomic_exchange_n(vp1, v1, __ATOMIC_SEQ_CST); } 237 { char ret; ret = __atomic_exchange_1(vp1, v1, __ATOMIC_SEQ_CST); } 238 { char ret; __atomic_exchange(vp1, &v1, &ret, __ATOMIC_SEQ_CST); } 239 { short ret; ret = __atomic_exchange_n(vp2, v2, __ATOMIC_SEQ_CST); } 240 { short ret; ret = __atomic_exchange_2(vp2, v2, __ATOMIC_SEQ_CST); } 241 { short ret; __atomic_exchange(vp2, &v2, &ret, __ATOMIC_SEQ_CST); } 242 { int ret; ret = __atomic_exchange_n(vp4, v4, __ATOMIC_SEQ_CST); } 243 { int ret; ret = __atomic_exchange_4(vp4, v4, __ATOMIC_SEQ_CST); } 244 { int ret; __atomic_exchange(vp4, &v4, &ret, __ATOMIC_SEQ_CST); } 245 { long long int ret; ret = __atomic_exchange_n(vp8, v8, __ATOMIC_SEQ_CST); } 246 { long long int ret; ret = __atomic_exchange_8(vp8, v8, __ATOMIC_SEQ_CST); } 247 { long long int ret; __atomic_exchange(vp8, &v8, &ret, __ATOMIC_SEQ_CST); } 248 #if defined(__SIZEOF_INT128__) 249 { __int128 ret; ret = __atomic_exchange_n(vp16, v16, __ATOMIC_SEQ_CST); } 250 { __int128 ret; ret = __atomic_exchange_16(vp16, v16, __ATOMIC_SEQ_CST); } 251 { __int128 ret; __atomic_exchange(vp16, &v16, &ret, __ATOMIC_SEQ_CST); } 172 { __atomic_clear(vpc, vc); } 173 174 { char ret; ret = __atomic_exchange_n(vpc, vc, __ATOMIC_SEQ_CST); } 175 { char ret; __atomic_exchange(vpc, &vc, &ret, __ATOMIC_SEQ_CST); } 176 { short ret; ret = __atomic_exchange_n(vps, vs, __ATOMIC_SEQ_CST); } 177 { short ret; __atomic_exchange(vps, &vs, &ret, __ATOMIC_SEQ_CST); } 178 { int ret; ret = __atomic_exchange_n(vpi, vi, __ATOMIC_SEQ_CST); } 179 { int ret; __atomic_exchange(vpi, &vi, &ret, __ATOMIC_SEQ_CST); } 180 { long int ret; ret = __atomic_exchange_n(vpl, vl, __ATOMIC_SEQ_CST); } 181 { long int ret; __atomic_exchange(vpl, &vl, &ret, __ATOMIC_SEQ_CST); } 182 { long long int ret; ret = __atomic_exchange_n(vpll, vll, __ATOMIC_SEQ_CST); } 183 { long long int ret; __atomic_exchange(vpll, &vll, &ret, __ATOMIC_SEQ_CST); } 184 #if defined(__SIZEOF_INT128__) 185 { __int128 ret; ret = __atomic_exchange_n(vplll, vlll, __ATOMIC_SEQ_CST); } 186 { __int128 ret; __atomic_exchange(vplll, &vlll, &ret, __ATOMIC_SEQ_CST); } 252 187 #endif 253 188 { struct type * ret; ret = __atomic_exchange_n(vpp, vp, __ATOMIC_SEQ_CST); } 254 189 { struct type * ret; __atomic_exchange(vpp, &vp, &ret, __ATOMIC_SEQ_CST); } 255 190 256 { char ret; ret = __atomic_load_n(vp1, __ATOMIC_SEQ_CST); } 257 { char ret; ret = __atomic_load_1(vp1, __ATOMIC_SEQ_CST); } 258 { char ret; __atomic_load(vp1, &ret, __ATOMIC_SEQ_CST); } 259 { short ret; ret = __atomic_load_n(vp2, __ATOMIC_SEQ_CST); } 260 { short ret; ret = __atomic_load_2(vp2, __ATOMIC_SEQ_CST); } 261 { short ret; __atomic_load(vp2, &ret, __ATOMIC_SEQ_CST); } 262 { int ret; ret = __atomic_load_n(vp4, __ATOMIC_SEQ_CST); } 263 { int ret; ret = __atomic_load_4(vp4, __ATOMIC_SEQ_CST); } 264 { int ret; __atomic_load(vp4, &ret, __ATOMIC_SEQ_CST); } 265 { long long int ret; ret = __atomic_load_n(vp8, __ATOMIC_SEQ_CST); } 266 { long long int ret; ret = __atomic_load_8(vp8, __ATOMIC_SEQ_CST); } 267 { long long int ret; __atomic_load(vp8, &ret, __ATOMIC_SEQ_CST); } 268 #if defined(__SIZEOF_INT128__) 269 { __int128 ret; ret = __atomic_load_n(vp16, __ATOMIC_SEQ_CST); } 270 { __int128 ret; ret = __atomic_load_16(vp16, __ATOMIC_SEQ_CST); } 271 { __int128 ret; __atomic_load(vp16, &ret, __ATOMIC_SEQ_CST); } 191 { char ret; ret = __atomic_load_n(vpc, __ATOMIC_SEQ_CST); } 192 { char ret; __atomic_load(vpc, &ret, __ATOMIC_SEQ_CST); } 193 { short ret; ret = __atomic_load_n(vps, __ATOMIC_SEQ_CST); } 194 { short ret; __atomic_load(vps, &ret, __ATOMIC_SEQ_CST); } 195 { int ret; ret = __atomic_load_n(vpi, __ATOMIC_SEQ_CST); } 196 { int ret; __atomic_load(vpi, &ret, __ATOMIC_SEQ_CST); } 197 { long int ret; ret = __atomic_load_n(vpl, __ATOMIC_SEQ_CST); } 198 { long int ret; __atomic_load(vpl, &ret, __ATOMIC_SEQ_CST); } 199 { long long int ret; ret = __atomic_load_n(vpll, __ATOMIC_SEQ_CST); } 200 { long long int ret; __atomic_load(vpll, &ret, __ATOMIC_SEQ_CST); } 201 #if defined(__SIZEOF_INT128__) 202 { __int128 ret; ret = __atomic_load_n(vplll, __ATOMIC_SEQ_CST); } 203 { __int128 ret; __atomic_load(vplll, &ret, __ATOMIC_SEQ_CST); } 272 204 #endif 273 205 { struct type * ret; ret = __atomic_load_n(vpp, __ATOMIC_SEQ_CST); } 274 206 { struct type * ret; __atomic_load(vpp, &ret, __ATOMIC_SEQ_CST); } 275 207 276 { _Bool ret; ret = __atomic_compare_exchange_n(vp1, rp1, v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 277 { _Bool ret; ret = __atomic_compare_exchange_1(vp1, rp1, v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 278 { _Bool ret; ret = __atomic_compare_exchange(vp1, rp1, &v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 279 { _Bool ret; ret = __atomic_compare_exchange_n(vp2, rp2, v2, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 280 { _Bool ret; ret = __atomic_compare_exchange_2(vp2, rp2, v2, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 281 { _Bool ret; ret = __atomic_compare_exchange(vp2, rp2, &v2, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 282 { _Bool ret; ret = __atomic_compare_exchange_n(vp4, rp4, v4, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 283 { _Bool ret; ret = __atomic_compare_exchange_4(vp4, rp4, v4, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 284 { _Bool ret; ret = __atomic_compare_exchange(vp4, rp4, &v4, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 285 { _Bool ret; ret = __atomic_compare_exchange_n(vp8, rp8, v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 286 { _Bool ret; ret = __atomic_compare_exchange_8(vp8, rp8, v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 287 { _Bool ret; ret = __atomic_compare_exchange(vp8, rp8, &v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 288 #if defined(__SIZEOF_INT128__) 289 { _Bool ret; ret = __atomic_compare_exchange_n(vp16, rp16, v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 290 { _Bool ret; ret = __atomic_compare_exchange_16(vp16, rp16, v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 291 { _Bool ret; ret = __atomic_compare_exchange(vp16, rp16, &v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 208 { _Bool ret; ret = __atomic_compare_exchange_n(vpc, rpc, vc, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 209 { _Bool ret; ret = __atomic_compare_exchange(vpc, rpc, &vc, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 210 { _Bool ret; ret = __atomic_compare_exchange_n(vps, rps, vs, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 211 { _Bool ret; ret = __atomic_compare_exchange(vps, rps, &vs, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 212 { _Bool ret; ret = __atomic_compare_exchange_n(vpi, rpi, vi, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 213 { _Bool ret; ret = __atomic_compare_exchange(vpi, rpi, &vi, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 214 { _Bool ret; ret = __atomic_compare_exchange_n(vpl, rpl, vl, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 215 { _Bool ret; ret = __atomic_compare_exchange(vpl, rpl, &vl, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 216 { _Bool ret; ret = __atomic_compare_exchange_n(vpll, rpll, vll, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 217 { _Bool ret; ret = __atomic_compare_exchange(vpll, rpll, &vll, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 218 #if defined(__SIZEOF_INT128__) 219 { _Bool ret; ret = __atomic_compare_exchange_n(vplll, rplll, vlll, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 220 { _Bool ret; ret = __atomic_compare_exchange(vplll, rplll, &vlll, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 292 221 #endif 293 222 { _Bool ret; ret = __atomic_compare_exchange_n(vpp, rpp, vp, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 294 223 { _Bool ret; ret = __atomic_compare_exchange(vpp, rpp, &vp, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 295 224 296 { __atomic_store_n(vp1, v1, __ATOMIC_SEQ_CST); } 297 { __atomic_store_1(vp1, v1, __ATOMIC_SEQ_CST); } 298 { __atomic_store(vp1, &v1, __ATOMIC_SEQ_CST); } 299 { __atomic_store_n(vp2, v2, __ATOMIC_SEQ_CST); } 300 { __atomic_store_2(vp2, v2, __ATOMIC_SEQ_CST); } 301 { __atomic_store(vp2, &v2, __ATOMIC_SEQ_CST); } 302 { __atomic_store_n(vp4, v4, __ATOMIC_SEQ_CST); } 303 { __atomic_store_4(vp4, v4, __ATOMIC_SEQ_CST); } 304 { __atomic_store(vp4, &v4, __ATOMIC_SEQ_CST); } 305 { __atomic_store_n(vp8, v8, __ATOMIC_SEQ_CST); } 306 { __atomic_store_8(vp8, v8, __ATOMIC_SEQ_CST); } 307 { __atomic_store(vp8, &v8, __ATOMIC_SEQ_CST); } 308 #if defined(__SIZEOF_INT128__) 309 { __atomic_store_n(vp16, v16, __ATOMIC_SEQ_CST); } 310 { __atomic_store_16(vp16, v16, __ATOMIC_SEQ_CST); } 311 { __atomic_store(vp16, &v16, __ATOMIC_SEQ_CST); } 225 { __atomic_store_n(vpc, vc, __ATOMIC_SEQ_CST); } 226 { __atomic_store(vpc, &vc, __ATOMIC_SEQ_CST); } 227 { __atomic_store_n(vps, vs, __ATOMIC_SEQ_CST); } 228 { __atomic_store(vps, &vs, __ATOMIC_SEQ_CST); } 229 { __atomic_store_n(vpi, vi, __ATOMIC_SEQ_CST); } 230 { __atomic_store(vpi, &vi, __ATOMIC_SEQ_CST); } 231 { __atomic_store_n(vpl, vl, __ATOMIC_SEQ_CST); } 232 { __atomic_store(vpl, &vl, __ATOMIC_SEQ_CST); } 233 { __atomic_store_n(vpll, vll, __ATOMIC_SEQ_CST); } 234 { __atomic_store(vpll, &vll, __ATOMIC_SEQ_CST); } 235 #if defined(__SIZEOF_INT128__) 236 { __atomic_store_n(vplll, vlll, __ATOMIC_SEQ_CST); } 237 { __atomic_store(vplll, &vlll, __ATOMIC_SEQ_CST); } 312 238 #endif 313 239 { __atomic_store_n(vpp, vp, __ATOMIC_SEQ_CST); } 314 240 { __atomic_store(vpp, &vp, __ATOMIC_SEQ_CST); } 315 241 316 { char ret; ret = __atomic_add_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 317 { char ret; ret = __atomic_add_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 318 { short ret; ret = __atomic_add_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 319 { short ret; ret = __atomic_add_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 320 { int ret; ret = __atomic_add_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 321 { int ret; ret = __atomic_add_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 322 { long long int ret; ret = __atomic_add_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 323 { long long int ret; ret = __atomic_add_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 324 #if defined(__SIZEOF_INT128__) 325 { __int128 ret; ret = __atomic_add_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 326 { __int128 ret; ret = __atomic_add_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 327 #endif 328 329 { char ret; ret = __atomic_sub_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 330 { char ret; ret = __atomic_sub_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 331 { short ret; ret = __atomic_sub_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 332 { short ret; ret = __atomic_sub_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 333 { int ret; ret = __atomic_sub_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 334 { int ret; ret = __atomic_sub_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 335 { long long int ret; ret = __atomic_sub_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 336 { long long int ret; ret = __atomic_sub_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 337 #if defined(__SIZEOF_INT128__) 338 { __int128 ret; ret = __atomic_sub_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 339 { __int128 ret; ret = __atomic_sub_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 340 #endif 341 342 { char ret; ret = __atomic_and_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 343 { char ret; ret = __atomic_and_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 344 { short ret; ret = __atomic_and_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 345 { short ret; ret = __atomic_and_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 346 { int ret; ret = __atomic_and_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 347 { int ret; ret = __atomic_and_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 348 { long long int ret; ret = __atomic_and_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 349 { long long int ret; ret = __atomic_and_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 350 #if defined(__SIZEOF_INT128__) 351 { __int128 ret; ret = __atomic_and_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 352 { __int128 ret; ret = __atomic_and_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 353 #endif 354 355 { char ret; ret = __atomic_nand_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 356 { char ret; ret = __atomic_nand_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 357 { short ret; ret = __atomic_nand_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 358 { short ret; ret = __atomic_nand_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 359 { int ret; ret = __atomic_nand_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 360 { int ret; ret = __atomic_nand_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 361 { long long int ret; ret = __atomic_nand_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 362 { long long int ret; ret = __atomic_nand_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 363 #if defined(__SIZEOF_INT128__) 364 { __int128 ret; ret = __atomic_nand_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 365 { __int128 ret; ret = __atomic_nand_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 366 #endif 367 368 { char ret; ret = __atomic_xor_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 369 { char ret; ret = __atomic_xor_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 370 { short ret; ret = __atomic_xor_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 371 { short ret; ret = __atomic_xor_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 372 { int ret; ret = __atomic_xor_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 373 { int ret; ret = __atomic_xor_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 374 { long long int ret; ret = __atomic_xor_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 375 { long long int ret; ret = __atomic_xor_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 376 #if defined(__SIZEOF_INT128__) 377 { __int128 ret; ret = __atomic_xor_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 378 { __int128 ret; ret = __atomic_xor_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 379 #endif 380 381 { char ret; ret = __atomic_or_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 382 { char ret; ret = __atomic_or_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 383 { short ret; ret = __atomic_or_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 384 { short ret; ret = __atomic_or_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 385 { int ret; ret = __atomic_or_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 386 { int ret; ret = __atomic_or_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 387 { long long int ret; ret = __atomic_or_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 388 { long long int ret; ret = __atomic_or_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 389 #if defined(__SIZEOF_INT128__) 390 { __int128 ret; ret = __atomic_or_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 391 { __int128 ret; ret = __atomic_or_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 392 #endif 393 394 { char ret; ret = __atomic_fetch_add(vp1, v1, __ATOMIC_SEQ_CST); } 395 { char ret; ret = __atomic_fetch_add_1(vp1, v1, __ATOMIC_SEQ_CST); } 396 { short ret; ret = __atomic_fetch_add(vp2, v2, __ATOMIC_SEQ_CST); } 397 { short ret; ret = __atomic_fetch_add_2(vp2, v2, __ATOMIC_SEQ_CST); } 398 { int ret; ret = __atomic_fetch_add(vp4, v4, __ATOMIC_SEQ_CST); } 399 { int ret; ret = __atomic_fetch_add_4(vp4, v4, __ATOMIC_SEQ_CST); } 400 { long long int ret; ret = __atomic_fetch_add(vp8, v8, __ATOMIC_SEQ_CST); } 401 { long long int ret; ret = __atomic_fetch_add_8(vp8, v8, __ATOMIC_SEQ_CST); } 402 #if defined(__SIZEOF_INT128__) 403 { __int128 ret; ret = __atomic_fetch_add(vp16, v16, __ATOMIC_SEQ_CST); } 404 { __int128 ret; ret = __atomic_fetch_add_16(vp16, v16, __ATOMIC_SEQ_CST); } 405 #endif 406 407 { char ret; ret = __atomic_fetch_sub(vp1, v1, __ATOMIC_SEQ_CST); } 408 { char ret; ret = __atomic_fetch_sub_1(vp1, v1, __ATOMIC_SEQ_CST); } 409 { short ret; ret = __atomic_fetch_sub(vp2, v2, __ATOMIC_SEQ_CST); } 410 { short ret; ret = __atomic_fetch_sub_2(vp2, v2, __ATOMIC_SEQ_CST); } 411 { int ret; ret = __atomic_fetch_sub(vp4, v4, __ATOMIC_SEQ_CST); } 412 { int ret; ret = __atomic_fetch_sub_4(vp4, v4, __ATOMIC_SEQ_CST); } 413 { long long int ret; ret = __atomic_fetch_sub(vp8, v8, __ATOMIC_SEQ_CST); } 414 { long long int ret; ret = __atomic_fetch_sub_8(vp8, v8, __ATOMIC_SEQ_CST); } 415 #if defined(__SIZEOF_INT128__) 416 { __int128 ret; ret = __atomic_fetch_sub(vp16, v16, __ATOMIC_SEQ_CST); } 417 { __int128 ret; ret = __atomic_fetch_sub_16(vp16, v16, __ATOMIC_SEQ_CST); } 418 #endif 419 420 { char ret; ret = __atomic_fetch_and(vp1, v1, __ATOMIC_SEQ_CST); } 421 { char ret; ret = __atomic_fetch_and_1(vp1, v1, __ATOMIC_SEQ_CST); } 422 { short ret; ret = __atomic_fetch_and(vp2, v2, __ATOMIC_SEQ_CST); } 423 { short ret; ret = __atomic_fetch_and_2(vp2, v2, __ATOMIC_SEQ_CST); } 424 { int ret; ret = __atomic_fetch_and(vp4, v4, __ATOMIC_SEQ_CST); } 425 { int ret; ret = __atomic_fetch_and_4(vp4, v4, __ATOMIC_SEQ_CST); } 426 { long long int ret; ret = __atomic_fetch_and(vp8, v8, __ATOMIC_SEQ_CST); } 427 { long long int ret; ret = __atomic_fetch_and_8(vp8, v8, __ATOMIC_SEQ_CST); } 428 #if defined(__SIZEOF_INT128__) 429 { __int128 ret; ret = __atomic_fetch_and(vp16, v16, __ATOMIC_SEQ_CST); } 430 { __int128 ret; ret = __atomic_fetch_and_16(vp16, v16, __ATOMIC_SEQ_CST); } 431 #endif 432 433 { char ret; ret = __atomic_fetch_nand(vp1, v1, __ATOMIC_SEQ_CST); } 434 { char ret; ret = __atomic_fetch_nand_1(vp1, v1, __ATOMIC_SEQ_CST); } 435 { short ret; ret = __atomic_fetch_nand(vp2, v2, __ATOMIC_SEQ_CST); } 436 { short ret; ret = __atomic_fetch_nand_2(vp2, v2, __ATOMIC_SEQ_CST); } 437 { int ret; ret = __atomic_fetch_nand(vp4, v4, __ATOMIC_SEQ_CST); } 438 { int ret; ret = __atomic_fetch_nand_4(vp4, v4, __ATOMIC_SEQ_CST); } 439 { long long int ret; ret = __atomic_fetch_nand(vp8, v8, __ATOMIC_SEQ_CST); } 440 { long long int ret; ret = __atomic_fetch_nand_8(vp8, v8, __ATOMIC_SEQ_CST); } 441 #if defined(__SIZEOF_INT128__) 442 { __int128 ret; ret = __atomic_fetch_nand(vp16, v16, __ATOMIC_SEQ_CST); } 443 { __int128 ret; ret = __atomic_fetch_nand_16(vp16, v16, __ATOMIC_SEQ_CST); } 444 #endif 445 446 { char ret; ret = __atomic_fetch_xor(vp1, v1, __ATOMIC_SEQ_CST); } 447 { char ret; ret = __atomic_fetch_xor_1(vp1, v1, __ATOMIC_SEQ_CST); } 448 { short ret; ret = __atomic_fetch_xor(vp2, v2, __ATOMIC_SEQ_CST); } 449 { short ret; ret = __atomic_fetch_xor_2(vp2, v2, __ATOMIC_SEQ_CST); } 450 { int ret; ret = __atomic_fetch_xor(vp4, v4, __ATOMIC_SEQ_CST); } 451 { int ret; ret = __atomic_fetch_xor_4(vp4, v4, __ATOMIC_SEQ_CST); } 452 { long long int ret; ret = __atomic_fetch_xor(vp8, v8, __ATOMIC_SEQ_CST); } 453 { long long int ret; ret = __atomic_fetch_xor_8(vp8, v8, __ATOMIC_SEQ_CST); } 454 #if defined(__SIZEOF_INT128__) 455 { __int128 ret; ret = __atomic_fetch_xor(vp16, v16, __ATOMIC_SEQ_CST); } 456 { __int128 ret; ret = __atomic_fetch_xor_16(vp16, v16, __ATOMIC_SEQ_CST); } 457 #endif 458 459 { char ret; ret = __atomic_fetch_or(vp1, v1, __ATOMIC_SEQ_CST); } 460 { char ret; ret = __atomic_fetch_or_1(vp1, v1, __ATOMIC_SEQ_CST); } 461 { short ret; ret = __atomic_fetch_or(vp2, v2, __ATOMIC_SEQ_CST); } 462 { short ret; ret = __atomic_fetch_or_2(vp2, v2, __ATOMIC_SEQ_CST); } 463 { int ret; ret = __atomic_fetch_or(vp4, v4, __ATOMIC_SEQ_CST); } 464 { int ret; ret = __atomic_fetch_or_4(vp4, v4, __ATOMIC_SEQ_CST); } 465 { long long int ret; ret = __atomic_fetch_or(vp8, v8, __ATOMIC_SEQ_CST); } 466 { long long int ret; ret = __atomic_fetch_or_8(vp8, v8, __ATOMIC_SEQ_CST); } 467 #if defined(__SIZEOF_INT128__) 468 { __int128 ret; ret = __atomic_fetch_or(vp16, v16, __ATOMIC_SEQ_CST); } 469 { __int128 ret; ret = __atomic_fetch_or_16(vp16, v16, __ATOMIC_SEQ_CST); } 470 #endif 471 472 { _Bool ret; ret = __atomic_always_lock_free(sizeof(int), vp4); } 473 { _Bool ret; ret = __atomic_is_lock_free(sizeof(int), vp4); } 242 { char ret; ret = __atomic_add_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 243 { short ret; ret = __atomic_add_fetch(vps, vs, __ATOMIC_SEQ_CST); } 244 { int ret; ret = __atomic_add_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 245 { long int ret; ret = __atomic_add_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 246 { long long int ret; ret = __atomic_add_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 247 #if defined(__SIZEOF_INT128__) 248 { __int128 ret; ret = __atomic_add_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 249 #endif 250 251 { char ret; ret = __atomic_sub_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 252 { short ret; ret = __atomic_sub_fetch(vps, vs, __ATOMIC_SEQ_CST); } 253 { int ret; ret = __atomic_sub_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 254 { long int ret; ret = __atomic_sub_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 255 { long long int ret; ret = __atomic_sub_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 256 #if defined(__SIZEOF_INT128__) 257 { __int128 ret; ret = __atomic_sub_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 258 #endif 259 260 { char ret; ret = __atomic_and_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 261 { short ret; ret = __atomic_and_fetch(vps, vs, __ATOMIC_SEQ_CST); } 262 { int ret; ret = __atomic_and_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 263 { long int ret; ret = __atomic_and_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 264 { long long int ret; ret = __atomic_and_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 265 #if defined(__SIZEOF_INT128__) 266 { __int128 ret; ret = __atomic_and_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 267 #endif 268 269 { char ret; ret = __atomic_nand_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 270 { short ret; ret = __atomic_nand_fetch(vps, vs, __ATOMIC_SEQ_CST); } 271 { int ret; ret = __atomic_nand_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 272 { long int ret; ret = __atomic_nand_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 273 { long long int ret; ret = __atomic_nand_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 274 #if defined(__SIZEOF_INT128__) 275 { __int128 ret; ret = __atomic_nand_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 276 #endif 277 278 { char ret; ret = __atomic_xor_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 279 { short ret; ret = __atomic_xor_fetch(vps, vs, __ATOMIC_SEQ_CST); } 280 { int ret; ret = __atomic_xor_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 281 { long int ret; ret = __atomic_xor_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 282 { long long int ret; ret = __atomic_xor_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 283 #if defined(__SIZEOF_INT128__) 284 { __int128 ret; ret = __atomic_xor_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 285 #endif 286 287 { char ret; ret = __atomic_or_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 288 { short ret; ret = __atomic_or_fetch(vps, vs, __ATOMIC_SEQ_CST); } 289 { int ret; ret = __atomic_or_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 290 { long int ret; ret = __atomic_or_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 291 { long long int ret; ret = __atomic_or_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 292 #if defined(__SIZEOF_INT128__) 293 { __int128 ret; ret = __atomic_or_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 294 #endif 295 296 { char ret; ret = __atomic_fetch_add(vpc, vc, __ATOMIC_SEQ_CST); } 297 { short ret; ret = __atomic_fetch_add(vps, vs, __ATOMIC_SEQ_CST); } 298 { int ret; ret = __atomic_fetch_add(vpi, vi, __ATOMIC_SEQ_CST); } 299 { long int ret; ret = __atomic_fetch_add(vpl, vl, __ATOMIC_SEQ_CST); } 300 { long long int ret; ret = __atomic_fetch_add(vpll, vll, __ATOMIC_SEQ_CST); } 301 #if defined(__SIZEOF_INT128__) 302 { __int128 ret; ret = __atomic_fetch_add(vplll, vlll, __ATOMIC_SEQ_CST); } 303 #endif 304 305 { char ret; ret = __atomic_fetch_sub(vpc, vc, __ATOMIC_SEQ_CST); } 306 { short ret; ret = __atomic_fetch_sub(vps, vs, __ATOMIC_SEQ_CST); } 307 { int ret; ret = __atomic_fetch_sub(vpi, vi, __ATOMIC_SEQ_CST); } 308 { long int ret; ret = __atomic_fetch_sub(vpl, vl, __ATOMIC_SEQ_CST); } 309 { long long int ret; ret = __atomic_fetch_sub(vpll, vll, __ATOMIC_SEQ_CST); } 310 #if defined(__SIZEOF_INT128__) 311 { __int128 ret; ret = __atomic_fetch_sub(vplll, vlll, __ATOMIC_SEQ_CST); } 312 #endif 313 314 { char ret; ret = __atomic_fetch_and(vpc, vc, __ATOMIC_SEQ_CST); } 315 { short ret; ret = __atomic_fetch_and(vps, vs, __ATOMIC_SEQ_CST); } 316 { int ret; ret = __atomic_fetch_and(vpi, vi, __ATOMIC_SEQ_CST); } 317 { long int ret; ret = __atomic_fetch_and(vpl, vl, __ATOMIC_SEQ_CST); } 318 { long long int ret; ret = __atomic_fetch_and(vpll, vll, __ATOMIC_SEQ_CST); } 319 #if defined(__SIZEOF_INT128__) 320 { __int128 ret; ret = __atomic_fetch_and(vplll, vlll, __ATOMIC_SEQ_CST); } 321 #endif 322 323 { char ret; ret = __atomic_fetch_nand(vpc, vc, __ATOMIC_SEQ_CST); } 324 { short ret; ret = __atomic_fetch_nand(vps, vs, __ATOMIC_SEQ_CST); } 325 { int ret; ret = __atomic_fetch_nand(vpi, vi, __ATOMIC_SEQ_CST); } 326 { long int ret; ret = __atomic_fetch_nand(vpl, vl, __ATOMIC_SEQ_CST); } 327 { long long int ret; ret = __atomic_fetch_nand(vpll, vll, __ATOMIC_SEQ_CST); } 328 #if defined(__SIZEOF_INT128__) 329 { __int128 ret; ret = __atomic_fetch_nand(vplll, vlll, __ATOMIC_SEQ_CST); } 330 #endif 331 332 { char ret; ret = __atomic_fetch_xor(vpc, vc, __ATOMIC_SEQ_CST); } 333 { short ret; ret = __atomic_fetch_xor(vps, vs, __ATOMIC_SEQ_CST); } 334 { int ret; ret = __atomic_fetch_xor(vpi, vi, __ATOMIC_SEQ_CST); } 335 { long int ret; ret = __atomic_fetch_xor(vpl, vl, __ATOMIC_SEQ_CST); } 336 { long long int ret; ret = __atomic_fetch_xor(vpll, vll, __ATOMIC_SEQ_CST); } 337 #if defined(__SIZEOF_INT128__) 338 { __int128 ret; ret = __atomic_fetch_xor(vplll, vlll, __ATOMIC_SEQ_CST); } 339 #endif 340 341 { char ret; ret = __atomic_fetch_or(vpc, vc, __ATOMIC_SEQ_CST); } 342 { short ret; ret = __atomic_fetch_or(vps, vs, __ATOMIC_SEQ_CST); } 343 { int ret; ret = __atomic_fetch_or(vpi, vi, __ATOMIC_SEQ_CST); } 344 { long int ret; ret = __atomic_fetch_or(vpl, vl, __ATOMIC_SEQ_CST); } 345 { long long int ret; ret = __atomic_fetch_or(vpll, vll, __ATOMIC_SEQ_CST); } 346 #if defined(__SIZEOF_INT128__) 347 { __int128 ret; ret = __atomic_fetch_or(vplll, vlll, __ATOMIC_SEQ_CST); } 348 #endif 349 350 { _Bool ret; ret = __atomic_always_lock_free(sizeof(int), vpi); } 351 { _Bool ret; ret = __atomic_is_lock_free(sizeof(int), vpi); } 474 352 { __atomic_thread_fence(__ATOMIC_SEQ_CST); } 475 353 { __atomic_signal_fence(__ATOMIC_SEQ_CST); } -
tests/concurrent/preempt.cfa
raca6a54c r2fa5bd2 36 36 if( (counter % 7) == this.value ) { 37 37 __cfaabi_check_preemption(); 38 int next = __atomic_add_fetch _4(&counter, 1, __ATOMIC_SEQ_CST);38 int next = __atomic_add_fetch( &counter, 1, __ATOMIC_SEQ_CST ); 39 39 __cfaabi_check_preemption(); 40 40 if( (next % 100) == 0 ) printf("%d\n", (int)next); -
tests/concurrent/signal/wait.cfa
raca6a54c r2fa5bd2 98 98 } 99 99 100 __ sync_fetch_and_sub_4( &waiter_left, 1);100 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 101 101 } 102 102 … … 109 109 } 110 110 111 __ sync_fetch_and_sub_4( &waiter_left, 1);111 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 112 112 } 113 113 … … 120 120 } 121 121 122 __ sync_fetch_and_sub_4( &waiter_left, 1);122 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 123 123 } 124 124 … … 131 131 } 132 132 133 __ sync_fetch_and_sub_4( &waiter_left, 1);133 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 134 134 } 135 135 -
tests/concurrent/thread.cfa
raca6a54c r2fa5bd2 7 7 thread Second { semaphore* lock; }; 8 8 9 void ?{}( First & this, semaphore & lock ) { ((thread&)this){ "Thread 1"}; this.lock = &lock; }10 void ?{}( Second & this, semaphore & lock ) { ((thread&)this){ "Thread 2"}; this.lock = &lock; }9 void ?{}( First & this, semaphore & lock ) { ((thread&)this){ "Thread 1" }; this.lock = &lock; } 10 void ?{}( Second & this, semaphore & lock ) { ((thread&)this){ "Thread 2" }; this.lock = &lock; } 11 11 12 12 void main(First& this) { -
tests/heap.cfa
raca6a54c r2fa5bd2 10 10 // Created On : Tue Nov 6 17:54:56 2018 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Fri Jul 19 08:22:34201913 // Update Count : 1912 // Last Modified On : Sun Nov 24 12:34:51 2019 13 // Update Count : 28 14 14 // 15 15 … … 38 38 enum { NoOfAllocs = 5000, NoOfMmaps = 10 }; 39 39 char * locns[NoOfAllocs]; 40 int i; 40 size_t amount; 41 enum { limit = 64 * 1024 }; // check alignments up to here 41 42 42 43 // check alloc/free … … 74 75 size_t s = (i + 1) * 20; 75 76 char * area = (char *)malloc( s ); 76 if ( area == 0 ) abort( "malloc/free out of memory" );77 if ( area == 0p ) abort( "malloc/free out of memory" ); 77 78 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last 78 79 area[malloc_usable_size( area ) - 1] = '\345'; // fill ultimate byte … … 83 84 size_t s = i + 1; // +1 to make initialization simpler 84 85 locns[i] = (char *)malloc( s ); 85 if ( locns[i] == 0 ) abort( "malloc/free out of memory" );86 if ( locns[i] == 0p ) abort( "malloc/free out of memory" ); 86 87 locns[i][0] = '\345'; locns[i][s - 1] = '\345'; // fill first/last 87 88 locns[i][malloc_usable_size( locns[i] ) - 1] = '\345'; // fill ultimate byte … … 99 100 size_t s = i + default_mmap_start(); // cross over point 100 101 char * area = (char *)malloc( s ); 101 if ( area == 0 ) abort( "malloc/free out of memory" );102 if ( area == 0p ) abort( "malloc/free out of memory" ); 102 103 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last 103 104 area[malloc_usable_size( area ) - 1] = '\345'; // fill ultimate byte … … 108 109 size_t s = i + default_mmap_start(); // cross over point 109 110 locns[i] = (char *)malloc( s ); 110 if ( locns[i] == 0 ) abort( "malloc/free out of memory" );111 if ( locns[i] == 0p ) abort( "malloc/free out of memory" ); 111 112 locns[i][0] = '\345'; locns[i][s - 1] = '\345'; // fill first/last 112 113 locns[i][malloc_usable_size( locns[i] ) - 1] = '\345'; // fill ultimate byte … … 124 125 size_t s = (i + 1) * 20; 125 126 char * area = (char *)calloc( 5, s ); 126 if ( area == 0 ) abort( "calloc/free out of memory" );127 if ( area == 0p ) abort( "calloc/free out of memory" ); 127 128 if ( area[0] != '\0' || area[s - 1] != '\0' || 128 129 area[malloc_usable_size( area ) - 1] != '\0' || … … 136 137 size_t s = i + 1; 137 138 locns[i] = (char *)calloc( 5, s ); 138 if ( locns[i] == 0 ) abort( "calloc/free out of memory" );139 if ( locns[i] == 0p ) abort( "calloc/free out of memory" ); 139 140 if ( locns[i][0] != '\0' || locns[i][s - 1] != '\0' || 140 141 locns[i][malloc_usable_size( locns[i] ) - 1] != '\0' || … … 155 156 size_t s = i + default_mmap_start(); // cross over point 156 157 char * area = (char *)calloc( 1, s ); 157 if ( area == 0 ) abort( "calloc/free out of memory" );158 if ( area == 0p ) abort( "calloc/free out of memory" ); 158 159 if ( area[0] != '\0' || area[s - 1] != '\0' ) abort( "calloc/free corrupt storage4.1" ); 159 160 if ( area[malloc_usable_size( area ) - 1] != '\0' ) abort( "calloc/free corrupt storage4.2" ); … … 167 168 size_t s = i + default_mmap_start(); // cross over point 168 169 locns[i] = (char *)calloc( 1, s ); 169 if ( locns[i] == 0 ) abort( "calloc/free out of memory" );170 if ( locns[i] == 0p ) abort( "calloc/free out of memory" ); 170 171 if ( locns[i][0] != '\0' || locns[i][s - 1] != '\0' || 171 172 locns[i][malloc_usable_size( locns[i] ) - 1] != '\0' || … … 183 184 // check memalign/free (sbrk) 184 185 185 enum { limit = 64 * 1024 }; // check alignments up to here186 187 186 for ( a; libAlign() ~= limit ~ a ) { // generate powers of 2 188 187 //sout | alignments[a]; 189 188 for ( s; 1 ~ NoOfAllocs ) { // allocation of size 0 can return null 190 189 char * area = (char *)memalign( a, s ); 191 if ( area == 0 ) abort( "memalign/free out of memory" );192 //sout | i | " " |area;190 if ( area == 0p ) abort( "memalign/free out of memory" ); 191 //sout | i | area; 193 192 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 194 193 abort( "memalign/free bad alignment : memalign(%d,%d) = %p", (int)a, s, area ); 195 194 } // if 196 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last byte195 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last byte 197 196 area[malloc_usable_size( area ) - 1] = '\345'; // fill ultimate byte 198 197 free( area ); … … 207 206 size_t s = i + default_mmap_start(); // cross over point 208 207 char * area = (char *)memalign( a, s ); 209 if ( area == 0 ) abort( "memalign/free out of memory" );210 //sout | i | " " |area;208 if ( area == 0p ) abort( "memalign/free out of memory" ); 209 //sout | i | area; 211 210 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 212 211 abort( "memalign/free bad alignment : memalign(%d,%d) = %p", (int)a, (int)s, area ); … … 223 222 // initial N byte allocation 224 223 char * area = (char *)calloc( 5, i ); 225 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );224 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 226 225 if ( area[0] != '\0' || area[i - 1] != '\0' || 227 226 area[malloc_usable_size( area ) - 1] != '\0' || … … 231 230 for ( s; i ~ 256 * 1024 ~ 26 ) { // start at initial memory request 232 231 area = (char *)realloc( area, s ); // attempt to reuse storage 233 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );232 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 234 233 if ( area[0] != '\0' || area[s - 1] != '\0' || 235 234 area[malloc_usable_size( area ) - 1] != '\0' || … … 245 244 size_t s = i + default_mmap_start(); // cross over point 246 245 char * area = (char *)calloc( 1, s ); 247 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );246 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 248 247 if ( area[0] != '\0' || area[s - 1] != '\0' || 249 248 area[malloc_usable_size( area ) - 1] != '\0' || … … 253 252 for ( r; i ~ 256 * 1024 ~ 26 ) { // start at initial memory request 254 253 area = (char *)realloc( area, r ); // attempt to reuse storage 255 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );254 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 256 255 if ( area[0] != '\0' || area[r - 1] != '\0' || 257 256 area[malloc_usable_size( area ) - 1] != '\0' || … … 263 262 // check memalign/realloc/free 264 263 265 size_tamount = 2;264 amount = 2; 266 265 for ( a; libAlign() ~= limit ~ a ) { // generate powers of 2 267 266 // initial N byte allocation 268 267 char * area = (char *)memalign( a, amount ); // aligned N-byte allocation 269 if ( area == 0 ) abort( "memalign/realloc/free out of memory" ); // no storage ?270 //sout | alignments[a] | " " |area;268 if ( area == 0p ) abort( "memalign/realloc/free out of memory" ); // no storage ? 269 //sout | alignments[a] | area; 271 270 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 272 271 abort( "memalign/realloc/free bad alignment : memalign(%d,%d) = %p", (int)a, (int)amount, area ); … … 278 277 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "memalign/realloc/free corrupt storage" ); 279 278 area = (char *)realloc( area, s ); // attempt to reuse storage 280 if ( area == 0 ) abort( "memalign/realloc/free out of memory" ); // no storage ?281 //sout | i | " " |area;279 if ( area == 0p ) abort( "memalign/realloc/free out of memory" ); // no storage ? 280 //sout | i | area; 282 281 if ( (size_t)area % a != 0 ) { // check for initial alignment 283 282 abort( "memalign/realloc/free bad alignment %p", area ); … … 294 293 for ( s; 1 ~ limit ) { // allocation of size 0 can return null 295 294 char * area = (char *)cmemalign( a, 1, s ); 296 if ( area == 0 ) abort( "cmemalign/free out of memory" );297 //sout | i | " " |area;295 if ( area == 0p ) abort( "cmemalign/free out of memory" ); 296 //sout | i | area; 298 297 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 299 298 abort( "cmemalign/free bad alignment : cmemalign(%d,%d) = %p", (int)a, s, area ); … … 313 312 // initial N byte allocation 314 313 char * area = (char *)cmemalign( a, 1, amount ); // aligned N-byte allocation 315 if ( area == 0 ) abort( "cmemalign/realloc/free out of memory" ); // no storage ?316 //sout | alignments[a] | " " |area;314 if ( area == 0p ) abort( "cmemalign/realloc/free out of memory" ); // no storage ? 315 //sout | alignments[a] | area; 317 316 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 318 317 abort( "cmemalign/realloc/free bad alignment : cmemalign(%d,%d) = %p", (int)a, (int)amount, area ); … … 327 326 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "cmemalign/realloc/free corrupt storage2" ); 328 327 area = (char *)realloc( area, s ); // attempt to reuse storage 329 if ( area == 0 ) abort( "cmemalign/realloc/free out of memory" ); // no storage ?330 //sout | i | " " |area;328 if ( area == 0p ) abort( "cmemalign/realloc/free out of memory" ); // no storage ? 329 //sout | i | area; 331 330 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 332 331 abort( "cmemalign/realloc/free bad alignment %p", area ); … … 339 338 free( area ); 340 339 } // for 340 341 // check memalign/realloc with align/free 342 343 amount = 2; 344 for ( a; libAlign() ~= limit ~ a ) { // generate powers of 2 345 // initial N byte allocation 346 char * area = (char *)memalign( a, amount ); // aligned N-byte allocation 347 if ( area == 0p ) abort( "memalign/realloc with align/free out of memory" ); // no storage ? 348 //sout | alignments[a] | area | endl; 349 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 350 abort( "memalign/realloc with align/free bad alignment : memalign(%d,%d) = %p", (int)a, (int)amount, area ); 351 } // if 352 area[0] = '\345'; area[amount - 2] = '\345'; // fill first/penultimate byte 353 354 // Do not start this loop index at 0 because realloc of 0 bytes frees the storage. 355 for ( s; amount ~ 256 * 1024 ) { // start at initial memory request 356 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "memalign/realloc/free corrupt storage" ); 357 area = (char *)realloc( area, a * 2, s ); // attempt to reuse storage 358 if ( area == 0p ) abort( "memalign/realloc with align/free out of memory" ); // no storage ? 359 //sout | i | area | endl; 360 if ( (size_t)area % a * 2 != 0 ) { // check for initial alignment 361 abort( "memalign/realloc with align/free bad alignment %p", area ); 362 } // if 363 area[s - 1] = '\345'; // fill last byte 364 } // for 365 free( area ); 366 } // for 367 368 // check cmemalign/realloc with align/free 369 370 amount = 2; 371 for ( size_t a = libAlign() + libAlign(); a <= limit; a += a ) { // generate powers of 2 372 // initial N byte allocation 373 char *area = (char *)cmemalign( a, 1, amount ); // aligned N-byte allocation 374 if ( area == 0p ) abort( "cmemalign/realloc with align/free out of memory" ); // no storage ? 375 //sout | alignments[a] | area | endl; 376 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 377 abort( "cmemalign/realloc with align/free bad alignment : cmemalign(%d,%d) = %p", (int)a, (int)amount, area ); 378 } // if 379 if ( area[0] != '\0' || area[amount - 1] != '\0' || 380 area[malloc_usable_size( area ) - 1] != '\0' || 381 ! malloc_zero_fill( area ) ) abort( "cmemalign/realloc with align/free corrupt storage1" ); 382 area[0] = '\345'; area[amount - 2] = '\345'; // fill first/penultimate byte 383 384 // Do not start this loop index at 0 because realloc of 0 bytes frees the storage. 385 for ( int s = amount; s < 256 * 1024; s += 1 ) { // start at initial memory request 386 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "cmemalign/realloc with align/free corrupt storage2" ); 387 area = (char *)realloc( area, a * 2, s ); // attempt to reuse storage 388 if ( area == 0p ) abort( "cmemalign/realloc with align/free out of memory" ); // no storage ? 389 //sout | i | area | endl; 390 if ( (size_t)area % a * 2 != 0 || malloc_alignment( area ) != a * 2 ) { // check for initial alignment 391 abort( "cmemalign/realloc with align/free bad alignment %p %jd %jd", area, malloc_alignment( area ), a * 2 ); 392 } // if 393 if ( area[s - 1] != '\0' || area[s - 1] != '\0' || 394 area[malloc_usable_size( area ) - 1] != '\0' || 395 ! malloc_zero_fill( area ) ) abort( "cmemalign/realloc/free corrupt storage3" ); 396 area[s - 1] = '\345'; // fill last byte 397 } // for 398 free( area ); 399 } // for 400 341 401 //sout | "worker" | thisTask() | "successful completion"; 342 402 } // Worker main -
tests/linking/withthreads.cfa
raca6a54c r2fa5bd2 34 34 // Local Variables: // 35 35 // tab-width: 4 // 36 // compile-command: "cfa nothreads.cfa" //36 // compile-command: "cfa withthreads.cfa" // 37 37 // End: // -
tests/raii/dtor-early-exit.cfa
raca6a54c r2fa5bd2 217 217 } 218 218 219 void i() { 220 // potential loop 221 for() { 222 if(true) continue; 223 int t = 0; 224 } 225 } 226 219 227 // TODO: implement __label__ and uncomment these lines 220 228 void computedGoto() { -
tests/references.cfa
raca6a54c r2fa5bd2 119 119 f( 3, a + b, (S){ 1.0, 7.0 }, (int [3]){ 1, 2, 3 } ); // two rvalue to reference 120 120 } 121 122 { 123 int a = 3; 124 int *p = &a; 125 asm ( 126 "incl %[p]\n\t" 127 : [p] "+m" (*p) 128 ); 129 printf("%d\n", a); 130 } 121 131 } 122 132 -
tests/test.py
raca6a54c r2fa5bd2 10 10 import tempfile 11 11 import time 12 13 import os 14 import psutil 15 import signal 12 16 13 17 ################################################################################ … … 221 225 make('clean', output_file=subprocess.DEVNULL, error=subprocess.DEVNULL) 222 226 227 # since python prints stacks by default on a interrupt, redo the interrupt handling to be silent 228 def worker_init(): 229 def sig_int(signal_num, frame): 230 pass 231 232 signal.signal(signal.SIGINT, sig_int) 233 223 234 # create the executor for our jobs and handle the signal properly 224 pool = multiprocessing.Pool(jobs )235 pool = multiprocessing.Pool(jobs, worker_init) 225 236 226 237 failed = False 238 239 def stop(x, y): 240 print("Tests interrupted by user", file=sys.stderr) 241 sys.exit(1) 242 signal.signal(signal.SIGINT, stop) 227 243 228 244 # for each test to run -
tests/time.cfa
raca6a54c r2fa5bd2 10 10 // Created On : Tue Mar 27 17:24:56 2018 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Thu Dec 20 23:09:21 201813 // Update Count : 2 312 // Last Modified On : Fri Nov 29 23:05:30 2019 13 // Update Count : 24 14 14 // 15 15 … … 20 20 Duration d1 = 3`h, d2 = 2`s, d3 = 3.375`s, d4 = 12`s, d5 = 1`s + 10_000`ns; 21 21 sout | d1 | d2 | d3 | d4 | d5; 22 int i;23 22 d1 = 0; 24 23 sout | d1 | d2 | d3;
Note:
See TracChangeset
for help on using the changeset viewer.