Changeset 30763fd
- Timestamp:
- Nov 26, 2019, 3:20:30 PM (4 years ago)
- Branches:
- ADT, arm-eh, ast-experimental, enum, forall-pointer-decay, jacob/cs343-translation, master, new-ast, new-ast-unique-expr, pthread-emulation, qualifiedEnum
- Children:
- d4f1521
- Parents:
- 7768b8d (diff), 58e280f (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the(diff)
links above to see all the changes relative to each parent. - Files:
-
- 33 edited
Legend:
- Unmodified
- Added
- Removed
-
driver/cc1.cc
r7768b8d r30763fd 335 335 #endif // __DEBUG_H__ 336 336 337 enum { 338 Color_Auto = 0, 339 Color_Always = 1, 340 Color_Never = 2, 341 } color_arg = Color_Auto; 342 343 const char * color_names[3] = { "--colors=auto", "--colors=always", "--colors=never" }; 344 337 345 // process all the arguments 338 346 … … 341 349 if ( prefix( arg, "-" ) ) { 342 350 // strip inappropriate flags 351 352 if ( prefix( arg, "-fdiagnostics-color=" ) ) { 353 string choice = arg.substr(20); 354 if(choice == "always") color_arg = Color_Always; 355 else if(choice == "never" ) color_arg = Color_Never; 356 else if(choice == "auto" ) color_arg = Color_Auto; 357 } else if ( arg == "-fno-diagnostics-color" ) { 358 color_arg = Color_Auto; 359 } 343 360 344 361 if ( arg == "-quiet" || arg == "-version" || arg == "-fpreprocessed" || … … 440 457 cargs[ncargs++] = cfa_cpp_out.c_str(); 441 458 } // if 459 460 cargs[ncargs++] = color_names[color_arg]; 461 442 462 cargs[ncargs] = nullptr; // terminate argument list 443 463 -
libcfa/prelude/builtins.c
r7768b8d r30763fd 10 10 // Created On : Fri Jul 21 16:21:03 2017 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : T ue Jun 25 18:06:52201913 // Update Count : 9712 // Last Modified On : Thu Nov 21 16:31:39 2019 13 // Update Count : 101 14 14 // 15 15 … … 69 69 70 70 // universal typed pointer constant 71 // Compiler issue: there is a problem with anonymous types that do not have a size. 72 static inline forall( dtype DT | sized(DT) ) DT * intptr( uintptr_t addr ) { return (DT *)addr; } 71 static inline forall( dtype DT ) DT * intptr( uintptr_t addr ) { return (DT *)addr; } 73 72 74 73 // exponentiation operator implementation -
libcfa/prelude/sync-builtins.cf
r7768b8d r30763fd 1 1 char __sync_fetch_and_add(volatile char *, char,...); 2 char __sync_fetch_and_add_1(volatile char *, char,...);3 2 signed char __sync_fetch_and_add(volatile signed char *, signed char,...); 4 signed char __sync_fetch_and_add_1(volatile signed char *, signed char,...);5 3 unsigned char __sync_fetch_and_add(volatile unsigned char *, unsigned char,...); 6 unsigned char __sync_fetch_and_add_1(volatile unsigned char *, unsigned char,...);7 4 signed short __sync_fetch_and_add(volatile signed short *, signed short,...); 8 signed short __sync_fetch_and_add_2(volatile signed short *, signed short,...);9 5 unsigned short __sync_fetch_and_add(volatile unsigned short *, unsigned short,...); 10 unsigned short __sync_fetch_and_add_2(volatile unsigned short *, unsigned short,...);11 6 signed int __sync_fetch_and_add(volatile signed int *, signed int,...); 12 signed int __sync_fetch_and_add_4(volatile signed int *, signed int,...);13 7 unsigned int __sync_fetch_and_add(volatile unsigned int *, unsigned int,...); 14 unsigned int __sync_fetch_and_add_4(volatile unsigned int *, unsigned int,...); 8 signed long int __sync_fetch_and_add(volatile signed long int *, signed long int,...); 9 unsigned long int __sync_fetch_and_add(volatile unsigned long int *, unsigned long int,...); 15 10 signed long long int __sync_fetch_and_add(volatile signed long long int *, signed long long int,...); 16 signed long long int __sync_fetch_and_add_8(volatile signed long long int *, signed long long int,...);17 11 unsigned long long int __sync_fetch_and_add(volatile unsigned long long int *, unsigned long long int,...); 18 unsigned long long int __sync_fetch_and_add_8(volatile unsigned long long int *, unsigned long long int,...);19 12 #if defined(__SIZEOF_INT128__) 20 13 signed __int128 __sync_fetch_and_add(volatile signed __int128 *, signed __int128,...); 21 signed __int128 __sync_fetch_and_add_16(volatile signed __int128 *, signed __int128,...);22 14 unsigned __int128 __sync_fetch_and_add(volatile unsigned __int128 *, unsigned __int128,...); 23 unsigned __int128 __sync_fetch_and_add_16(volatile unsigned __int128 *, unsigned __int128,...);24 15 #endif 25 16 26 17 char __sync_fetch_and_sub(volatile char *, char,...); 27 char __sync_fetch_and_sub_1(volatile char *, char,...);28 18 signed char __sync_fetch_and_sub(volatile signed char *, signed char,...); 29 signed char __sync_fetch_and_sub_1(volatile signed char *, signed char,...);30 19 unsigned char __sync_fetch_and_sub(volatile unsigned char *, unsigned char,...); 31 unsigned char __sync_fetch_and_sub_1(volatile unsigned char *, unsigned char,...);32 20 signed short __sync_fetch_and_sub(volatile signed short *, signed short,...); 33 signed short __sync_fetch_and_sub_2(volatile signed short *, signed short,...);34 21 unsigned short __sync_fetch_and_sub(volatile unsigned short *, unsigned short,...); 35 unsigned short __sync_fetch_and_sub_2(volatile unsigned short *, unsigned short,...);36 22 signed int __sync_fetch_and_sub(volatile signed int *, signed int,...); 37 signed int __sync_fetch_and_sub_4(volatile signed int *, signed int,...);38 23 unsigned int __sync_fetch_and_sub(volatile unsigned int *, unsigned int,...); 39 unsigned int __sync_fetch_and_sub_4(volatile unsigned int *, unsigned int,...); 24 signed long int __sync_fetch_and_sub(volatile signed long int *, signed long int,...); 25 unsigned long int __sync_fetch_and_sub(volatile unsigned long int *, unsigned long int,...); 40 26 signed long long int __sync_fetch_and_sub(volatile signed long long int *, signed long long int,...); 41 signed long long int __sync_fetch_and_sub_8(volatile signed long long int *, signed long long int,...);42 27 unsigned long long int __sync_fetch_and_sub(volatile unsigned long long int *, unsigned long long int,...); 43 unsigned long long int __sync_fetch_and_sub_8(volatile unsigned long long int *, unsigned long long int,...);44 28 #if defined(__SIZEOF_INT128__) 45 29 signed __int128 __sync_fetch_and_sub(volatile signed __int128 *, signed __int128,...); 46 signed __int128 __sync_fetch_and_sub_16(volatile signed __int128 *, signed __int128,...);47 30 unsigned __int128 __sync_fetch_and_sub(volatile unsigned __int128 *, unsigned __int128,...); 48 unsigned __int128 __sync_fetch_and_sub_16(volatile unsigned __int128 *, unsigned __int128,...);49 31 #endif 50 32 51 33 char __sync_fetch_and_or(volatile char *, char,...); 52 char __sync_fetch_and_or_1(volatile char *, char,...);53 34 signed char __sync_fetch_and_or(volatile signed char *, signed char,...); 54 signed char __sync_fetch_and_or_1(volatile signed char *, signed char,...);55 35 unsigned char __sync_fetch_and_or(volatile unsigned char *, unsigned char,...); 56 unsigned char __sync_fetch_and_or_1(volatile unsigned char *, unsigned char,...);57 36 signed short __sync_fetch_and_or(volatile signed short *, signed short,...); 58 signed short __sync_fetch_and_or_2(volatile signed short *, signed short,...);59 37 unsigned short __sync_fetch_and_or(volatile unsigned short *, unsigned short,...); 60 unsigned short __sync_fetch_and_or_2(volatile unsigned short *, unsigned short,...);61 38 signed int __sync_fetch_and_or(volatile signed int *, signed int,...); 62 signed int __sync_fetch_and_or_4(volatile signed int *, signed int,...);63 39 unsigned int __sync_fetch_and_or(volatile unsigned int *, unsigned int,...); 64 unsigned int __sync_fetch_and_or_4(volatile unsigned int *, unsigned int,...); 40 signed long int __sync_fetch_and_or(volatile signed long int *, signed long int,...); 41 unsigned long int __sync_fetch_and_or(volatile unsigned long int *, unsigned long int,...); 65 42 signed long long int __sync_fetch_and_or(volatile signed long long int *, signed long long int,...); 66 signed long long int __sync_fetch_and_or_8(volatile signed long long int *, signed long long int,...);67 43 unsigned long long int __sync_fetch_and_or(volatile unsigned long long int *, unsigned long long int,...); 68 unsigned long long int __sync_fetch_and_or_8(volatile unsigned long long int *, unsigned long long int,...);69 44 #if defined(__SIZEOF_INT128__) 70 45 signed __int128 __sync_fetch_and_or(volatile signed __int128 *, signed __int128,...); 71 signed __int128 __sync_fetch_and_or_16(volatile signed __int128 *, signed __int128,...);72 46 unsigned __int128 __sync_fetch_and_or(volatile unsigned __int128 *, unsigned __int128,...); 73 unsigned __int128 __sync_fetch_and_or_16(volatile unsigned __int128 *, unsigned __int128,...);74 47 #endif 75 48 76 49 char __sync_fetch_and_and(volatile char *, char,...); 77 char __sync_fetch_and_and_1(volatile char *, char,...);78 50 signed char __sync_fetch_and_and(volatile signed char *, signed char,...); 79 signed char __sync_fetch_and_and_1(volatile signed char *, signed char,...);80 51 unsigned char __sync_fetch_and_and(volatile unsigned char *, unsigned char,...); 81 unsigned char __sync_fetch_and_and_1(volatile unsigned char *, unsigned char,...);82 52 signed short __sync_fetch_and_and(volatile signed short *, signed short,...); 83 signed short __sync_fetch_and_and_2(volatile signed short *, signed short,...);84 53 unsigned short __sync_fetch_and_and(volatile unsigned short *, unsigned short,...); 85 unsigned short __sync_fetch_and_and_2(volatile unsigned short *, unsigned short,...);86 54 signed int __sync_fetch_and_and(volatile signed int *, signed int,...); 87 signed int __sync_fetch_and_and_4(volatile signed int *, signed int,...);88 55 unsigned int __sync_fetch_and_and(volatile unsigned int *, unsigned int,...); 89 unsigned int __sync_fetch_and_and_4(volatile unsigned int *, unsigned int,...); 56 signed long int __sync_fetch_and_and(volatile signed long int *, signed long int,...); 57 unsigned long int __sync_fetch_and_and(volatile unsigned long int *, unsigned long int,...); 90 58 signed long long int __sync_fetch_and_and(volatile signed long long int *, signed long long int,...); 91 signed long long int __sync_fetch_and_and_8(volatile signed long long int *, signed long long int,...);92 59 unsigned long long int __sync_fetch_and_and(volatile unsigned long long int *, unsigned long long int,...); 93 unsigned long long int __sync_fetch_and_and_8(volatile unsigned long long int *, unsigned long long int,...);94 60 #if defined(__SIZEOF_INT128__) 95 61 signed __int128 __sync_fetch_and_and(volatile signed __int128 *, signed __int128,...); 96 signed __int128 __sync_fetch_and_and_16(volatile signed __int128 *, signed __int128,...);97 62 unsigned __int128 __sync_fetch_and_and(volatile unsigned __int128 *, unsigned __int128,...); 98 unsigned __int128 __sync_fetch_and_and_16(volatile unsigned __int128 *, unsigned __int128,...);99 63 #endif 100 64 101 65 char __sync_fetch_and_xor(volatile char *, char,...); 102 char __sync_fetch_and_xor_1(volatile char *, char,...);103 66 signed char __sync_fetch_and_xor(volatile signed char *, signed char,...); 104 signed char __sync_fetch_and_xor_1(volatile signed char *, signed char,...);105 67 unsigned char __sync_fetch_and_xor(volatile unsigned char *, unsigned char,...); 106 unsigned char __sync_fetch_and_xor_1(volatile unsigned char *, unsigned char,...);107 68 signed short __sync_fetch_and_xor(volatile signed short *, signed short,...); 108 signed short __sync_fetch_and_xor_2(volatile signed short *, signed short,...);109 69 unsigned short __sync_fetch_and_xor(volatile unsigned short *, unsigned short,...); 110 unsigned short __sync_fetch_and_xor_2(volatile unsigned short *, unsigned short,...);111 70 signed int __sync_fetch_and_xor(volatile signed int *, signed int,...); 112 signed int __sync_fetch_and_xor_4(volatile signed int *, signed int,...);113 71 unsigned int __sync_fetch_and_xor(volatile unsigned int *, unsigned int,...); 114 unsigned int __sync_fetch_and_xor_4(volatile unsigned int *, unsigned int,...); 72 signed long int __sync_fetch_and_xor(volatile signed long int *, signed long int,...); 73 unsigned long int __sync_fetch_and_xor(volatile unsigned long int *, unsigned long int,...); 115 74 signed long long int __sync_fetch_and_xor(volatile signed long long int *, signed long long int,...); 116 signed long long int __sync_fetch_and_xor_8(volatile signed long long int *, signed long long int,...);117 75 unsigned long long int __sync_fetch_and_xor(volatile unsigned long long int *, unsigned long long int,...); 118 unsigned long long int __sync_fetch_and_xor_8(volatile unsigned long long int *, unsigned long long int,...);119 76 #if defined(__SIZEOF_INT128__) 120 77 signed __int128 __sync_fetch_and_xor(volatile signed __int128 *, signed __int128,...); 121 signed __int128 __sync_fetch_and_xor_16(volatile signed __int128 *, signed __int128,...);122 78 unsigned __int128 __sync_fetch_and_xor(volatile unsigned __int128 *, unsigned __int128,...); 123 unsigned __int128 __sync_fetch_and_xor_16(volatile unsigned __int128 *, unsigned __int128,...);124 79 #endif 125 80 126 81 char __sync_fetch_and_nand(volatile char *, char,...); 127 char __sync_fetch_and_nand_1(volatile char *, char,...);128 82 signed char __sync_fetch_and_nand(volatile signed char *, signed char,...); 129 signed char __sync_fetch_and_nand_1(volatile signed char *, signed char,...);130 83 unsigned char __sync_fetch_and_nand(volatile unsigned char *, unsigned char,...); 131 unsigned char __sync_fetch_and_nand_1(volatile unsigned char *, unsigned char,...);132 84 signed short __sync_fetch_and_nand(volatile signed short *, signed short,...); 133 signed short __sync_fetch_and_nand_2(volatile signed short *, signed short,...);134 85 unsigned short __sync_fetch_and_nand(volatile unsigned short *, unsigned short,...); 135 unsigned short __sync_fetch_and_nand_2(volatile unsigned short *, unsigned short,...);136 86 signed int __sync_fetch_and_nand(volatile signed int *, signed int,...); 137 signed int __sync_fetch_and_nand_4(volatile signed int *, signed int,...);138 87 unsigned int __sync_fetch_and_nand(volatile unsigned int *, unsigned int,...); 139 unsigned int __sync_fetch_and_nand_4(volatile unsigned int *, unsigned int,...); 88 signed long int __sync_fetch_and_nand(volatile signed long int *, signed long int,...); 89 unsigned long int __sync_fetch_and_nand(volatile unsigned long int *, unsigned long int,...); 140 90 signed long long int __sync_fetch_and_nand(volatile signed long long int *, signed long long int,...); 141 signed long long int __sync_fetch_and_nand_8(volatile signed long long int *, signed long long int,...);142 91 unsigned long long int __sync_fetch_and_nand(volatile unsigned long long int *, unsigned long long int,...); 143 unsigned long long int __sync_fetch_and_nand_8(volatile unsigned long long int *, unsigned long long int,...);144 92 #if defined(__SIZEOF_INT128__) 145 93 signed __int128 __sync_fetch_and_nand(volatile signed __int128 *, signed __int128,...); 146 signed __int128 __sync_fetch_and_nand_16(volatile signed __int128 *, signed __int128,...);147 94 unsigned __int128 __sync_fetch_and_nand(volatile unsigned __int128 *, unsigned __int128,...); 148 unsigned __int128 __sync_fetch_and_nand_16(volatile unsigned __int128 *, unsigned __int128,...);149 95 #endif 150 96 151 97 char __sync_add_and_fetch(volatile char *, char,...); 152 char __sync_add_and_fetch_1(volatile char *, char,...);153 98 signed char __sync_add_and_fetch(volatile signed char *, signed char,...); 154 signed char __sync_add_and_fetch_1(volatile signed char *, signed char,...);155 99 unsigned char __sync_add_and_fetch(volatile unsigned char *, unsigned char,...); 156 unsigned char __sync_add_and_fetch_1(volatile unsigned char *, unsigned char,...);157 100 signed short __sync_add_and_fetch(volatile signed short *, signed short,...); 158 signed short __sync_add_and_fetch_2(volatile signed short *, signed short,...);159 101 unsigned short __sync_add_and_fetch(volatile unsigned short *, unsigned short,...); 160 unsigned short __sync_add_and_fetch_2(volatile unsigned short *, unsigned short,...);161 102 signed int __sync_add_and_fetch(volatile signed int *, signed int,...); 162 signed int __sync_add_and_fetch_4(volatile signed int *, signed int,...);163 103 signed int __sync_add_and_fetch(volatile signed int *, signed int,...); 164 signed int __sync_add_and_fetch_4(volatile signed int *, signed int,...); 104 signed long int __sync_add_and_fetch(volatile signed long int *, signed long int,...); 105 unsigned long int __sync_add_and_fetch(volatile unsigned long int *, unsigned long int,...); 165 106 signed long long int __sync_add_and_fetch(volatile signed long long int *, signed long long int,...); 166 signed long long int __sync_add_and_fetch_8(volatile signed long long int *, signed long long int,...);167 107 unsigned long long int __sync_add_and_fetch(volatile unsigned long long int *, unsigned long long int,...); 168 unsigned long long int __sync_add_and_fetch_8(volatile unsigned long long int *, unsigned long long int,...);169 108 #if defined(__SIZEOF_INT128__) 170 109 signed __int128 __sync_add_and_fetch(volatile signed __int128 *, signed __int128,...); 171 signed __int128 __sync_add_and_fetch_16(volatile signed __int128 *, signed __int128,...);172 110 unsigned __int128 __sync_add_and_fetch(volatile unsigned __int128 *, unsigned __int128,...); 173 unsigned __int128 __sync_add_and_fetch_16(volatile unsigned __int128 *, unsigned __int128,...);174 111 #endif 175 112 176 113 char __sync_sub_and_fetch(volatile char *, char,...); 177 char __sync_sub_and_fetch_1(volatile char *, char,...);178 114 signed char __sync_sub_and_fetch(volatile signed char *, signed char,...); 179 signed char __sync_sub_and_fetch_1(volatile signed char *, signed char,...);180 115 unsigned char __sync_sub_and_fetch(volatile unsigned char *, unsigned char,...); 181 unsigned char __sync_sub_and_fetch_1(volatile unsigned char *, unsigned char,...);182 116 signed short __sync_sub_and_fetch(volatile signed short *, signed short,...); 183 signed short __sync_sub_and_fetch_2(volatile signed short *, signed short,...);184 117 unsigned short __sync_sub_and_fetch(volatile unsigned short *, unsigned short,...); 185 unsigned short __sync_sub_and_fetch_2(volatile unsigned short *, unsigned short,...);186 118 signed int __sync_sub_and_fetch(volatile signed int *, signed int,...); 187 signed int __sync_sub_and_fetch_4(volatile signed int *, signed int,...);188 119 unsigned int __sync_sub_and_fetch(volatile unsigned int *, unsigned int,...); 189 unsigned int __sync_sub_and_fetch_4(volatile unsigned int *, unsigned int,...); 120 signed long int __sync_sub_and_fetch(volatile signed long int *, signed long int,...); 121 unsigned long int __sync_sub_and_fetch(volatile unsigned long int *, unsigned long int,...); 190 122 signed long long int __sync_sub_and_fetch(volatile signed long long int *, signed long long int,...); 191 signed long long int __sync_sub_and_fetch_8(volatile signed long long int *, signed long long int,...);192 123 unsigned long long int __sync_sub_and_fetch(volatile unsigned long long int *, unsigned long long int,...); 193 unsigned long long int __sync_sub_and_fetch_8(volatile unsigned long long int *, unsigned long long int,...);194 124 #if defined(__SIZEOF_INT128__) 195 125 signed __int128 __sync_sub_and_fetch(volatile signed __int128 *, signed __int128,...); 196 signed __int128 __sync_sub_and_fetch_16(volatile signed __int128 *, signed __int128,...);197 126 unsigned __int128 __sync_sub_and_fetch(volatile unsigned __int128 *, unsigned __int128,...); 198 unsigned __int128 __sync_sub_and_fetch_16(volatile unsigned __int128 *, unsigned __int128,...);199 127 #endif 200 128 201 129 char __sync_or_and_fetch(volatile char *, char,...); 202 char __sync_or_and_fetch_1(volatile char *, char,...);203 130 signed char __sync_or_and_fetch(volatile signed char *, signed char,...); 204 signed char __sync_or_and_fetch_1(volatile signed char *, signed char,...);205 131 unsigned char __sync_or_and_fetch(volatile unsigned char *, unsigned char,...); 206 unsigned char __sync_or_and_fetch_1(volatile unsigned char *, unsigned char,...);207 132 signed short __sync_or_and_fetch(volatile signed short *, signed short,...); 208 signed short __sync_or_and_fetch_2(volatile signed short *, signed short,...);209 133 unsigned short __sync_or_and_fetch(volatile unsigned short *, unsigned short,...); 210 unsigned short __sync_or_and_fetch_2(volatile unsigned short *, unsigned short,...);211 134 signed int __sync_or_and_fetch(volatile signed int *, signed int,...); 212 signed int __sync_or_and_fetch_4(volatile signed int *, signed int,...);213 135 unsigned int __sync_or_and_fetch(volatile unsigned int *, unsigned int,...); 214 unsigned int __sync_or_and_fetch_4(volatile unsigned int *, unsigned int,...); 136 signed long int __sync_or_and_fetch(volatile signed long int *, signed long int,...); 137 unsigned long int __sync_or_and_fetch(volatile unsigned long int *, unsigned long int,...); 215 138 signed long long int __sync_or_and_fetch(volatile signed long long int *, signed long long int,...); 216 signed long long int __sync_or_and_fetch_8(volatile signed long long int *, signed long long int,...);217 139 unsigned long long int __sync_or_and_fetch(volatile unsigned long long int *, unsigned long long int,...); 218 unsigned long long int __sync_or_and_fetch_8(volatile unsigned long long int *, unsigned long long int,...);219 140 #if defined(__SIZEOF_INT128__) 220 141 signed __int128 __sync_or_and_fetch(volatile signed __int128 *, signed __int128,...); 221 signed __int128 __sync_or_and_fetch_16(volatile signed __int128 *, signed __int128,...);222 142 unsigned __int128 __sync_or_and_fetch(volatile unsigned __int128 *, unsigned __int128,...); 223 unsigned __int128 __sync_or_and_fetch_16(volatile unsigned __int128 *, unsigned __int128,...);224 143 #endif 225 144 226 145 char __sync_and_and_fetch(volatile char *, char,...); 227 char __sync_and_and_fetch_1(volatile char *, char,...);228 146 signed char __sync_and_and_fetch(volatile signed char *, signed char,...); 229 signed char __sync_and_and_fetch_1(volatile signed char *, signed char,...);230 147 unsigned char __sync_and_and_fetch(volatile unsigned char *, unsigned char,...); 231 unsigned char __sync_and_and_fetch_1(volatile unsigned char *, unsigned char,...);232 148 signed short __sync_and_and_fetch(volatile signed short *, signed short,...); 233 signed short __sync_and_and_fetch_2(volatile signed short *, signed short,...);234 149 unsigned short __sync_and_and_fetch(volatile unsigned short *, unsigned short,...); 235 unsigned short __sync_and_and_fetch_2(volatile unsigned short *, unsigned short,...);236 150 signed int __sync_and_and_fetch(volatile signed int *, signed int,...); 237 signed int __sync_and_and_fetch_4(volatile signed int *, signed int,...);238 151 unsigned int __sync_and_and_fetch(volatile unsigned int *, unsigned int,...); 239 unsigned int __sync_and_and_fetch_4(volatile unsigned int *, unsigned int,...); 152 signed long int __sync_and_and_fetch(volatile signed long int *, signed long int,...); 153 unsigned long int __sync_and_and_fetch(volatile unsigned long int *, unsigned long int,...); 240 154 signed long long int __sync_and_and_fetch(volatile signed long long int *, signed long long int,...); 241 signed long long int __sync_and_and_fetch_8(volatile signed long long int *, signed long long int,...);242 155 unsigned long long int __sync_and_and_fetch(volatile unsigned long long int *, unsigned long long int,...); 243 unsigned long long int __sync_and_and_fetch_8(volatile unsigned long long int *, unsigned long long int,...);244 156 #if defined(__SIZEOF_INT128__) 245 157 signed __int128 __sync_and_and_fetch(volatile signed __int128 *, signed __int128,...); 246 signed __int128 __sync_and_and_fetch_16(volatile signed __int128 *, signed __int128,...);247 158 unsigned __int128 __sync_and_and_fetch(volatile unsigned __int128 *, unsigned __int128,...); 248 unsigned __int128 __sync_and_and_fetch_16(volatile unsigned __int128 *, unsigned __int128,...);249 159 #endif 250 160 251 161 char __sync_xor_and_fetch(volatile char *, char,...); 252 char __sync_xor_and_fetch_1(volatile char *, char,...);253 162 signed char __sync_xor_and_fetch(volatile signed char *, signed char,...); 254 signed char __sync_xor_and_fetch_1(volatile signed char *, signed char,...);255 163 unsigned char __sync_xor_and_fetch(volatile unsigned char *, unsigned char,...); 256 unsigned char __sync_xor_and_fetch_1(volatile unsigned char *, unsigned char,...);257 164 signed short __sync_xor_and_fetch(volatile signed short *, signed short,...); 258 signed short __sync_xor_and_fetch_2(volatile signed short *, signed short,...);259 165 unsigned short __sync_xor_and_fetch(volatile unsigned short *, unsigned short,...); 260 unsigned short __sync_xor_and_fetch_2(volatile unsigned short *, unsigned short,...);261 166 signed int __sync_xor_and_fetch(volatile signed int *, signed int,...); 262 signed int __sync_xor_and_fetch_4(volatile signed int *, signed int,...);263 167 unsigned int __sync_xor_and_fetch(volatile unsigned int *, unsigned int,...); 264 unsigned int __sync_xor_and_fetch_4(volatile unsigned int *, unsigned int,...); 168 signed long int __sync_xor_and_fetch(volatile signed long int *, signed long int,...); 169 unsigned long int __sync_xor_and_fetch(volatile unsigned long int *, unsigned long int,...); 265 170 signed long long int __sync_xor_and_fetch(volatile signed long long int *, signed long long int,...); 266 signed long long int __sync_xor_and_fetch_8(volatile signed long long int *, signed long long int,...);267 171 unsigned long long int __sync_xor_and_fetch(volatile unsigned long long int *, unsigned long long int,...); 268 unsigned long long int __sync_xor_and_fetch_8(volatile unsigned long long int *, unsigned long long int,...);269 172 #if defined(__SIZEOF_INT128__) 270 173 signed __int128 __sync_xor_and_fetch(volatile signed __int128 *, signed __int128,...); 271 signed __int128 __sync_xor_and_fetch_16(volatile signed __int128 *, signed __int128,...);272 174 unsigned __int128 __sync_xor_and_fetch(volatile unsigned __int128 *, unsigned __int128,...); 273 unsigned __int128 __sync_xor_and_fetch_16(volatile unsigned __int128 *, unsigned __int128,...);274 175 #endif 275 176 276 177 char __sync_nand_and_fetch(volatile char *, char,...); 277 char __sync_nand_and_fetch_1(volatile char *, char,...);278 178 signed char __sync_nand_and_fetch(volatile signed char *, signed char,...); 279 signed char __sync_nand_and_fetch_1(volatile signed char *, signed char,...);280 179 unsigned char __sync_nand_and_fetch(volatile unsigned char *, unsigned char,...); 281 unsigned char __sync_nand_and_fetch_1(volatile unsigned char *, unsigned char,...);282 180 signed short __sync_nand_and_fetch(volatile signed short *, signed short,...); 283 signed short __sync_nand_and_fetch_2(volatile signed short *, signed short,...);284 181 unsigned short __sync_nand_and_fetch(volatile unsigned short *, unsigned short,...); 285 unsigned short __sync_nand_and_fetch_2(volatile unsigned short *, unsigned short,...);286 182 signed int __sync_nand_and_fetch(volatile signed int *, signed int,...); 287 signed int __sync_nand_and_fetch_4(volatile signed int *, signed int,...);288 183 unsigned int __sync_nand_and_fetch(volatile unsigned int *, unsigned int,...); 289 unsigned int __sync_nand_and_fetch_4(volatile unsigned int *, unsigned int,...); 184 signed long int __sync_nand_and_fetch(volatile signed long int *, signed long int,...); 185 unsigned long int __sync_nand_and_fetch(volatile unsigned long int *, unsigned long int,...); 290 186 signed long long int __sync_nand_and_fetch(volatile signed long long int *, signed long long int,...); 291 signed long long int __sync_nand_and_fetch_8(volatile signed long long int *, signed long long int,...);292 187 unsigned long long int __sync_nand_and_fetch(volatile unsigned long long int *, unsigned long long int,...); 293 unsigned long long int __sync_nand_and_fetch_8(volatile unsigned long long int *, unsigned long long int,...);294 188 #if defined(__SIZEOF_INT128__) 295 189 signed __int128 __sync_nand_and_fetch(volatile signed __int128 *, signed __int128,...); 296 signed __int128 __sync_nand_and_fetch_16(volatile signed __int128 *, signed __int128,...);297 190 unsigned __int128 __sync_nand_and_fetch(volatile unsigned __int128 *, unsigned __int128,...); 298 unsigned __int128 __sync_nand_and_fetch_16(volatile unsigned __int128 *, unsigned __int128,...);299 191 #endif 300 192 301 193 _Bool __sync_bool_compare_and_swap(volatile char *, char, char,...); 302 _Bool __sync_bool_compare_and_swap_1(volatile char *, char, char,...);303 194 _Bool __sync_bool_compare_and_swap(volatile signed char *, signed char, signed char,...); 304 _Bool __sync_bool_compare_and_swap_1(volatile signed char *, signed char, signed char,...);305 195 _Bool __sync_bool_compare_and_swap(volatile unsigned char *, unsigned char, unsigned char,...); 306 _Bool __sync_bool_compare_and_swap_1(volatile unsigned char *, unsigned char, unsigned char,...);307 196 _Bool __sync_bool_compare_and_swap(volatile short *, signed short, signed short,...); 308 _Bool __sync_bool_compare_and_swap_2(volatile short *, signed short, signed short,...);309 197 _Bool __sync_bool_compare_and_swap(volatile short *, unsigned short, unsigned short,...); 310 _Bool __sync_bool_compare_and_swap_2(volatile short *, unsigned short, unsigned short,...);311 198 _Bool __sync_bool_compare_and_swap(volatile signed int *, signed int, signed int,...); 312 _Bool __sync_bool_compare_and_swap_4(volatile signed int *, signed int, signed int,...);313 199 _Bool __sync_bool_compare_and_swap(volatile unsigned int *, unsigned int, unsigned int,...); 314 _Bool __sync_bool_compare_and_swap_4(volatile unsigned int *, unsigned int, unsigned int,...); 200 _Bool __sync_bool_compare_and_swap(volatile signed long int *, signed long int, signed long int,...); 201 _Bool __sync_bool_compare_and_swap(volatile unsigned long int *, unsigned long int, unsigned long int,...); 315 202 _Bool __sync_bool_compare_and_swap(volatile signed long long int *, signed long long int, signed long long int,...); 316 _Bool __sync_bool_compare_and_swap_8(volatile signed long long int *, signed long long int, signed long long int,...);317 203 _Bool __sync_bool_compare_and_swap(volatile unsigned long long int *, unsigned long long int, unsigned long long int,...); 318 _Bool __sync_bool_compare_and_swap_8(volatile unsigned long long int *, unsigned long long int, unsigned long long int,...);319 204 #if defined(__SIZEOF_INT128__) 320 205 _Bool __sync_bool_compare_and_swap(volatile signed __int128 *, signed __int128, signed __int128,...); 321 _Bool __sync_bool_compare_and_swap_16(volatile signed __int128 *, signed __int128, signed __int128,...);322 206 _Bool __sync_bool_compare_and_swap(volatile unsigned __int128 *, unsigned __int128, unsigned __int128,...); 323 _Bool __sync_bool_compare_and_swap_16(volatile unsigned __int128 *, unsigned __int128, unsigned __int128,...);324 207 #endif 325 208 forall(dtype T) _Bool __sync_bool_compare_and_swap(T * volatile *, T *, T*, ...); 326 209 327 210 char __sync_val_compare_and_swap(volatile char *, char, char,...); 328 char __sync_val_compare_and_swap_1(volatile char *, char, char,...);329 211 signed char __sync_val_compare_and_swap(volatile signed char *, signed char, signed char,...); 330 signed char __sync_val_compare_and_swap_1(volatile signed char *, signed char, signed char,...);331 212 unsigned char __sync_val_compare_and_swap(volatile unsigned char *, unsigned char, unsigned char,...); 332 unsigned char __sync_val_compare_and_swap_1(volatile unsigned char *, unsigned char, unsigned char,...);333 213 signed short __sync_val_compare_and_swap(volatile signed short *, signed short, signed short,...); 334 signed short __sync_val_compare_and_swap_2(volatile signed short *, signed short, signed short,...);335 214 unsigned short __sync_val_compare_and_swap(volatile unsigned short *, unsigned short, unsigned short,...); 336 unsigned short __sync_val_compare_and_swap_2(volatile unsigned short *, unsigned short, unsigned short,...);337 215 signed int __sync_val_compare_and_swap(volatile signed int *, signed int, signed int,...); 338 signed int __sync_val_compare_and_swap_4(volatile signed int *, signed int, signed int,...);339 216 unsigned int __sync_val_compare_and_swap(volatile unsigned int *, unsigned int, unsigned int,...); 340 unsigned int __sync_val_compare_and_swap_4(volatile unsigned int *, unsigned int, unsigned int,...); 217 signed long int __sync_val_compare_and_swap(volatile signed long int *, signed long int, signed long int,...); 218 unsigned long int __sync_val_compare_and_swap(volatile unsigned long int *, unsigned long int, unsigned long int,...); 341 219 signed long long int __sync_val_compare_and_swap(volatile signed long long int *, signed long long int, signed long long int,...); 342 signed long long int __sync_val_compare_and_swap_8(volatile signed long long int *, signed long long int, signed long long int,...);343 220 unsigned long long int __sync_val_compare_and_swap(volatile unsigned long long int *, unsigned long long int, unsigned long long int,...); 344 unsigned long long int __sync_val_compare_and_swap_8(volatile unsigned long long int *, unsigned long long int, unsigned long long int,...);345 221 #if defined(__SIZEOF_INT128__) 346 222 signed __int128 __sync_val_compare_and_swap(volatile signed __int128 *, signed __int128, signed __int128,...); 347 signed __int128 __sync_val_compare_and_swap_16(volatile signed __int128 *, signed __int128, signed __int128,...);348 223 unsigned __int128 __sync_val_compare_and_swap(volatile unsigned __int128 *, unsigned __int128, unsigned __int128,...); 349 unsigned __int128 __sync_val_compare_and_swap_16(volatile unsigned __int128 *, unsigned __int128, unsigned __int128,...);350 224 #endif 351 225 forall(dtype T) T * __sync_val_compare_and_swap(T * volatile *, T *, T*,...); 352 226 353 227 char __sync_lock_test_and_set(volatile char *, char,...); 354 char __sync_lock_test_and_set_1(volatile char *, char,...);355 228 signed char __sync_lock_test_and_set(volatile signed char *, signed char,...); 356 signed char __sync_lock_test_and_set_1(volatile signed char *, signed char,...);357 229 unsigned char __sync_lock_test_and_set(volatile unsigned char *, unsigned char,...); 358 unsigned char __sync_lock_test_and_set_1(volatile unsigned char *, unsigned char,...);359 230 signed short __sync_lock_test_and_set(volatile signed short *, signed short,...); 360 signed short __sync_lock_test_and_set_2(volatile signed short *, signed short,...);361 231 unsigned short __sync_lock_test_and_set(volatile unsigned short *, unsigned short,...); 362 unsigned short __sync_lock_test_and_set_2(volatile unsigned short *, unsigned short,...);363 232 signed int __sync_lock_test_and_set(volatile signed int *, signed int,...); 364 signed int __sync_lock_test_and_set_4(volatile signed int *, signed int,...);365 233 unsigned int __sync_lock_test_and_set(volatile unsigned int *, unsigned int,...); 366 unsigned int __sync_lock_test_and_set_4(volatile unsigned int *, unsigned int,...); 234 signed long int __sync_lock_test_and_set(volatile signed long int *, signed long int,...); 235 unsigned long int __sync_lock_test_and_set(volatile unsigned long int *, unsigned long int,...); 367 236 signed long long int __sync_lock_test_and_set(volatile signed long long int *, signed long long int,...); 368 signed long long int __sync_lock_test_and_set_8(volatile signed long long int *, signed long long int,...);369 237 unsigned long long int __sync_lock_test_and_set(volatile unsigned long long int *, unsigned long long int,...); 370 unsigned long long int __sync_lock_test_and_set_8(volatile unsigned long long int *, unsigned long long int,...);371 238 #if defined(__SIZEOF_INT128__) 372 239 signed __int128 __sync_lock_test_and_set(volatile signed __int128 *, signed __int128,...); 373 signed __int128 __sync_lock_test_and_set_16(volatile signed __int128 *, signed __int128,...);374 240 unsigned __int128 __sync_lock_test_and_set(volatile unsigned __int128 *, unsigned __int128,...); 375 unsigned __int128 __sync_lock_test_and_set_16(volatile unsigned __int128 *, unsigned __int128,...);376 241 #endif 377 242 378 243 void __sync_lock_release(volatile char *,...); 379 void __sync_lock_release_1(volatile char *,...);380 244 void __sync_lock_release(volatile signed char *,...); 381 void __sync_lock_release_1(volatile signed char *,...);382 245 void __sync_lock_release(volatile unsigned char *,...); 383 void __sync_lock_release_1(volatile unsigned char *,...);384 246 void __sync_lock_release(volatile signed short *,...); 385 void __sync_lock_release_2(volatile signed short *,...);386 247 void __sync_lock_release(volatile unsigned short *,...); 387 void __sync_lock_release_2(volatile unsigned short *,...);388 248 void __sync_lock_release(volatile signed int *,...); 389 void __sync_lock_release_4(volatile signed int *,...);390 249 void __sync_lock_release(volatile unsigned int *,...); 391 void __sync_lock_release_4(volatile unsigned int *,...); 250 void __sync_lock_release(volatile signed long int *,...); 251 void __sync_lock_release(volatile unsigned long int *,...); 392 252 void __sync_lock_release(volatile signed long long int *,...); 393 void __sync_lock_release_8(volatile signed long long int *,...);394 253 void __sync_lock_release(volatile unsigned long long int *,...); 395 void __sync_lock_release_8(volatile unsigned long long int *,...);396 254 #if defined(__SIZEOF_INT128__) 397 255 void __sync_lock_release(volatile signed __int128 *,...); 398 void __sync_lock_release_16(volatile signed __int128 *,...);399 256 void __sync_lock_release(volatile unsigned __int128 *,...); 400 void __sync_lock_release_16(volatile unsigned __int128 *,...);401 257 #endif 402 258 … … 414 270 _Bool __atomic_test_and_set(volatile signed int *, int); 415 271 _Bool __atomic_test_and_set(volatile unsigned int *, int); 272 _Bool __atomic_test_and_set(volatile signed long int *, int); 273 _Bool __atomic_test_and_set(volatile unsigned long int *, int); 416 274 _Bool __atomic_test_and_set(volatile signed long long int *, int); 417 275 _Bool __atomic_test_and_set(volatile unsigned long long int *, int); … … 429 287 void __atomic_clear(volatile signed int *, int); 430 288 void __atomic_clear(volatile unsigned int *, int); 289 void __atomic_clear(volatile signed long int *, int); 290 void __atomic_clear(volatile unsigned long int *, int); 431 291 void __atomic_clear(volatile signed long long int *, int); 432 292 void __atomic_clear(volatile unsigned long long int *, int); … … 437 297 438 298 _Bool __atomic_exchange_n(volatile _Bool *, _Bool, int); 439 _Bool __atomic_exchange_1(volatile _Bool *, _Bool, int);440 299 void __atomic_exchange(volatile _Bool *, volatile _Bool *, volatile _Bool *, int); 441 300 char __atomic_exchange_n(volatile char *, char, int); 442 char __atomic_exchange_1(volatile char *, char, int);443 301 void __atomic_exchange(volatile char *, volatile char *, volatile char *, int); 444 302 signed char __atomic_exchange_n(volatile signed char *, signed char, int); 445 signed char __atomic_exchange_1(volatile signed char *, signed char, int);446 303 void __atomic_exchange(volatile signed char *, volatile signed char *, volatile signed char *, int); 447 304 unsigned char __atomic_exchange_n(volatile unsigned char *, unsigned char, int); 448 unsigned char __atomic_exchange_1(volatile unsigned char *, unsigned char, int);449 305 void __atomic_exchange(volatile unsigned char *, volatile unsigned char *, volatile unsigned char *, int); 450 306 signed short __atomic_exchange_n(volatile signed short *, signed short, int); 451 signed short __atomic_exchange_2(volatile signed short *, signed short, int);452 307 void __atomic_exchange(volatile signed short *, volatile signed short *, volatile signed short *, int); 453 308 unsigned short __atomic_exchange_n(volatile unsigned short *, unsigned short, int); 454 unsigned short __atomic_exchange_2(volatile unsigned short *, unsigned short, int);455 309 void __atomic_exchange(volatile unsigned short *, volatile unsigned short *, volatile unsigned short *, int); 456 310 signed int __atomic_exchange_n(volatile signed int *, signed int, int); 457 signed int __atomic_exchange_4(volatile signed int *, signed int, int);458 311 void __atomic_exchange(volatile signed int *, volatile signed int *, volatile signed int *, int); 459 312 unsigned int __atomic_exchange_n(volatile unsigned int *, unsigned int, int); 460 unsigned int __atomic_exchange_4(volatile unsigned int *, unsigned int, int);461 313 void __atomic_exchange(volatile unsigned int *, volatile unsigned int *, volatile unsigned int *, int); 314 signed long int __atomic_exchange_n(volatile signed long int *, signed long int, int); 315 void __atomic_exchange(volatile signed long int *, volatile signed long int *, volatile signed long int *, int); 316 unsigned long int __atomic_exchange_n(volatile unsigned long int *, unsigned long int, int); 317 void __atomic_exchange(volatile unsigned long int *, volatile unsigned long int *, volatile unsigned long int *, int); 462 318 signed long long int __atomic_exchange_n(volatile signed long long int *, signed long long int, int); 463 signed long long int __atomic_exchange_8(volatile signed long long int *, signed long long int, int);464 319 void __atomic_exchange(volatile signed long long int *, volatile signed long long int *, volatile signed long long int *, int); 465 320 unsigned long long int __atomic_exchange_n(volatile unsigned long long int *, unsigned long long int, int); 466 unsigned long long int __atomic_exchange_8(volatile unsigned long long int *, unsigned long long int, int);467 321 void __atomic_exchange(volatile unsigned long long int *, volatile unsigned long long int *, volatile unsigned long long int *, int); 468 322 #if defined(__SIZEOF_INT128__) 469 323 signed __int128 __atomic_exchange_n(volatile signed __int128 *, signed __int128, int); 470 signed __int128 __atomic_exchange_16(volatile signed __int128 *, signed __int128, int);471 324 void __atomic_exchange(volatile signed __int128 *, volatile signed __int128 *, volatile signed __int128 *, int); 472 325 unsigned __int128 __atomic_exchange_n(volatile unsigned __int128 *, unsigned __int128, int); 473 unsigned __int128 __atomic_exchange_16(volatile unsigned __int128 *, unsigned __int128, int);474 326 void __atomic_exchange(volatile unsigned __int128 *, volatile unsigned __int128 *, volatile unsigned __int128 *, int); 475 327 #endif … … 480 332 void __atomic_load(const volatile _Bool *, volatile _Bool *, int); 481 333 char __atomic_load_n(const volatile char *, int); 482 char __atomic_load_1(const volatile char *, int);483 334 void __atomic_load(const volatile char *, volatile char *, int); 484 335 signed char __atomic_load_n(const volatile signed char *, int); 485 signed char __atomic_load_1(const volatile signed char *, int);486 336 void __atomic_load(const volatile signed char *, volatile signed char *, int); 487 337 unsigned char __atomic_load_n(const volatile unsigned char *, int); 488 unsigned char __atomic_load_1(const volatile unsigned char *, int);489 338 void __atomic_load(const volatile unsigned char *, volatile unsigned char *, int); 490 339 signed short __atomic_load_n(const volatile signed short *, int); 491 signed short __atomic_load_2(const volatile signed short *, int);492 340 void __atomic_load(const volatile signed short *, volatile signed short *, int); 493 341 unsigned short __atomic_load_n(const volatile unsigned short *, int); 494 unsigned short __atomic_load_2(const volatile unsigned short *, int);495 342 void __atomic_load(const volatile unsigned short *, volatile unsigned short *, int); 496 343 signed int __atomic_load_n(const volatile signed int *, int); 497 signed int __atomic_load_4(const volatile signed int *, int);498 344 void __atomic_load(const volatile signed int *, volatile signed int *, int); 499 345 unsigned int __atomic_load_n(const volatile unsigned int *, int); 500 unsigned int __atomic_load_4(const volatile unsigned int *, int);501 346 void __atomic_load(const volatile unsigned int *, volatile unsigned int *, int); 347 signed long int __atomic_load_n(const volatile signed long int *, int); 348 void __atomic_load(const volatile signed long int *, volatile signed long int *, int); 349 unsigned long int __atomic_load_n(const volatile unsigned long int *, int); 350 void __atomic_load(const volatile unsigned long int *, volatile unsigned long int *, int); 502 351 signed long long int __atomic_load_n(const volatile signed long long int *, int); 503 signed long long int __atomic_load_8(const volatile signed long long int *, int);504 352 void __atomic_load(const volatile signed long long int *, volatile signed long long int *, int); 505 353 unsigned long long int __atomic_load_n(const volatile unsigned long long int *, int); 506 unsigned long long int __atomic_load_8(const volatile unsigned long long int *, int);507 354 void __atomic_load(const volatile unsigned long long int *, volatile unsigned long long int *, int); 508 355 #if defined(__SIZEOF_INT128__) 509 356 signed __int128 __atomic_load_n(const volatile signed __int128 *, int); 510 signed __int128 __atomic_load_16(const volatile signed __int128 *, int);511 357 void __atomic_load(const volatile signed __int128 *, volatile signed __int128 *, int); 512 358 unsigned __int128 __atomic_load_n(const volatile unsigned __int128 *, int); 513 unsigned __int128 __atomic_load_16(const volatile unsigned __int128 *, int);514 359 void __atomic_load(const volatile unsigned __int128 *, volatile unsigned __int128 *, int); 515 360 #endif … … 518 363 519 364 _Bool __atomic_compare_exchange_n(volatile char *, char *, char, _Bool, int, int); 520 _Bool __atomic_compare_exchange_1(volatile char *, char *, char, _Bool, int, int);521 365 _Bool __atomic_compare_exchange (volatile char *, char *, char *, _Bool, int, int); 522 366 _Bool __atomic_compare_exchange_n(volatile signed char *, signed char *, signed char, _Bool, int, int); 523 _Bool __atomic_compare_exchange_1(volatile signed char *, signed char *, signed char, _Bool, int, int);524 367 _Bool __atomic_compare_exchange (volatile signed char *, signed char *, signed char *, _Bool, int, int); 525 368 _Bool __atomic_compare_exchange_n(volatile unsigned char *, unsigned char *, unsigned char, _Bool, int, int); 526 _Bool __atomic_compare_exchange_1(volatile unsigned char *, unsigned char *, unsigned char, _Bool, int, int);527 369 _Bool __atomic_compare_exchange (volatile unsigned char *, unsigned char *, unsigned char *, _Bool, int, int); 528 370 _Bool __atomic_compare_exchange_n(volatile signed short *, signed short *, signed short, _Bool, int, int); 529 _Bool __atomic_compare_exchange_2(volatile signed short *, signed short *, signed short, _Bool, int, int);530 371 _Bool __atomic_compare_exchange (volatile signed short *, signed short *, signed short *, _Bool, int, int); 531 372 _Bool __atomic_compare_exchange_n(volatile unsigned short *, unsigned short *, unsigned short, _Bool, int, int); 532 _Bool __atomic_compare_exchange_2(volatile unsigned short *, unsigned short *, unsigned short, _Bool, int, int);533 373 _Bool __atomic_compare_exchange (volatile unsigned short *, unsigned short *, unsigned short *, _Bool, int, int); 534 374 _Bool __atomic_compare_exchange_n(volatile signed int *, signed int *, signed int, _Bool, int, int); 535 _Bool __atomic_compare_exchange_4(volatile signed int *, signed int *, signed int, _Bool, int, int);536 375 _Bool __atomic_compare_exchange (volatile signed int *, signed int *, signed int *, _Bool, int, int); 537 376 _Bool __atomic_compare_exchange_n(volatile unsigned int *, unsigned int *, unsigned int, _Bool, int, int); 538 _Bool __atomic_compare_exchange_4(volatile unsigned int *, unsigned int *, unsigned int, _Bool, int, int);539 377 _Bool __atomic_compare_exchange (volatile unsigned int *, unsigned int *, unsigned int *, _Bool, int, int); 378 _Bool __atomic_compare_exchange_n(volatile signed long int *, signed long int *, signed long int, _Bool, int, int); 379 _Bool __atomic_compare_exchange (volatile signed long int *, signed long int *, signed long int *, _Bool, int, int); 380 _Bool __atomic_compare_exchange_n(volatile unsigned long int *, unsigned long int *, unsigned long int, _Bool, int, int); 381 _Bool __atomic_compare_exchange (volatile unsigned long int *, unsigned long int *, unsigned long int *, _Bool, int, int); 540 382 _Bool __atomic_compare_exchange_n(volatile signed long long int *, signed long long int *, signed long long int, _Bool, int, int); 541 _Bool __atomic_compare_exchange_8(volatile signed long long int *, signed long long int *, signed long long int, _Bool, int, int);542 383 _Bool __atomic_compare_exchange (volatile signed long long int *, signed long long int *, signed long long int *, _Bool, int, int); 543 384 _Bool __atomic_compare_exchange_n(volatile unsigned long long int *, unsigned long long int *, unsigned long long int, _Bool, int, int); 544 _Bool __atomic_compare_exchange_8(volatile unsigned long long int *, unsigned long long int *, unsigned long long int, _Bool, int, int);545 385 _Bool __atomic_compare_exchange (volatile unsigned long long int *, unsigned long long int *, unsigned long long int *, _Bool, int, int); 546 386 #if defined(__SIZEOF_INT128__) 547 387 _Bool __atomic_compare_exchange_n (volatile signed __int128 *, signed __int128 *, signed __int128, _Bool, int, int); 548 _Bool __atomic_compare_exchange_16(volatile signed __int128 *, signed __int128 *, signed __int128, _Bool, int, int);549 388 _Bool __atomic_compare_exchange (volatile signed __int128 *, signed __int128 *, signed __int128 *, _Bool, int, int); 550 389 _Bool __atomic_compare_exchange_n (volatile unsigned __int128 *, unsigned __int128 *, unsigned __int128, _Bool, int, int); 551 _Bool __atomic_compare_exchange_16(volatile unsigned __int128 *, unsigned __int128 *, unsigned __int128, _Bool, int, int);552 390 _Bool __atomic_compare_exchange (volatile unsigned __int128 *, unsigned __int128 *, unsigned __int128 *, _Bool, int, int); 553 391 #endif … … 558 396 void __atomic_store(volatile _Bool *, _Bool *, int); 559 397 void __atomic_store_n(volatile char *, char, int); 560 void __atomic_store_1(volatile char *, char, int);561 398 void __atomic_store(volatile char *, char *, int); 562 399 void __atomic_store_n(volatile signed char *, signed char, int); 563 void __atomic_store_1(volatile signed char *, signed char, int);564 400 void __atomic_store(volatile signed char *, signed char *, int); 565 401 void __atomic_store_n(volatile unsigned char *, unsigned char, int); 566 void __atomic_store_1(volatile unsigned char *, unsigned char, int);567 402 void __atomic_store(volatile unsigned char *, unsigned char *, int); 568 403 void __atomic_store_n(volatile signed short *, signed short, int); 569 void __atomic_store_2(volatile signed short *, signed short, int);570 404 void __atomic_store(volatile signed short *, signed short *, int); 571 405 void __atomic_store_n(volatile unsigned short *, unsigned short, int); 572 void __atomic_store_2(volatile unsigned short *, unsigned short, int);573 406 void __atomic_store(volatile unsigned short *, unsigned short *, int); 574 407 void __atomic_store_n(volatile signed int *, signed int, int); 575 void __atomic_store_4(volatile signed int *, signed int, int);576 408 void __atomic_store(volatile signed int *, signed int *, int); 577 409 void __atomic_store_n(volatile unsigned int *, unsigned int, int); 578 void __atomic_store_4(volatile unsigned int *, unsigned int, int);579 410 void __atomic_store(volatile unsigned int *, unsigned int *, int); 411 void __atomic_store_n(volatile signed long int *, signed long int, int); 412 void __atomic_store(volatile signed long int *, signed long int *, int); 413 void __atomic_store_n(volatile unsigned long int *, unsigned long int, int); 414 void __atomic_store(volatile unsigned long int *, unsigned long int *, int); 580 415 void __atomic_store_n(volatile signed long long int *, signed long long int, int); 581 void __atomic_store_8(volatile signed long long int *, signed long long int, int);582 416 void __atomic_store(volatile signed long long int *, signed long long int *, int); 583 417 void __atomic_store_n(volatile unsigned long long int *, unsigned long long int, int); 584 void __atomic_store_8(volatile unsigned long long int *, unsigned long long int, int);585 418 void __atomic_store(volatile unsigned long long int *, unsigned long long int *, int); 586 419 #if defined(__SIZEOF_INT128__) 587 420 void __atomic_store_n(volatile signed __int128 *, signed __int128, int); 588 void __atomic_store_16(volatile signed __int128 *, signed __int128, int);589 421 void __atomic_store(volatile signed __int128 *, signed __int128 *, int); 590 422 void __atomic_store_n(volatile unsigned __int128 *, unsigned __int128, int); 591 void __atomic_store_16(volatile unsigned __int128 *, unsigned __int128, int);592 423 void __atomic_store(volatile unsigned __int128 *, unsigned __int128 *, int); 593 424 #endif … … 596 427 597 428 char __atomic_add_fetch (volatile char *, char, int); 598 char __atomic_add_fetch_1(volatile char *, char, int);599 429 signed char __atomic_add_fetch (volatile signed char *, signed char, int); 600 signed char __atomic_add_fetch_1(volatile signed char *, signed char, int);601 430 unsigned char __atomic_add_fetch (volatile unsigned char *, unsigned char, int); 602 unsigned char __atomic_add_fetch_1(volatile unsigned char *, unsigned char, int);603 431 signed short __atomic_add_fetch (volatile signed short *, signed short, int); 604 signed short __atomic_add_fetch_2(volatile signed short *, signed short, int);605 432 unsigned short __atomic_add_fetch (volatile unsigned short *, unsigned short, int); 606 unsigned short __atomic_add_fetch_2(volatile unsigned short *, unsigned short, int);607 433 signed int __atomic_add_fetch (volatile signed int *, signed int, int); 608 signed int __atomic_add_fetch_4(volatile signed int *, signed int, int);609 434 unsigned int __atomic_add_fetch (volatile unsigned int *, unsigned int, int); 610 unsigned int __atomic_add_fetch_4(volatile unsigned int *, unsigned int, int); 435 signed long int __atomic_add_fetch (volatile signed long int *, signed long int, int); 436 unsigned long int __atomic_add_fetch (volatile unsigned long int *, unsigned long int, int); 611 437 signed long long int __atomic_add_fetch (volatile signed long long int *, signed long long int, int); 612 signed long long int __atomic_add_fetch_8(volatile signed long long int *, signed long long int, int);613 438 unsigned long long int __atomic_add_fetch (volatile unsigned long long int *, unsigned long long int, int); 614 unsigned long long int __atomic_add_fetch_8(volatile unsigned long long int *, unsigned long long int, int);615 439 #if defined(__SIZEOF_INT128__) 616 440 signed __int128 __atomic_add_fetch (volatile signed __int128 *, signed __int128, int); 617 signed __int128 __atomic_add_fetch_16(volatile signed __int128 *, signed __int128, int);618 441 unsigned __int128 __atomic_add_fetch (volatile unsigned __int128 *, unsigned __int128, int); 619 unsigned __int128 __atomic_add_fetch_16(volatile unsigned __int128 *, unsigned __int128, int);620 442 #endif 621 443 622 444 char __atomic_sub_fetch (volatile char *, char, int); 623 char __atomic_sub_fetch_1(volatile char *, char, int);624 445 signed char __atomic_sub_fetch (volatile signed char *, signed char, int); 625 signed char __atomic_sub_fetch_1(volatile signed char *, signed char, int);626 446 unsigned char __atomic_sub_fetch (volatile unsigned char *, unsigned char, int); 627 unsigned char __atomic_sub_fetch_1(volatile unsigned char *, unsigned char, int);628 447 signed short __atomic_sub_fetch (volatile signed short *, signed short, int); 629 signed short __atomic_sub_fetch_2(volatile signed short *, signed short, int);630 448 unsigned short __atomic_sub_fetch (volatile unsigned short *, unsigned short, int); 631 unsigned short __atomic_sub_fetch_2(volatile unsigned short *, unsigned short, int);632 449 signed int __atomic_sub_fetch (volatile signed int *, signed int, int); 633 signed int __atomic_sub_fetch_4(volatile signed int *, signed int, int);634 450 unsigned int __atomic_sub_fetch (volatile unsigned int *, unsigned int, int); 635 unsigned int __atomic_sub_fetch_4(volatile unsigned int *, unsigned int, int); 451 signed long long int __atomic_sub_fetch (volatile signed long int *, signed long int, int); 452 unsigned long long int __atomic_sub_fetch (volatile unsigned long int *, unsigned long int, int); 636 453 signed long long int __atomic_sub_fetch (volatile signed long long int *, signed long long int, int); 637 signed long long int __atomic_sub_fetch_8(volatile signed long long int *, signed long long int, int);638 454 unsigned long long int __atomic_sub_fetch (volatile unsigned long long int *, unsigned long long int, int); 639 unsigned long long int __atomic_sub_fetch_8(volatile unsigned long long int *, unsigned long long int, int);640 455 #if defined(__SIZEOF_INT128__) 641 456 signed __int128 __atomic_sub_fetch (volatile signed __int128 *, signed __int128, int); 642 signed __int128 __atomic_sub_fetch_16(volatile signed __int128 *, signed __int128, int);643 457 unsigned __int128 __atomic_sub_fetch (volatile unsigned __int128 *, unsigned __int128, int); 644 unsigned __int128 __atomic_sub_fetch_16(volatile unsigned __int128 *, unsigned __int128, int);645 458 #endif 646 459 647 460 char __atomic_and_fetch (volatile char *, char, int); 648 char __atomic_and_fetch_1(volatile char *, char, int);649 461 signed char __atomic_and_fetch (volatile signed char *, signed char, int); 650 signed char __atomic_and_fetch_1(volatile signed char *, signed char, int);651 462 unsigned char __atomic_and_fetch (volatile unsigned char *, unsigned char, int); 652 unsigned char __atomic_and_fetch_1(volatile unsigned char *, unsigned char, int);653 463 signed short __atomic_and_fetch (volatile signed short *, signed short, int); 654 signed short __atomic_and_fetch_2(volatile signed short *, signed short, int);655 464 unsigned short __atomic_and_fetch (volatile unsigned short *, unsigned short, int); 656 unsigned short __atomic_and_fetch_2(volatile unsigned short *, unsigned short, int);657 465 signed int __atomic_and_fetch (volatile signed int *, signed int, int); 658 signed int __atomic_and_fetch_4(volatile signed int *, signed int, int);659 466 unsigned int __atomic_and_fetch (volatile unsigned int *, unsigned int, int); 660 unsigned int __atomic_and_fetch_4(volatile unsigned int *, unsigned int, int); 467 signed long int __atomic_and_fetch (volatile signed long int *, signed long int, int); 468 unsigned long int __atomic_and_fetch (volatile unsigned long int *, unsigned long int, int); 661 469 signed long long int __atomic_and_fetch (volatile signed long long int *, signed long long int, int); 662 signed long long int __atomic_and_fetch_8(volatile signed long long int *, signed long long int, int);663 470 unsigned long long int __atomic_and_fetch (volatile unsigned long long int *, unsigned long long int, int); 664 unsigned long long int __atomic_and_fetch_8(volatile unsigned long long int *, unsigned long long int, int);665 471 #if defined(__SIZEOF_INT128__) 666 472 signed __int128 __atomic_and_fetch (volatile signed __int128 *, signed __int128, int); 667 signed __int128 __atomic_and_fetch_16(volatile signed __int128 *, signed __int128, int);668 473 unsigned __int128 __atomic_and_fetch (volatile unsigned __int128 *, unsigned __int128, int); 669 unsigned __int128 __atomic_and_fetch_16(volatile unsigned __int128 *, unsigned __int128, int);670 474 #endif 671 475 672 476 char __atomic_nand_fetch (volatile char *, char, int); 673 char __atomic_nand_fetch_1(volatile char *, char, int);674 477 signed char __atomic_nand_fetch (volatile signed char *, signed char, int); 675 signed char __atomic_nand_fetch_1(volatile signed char *, signed char, int);676 478 unsigned char __atomic_nand_fetch (volatile unsigned char *, unsigned char, int); 677 unsigned char __atomic_nand_fetch_1(volatile unsigned char *, unsigned char, int);678 479 signed short __atomic_nand_fetch (volatile signed short *, signed short, int); 679 signed short __atomic_nand_fetch_2(volatile signed short *, signed short, int);680 480 unsigned short __atomic_nand_fetch (volatile unsigned short *, unsigned short, int); 681 unsigned short __atomic_nand_fetch_2(volatile unsigned short *, unsigned short, int);682 481 signed int __atomic_nand_fetch (volatile signed int *, signed int, int); 683 signed int __atomic_nand_fetch_4(volatile signed int *, signed int, int);684 482 unsigned int __atomic_nand_fetch (volatile unsigned int *, unsigned int, int); 685 unsigned int __atomic_nand_fetch_4(volatile unsigned int *, unsigned int, int); 483 signed long int __atomic_nand_fetch (volatile signed long int *, signed long int, int); 484 unsigned long int __atomic_nand_fetch (volatile unsigned long int *, unsigned long int, int); 686 485 signed long long int __atomic_nand_fetch (volatile signed long long int *, signed long long int, int); 687 signed long long int __atomic_nand_fetch_8(volatile signed long long int *, signed long long int, int);688 486 unsigned long long int __atomic_nand_fetch (volatile unsigned long long int *, unsigned long long int, int); 689 unsigned long long int __atomic_nand_fetch_8(volatile unsigned long long int *, unsigned long long int, int);690 487 #if defined(__SIZEOF_INT128__) 691 488 signed __int128 __atomic_nand_fetch (volatile signed __int128 *, signed __int128, int); 692 signed __int128 __atomic_nand_fetch_16(volatile signed __int128 *, signed __int128, int);693 489 unsigned __int128 __atomic_nand_fetch (volatile unsigned __int128 *, unsigned __int128, int); 694 unsigned __int128 __atomic_nand_fetch_16(volatile unsigned __int128 *, unsigned __int128, int);695 490 #endif 696 491 697 492 char __atomic_xor_fetch (volatile char *, char, int); 698 char __atomic_xor_fetch_1(volatile char *, char, int);699 493 signed char __atomic_xor_fetch (volatile signed char *, signed char, int); 700 signed char __atomic_xor_fetch_1(volatile signed char *, signed char, int);701 494 unsigned char __atomic_xor_fetch (volatile unsigned char *, unsigned char, int); 702 unsigned char __atomic_xor_fetch_1(volatile unsigned char *, unsigned char, int);703 495 signed short __atomic_xor_fetch (volatile signed short *, signed short, int); 704 signed short __atomic_xor_fetch_2(volatile signed short *, signed short, int);705 496 unsigned short __atomic_xor_fetch (volatile unsigned short *, unsigned short, int); 706 unsigned short __atomic_xor_fetch_2(volatile unsigned short *, unsigned short, int);707 497 signed int __atomic_xor_fetch (volatile signed int *, signed int, int); 708 signed int __atomic_xor_fetch_4(volatile signed int *, signed int, int);709 498 unsigned int __atomic_xor_fetch (volatile unsigned int *, unsigned int, int); 710 unsigned int __atomic_xor_fetch_4(volatile unsigned int *, unsigned int, int); 499 signed long int __atomic_xor_fetch (volatile signed long int *, signed long int, int); 500 unsigned long int __atomic_xor_fetch (volatile unsigned long int *, unsigned long int, int); 711 501 signed long long int __atomic_xor_fetch (volatile signed long long int *, signed long long int, int); 712 signed long long int __atomic_xor_fetch_8(volatile signed long long int *, signed long long int, int);713 502 unsigned long long int __atomic_xor_fetch (volatile unsigned long long int *, unsigned long long int, int); 714 unsigned long long int __atomic_xor_fetch_8(volatile unsigned long long int *, unsigned long long int, int);715 503 #if defined(__SIZEOF_INT128__) 716 504 signed __int128 __atomic_xor_fetch (volatile signed __int128 *, signed __int128, int); 717 signed __int128 __atomic_xor_fetch_16(volatile signed __int128 *, signed __int128, int);718 505 unsigned __int128 __atomic_xor_fetch (volatile unsigned __int128 *, unsigned __int128, int); 719 unsigned __int128 __atomic_xor_fetch_16(volatile unsigned __int128 *, unsigned __int128, int);720 506 #endif 721 507 722 508 char __atomic_or_fetch (volatile char *, char, int); 723 char __atomic_or_fetch_1(volatile char *, char, int);724 509 signed char __atomic_or_fetch (volatile signed char *, signed char, int); 725 signed char __atomic_or_fetch_1(volatile signed char *, signed char, int);726 510 unsigned char __atomic_or_fetch (volatile unsigned char *, unsigned char, int); 727 unsigned char __atomic_or_fetch_1(volatile unsigned char *, unsigned char, int);728 511 signed short __atomic_or_fetch (volatile signed short *, signed short, int); 729 signed short __atomic_or_fetch_2(volatile signed short *, signed short, int);730 512 unsigned short __atomic_or_fetch (volatile unsigned short *, unsigned short, int); 731 unsigned short __atomic_or_fetch_2(volatile unsigned short *, unsigned short, int);732 513 signed int __atomic_or_fetch (volatile signed int *, signed int, int); 733 signed int __atomic_or_fetch_4(volatile signed int *, signed int, int);734 514 unsigned int __atomic_or_fetch (volatile unsigned int *, unsigned int, int); 735 unsigned int __atomic_or_fetch_4(volatile unsigned int *, unsigned int, int); 515 signed long int __atomic_or_fetch (volatile signed long int *, signed long int, int); 516 unsigned long int __atomic_or_fetch (volatile unsigned long int *, unsigned long int, int); 736 517 signed long long int __atomic_or_fetch (volatile signed long long int *, signed long long int, int); 737 signed long long int __atomic_or_fetch_8(volatile signed long long int *, signed long long int, int);738 518 unsigned long long int __atomic_or_fetch (volatile unsigned long long int *, unsigned long long int, int); 739 unsigned long long int __atomic_or_fetch_8(volatile unsigned long long int *, unsigned long long int, int);740 519 #if defined(__SIZEOF_INT128__) 741 520 signed __int128 __atomic_or_fetch (volatile signed __int128 *, signed __int128, int); 742 signed __int128 __atomic_or_fetch_16(volatile signed __int128 *, signed __int128, int);743 521 unsigned __int128 __atomic_or_fetch (volatile unsigned __int128 *, unsigned __int128, int); 744 unsigned __int128 __atomic_or_fetch_16(volatile unsigned __int128 *, unsigned __int128, int);745 522 #endif 746 523 747 524 char __atomic_fetch_add (volatile char *, char, int); 748 char __atomic_fetch_add_1(volatile char *, char, int);749 525 signed char __atomic_fetch_add (volatile signed char *, signed char, int); 750 signed char __atomic_fetch_add_1(volatile signed char *, signed char, int);751 526 unsigned char __atomic_fetch_add (volatile unsigned char *, unsigned char, int); 752 unsigned char __atomic_fetch_add_1(volatile unsigned char *, unsigned char, int);753 527 signed short __atomic_fetch_add (volatile signed short *, signed short, int); 754 signed short __atomic_fetch_add_2(volatile signed short *, signed short, int);755 528 unsigned short __atomic_fetch_add (volatile unsigned short *, unsigned short, int); 756 unsigned short __atomic_fetch_add_2(volatile unsigned short *, unsigned short, int);757 529 signed int __atomic_fetch_add (volatile signed int *, signed int, int); 758 signed int __atomic_fetch_add_4(volatile signed int *, signed int, int);759 530 unsigned int __atomic_fetch_add (volatile unsigned int *, unsigned int, int); 760 unsigned int __atomic_fetch_add_4(volatile unsigned int *, unsigned int, int); 531 signed long int __atomic_fetch_add (volatile signed long int *, signed long int, int); 532 unsigned long int __atomic_fetch_add (volatile unsigned long int *, unsigned long int, int); 761 533 signed long long int __atomic_fetch_add (volatile signed long long int *, signed long long int, int); 762 signed long long int __atomic_fetch_add_8(volatile signed long long int *, signed long long int, int);763 534 unsigned long long int __atomic_fetch_add (volatile unsigned long long int *, unsigned long long int, int); 764 unsigned long long int __atomic_fetch_add_8(volatile unsigned long long int *, unsigned long long int, int);765 535 #if defined(__SIZEOF_INT128__) 766 536 signed __int128 __atomic_fetch_add (volatile signed __int128 *, signed __int128, int); 767 signed __int128 __atomic_fetch_add_16(volatile signed __int128 *, signed __int128, int);768 537 unsigned __int128 __atomic_fetch_add (volatile unsigned __int128 *, unsigned __int128, int); 769 unsigned __int128 __atomic_fetch_add_16(volatile unsigned __int128 *, unsigned __int128, int);770 538 #endif 771 539 772 540 char __atomic_fetch_sub (volatile char *, char, int); 773 char __atomic_fetch_sub_1(volatile char *, char, int);774 541 signed char __atomic_fetch_sub (volatile signed char *, signed char, int); 775 signed char __atomic_fetch_sub_1(volatile signed char *, signed char, int);776 542 unsigned char __atomic_fetch_sub (volatile unsigned char *, unsigned char, int); 777 unsigned char __atomic_fetch_sub_1(volatile unsigned char *, unsigned char, int);778 543 signed short __atomic_fetch_sub (volatile signed short *, signed short, int); 779 signed short __atomic_fetch_sub_2(volatile signed short *, signed short, int);780 544 unsigned short __atomic_fetch_sub (volatile unsigned short *, unsigned short, int); 781 unsigned short __atomic_fetch_sub_2(volatile unsigned short *, unsigned short, int);782 545 signed int __atomic_fetch_sub (volatile signed int *, signed int, int); 783 signed int __atomic_fetch_sub_4(volatile signed int *, signed int, int);784 546 unsigned int __atomic_fetch_sub (volatile unsigned int *, unsigned int, int); 785 unsigned int __atomic_fetch_sub_4(volatile unsigned int *, unsigned int, int); 547 signed long int __atomic_fetch_sub (volatile signed long int *, signed long int, int); 548 unsigned long int __atomic_fetch_sub (volatile unsigned long int *, unsigned long int, int); 786 549 signed long long int __atomic_fetch_sub (volatile signed long long int *, signed long long int, int); 787 signed long long int __atomic_fetch_sub_8(volatile signed long long int *, signed long long int, int);788 550 unsigned long long int __atomic_fetch_sub (volatile unsigned long long int *, unsigned long long int, int); 789 unsigned long long int __atomic_fetch_sub_8(volatile unsigned long long int *, unsigned long long int, int);790 551 #if defined(__SIZEOF_INT128__) 791 552 signed __int128 __atomic_fetch_sub (volatile signed __int128 *, signed __int128, int); 792 signed __int128 __atomic_fetch_sub_16(volatile signed __int128 *, signed __int128, int);793 553 unsigned __int128 __atomic_fetch_sub (volatile unsigned __int128 *, unsigned __int128, int); 794 unsigned __int128 __atomic_fetch_sub_16(volatile unsigned __int128 *, unsigned __int128, int);795 554 #endif 796 555 797 556 char __atomic_fetch_and (volatile char *, char, int); 798 char __atomic_fetch_and_1(volatile char *, char, int);799 557 signed char __atomic_fetch_and (volatile signed char *, signed char, int); 800 signed char __atomic_fetch_and_1(volatile signed char *, signed char, int);801 558 unsigned char __atomic_fetch_and (volatile unsigned char *, unsigned char, int); 802 unsigned char __atomic_fetch_and_1(volatile unsigned char *, unsigned char, int);803 559 signed short __atomic_fetch_and (volatile signed short *, signed short, int); 804 signed short __atomic_fetch_and_2(volatile signed short *, signed short, int);805 560 unsigned short __atomic_fetch_and (volatile unsigned short *, unsigned short, int); 806 unsigned short __atomic_fetch_and_2(volatile unsigned short *, unsigned short, int);807 561 signed int __atomic_fetch_and (volatile signed int *, signed int, int); 808 signed int __atomic_fetch_and_4(volatile signed int *, signed int, int);809 562 unsigned int __atomic_fetch_and (volatile unsigned int *, unsigned int, int); 810 unsigned int __atomic_fetch_and_4(volatile unsigned int *, unsigned int, int); 563 signed long int __atomic_fetch_and (volatile signed long int *, signed long int, int); 564 unsigned long int __atomic_fetch_and (volatile unsigned long int *, unsigned long int, int); 811 565 signed long long int __atomic_fetch_and (volatile signed long long int *, signed long long int, int); 812 signed long long int __atomic_fetch_and_8(volatile signed long long int *, signed long long int, int);813 566 unsigned long long int __atomic_fetch_and (volatile unsigned long long int *, unsigned long long int, int); 814 unsigned long long int __atomic_fetch_and_8(volatile unsigned long long int *, unsigned long long int, int);815 567 #if defined(__SIZEOF_INT128__) 816 568 signed __int128 __atomic_fetch_and (volatile signed __int128 *, signed __int128, int); 817 signed __int128 __atomic_fetch_and_16(volatile signed __int128 *, signed __int128, int);818 569 unsigned __int128 __atomic_fetch_and (volatile unsigned __int128 *, unsigned __int128, int); 819 unsigned __int128 __atomic_fetch_and_16(volatile unsigned __int128 *, unsigned __int128, int);820 570 #endif 821 571 822 572 char __atomic_fetch_nand (volatile char *, char, int); 823 char __atomic_fetch_nand_1(volatile char *, char, int);824 573 signed char __atomic_fetch_nand (volatile signed char *, signed char, int); 825 signed char __atomic_fetch_nand_1(volatile signed char *, signed char, int);826 574 unsigned char __atomic_fetch_nand (volatile unsigned char *, unsigned char, int); 827 unsigned char __atomic_fetch_nand_1(volatile unsigned char *, unsigned char, int);828 575 signed short __atomic_fetch_nand (volatile signed short *, signed short, int); 829 signed short __atomic_fetch_nand_2(volatile signed short *, signed short, int);830 576 unsigned short __atomic_fetch_nand (volatile unsigned short *, unsigned short, int); 831 unsigned short __atomic_fetch_nand_2(volatile unsigned short *, unsigned short, int);832 577 signed int __atomic_fetch_nand (volatile signed int *, signed int, int); 833 signed int __atomic_fetch_nand_4(volatile signed int *, signed int, int);834 578 unsigned int __atomic_fetch_nand (volatile unsigned int *, unsigned int, int); 835 unsigned int __atomic_fetch_nand_4(volatile unsigned int *, unsigned int, int); 579 signed long int __atomic_fetch_nand (volatile signed long int *, signed long int, int); 580 unsigned long int __atomic_fetch_nand (volatile unsigned long int *, unsigned long int, int); 836 581 signed long long int __atomic_fetch_nand (volatile signed long long int *, signed long long int, int); 837 signed long long int __atomic_fetch_nand_8(volatile signed long long int *, signed long long int, int);838 582 unsigned long long int __atomic_fetch_nand (volatile unsigned long long int *, unsigned long long int, int); 839 unsigned long long int __atomic_fetch_nand_8(volatile unsigned long long int *, unsigned long long int, int);840 583 #if defined(__SIZEOF_INT128__) 841 584 signed __int128 __atomic_fetch_nand (volatile signed __int128 *, signed __int128, int); 842 signed __int128 __atomic_fetch_nand_16(volatile signed __int128 *, signed __int128, int);843 585 unsigned __int128 __atomic_fetch_nand (volatile unsigned __int128 *, unsigned __int128, int); 844 unsigned __int128 __atomic_fetch_nand_16(volatile unsigned __int128 *, unsigned __int128, int);845 586 #endif 846 587 847 588 char __atomic_fetch_xor (volatile char *, char, int); 848 char __atomic_fetch_xor_1(volatile char *, char, int);849 589 signed char __atomic_fetch_xor (volatile signed char *, signed char, int); 850 signed char __atomic_fetch_xor_1(volatile signed char *, signed char, int);851 590 unsigned char __atomic_fetch_xor (volatile unsigned char *, unsigned char, int); 852 unsigned char __atomic_fetch_xor_1(volatile unsigned char *, unsigned char, int);853 591 signed short __atomic_fetch_xor (volatile signed short *, signed short, int); 854 signed short __atomic_fetch_xor_2(volatile signed short *, signed short, int);855 592 unsigned short __atomic_fetch_xor (volatile unsigned short *, unsigned short, int); 856 unsigned short __atomic_fetch_xor_2(volatile unsigned short *, unsigned short, int);857 593 signed int __atomic_fetch_xor (volatile signed int *, signed int, int); 858 signed int __atomic_fetch_xor_4(volatile signed int *, signed int, int);859 594 unsigned int __atomic_fetch_xor (volatile unsigned int *, unsigned int, int); 860 unsigned int __atomic_fetch_xor_4(volatile unsigned int *, unsigned int, int); 595 signed long int __atomic_fetch_xor (volatile signed long int *, signed long int, int); 596 unsigned long int __atomic_fetch_xor (volatile unsigned long int *, unsigned long int, int); 861 597 signed long long int __atomic_fetch_xor (volatile signed long long int *, signed long long int, int); 862 signed long long int __atomic_fetch_xor_8(volatile signed long long int *, signed long long int, int);863 598 unsigned long long int __atomic_fetch_xor (volatile unsigned long long int *, unsigned long long int, int); 864 unsigned long long int __atomic_fetch_xor_8(volatile unsigned long long int *, unsigned long long int, int);865 599 #if defined(__SIZEOF_INT128__) 866 600 signed __int128 __atomic_fetch_xor (volatile signed __int128 *, signed __int128, int); 867 signed __int128 __atomic_fetch_xor_16(volatile signed __int128 *, signed __int128, int);868 601 unsigned __int128 __atomic_fetch_xor (volatile unsigned __int128 *, unsigned __int128, int); 869 unsigned __int128 __atomic_fetch_xor_16(volatile unsigned __int128 *, unsigned __int128, int);870 602 #endif 871 603 872 604 char __atomic_fetch_or (volatile char *, char, int); 873 char __atomic_fetch_or_1(volatile char *, char, int);874 605 signed char __atomic_fetch_or (volatile signed char *, signed char, int); 875 signed char __atomic_fetch_or_1(volatile signed char *, signed char, int);876 606 unsigned char __atomic_fetch_or (volatile unsigned char *, unsigned char, int); 877 unsigned char __atomic_fetch_or_1(volatile unsigned char *, unsigned char, int);878 607 signed short __atomic_fetch_or (volatile signed short *, signed short, int); 879 signed short __atomic_fetch_or_2(volatile signed short *, signed short, int);880 608 unsigned short __atomic_fetch_or (volatile unsigned short *, unsigned short, int); 881 unsigned short __atomic_fetch_or_2(volatile unsigned short *, unsigned short, int);882 609 signed int __atomic_fetch_or (volatile signed int *, signed int, int); 883 signed int __atomic_fetch_or_4(volatile signed int *, signed int, int);884 610 unsigned int __atomic_fetch_or (volatile unsigned int *, unsigned int, int); 885 unsigned int __atomic_fetch_or_4(volatile unsigned int *, unsigned int, int); 611 signed long int __atomic_fetch_or (volatile signed long int *, signed long int, int); 612 unsigned long int __atomic_fetch_or (volatile unsigned long int *, unsigned long int, int); 886 613 signed long long int __atomic_fetch_or (volatile signed long long int *, signed long long int, int); 887 signed long long int __atomic_fetch_or_8(volatile signed long long int *, signed long long int, int);888 614 unsigned long long int __atomic_fetch_or (volatile unsigned long long int *, unsigned long long int, int); 889 unsigned long long int __atomic_fetch_or_8(volatile unsigned long long int *, unsigned long long int, int);890 615 #if defined(__SIZEOF_INT128__) 891 616 signed __int128 __atomic_fetch_or (volatile signed __int128 *, signed __int128, int); 892 signed __int128 __atomic_fetch_or_16(volatile signed __int128 *, signed __int128, int);893 617 unsigned __int128 __atomic_fetch_or (volatile unsigned __int128 *, unsigned __int128, int); 894 unsigned __int128 __atomic_fetch_or_16(volatile unsigned __int128 *, unsigned __int128, int);895 618 #endif 896 619 -
libcfa/src/assert.cfa
r7768b8d r30763fd 10 10 // Created On : Mon Nov 28 12:27:26 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Thu Jul 20 15:10:26 201713 // Update Count : 212 // Last Modified On : Thu Nov 21 17:09:26 2019 13 // Update Count : 5 14 14 // 15 15 … … 17 17 #include <stdarg.h> // varargs 18 18 #include <stdio.h> // fprintf 19 #include <unistd.h> // STDERR_FILENO 19 20 #include "bits/debug.hfa" 20 21 … … 26 27 // called by macro assert in assert.h 27 28 void __assert_fail( const char *assertion, const char *file, unsigned int line, const char *function ) { 28 __cfaabi_ dbg_bits_print_safe(CFA_ASSERT_FMT ".\n", assertion, __progname, function, line, file );29 __cfaabi_bits_print_safe( STDERR_FILENO, CFA_ASSERT_FMT ".\n", assertion, __progname, function, line, file ); 29 30 abort(); 30 31 } … … 32 33 // called by macro assertf 33 34 void __assert_fail_f( const char *assertion, const char *file, unsigned int line, const char *function, const char *fmt, ... ) { 34 __cfaabi_ dbg_bits_acquire();35 __cfaabi_ dbg_bits_print_nolock(CFA_ASSERT_FMT ": ", assertion, __progname, function, line, file );35 __cfaabi_bits_acquire(); 36 __cfaabi_bits_print_nolock( STDERR_FILENO, CFA_ASSERT_FMT ": ", assertion, __progname, function, line, file ); 36 37 37 38 va_list args; 38 39 va_start( args, fmt ); 39 __cfaabi_ dbg_bits_print_vararg(fmt, args );40 __cfaabi_bits_print_vararg( STDERR_FILENO, fmt, args ); 40 41 va_end( args ); 41 42 42 __cfaabi_ dbg_bits_print_nolock("\n" );43 __cfaabi_ dbg_bits_release();43 __cfaabi_bits_print_nolock( STDERR_FILENO, "\n" ); 44 __cfaabi_bits_release(); 44 45 abort(); 45 46 } -
libcfa/src/bits/align.hfa
r7768b8d r30763fd 10 10 // Created On : Mon Nov 28 12:27:26 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Fri Jul 21 23:05:35 201713 // Update Count : 212 // Last Modified On : Sat Nov 16 18:58:22 2019 13 // Update Count : 3 14 14 // 15 15 // This library is free software; you can redistribute it and/or modify it … … 33 33 34 34 // Minimum size used to align memory boundaries for memory allocations. 35 #define libAlign() (sizeof(double)) 35 //#define libAlign() (sizeof(double)) 36 // gcc-7 uses xmms instructions, which require 16 byte alignment. 37 #define libAlign() (16) 36 38 37 39 // Check for power of 2 -
libcfa/src/bits/debug.cfa
r7768b8d r30763fd 10 10 // Created On : Thu Mar 30 12:30:01 2017 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Sun Jul 14 22:17:35201913 // Update Count : 412 // Last Modified On : Thu Nov 21 17:16:30 2019 13 // Update Count : 10 14 14 // 15 15 … … 28 28 extern "C" { 29 29 30 void __cfaabi_ dbg_bits_write(const char *in_buffer, int len ) {30 void __cfaabi_bits_write( int fd, const char *in_buffer, int len ) { 31 31 // ensure all data is written 32 32 for ( int count = 0, retcode; count < len; count += retcode ) { … … 34 34 35 35 for ( ;; ) { 36 retcode = write( STDERR_FILENO, in_buffer, len - count );36 retcode = write( fd, in_buffer, len - count ); 37 37 38 38 // not a timer interrupt ? … … 44 44 } 45 45 46 void __cfaabi_ dbg_bits_acquire() __attribute__((__weak__)) {}47 void __cfaabi_ dbg_bits_release() __attribute__((__weak__)) {}46 void __cfaabi_bits_acquire() __attribute__((__weak__)) {} 47 void __cfaabi_bits_release() __attribute__((__weak__)) {} 48 48 49 void __cfaabi_ dbg_bits_print_safe ( const char fmt[], ... ) __attribute__(( format(printf, 1, 2) )) {49 void __cfaabi_bits_print_safe ( int fd, const char fmt[], ... ) __attribute__(( format(printf, 2, 3) )) { 50 50 va_list args; 51 51 52 52 va_start( args, fmt ); 53 __cfaabi_ dbg_bits_acquire();53 __cfaabi_bits_acquire(); 54 54 55 55 int len = vsnprintf( buffer, buffer_size, fmt, args ); 56 __cfaabi_ dbg_bits_write(buffer, len );56 __cfaabi_bits_write( fd, buffer, len ); 57 57 58 __cfaabi_ dbg_bits_release();58 __cfaabi_bits_release(); 59 59 va_end( args ); 60 60 } 61 61 62 void __cfaabi_ dbg_bits_print_nolock( const char fmt[], ... ) __attribute__(( format(printf, 1, 2) )) {62 void __cfaabi_bits_print_nolock( int fd, const char fmt[], ... ) __attribute__(( format(printf, 2, 3) )) { 63 63 va_list args; 64 64 … … 66 66 67 67 int len = vsnprintf( buffer, buffer_size, fmt, args ); 68 __cfaabi_ dbg_bits_write(buffer, len );68 __cfaabi_bits_write( fd, buffer, len ); 69 69 70 70 va_end( args ); 71 71 } 72 72 73 void __cfaabi_ dbg_bits_print_vararg(const char fmt[], va_list args ) {73 void __cfaabi_bits_print_vararg( int fd, const char fmt[], va_list args ) { 74 74 int len = vsnprintf( buffer, buffer_size, fmt, args ); 75 __cfaabi_ dbg_bits_write(buffer, len );75 __cfaabi_bits_write( fd, buffer, len ); 76 76 } 77 77 78 void __cfaabi_ dbg_bits_print_buffer( char in_buffer[], int in_buffer_size, const char fmt[], ... ) __attribute__(( format(printf, 3, 4) )) {78 void __cfaabi_bits_print_buffer( int fd, char in_buffer[], int in_buffer_size, const char fmt[], ... ) __attribute__(( format(printf, 4, 5) )) { 79 79 va_list args; 80 80 … … 82 82 83 83 int len = vsnprintf( in_buffer, in_buffer_size, fmt, args ); 84 __cfaabi_ dbg_bits_write(in_buffer, len );84 __cfaabi_bits_write( fd, in_buffer, len ); 85 85 86 86 va_end( args ); -
libcfa/src/bits/debug.hfa
r7768b8d r30763fd 10 10 // Created On : Mon Nov 28 12:27:26 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Thu Feb 8 12:35:19 201813 // Update Count : 212 // Last Modified On : Thu Nov 21 17:06:58 2019 13 // Update Count : 8 14 14 // 15 15 … … 38 38 #include <stdio.h> 39 39 40 extern void __cfaabi_dbg_bits_write(const char *buffer, int len );41 extern void __cfaabi_dbg_bits_acquire();42 extern void __cfaabi_dbg_bits_release();43 extern void __cfaabi_dbg_bits_print_safe ( const char fmt[], ... ) __attribute__(( format(printf, 1, 2) ));44 extern void __cfaabi_dbg_bits_print_nolock( const char fmt[], ... ) __attribute__(( format(printf, 1, 2) ));45 extern void __cfaabi_dbg_bits_print_vararg(const char fmt[], va_list arg );46 extern void __cfaabi_dbg_bits_print_buffer( char buffer[], int buffer_size, const char fmt[], ... ) __attribute__(( format(printf, 3, 4) ));40 extern void __cfaabi_bits_write( int fd, const char *buffer, int len ); 41 extern void __cfaabi_bits_acquire(); 42 extern void __cfaabi_bits_release(); 43 extern void __cfaabi_bits_print_safe ( int fd, const char fmt[], ... ) __attribute__(( format(printf, 2, 3) )); 44 extern void __cfaabi_bits_print_nolock( int fd, const char fmt[], ... ) __attribute__(( format(printf, 2, 3) )); 45 extern void __cfaabi_bits_print_vararg( int fd, const char fmt[], va_list arg ); 46 extern void __cfaabi_bits_print_buffer( int fd, char buffer[], int buffer_size, const char fmt[], ... ) __attribute__(( format(printf, 4, 5) )); 47 47 #ifdef __cforall 48 48 } … … 50 50 51 51 #ifdef __CFA_DEBUG_PRINT__ 52 #define __cfaabi_dbg_write( buffer, len ) __cfaabi_ dbg_bits_write(buffer, len )53 #define __cfaabi_dbg_acquire() __cfaabi_ dbg_bits_acquire()54 #define __cfaabi_dbg_release() __cfaabi_ dbg_bits_release()55 #define __cfaabi_dbg_print_safe(...) __cfaabi_ dbg_bits_print_safe (__VA_ARGS__)56 #define __cfaabi_dbg_print_nolock(...) __cfaabi_ dbg_bits_print_nolock (__VA_ARGS__)57 #define __cfaabi_dbg_print_buffer(...) __cfaabi_ dbg_bits_print_buffer (__VA_ARGS__)58 #define __cfaabi_dbg_print_buffer_decl(...) char __dbg_text[256]; int __dbg_len = snprintf( __dbg_text, 256, __VA_ARGS__ ); __cfaabi_ dbg_bits_write( __dbg_text, __dbg_len );59 #define __cfaabi_dbg_print_buffer_local(...) __dbg_len = snprintf( __dbg_text, 256, __VA_ARGS__ ); __cfaabi_dbg_ bits_write( __dbg_text, __dbg_len );52 #define __cfaabi_dbg_write( buffer, len ) __cfaabi_bits_write( STDERR_FILENO, buffer, len ) 53 #define __cfaabi_dbg_acquire() __cfaabi_bits_acquire() 54 #define __cfaabi_dbg_release() __cfaabi_bits_release() 55 #define __cfaabi_dbg_print_safe(...) __cfaabi_bits_print_safe (__VA_ARGS__) 56 #define __cfaabi_dbg_print_nolock(...) __cfaabi_bits_print_nolock (__VA_ARGS__) 57 #define __cfaabi_dbg_print_buffer(...) __cfaabi_bits_print_buffer (__VA_ARGS__) 58 #define __cfaabi_dbg_print_buffer_decl(...) char __dbg_text[256]; int __dbg_len = snprintf( __dbg_text, 256, __VA_ARGS__ ); __cfaabi_bits_write( __dbg_text, __dbg_len ); 59 #define __cfaabi_dbg_print_buffer_local(...) __dbg_len = snprintf( __dbg_text, 256, __VA_ARGS__ ); __cfaabi_dbg_write( __dbg_text, __dbg_len ); 60 60 #else 61 61 #define __cfaabi_dbg_write(...) ((void)0) -
libcfa/src/concurrency/kernel.cfa
r7768b8d r30763fd 10 10 // Created On : Tue Jan 17 12:27:26 2017 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Thu Jun 20 17:21:23201913 // Update Count : 2 512 // Last Modified On : Thu Nov 21 16:46:59 2019 13 // Update Count : 27 14 14 // 15 15 … … 841 841 if(thrd) { 842 842 int len = snprintf( abort_text, abort_text_size, "Error occurred while executing thread %.256s (%p)", thrd->self_cor.name, thrd ); 843 __cfaabi_ dbg_bits_write(abort_text, len );843 __cfaabi_bits_write( STDERR_FILENO, abort_text, len ); 844 844 845 845 if ( &thrd->self_cor != thrd->curr_cor ) { 846 846 len = snprintf( abort_text, abort_text_size, " in coroutine %.256s (%p).\n", thrd->curr_cor->name, thrd->curr_cor ); 847 __cfaabi_ dbg_bits_write(abort_text, len );847 __cfaabi_bits_write( STDERR_FILENO, abort_text, len ); 848 848 } 849 849 else { 850 __cfaabi_ dbg_bits_write(".\n", 2 );850 __cfaabi_bits_write( STDERR_FILENO, ".\n", 2 ); 851 851 } 852 852 } 853 853 else { 854 854 int len = snprintf( abort_text, abort_text_size, "Error occurred outside of any thread.\n" ); 855 __cfaabi_ dbg_bits_write(abort_text, len );855 __cfaabi_bits_write( STDERR_FILENO, abort_text, len ); 856 856 } 857 857 } … … 864 864 865 865 extern "C" { 866 void __cfaabi_ dbg_bits_acquire() {866 void __cfaabi_bits_acquire() { 867 867 lock( kernel_debug_lock __cfaabi_dbg_ctx2 ); 868 868 } 869 869 870 void __cfaabi_ dbg_bits_release() {870 void __cfaabi_bits_release() { 871 871 unlock( kernel_debug_lock ); 872 872 } -
libcfa/src/heap.cfa
r7768b8d r30763fd 10 10 // Created On : Tue Dec 19 21:58:35 2017 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Fri Oct 18 07:42:09201913 // Update Count : 55612 // Last Modified On : Sun Nov 24 17:56:15 2019 13 // Update Count : 638 14 14 // 15 15 … … 18 18 #include <stdio.h> // snprintf, fileno 19 19 #include <errno.h> // errno 20 #include <string.h> // memset, memcpy 20 21 extern "C" { 21 22 #include <sys/mman.h> // mmap, munmap … … 27 28 #include "bits/locks.hfa" // __spinlock_t 28 29 #include "startup.hfa" // STARTUP_PRIORITY_MEMORY 29 #include "stdlib.hfa" // bsearchl30 //#include "stdlib.hfa" // bsearchl 30 31 #include "malloc.h" 31 32 33 #define MIN(x, y) (y > x ? x : y) 32 34 33 35 static bool traceHeap = false; … … 50 52 51 53 52 static bool checkFree = false;53 54 inline bool checkFree() {55 return checkFree;56 } // checkFree57 58 bool checkFreeOn() {59 bool temp = checkFree;60 checkFree = true;54 static bool prtFree = false; 55 56 inline bool prtFree() { 57 return prtFree; 58 } // prtFree 59 60 bool prtFreeOn() { 61 bool temp = prtFree; 62 prtFree = true; 61 63 return temp; 62 } // checkFreeOn63 64 bool checkFreeOff() {65 bool temp = checkFree;66 checkFree = false;64 } // prtFreeOn 65 66 bool prtFreeOff() { 67 bool temp = prtFree; 68 prtFree = false; 67 69 return temp; 68 } // checkFreeOff70 } // prtFreeOff 69 71 70 72 … … 89 91 90 92 enum { 93 // Define the default extension heap amount in units of bytes. When the uC++ supplied heap reaches the brk address, 94 // the brk address is extended by the extension amount. 95 __CFA_DEFAULT_HEAP_EXPANSION__ = (1 * 1024 * 1024), 96 97 // Define the mmap crossover point during allocation. Allocations less than this amount are allocated from buckets; 98 // values greater than or equal to this value are mmap from the operating system. 91 99 __CFA_DEFAULT_MMAP_START__ = (512 * 1024 + 1), 92 __CFA_DEFAULT_HEAP_EXPANSION__ = (1 * 1024 * 1024),93 100 }; 94 101 … … 105 112 static unsigned int allocFree; // running total of allocations minus frees 106 113 107 static void checkUnfreed() {114 static void prtUnfreed() { 108 115 if ( allocFree != 0 ) { 109 116 // DO NOT USE STREAMS AS THEY MAY BE UNAVAILABLE AT THIS POINT. … … 112 119 // "Possible cause is unfreed storage allocated by the program or system/library routines called from the program.\n", 113 120 // (long int)getpid(), allocFree, allocFree ); // always print the UNIX pid 114 // __cfaabi_dbg_bits_write( helpText, len );115 } // if 116 } // checkUnfreed121 // __cfaabi_dbg_bits_write( STDERR_FILENO, helpText, len ); // print debug/nodebug 122 } // if 123 } // prtUnfreed 117 124 118 125 extern "C" { … … 123 130 void heapAppStop() { // called by __cfaabi_appready_startdown 124 131 fclose( stdin ); fclose( stdout ); 125 checkUnfreed();132 prtUnfreed(); 126 133 } // heapAppStop 127 134 } // extern "C" 128 135 #endif // __CFA_DEBUG__ 136 129 137 130 138 // statically allocated variables => zero filled. … … 134 142 static unsigned int maxBucketsUsed; // maximum number of buckets in use 135 143 136 137 // #comment TD : This defined is significantly different from the __ALIGN__ define from locks.hfa138 #define ALIGN 16139 144 140 145 #define SPINLOCK 0 … … 147 152 // Recursive definitions: HeapManager needs size of bucket array and bucket area needs sizeof HeapManager storage. 148 153 // Break recusion by hardcoding number of buckets and statically checking number is correct after bucket array defined. 149 enum { NoBucketSizes = 9 3}; // number of buckets sizes154 enum { NoBucketSizes = 91 }; // number of buckets sizes 150 155 151 156 struct HeapManager { … … 194 199 } kind; // Kind 195 200 } header; // Header 196 char pad[ ALIGN- sizeof( Header )];201 char pad[libAlign() - sizeof( Header )]; 197 202 char data[0]; // storage 198 203 }; // Storage 199 204 200 static_assert( ALIGN >= sizeof( Storage ), "ALIGN< sizeof( Storage )" );205 static_assert( libAlign() >= sizeof( Storage ), "libAlign() < sizeof( Storage )" ); 201 206 202 207 struct FreeHeader { … … 228 233 #define __STATISTICS__ 229 234 235 // Bucket size must be multiple of 16. 230 236 // Powers of 2 are common allocation sizes, so make powers of 2 generate the minimum required size. 231 237 static const unsigned int bucketSizes[] @= { // different bucket sizes 232 16, 32, 48, 64, 233 64 + sizeof(HeapManager.Storage), 96, 112, 128, 128 + sizeof(HeapManager.Storage), 160, 192, 224, 234 256 + sizeof(HeapManager.Storage), 320, 384, 448, 512 + sizeof(HeapManager.Storage), 640, 768, 896, 235 1_024 + sizeof(HeapManager.Storage), 1_536, 2_048 + sizeof(HeapManager.Storage), 2_560, 3_072, 3_584, 4_096 + sizeof(HeapManager.Storage), 6_144, 236 8_192 + sizeof(HeapManager.Storage), 9_216, 10_240, 11_264, 12_288, 13_312, 14_336, 15_360, 237 16_384 + sizeof(HeapManager.Storage), 18_432, 20_480, 22_528, 24_576, 26_624, 28_672, 30_720, 238 32_768 + sizeof(HeapManager.Storage), 36_864, 40_960, 45_056, 49_152, 53_248, 57_344, 61_440, 239 65_536 + sizeof(HeapManager.Storage), 73_728, 81_920, 90_112, 98_304, 106_496, 114_688, 122_880, 240 131_072 + sizeof(HeapManager.Storage), 147_456, 163_840, 180_224, 196_608, 212_992, 229_376, 245_760, 241 262_144 + sizeof(HeapManager.Storage), 294_912, 327_680, 360_448, 393_216, 425_984, 458_752, 491_520, 242 524_288 + sizeof(HeapManager.Storage), 655_360, 786_432, 917_504, 1_048_576 + sizeof(HeapManager.Storage), 1_179_648, 1_310_720, 1_441_792, 243 1_572_864, 1_703_936, 1_835_008, 1_966_080, 2_097_152 + sizeof(HeapManager.Storage), 2_621_440, 3_145_728, 3_670_016, 244 4_194_304 + sizeof(HeapManager.Storage) 238 16, 32, 48, 64 + sizeof(HeapManager.Storage), // 4 239 96, 112, 128 + sizeof(HeapManager.Storage), // 3 240 160, 192, 224, 256 + sizeof(HeapManager.Storage), // 4 241 320, 384, 448, 512 + sizeof(HeapManager.Storage), // 4 242 640, 768, 896, 1_024 + sizeof(HeapManager.Storage), // 4 243 1_536, 2_048 + sizeof(HeapManager.Storage), // 2 244 2_560, 3_072, 3_584, 4_096 + sizeof(HeapManager.Storage), // 4 245 6_144, 8_192 + sizeof(HeapManager.Storage), // 2 246 9_216, 10_240, 11_264, 12_288, 13_312, 14_336, 15_360, 16_384 + sizeof(HeapManager.Storage), // 8 247 18_432, 20_480, 22_528, 24_576, 26_624, 28_672, 30_720, 32_768 + sizeof(HeapManager.Storage), // 8 248 36_864, 40_960, 45_056, 49_152, 53_248, 57_344, 61_440, 65_536 + sizeof(HeapManager.Storage), // 8 249 73_728, 81_920, 90_112, 98_304, 106_496, 114_688, 122_880, 131_072 + sizeof(HeapManager.Storage), // 8 250 147_456, 163_840, 180_224, 196_608, 212_992, 229_376, 245_760, 262_144 + sizeof(HeapManager.Storage), // 8 251 294_912, 327_680, 360_448, 393_216, 425_984, 458_752, 491_520, 524_288 + sizeof(HeapManager.Storage), // 8 252 655_360, 786_432, 917_504, 1_048_576 + sizeof(HeapManager.Storage), // 4 253 1_179_648, 1_310_720, 1_441_792, 1_572_864, 1_703_936, 1_835_008, 1_966_080, 2_097_152 + sizeof(HeapManager.Storage), // 8 254 2_621_440, 3_145_728, 3_670_016, 4_194_304 + sizeof(HeapManager.Storage), // 4 245 255 }; 246 256 … … 251 261 static unsigned char lookup[LookupSizes]; // O(1) lookup for small sizes 252 262 #endif // FASTLOOKUP 263 253 264 static int mmapFd = -1; // fake or actual fd for anonymous file 254 255 256 265 #ifdef __CFA_DEBUG__ 257 266 static bool heapBoot = 0; // detect recursion during boot … … 259 268 static HeapManager heapManager __attribute__(( aligned (128) )) @= {}; // size of cache line to prevent false sharing 260 269 261 // #comment TD : The return type of this function should be commented262 static inline bool setMmapStart( size_t value ) {263 if ( value < pageSize || bucketSizes[NoBucketSizes - 1] < value ) return true;264 mmapStart = value; // set global265 266 // find the closest bucket size less than or equal to the mmapStart size267 maxBucketsUsed = bsearchl( (unsigned int)mmapStart, bucketSizes, NoBucketSizes ); // binary search268 assert( maxBucketsUsed < NoBucketSizes ); // subscript failure ?269 assert( mmapStart <= bucketSizes[maxBucketsUsed] ); // search failure ?270 return false;271 } // setMmapStart272 273 274 static void ?{}( HeapManager & manager ) with ( manager ) {275 pageSize = sysconf( _SC_PAGESIZE );276 277 for ( unsigned int i = 0; i < NoBucketSizes; i += 1 ) { // initialize the free lists278 freeLists[i].blockSize = bucketSizes[i];279 } // for280 281 #ifdef FASTLOOKUP282 unsigned int idx = 0;283 for ( unsigned int i = 0; i < LookupSizes; i += 1 ) {284 if ( i > bucketSizes[idx] ) idx += 1;285 lookup[i] = idx;286 } // for287 #endif // FASTLOOKUP288 289 if ( setMmapStart( default_mmap_start() ) ) {290 abort( "HeapManager : internal error, mmap start initialization failure." );291 } // if292 heapExpand = default_heap_expansion();293 294 char * End = (char *)sbrk( 0 );295 sbrk( (char *)libCeiling( (long unsigned int)End, libAlign() ) - End ); // move start of heap to multiple of alignment296 heapBegin = heapEnd = sbrk( 0 ); // get new start point297 } // HeapManager298 299 300 static void ^?{}( HeapManager & ) {301 #ifdef __STATISTICS__302 // if ( traceHeapTerm() ) {303 // printStats();304 // if ( checkfree() ) checkFree( heapManager, true );305 // } // if306 #endif // __STATISTICS__307 } // ~HeapManager308 309 310 static void memory_startup( void ) __attribute__(( constructor( STARTUP_PRIORITY_MEMORY ) ));311 void memory_startup( void ) {312 #ifdef __CFA_DEBUG__313 if ( unlikely( heapBoot ) ) { // check for recursion during system boot314 // DO NOT USE STREAMS AS THEY MAY BE UNAVAILABLE AT THIS POINT.315 abort( "boot() : internal error, recursively invoked during system boot." );316 } // if317 heapBoot = true;318 #endif // __CFA_DEBUG__319 320 //assert( heapManager.heapBegin != 0 );321 //heapManager{};322 if ( heapManager.heapBegin == 0 ) heapManager{};323 } // memory_startup324 325 static void memory_shutdown( void ) __attribute__(( destructor( STARTUP_PRIORITY_MEMORY ) ));326 void memory_shutdown( void ) {327 ^heapManager{};328 } // memory_shutdown329 330 270 331 271 #ifdef __STATISTICS__ 332 static unsigned long long int mmap_storage; // heap statistics counters 272 // Heap statistics counters. 273 static unsigned long long int mmap_storage; 333 274 static unsigned int mmap_calls; 334 275 static unsigned long long int munmap_storage; … … 348 289 static unsigned long long int realloc_storage; 349 290 static unsigned int realloc_calls; 350 351 static int statfd; // statistics file descriptor (changed by malloc_stats_fd) 352 291 // Statistics file descriptor (changed by malloc_stats_fd). 292 static int statfd = STDERR_FILENO; // default stderr 353 293 354 294 // Use "write" because streams may be shutdown when calls are made. 355 295 static void printStats() { 356 296 char helpText[512]; 357 __cfaabi_ dbg_bits_print_buffer(helpText, sizeof(helpText),297 __cfaabi_bits_print_buffer( STDERR_FILENO, helpText, sizeof(helpText), 358 298 "\nHeap statistics:\n" 359 299 " malloc: calls %u / storage %llu\n" … … 405 345 sbrk_calls, sbrk_storage 406 346 ); 407 return write( fileno( stream ), helpText, len ); // -1 => error 347 __cfaabi_bits_write( fileno( stream ), helpText, len ); // ensures all bytes written or exit 348 return len; 408 349 } // printStatsXML 409 350 #endif // __STATISTICS__ 351 410 352 411 353 // #comment TD : Is this the samething as Out-of-Memory? … … 418 360 419 361 static inline void checkAlign( size_t alignment ) { 420 if ( alignment < sizeof(void *) || ! libPow2( alignment ) ) {421 abort( "Alignment %zu for memory allocation is less than sizeof(void *) and/or not a power of 2.", alignment);362 if ( alignment < libAlign() || ! libPow2( alignment ) ) { 363 abort( "Alignment %zu for memory allocation is less than %d and/or not a power of 2.", alignment, libAlign() ); 422 364 } // if 423 365 } // checkAlign … … 431 373 432 374 433 static inline void checkHeader( bool check, const char * name, void * addr ) { 434 if ( unlikely( check ) ) { // bad address ? 435 abort( "Attempt to %s storage %p with address outside the heap.\n" 436 "Possible cause is duplicate free on same block or overwriting of memory.", 437 name, addr ); 438 } // if 439 } // checkHeader 440 441 // #comment TD : function should be commented and/or have a more evocative name 442 // this isn't either a check or a constructor which is what I would expect this function to be 443 static inline void fakeHeader( HeapManager.Storage.Header *& header, size_t & size, size_t & alignment ) { 444 if ( unlikely( (header->kind.fake.alignment & 1) == 1 ) ) { // fake header ? 445 size_t offset = header->kind.fake.offset; 446 alignment = header->kind.fake.alignment & -2; // remove flag from value 447 #ifdef __CFA_DEBUG__ 448 checkAlign( alignment ); // check alignment 449 #endif // __CFA_DEBUG__ 450 header = (HeapManager.Storage.Header *)((char *)header - offset); 451 } // if 452 } // fakeHeader 453 454 // #comment TD : Why is this a define 455 #define headerAddr( addr ) ((HeapManager.Storage.Header *)( (char *)addr - sizeof(HeapManager.Storage) )) 456 457 static inline bool headers( const char * name, void * addr, HeapManager.Storage.Header *& header, HeapManager.FreeHeader *& freeElem, size_t & size, size_t & alignment ) with ( heapManager ) { 458 header = headerAddr( addr ); 459 460 if ( unlikely( heapEnd < addr ) ) { // mmapped ? 461 fakeHeader( header, size, alignment ); 462 size = header->kind.real.blockSize & -3; // mmap size 463 return true; 464 } // if 465 466 #ifdef __CFA_DEBUG__ 467 checkHeader( addr < heapBegin || header < (HeapManager.Storage.Header *)heapBegin, name, addr ); // bad low address ? 468 #endif // __CFA_DEBUG__ 469 470 // #comment TD : This code looks weird... 471 // It's called as the first statement of both branches of the last if, with the same parameters in all cases 472 473 // header may be safe to dereference 474 fakeHeader( header, size, alignment ); 475 #ifdef __CFA_DEBUG__ 476 checkHeader( header < (HeapManager.Storage.Header *)heapBegin || (HeapManager.Storage.Header *)heapEnd < header, name, addr ); // bad address ? (offset could be + or -) 477 #endif // __CFA_DEBUG__ 478 479 freeElem = (HeapManager.FreeHeader *)((size_t)header->kind.real.home & -3); 480 #ifdef __CFA_DEBUG__ 481 if ( freeElem < &freeLists[0] || &freeLists[NoBucketSizes] <= freeElem ) { 482 abort( "Attempt to %s storage %p with corrupted header.\n" 483 "Possible cause is duplicate free on same block or overwriting of header information.", 484 name, addr ); 485 } // if 486 #endif // __CFA_DEBUG__ 487 size = freeElem->blockSize; 488 return false; 489 } // headers 490 491 492 static inline void * extend( size_t size ) with ( heapManager ) { 493 lock( extlock __cfaabi_dbg_ctx2 ); 494 ptrdiff_t rem = heapRemaining - size; 495 if ( rem < 0 ) { 496 // If the size requested is bigger than the current remaining storage, increase the size of the heap. 497 498 size_t increase = libCeiling( size > heapExpand ? size : heapExpand, libAlign() ); 499 if ( sbrk( increase ) == (void *)-1 ) { 500 unlock( extlock ); 501 errno = ENOMEM; 502 return 0; 503 } // if 504 #ifdef __STATISTICS__ 505 sbrk_calls += 1; 506 sbrk_storage += increase; 507 #endif // __STATISTICS__ 508 #ifdef __CFA_DEBUG__ 509 // Set new memory to garbage so subsequent uninitialized usages might fail. 510 memset( (char *)heapEnd + heapRemaining, '\377', increase ); 511 #endif // __CFA_DEBUG__ 512 rem = heapRemaining + increase - size; 513 } // if 514 515 HeapManager.Storage * block = (HeapManager.Storage *)heapEnd; 516 heapRemaining = rem; 517 heapEnd = (char *)heapEnd + size; 518 unlock( extlock ); 519 return block; 520 } // extend 521 522 375 // thunk problem 523 376 size_t Bsearchl( unsigned int key, const unsigned int * vals, size_t dim ) { 524 377 size_t l = 0, m, h = dim; … … 535 388 536 389 390 static inline bool setMmapStart( size_t value ) { // true => mmapped, false => sbrk 391 if ( value < pageSize || bucketSizes[NoBucketSizes - 1] < value ) return true; 392 mmapStart = value; // set global 393 394 // find the closest bucket size less than or equal to the mmapStart size 395 maxBucketsUsed = Bsearchl( (unsigned int)mmapStart, bucketSizes, NoBucketSizes ); // binary search 396 assert( maxBucketsUsed < NoBucketSizes ); // subscript failure ? 397 assert( mmapStart <= bucketSizes[maxBucketsUsed] ); // search failure ? 398 return false; 399 } // setMmapStart 400 401 402 static inline void checkHeader( bool check, const char * name, void * addr ) { 403 if ( unlikely( check ) ) { // bad address ? 404 abort( "Attempt to %s storage %p with address outside the heap.\n" 405 "Possible cause is duplicate free on same block or overwriting of memory.", 406 name, addr ); 407 } // if 408 } // checkHeader 409 410 411 static inline void fakeHeader( HeapManager.Storage.Header *& header, size_t & alignment ) { 412 if ( unlikely( (header->kind.fake.alignment & 1) == 1 ) ) { // fake header ? 413 size_t offset = header->kind.fake.offset; 414 alignment = header->kind.fake.alignment & -2; // remove flag from value 415 #ifdef __CFA_DEBUG__ 416 checkAlign( alignment ); // check alignment 417 #endif // __CFA_DEBUG__ 418 header = (HeapManager.Storage.Header *)((char *)header - offset); 419 } // if 420 } // fakeHeader 421 422 423 // <-------+----------------------------------------------------> bsize (bucket size) 424 // |header |addr 425 //================================================================================== 426 // | alignment 427 // <-----------------<------------+-----------------------------> bsize (bucket size) 428 // |fake-header | addr 429 #define headerAddr( addr ) ((HeapManager.Storage.Header *)( (char *)addr - sizeof(HeapManager.Storage) )) 430 431 // <-------<<--------------------- dsize ---------------------->> bsize (bucket size) 432 // |header |addr 433 //================================================================================== 434 // | alignment 435 // <------------------------------<<---------- dsize --------->>> bsize (bucket size) 436 // |fake-header |addr 437 #define dataStorage( bsize, addr, header ) (bsize - ( (char *)addr - (char *)header )) 438 439 440 static inline bool headers( const char * name __attribute__(( unused )), void * addr, HeapManager.Storage.Header *& header, HeapManager.FreeHeader *& freeElem, size_t & size, size_t & alignment ) with ( heapManager ) { 441 header = headerAddr( addr ); 442 443 if ( unlikely( heapEnd < addr ) ) { // mmapped ? 444 fakeHeader( header, alignment ); 445 size = header->kind.real.blockSize & -3; // mmap size 446 return true; 447 } // if 448 449 #ifdef __CFA_DEBUG__ 450 checkHeader( addr < heapBegin || header < (HeapManager.Storage.Header *)heapBegin, name, addr ); // bad low address ? 451 #endif // __CFA_DEBUG__ 452 453 // header may be safe to dereference 454 fakeHeader( header, alignment ); 455 #ifdef __CFA_DEBUG__ 456 checkHeader( header < (HeapManager.Storage.Header *)heapBegin || (HeapManager.Storage.Header *)heapEnd < header, name, addr ); // bad address ? (offset could be + or -) 457 #endif // __CFA_DEBUG__ 458 459 freeElem = (HeapManager.FreeHeader *)((size_t)header->kind.real.home & -3); 460 #ifdef __CFA_DEBUG__ 461 if ( freeElem < &freeLists[0] || &freeLists[NoBucketSizes] <= freeElem ) { 462 abort( "Attempt to %s storage %p with corrupted header.\n" 463 "Possible cause is duplicate free on same block or overwriting of header information.", 464 name, addr ); 465 } // if 466 #endif // __CFA_DEBUG__ 467 size = freeElem->blockSize; 468 return false; 469 } // headers 470 471 472 static inline void * extend( size_t size ) with ( heapManager ) { 473 lock( extlock __cfaabi_dbg_ctx2 ); 474 ptrdiff_t rem = heapRemaining - size; 475 if ( rem < 0 ) { 476 // If the size requested is bigger than the current remaining storage, increase the size of the heap. 477 478 size_t increase = libCeiling( size > heapExpand ? size : heapExpand, libAlign() ); 479 if ( sbrk( increase ) == (void *)-1 ) { 480 unlock( extlock ); 481 errno = ENOMEM; 482 return 0p; 483 } // if 484 #ifdef __STATISTICS__ 485 sbrk_calls += 1; 486 sbrk_storage += increase; 487 #endif // __STATISTICS__ 488 #ifdef __CFA_DEBUG__ 489 // Set new memory to garbage so subsequent uninitialized usages might fail. 490 memset( (char *)heapEnd + heapRemaining, '\377', increase ); 491 #endif // __CFA_DEBUG__ 492 rem = heapRemaining + increase - size; 493 } // if 494 495 HeapManager.Storage * block = (HeapManager.Storage *)heapEnd; 496 heapRemaining = rem; 497 heapEnd = (char *)heapEnd + size; 498 unlock( extlock ); 499 return block; 500 } // extend 501 502 537 503 static inline void * doMalloc( size_t size ) with ( heapManager ) { 538 504 HeapManager.Storage * block; // pointer to new block of storage … … 541 507 // along with the block and is a multiple of the alignment size. 542 508 543 if ( unlikely( size > ~0ul - sizeof(HeapManager.Storage) ) ) return 0 ;509 if ( unlikely( size > ~0ul - sizeof(HeapManager.Storage) ) ) return 0p; 544 510 size_t tsize = size + sizeof(HeapManager.Storage); 545 511 if ( likely( tsize < mmapStart ) ) { // small size => sbrk … … 574 540 block = freeElem->freeList.pop(); 575 541 #endif // SPINLOCK 576 if ( unlikely( block == 0 ) ) {// no free block ?542 if ( unlikely( block == 0p ) ) { // no free block ? 577 543 #if defined( SPINLOCK ) 578 544 unlock( freeElem->lock ); … … 583 549 584 550 block = (HeapManager.Storage *)extend( tsize ); // mutual exclusion on call 585 if ( unlikely( block == 0 ) ) return 0;586 551 if ( unlikely( block == 0p ) ) return 0p; 552 #if defined( SPINLOCK ) 587 553 } else { 588 554 freeElem->freeList = block->header.kind.real.next; 589 555 unlock( freeElem->lock ); 590 556 #endif // SPINLOCK 591 557 } // if 592 558 593 559 block->header.kind.real.home = freeElem; // pointer back to free list of apropriate size 594 560 } else { // large size => mmap 595 if ( unlikely( size > ~0ul - pageSize ) ) return 0 ;561 if ( unlikely( size > ~0ul - pageSize ) ) return 0p; 596 562 tsize = libCeiling( tsize, pageSize ); // must be multiple of page size 597 563 #ifdef __STATISTICS__ … … 611 577 } // if 612 578 613 void * a rea= &(block->data); // adjust off header to user bytes579 void * addr = &(block->data); // adjust off header to user bytes 614 580 615 581 #ifdef __CFA_DEBUG__ 616 assert( ((uintptr_t)a rea& (libAlign() - 1)) == 0 ); // minimum alignment ?582 assert( ((uintptr_t)addr & (libAlign() - 1)) == 0 ); // minimum alignment ? 617 583 __atomic_add_fetch( &allocFree, tsize, __ATOMIC_SEQ_CST ); 618 584 if ( traceHeap() ) { 619 585 enum { BufferSize = 64 }; 620 586 char helpText[BufferSize]; 621 int len = snprintf( helpText, BufferSize, "%p = Malloc( %zu ) (allocated %zu)\n", a rea, size, tsize );622 // int len = snprintf( helpText, BufferSize, "Malloc %p %zu\n", a rea, size );623 __cfaabi_ dbg_bits_write( helpText, len );587 int len = snprintf( helpText, BufferSize, "%p = Malloc( %zu ) (allocated %zu)\n", addr, size, tsize ); 588 // int len = snprintf( helpText, BufferSize, "Malloc %p %zu\n", addr, size ); 589 __cfaabi_bits_write( STDERR_FILENO, helpText, len ); // print debug/nodebug 624 590 } // if 625 591 #endif // __CFA_DEBUG__ 626 592 627 return a rea;593 return addr; 628 594 } // doMalloc 629 595 … … 631 597 static inline void doFree( void * addr ) with ( heapManager ) { 632 598 #ifdef __CFA_DEBUG__ 633 if ( unlikely( heapManager.heapBegin == 0 ) ) {599 if ( unlikely( heapManager.heapBegin == 0p ) ) { 634 600 abort( "doFree( %p ) : internal error, called before heap is initialized.", addr ); 635 601 } // if … … 677 643 char helpText[BufferSize]; 678 644 int len = snprintf( helpText, sizeof(helpText), "Free( %p ) size:%zu\n", addr, size ); 679 __cfaabi_ dbg_bits_write( helpText, len );645 __cfaabi_bits_write( STDERR_FILENO, helpText, len ); // print debug/nodebug 680 646 } // if 681 647 #endif // __CFA_DEBUG__ … … 683 649 684 650 685 size_t checkFree( HeapManager & manager ) with ( manager ) {651 size_t prtFree( HeapManager & manager ) with ( manager ) { 686 652 size_t total = 0; 687 653 #ifdef __STATISTICS__ 688 __cfaabi_ dbg_bits_acquire();689 __cfaabi_ dbg_bits_print_nolock("\nBin lists (bin size : free blocks on list)\n" );654 __cfaabi_bits_acquire(); 655 __cfaabi_bits_print_nolock( STDERR_FILENO, "\nBin lists (bin size : free blocks on list)\n" ); 690 656 #endif // __STATISTICS__ 691 657 for ( unsigned int i = 0; i < maxBucketsUsed; i += 1 ) { … … 696 662 697 663 #if defined( SPINLOCK ) 698 for ( HeapManager.Storage * p = freeLists[i].freeList; p != 0 ; p = p->header.kind.real.next ) {664 for ( HeapManager.Storage * p = freeLists[i].freeList; p != 0p; p = p->header.kind.real.next ) { 699 665 #else 700 for ( HeapManager.Storage * p = freeLists[i].freeList.top(); p != 0 ; p = p->header.kind.real.next.top ) {666 for ( HeapManager.Storage * p = freeLists[i].freeList.top(); p != 0p; p = p->header.kind.real.next.top ) { 701 667 #endif // SPINLOCK 702 668 total += size; … … 707 673 708 674 #ifdef __STATISTICS__ 709 __cfaabi_ dbg_bits_print_nolock("%7zu, %-7u ", size, N );710 if ( (i + 1) % 8 == 0 ) __cfaabi_ dbg_bits_print_nolock("\n" );675 __cfaabi_bits_print_nolock( STDERR_FILENO, "%7zu, %-7u ", size, N ); 676 if ( (i + 1) % 8 == 0 ) __cfaabi_bits_print_nolock( STDERR_FILENO, "\n" ); 711 677 #endif // __STATISTICS__ 712 678 } // for 713 679 #ifdef __STATISTICS__ 714 __cfaabi_ dbg_bits_print_nolock("\ntotal free blocks:%zu\n", total );715 __cfaabi_ dbg_bits_release();680 __cfaabi_bits_print_nolock( STDERR_FILENO, "\ntotal free blocks:%zu\n", total ); 681 __cfaabi_bits_release(); 716 682 #endif // __STATISTICS__ 717 683 return (char *)heapEnd - (char *)heapBegin - total; 718 } // checkFree 684 } // prtFree 685 686 687 static void ?{}( HeapManager & manager ) with ( manager ) { 688 pageSize = sysconf( _SC_PAGESIZE ); 689 690 for ( unsigned int i = 0; i < NoBucketSizes; i += 1 ) { // initialize the free lists 691 freeLists[i].blockSize = bucketSizes[i]; 692 } // for 693 694 #ifdef FASTLOOKUP 695 unsigned int idx = 0; 696 for ( unsigned int i = 0; i < LookupSizes; i += 1 ) { 697 if ( i > bucketSizes[idx] ) idx += 1; 698 lookup[i] = idx; 699 } // for 700 #endif // FASTLOOKUP 701 702 if ( setMmapStart( default_mmap_start() ) ) { 703 abort( "HeapManager : internal error, mmap start initialization failure." ); 704 } // if 705 heapExpand = default_heap_expansion(); 706 707 char * end = (char *)sbrk( 0 ); 708 sbrk( (char *)libCeiling( (long unsigned int)end, libAlign() ) - end ); // move start of heap to multiple of alignment 709 heapBegin = heapEnd = sbrk( 0 ); // get new start point 710 } // HeapManager 711 712 713 static void ^?{}( HeapManager & ) { 714 #ifdef __STATISTICS__ 715 // if ( traceHeapTerm() ) { 716 // printStats(); 717 // if ( prtfree() ) prtFree( heapManager, true ); 718 // } // if 719 #endif // __STATISTICS__ 720 } // ~HeapManager 721 722 723 static void memory_startup( void ) __attribute__(( constructor( STARTUP_PRIORITY_MEMORY ) )); 724 void memory_startup( void ) { 725 #ifdef __CFA_DEBUG__ 726 if ( unlikely( heapBoot ) ) { // check for recursion during system boot 727 // DO NOT USE STREAMS AS THEY MAY BE UNAVAILABLE AT THIS POINT. 728 abort( "boot() : internal error, recursively invoked during system boot." ); 729 } // if 730 heapBoot = true; 731 #endif // __CFA_DEBUG__ 732 733 //assert( heapManager.heapBegin != 0 ); 734 //heapManager{}; 735 if ( heapManager.heapBegin == 0p ) heapManager{}; 736 } // memory_startup 737 738 static void memory_shutdown( void ) __attribute__(( destructor( STARTUP_PRIORITY_MEMORY ) )); 739 void memory_shutdown( void ) { 740 ^heapManager{}; 741 } // memory_shutdown 719 742 720 743 721 744 static inline void * mallocNoStats( size_t size ) { // necessary for malloc statistics 722 745 //assert( heapManager.heapBegin != 0 ); 723 if ( unlikely( heapManager.heapBegin == 0 ) ) heapManager{}; // called before memory_startup ?724 void * a rea= doMalloc( size );725 if ( unlikely( a rea == 0) ) errno = ENOMEM; // POSIX726 return a rea;746 if ( unlikely( heapManager.heapBegin == 0p ) ) heapManager{}; // called before memory_startup ? 747 void * addr = doMalloc( size ); 748 if ( unlikely( addr == 0p ) ) errno = ENOMEM; // POSIX 749 return addr; 727 750 } // mallocNoStats 751 752 753 static inline void * callocNoStats( size_t noOfElems, size_t elemSize ) { 754 size_t size = noOfElems * elemSize; 755 char * addr = (char *)mallocNoStats( size ); 756 if ( unlikely( addr == 0p ) ) return 0p; 757 758 HeapManager.Storage.Header * header; 759 HeapManager.FreeHeader * freeElem; 760 size_t bsize, alignment; 761 bool mapped __attribute__(( unused )) = headers( "calloc", addr, header, freeElem, bsize, alignment ); 762 #ifndef __CFA_DEBUG__ 763 // Mapped storage is zero filled, but in debug mode mapped memory is scrubbed in doMalloc, so it has to be reset to zero. 764 if ( ! mapped ) 765 #endif // __CFA_DEBUG__ 766 // Zero entire data space even when > than size => realloc without a new allocation and zero fill works. 767 // <-------00000000000000000000000000000000000000000000000000000> bsize (bucket size) 768 // `-header`-addr `-size 769 memset( addr, '\0', bsize - sizeof(HeapManager.Storage) ); // set to zeros 770 771 header->kind.real.blockSize |= 2; // mark as zero filled 772 return addr; 773 } // callocNoStats 728 774 729 775 … … 745 791 // subtract libAlign() because it is already the minimum alignment 746 792 // add sizeof(Storage) for fake header 747 // #comment TD : this is the only place that calls doMalloc without calling mallocNoStats, why ? 748 char * area = (char *)doMalloc( size + alignment - libAlign() + sizeof(HeapManager.Storage) ); 749 if ( unlikely( area == 0 ) ) return area; 793 char * addr = (char *)mallocNoStats( size + alignment - libAlign() + sizeof(HeapManager.Storage) ); 794 if ( unlikely( addr == 0p ) ) return addr; 750 795 751 796 // address in the block of the "next" alignment address 752 char * user = (char *)libCeiling( (uintptr_t)(a rea+ sizeof(HeapManager.Storage)), alignment );797 char * user = (char *)libCeiling( (uintptr_t)(addr + sizeof(HeapManager.Storage)), alignment ); 753 798 754 799 // address of header from malloc 755 HeapManager.Storage.Header * realHeader = headerAddr( a rea);800 HeapManager.Storage.Header * realHeader = headerAddr( addr ); 756 801 // address of fake header * before* the alignment location 757 802 HeapManager.Storage.Header * fakeHeader = headerAddr( user ); … … 763 808 return user; 764 809 } // memalignNoStats 810 811 812 static inline void * cmemalignNoStats( size_t alignment, size_t noOfElems, size_t elemSize ) { 813 size_t size = noOfElems * elemSize; 814 char * addr = (char *)memalignNoStats( alignment, size ); 815 if ( unlikely( addr == 0p ) ) return 0p; 816 HeapManager.Storage.Header * header; 817 HeapManager.FreeHeader * freeElem; 818 size_t bsize; 819 bool mapped __attribute__(( unused )) = headers( "cmemalign", addr, header, freeElem, bsize, alignment ); 820 #ifndef __CFA_DEBUG__ 821 // Mapped storage is zero filled, but in debug mode mapped memory is scrubbed in doMalloc, so it has to be reset to zero. 822 if ( ! mapped ) 823 #endif // __CFA_DEBUG__ 824 memset( addr, '\0', dataStorage( bsize, addr, header ) ); // set to zeros 825 header->kind.real.blockSize |= 2; // mark as zero filled 826 827 return addr; 828 } // cmemalignNoStats 765 829 766 830 … … 776 840 extern "C" { 777 841 // The malloc() function allocates size bytes and returns a pointer to the allocated memory. The memory is not 778 // initialized. If size is 0, then malloc() returns either NULL, or a unique pointer value that can later be842 // initialized. If size is 0, then malloc() returns either 0p, or a unique pointer value that can later be 779 843 // successfully passed to free(). 780 844 void * malloc( size_t size ) { … … 788 852 789 853 // The calloc() function allocates memory for an array of nmemb elements of size bytes each and returns a pointer to 790 // the allocated memory. The memory is set to zero. If nmemb or size is 0, then calloc() returns either NULL, or a854 // the allocated memory. The memory is set to zero. If nmemb or size is 0, then calloc() returns either 0p, or a 791 855 // unique pointer value that can later be successfully passed to free(). 792 856 void * calloc( size_t noOfElems, size_t elemSize ) { 793 size_t size = noOfElems * elemSize;794 857 #ifdef __STATISTICS__ 795 858 __atomic_add_fetch( &calloc_calls, 1, __ATOMIC_SEQ_CST ); 796 __atomic_add_fetch( &calloc_storage, size, __ATOMIC_SEQ_CST ); 797 #endif // __STATISTICS__ 798 799 char * area = (char *)mallocNoStats( size ); 800 if ( unlikely( area == 0 ) ) return 0; 859 __atomic_add_fetch( &calloc_storage, noOfElems * elemSize, __ATOMIC_SEQ_CST ); 860 #endif // __STATISTICS__ 861 862 return callocNoStats( noOfElems, elemSize ); 863 } // calloc 864 865 // The realloc() function changes the size of the memory block pointed to by ptr to size bytes. The contents will be 866 // unchanged in the range from the start of the region up to the minimum of the old and new sizes. If the new size 867 // is larger than the old size, the added memory will not be initialized. If ptr is 0p, then the call is 868 // equivalent to malloc(size), for all values of size; if size is equal to zero, and ptr is not 0p, then the call 869 // is equivalent to free(ptr). Unless ptr is 0p, it must have been returned by an earlier call to malloc(), 870 // calloc() or realloc(). If the area pointed to was moved, a free(ptr) is done. 871 void * realloc( void * oaddr, size_t size ) { 872 #ifdef __STATISTICS__ 873 __atomic_add_fetch( &realloc_calls, 1, __ATOMIC_SEQ_CST ); 874 #endif // __STATISTICS__ 875 876 if ( unlikely( size == 0 ) ) { free( oaddr ); return 0p; } // special cases 877 if ( unlikely( oaddr == 0p ) ) return mallocNoStats( size ); 801 878 802 879 HeapManager.Storage.Header * header; 803 880 HeapManager.FreeHeader * freeElem; 804 size_t asize, alignment; 805 bool mapped __attribute__(( unused )) = headers( "calloc", area, header, freeElem, asize, alignment ); 806 #ifndef __CFA_DEBUG__ 807 // Mapped storage is zero filled, but in debug mode mapped memory is scrubbed in doMalloc, so it has to be reset to zero. 808 if ( ! mapped ) 809 #endif // __CFA_DEBUG__ 810 memset( area, '\0', asize - sizeof(HeapManager.Storage) ); // set to zeros 811 812 header->kind.real.blockSize |= 2; // mark as zero filled 813 return area; 814 } // calloc 815 816 // #comment TD : Document this function 817 void * cmemalign( size_t alignment, size_t noOfElems, size_t elemSize ) { 818 size_t size = noOfElems * elemSize; 819 #ifdef __STATISTICS__ 820 __atomic_add_fetch( &cmemalign_calls, 1, __ATOMIC_SEQ_CST ); 821 __atomic_add_fetch( &cmemalign_storage, size, __ATOMIC_SEQ_CST ); 822 #endif // __STATISTICS__ 823 824 char * area = (char *)memalignNoStats( alignment, size ); 825 if ( unlikely( area == 0 ) ) return 0; 826 HeapManager.Storage.Header * header; 827 HeapManager.FreeHeader * freeElem; 828 size_t asize; 829 bool mapped __attribute__(( unused )) = headers( "cmemalign", area, header, freeElem, asize, alignment ); 830 #ifndef __CFA_DEBUG__ 831 // Mapped storage is zero filled, but in debug mode mapped memory is scrubbed in doMalloc, so it has to be reset to zero. 832 if ( ! mapped ) 833 #endif // __CFA_DEBUG__ 834 memset( area, '\0', asize - ( (char *)area - (char *)header ) ); // set to zeros 835 header->kind.real.blockSize |= 2; // mark as zero filled 836 837 return area; 838 } // cmemalign 839 840 // The realloc() function changes the size of the memory block pointed to by ptr to size bytes. The contents will be 841 // unchanged in the range from the start of the region up to the minimum of the old and new sizes. If the new size 842 // is larger than the old size, the added memory will not be initialized. If ptr is NULL, then the call is 843 // equivalent to malloc(size), for all values of size; if size is equal to zero, and ptr is not NULL, then the call 844 // is equivalent to free(ptr). Unless ptr is NULL, it must have been returned by an earlier call to malloc(), 845 // calloc() or realloc(). If the area pointed to was moved, a free(ptr) is done. 846 void * realloc( void * addr, size_t size ) { 847 #ifdef __STATISTICS__ 848 __atomic_add_fetch( &realloc_calls, 1, __ATOMIC_SEQ_CST ); 849 #endif // __STATISTICS__ 850 851 if ( unlikely( addr == 0 ) ) return mallocNoStats( size ); // special cases 852 if ( unlikely( size == 0 ) ) { free( addr ); return 0; } 853 854 HeapManager.Storage.Header * header; 855 HeapManager.FreeHeader * freeElem; 856 size_t asize, alignment = 0; 857 headers( "realloc", addr, header, freeElem, asize, alignment ); 858 859 size_t usize = asize - ( (char *)addr - (char *)header ); // compute the amount of user storage in the block 860 if ( usize >= size ) { // already sufficient storage 881 size_t bsize, oalign = 0; 882 headers( "realloc", oaddr, header, freeElem, bsize, oalign ); 883 884 size_t odsize = dataStorage( bsize, oaddr, header ); // data storage available in bucket 885 if ( size <= odsize && odsize <= size * 2 ) { // allow up to 50% wasted storage in smaller size 886 // Do not know size of original allocation => cannot do 0 fill for any additional space because do not know 887 // where to start filling, i.e., do not overwrite existing values in space. 888 // 861 889 // This case does not result in a new profiler entry because the previous one still exists and it must match with 862 890 // the free for this memory. Hence, this realloc does not appear in the profiler output. 863 return addr;891 return oaddr; 864 892 } // if 865 893 … … 868 896 #endif // __STATISTICS__ 869 897 870 void * area; 871 if ( unlikely( alignment != 0 ) ) { // previous request memalign? 872 area = memalign( alignment, size ); // create new aligned area 898 // change size and copy old content to new storage 899 900 void * naddr; 901 if ( unlikely( oalign != 0 ) ) { // previous request memalign? 902 if ( unlikely( header->kind.real.blockSize & 2 ) ) { // previous request zero fill 903 naddr = cmemalignNoStats( oalign, 1, size ); // create new aligned area 904 } else { 905 naddr = memalignNoStats( oalign, size ); // create new aligned area 906 } // if 873 907 } else { 874 area = mallocNoStats( size ); // create new area 908 if ( unlikely( header->kind.real.blockSize & 2 ) ) { // previous request zero fill 909 naddr = callocNoStats( 1, size ); // create new area 910 } else { 911 naddr = mallocNoStats( size ); // create new area 912 } // if 875 913 } // if 876 if ( unlikely( area == 0 ) ) return 0; 877 if ( unlikely( header->kind.real.blockSize & 2 ) ) { // previous request zero fill (calloc/cmemalign) ? 878 assert( (header->kind.real.blockSize & 1) == 0 ); 879 bool mapped __attribute__(( unused )) = headers( "realloc", area, header, freeElem, asize, alignment ); 880 #ifndef __CFA_DEBUG__ 881 // Mapped storage is zero filled, but in debug mode mapped memory is scrubbed in doMalloc, so it has to be reset to zero. 882 if ( ! mapped ) 883 #endif // __CFA_DEBUG__ 884 memset( (char *)area + usize, '\0', asize - ( (char *)area - (char *)header ) - usize ); // zero-fill back part 885 header->kind.real.blockSize |= 2; // mark new request as zero fill 886 } // if 887 memcpy( area, addr, usize ); // copy bytes 888 free( addr ); 889 return area; 914 if ( unlikely( naddr == 0p ) ) return 0p; 915 916 headers( "realloc", naddr, header, freeElem, bsize, oalign ); 917 size_t ndsize = dataStorage( bsize, naddr, header ); // data storage avilable in bucket 918 // To preserve prior fill, the entire bucket must be copied versus the size. 919 memcpy( naddr, oaddr, MIN( odsize, ndsize ) ); // copy bytes 920 free( oaddr ); 921 return naddr; 890 922 } // realloc 891 923 … … 898 930 #endif // __STATISTICS__ 899 931 900 void * area = memalignNoStats( alignment, size ); 901 902 return area; 932 return memalignNoStats( alignment, size ); 903 933 } // memalign 934 935 936 // The cmemalign() function is the same as calloc() with memory alignment. 937 void * cmemalign( size_t alignment, size_t noOfElems, size_t elemSize ) { 938 #ifdef __STATISTICS__ 939 __atomic_add_fetch( &cmemalign_calls, 1, __ATOMIC_SEQ_CST ); 940 __atomic_add_fetch( &cmemalign_storage, noOfElems * elemSize, __ATOMIC_SEQ_CST ); 941 #endif // __STATISTICS__ 942 943 return cmemalignNoStats( alignment, noOfElems, elemSize ); 944 } // cmemalign 904 945 905 946 // The function aligned_alloc() is the same as memalign(), except for the added restriction that size should be a … … 912 953 // The function posix_memalign() allocates size bytes and places the address of the allocated memory in *memptr. The 913 954 // address of the allocated memory will be a multiple of alignment, which must be a power of two and a multiple of 914 // sizeof(void *). If size is 0, then posix_memalign() returns either NULL, or a unique pointer value that can later955 // sizeof(void *). If size is 0, then posix_memalign() returns either 0p, or a unique pointer value that can later 915 956 // be successfully passed to free(3). 916 957 int posix_memalign( void ** memptr, size_t alignment, size_t size ) { 917 958 if ( alignment < sizeof(void *) || ! libPow2( alignment ) ) return EINVAL; // check alignment 918 959 * memptr = memalign( alignment, size ); 919 if ( unlikely( * memptr == 0 ) ) return ENOMEM;960 if ( unlikely( * memptr == 0p ) ) return ENOMEM; 920 961 return 0; 921 962 } // posix_memalign … … 930 971 // The free() function frees the memory space pointed to by ptr, which must have been returned by a previous call to 931 972 // malloc(), calloc() or realloc(). Otherwise, or if free(ptr) has already been called before, undefined behavior 932 // occurs. If ptr is NULL, no operation is performed.973 // occurs. If ptr is 0p, no operation is performed. 933 974 void free( void * addr ) { 934 975 #ifdef __STATISTICS__ … … 936 977 #endif // __STATISTICS__ 937 978 938 // #comment TD : To decrease nesting I would but the special case in the 939 // else instead, plus it reads more naturally to have the 940 // short / normal case instead 941 if ( unlikely( addr == 0 ) ) { // special case 942 #ifdef __CFA_DEBUG__ 943 if ( traceHeap() ) { 944 #define nullmsg "Free( 0x0 ) size:0\n" 945 // Do not debug print free( 0 ), as it can cause recursive entry from sprintf. 946 __cfaabi_dbg_bits_write( nullmsg, sizeof(nullmsg) - 1 ); 947 } // if 948 #endif // __CFA_DEBUG__ 979 if ( unlikely( addr == 0p ) ) { // special case 980 // #ifdef __CFA_DEBUG__ 981 // if ( traceHeap() ) { 982 // #define nullmsg "Free( 0x0 ) size:0\n" 983 // // Do not debug print free( 0p ), as it can cause recursive entry from sprintf. 984 // __cfaabi_dbg_write( nullmsg, sizeof(nullmsg) - 1 ); 985 // } // if 986 // #endif // __CFA_DEBUG__ 949 987 return; 950 988 } // exit … … 953 991 } // free 954 992 955 // The mallopt() function adjusts parameters that control the behavior of the memory-allocation functions (see 956 // malloc(3)). The param argument specifies the parameter to be modified, and value specifies the new value for that 957 // parameter. 958 int mallopt( int option, int value ) { 959 choose( option ) { 960 case M_TOP_PAD: 961 if ( setHeapExpand( value ) ) fallthru default; 962 case M_MMAP_THRESHOLD: 963 if ( setMmapStart( value ) ) fallthru default; 964 default: 965 // #comment TD : 1 for unsopported feels wrong 966 return 1; // success, or unsupported 967 } // switch 968 return 0; // error 969 } // mallopt 970 971 // The malloc_trim() function attempts to release free memory at the top of the heap (by calling sbrk(2) with a 972 // suitable argument). 973 int malloc_trim( size_t ) { 974 return 0; // => impossible to release memory 975 } // malloc_trim 976 977 // The malloc_usable_size() function returns the number of usable bytes in the block pointed to by ptr, a pointer to 978 // a block of memory allocated by malloc(3) or a related function. 979 size_t malloc_usable_size( void * addr ) { 980 if ( unlikely( addr == 0 ) ) return 0; // null allocation has 0 size 981 982 HeapManager.Storage.Header * header; 983 HeapManager.FreeHeader * freeElem; 984 size_t size, alignment; 985 986 headers( "malloc_usable_size", addr, header, freeElem, size, alignment ); 987 size_t usize = size - ( (char *)addr - (char *)header ); // compute the amount of user storage in the block 988 return usize; 989 } // malloc_usable_size 990 991 992 // The malloc_alignment() function returns the alignment of the allocation. 993 994 // The malloc_alignment() function returns the alignment of the allocation. 993 995 size_t malloc_alignment( void * addr ) { 994 if ( unlikely( addr == 0 ) ) return libAlign(); // minimum alignment996 if ( unlikely( addr == 0p ) ) return libAlign(); // minimum alignment 995 997 HeapManager.Storage.Header * header = headerAddr( addr ); 996 998 if ( (header->kind.fake.alignment & 1) == 1 ) { // fake header ? … … 1002 1004 1003 1005 1004 1006 // The malloc_zero_fill() function returns true if the allocation is zero filled, i.e., initially allocated by calloc(). 1005 1007 bool malloc_zero_fill( void * addr ) { 1006 if ( unlikely( addr == 0 ) ) return false; // null allocation is not zero fill1008 if ( unlikely( addr == 0p ) ) return false; // null allocation is not zero fill 1007 1009 HeapManager.Storage.Header * header = headerAddr( addr ); 1008 1010 if ( (header->kind.fake.alignment & 1) == 1 ) { // fake header ? … … 1013 1015 1014 1016 1015 // The malloc_stats() function prints (on default standard error) statistics about memory allocated by malloc(3) and 1016 // related functions. 1017 // The malloc_usable_size() function returns the number of usable bytes in the block pointed to by ptr, a pointer to 1018 // a block of memory allocated by malloc(3) or a related function. 1019 size_t malloc_usable_size( void * addr ) { 1020 if ( unlikely( addr == 0p ) ) return 0; // null allocation has 0 size 1021 HeapManager.Storage.Header * header; 1022 HeapManager.FreeHeader * freeElem; 1023 size_t bsize, alignment; 1024 1025 headers( "malloc_usable_size", addr, header, freeElem, bsize, alignment ); 1026 return dataStorage( bsize, addr, header ); // data storage in bucket 1027 } // malloc_usable_size 1028 1029 1030 // The malloc_stats() function prints (on default standard error) statistics about memory allocated by malloc(3) and 1031 // related functions. 1017 1032 void malloc_stats( void ) { 1018 1033 #ifdef __STATISTICS__ 1019 1034 printStats(); 1020 if ( checkFree() ) checkFree( heapManager );1035 if ( prtFree() ) prtFree( heapManager ); 1021 1036 #endif // __STATISTICS__ 1022 1037 } // malloc_stats 1023 1038 1024 1039 // The malloc_stats_fd() function changes the file descripter where malloc_stats() writes the statistics. 1025 int malloc_stats_fd( int fd ) {1040 int malloc_stats_fd( int fd __attribute__(( unused )) ) { 1026 1041 #ifdef __STATISTICS__ 1027 1042 int temp = statfd; … … 1033 1048 } // malloc_stats_fd 1034 1049 1050 1051 // The mallopt() function adjusts parameters that control the behavior of the memory-allocation functions (see 1052 // malloc(3)). The param argument specifies the parameter to be modified, and value specifies the new value for that 1053 // parameter. 1054 int mallopt( int option, int value ) { 1055 choose( option ) { 1056 case M_TOP_PAD: 1057 if ( setHeapExpand( value ) ) return 1; 1058 case M_MMAP_THRESHOLD: 1059 if ( setMmapStart( value ) ) return 1; 1060 } // switch 1061 return 0; // error, unsupported 1062 } // mallopt 1063 1064 // The malloc_trim() function attempts to release free memory at the top of the heap (by calling sbrk(2) with a 1065 // suitable argument). 1066 int malloc_trim( size_t ) { 1067 return 0; // => impossible to release memory 1068 } // malloc_trim 1069 1070 1035 1071 // The malloc_info() function exports an XML string that describes the current state of the memory-allocation 1036 1072 // implementation in the caller. The string is printed on the file stream stream. The exported string includes 1037 1073 // information about all arenas (see malloc(3)). 1038 1074 int malloc_info( int options, FILE * stream ) { 1075 if ( options != 0 ) { errno = EINVAL; return -1; } 1039 1076 return printStatsXML( stream ); 1040 1077 } // malloc_info … … 1046 1083 // structure is returned as the function result. (It is the caller's responsibility to free(3) this memory.) 1047 1084 void * malloc_get_state( void ) { 1048 return 0 ; // unsupported1085 return 0p; // unsupported 1049 1086 } // malloc_get_state 1050 1087 … … 1058 1095 1059 1096 1097 // Must have CFA linkage to overload with C linkage realloc. 1098 void * realloc( void * oaddr, size_t nalign, size_t size ) { 1099 #ifdef __STATISTICS__ 1100 __atomic_add_fetch( &realloc_calls, 1, __ATOMIC_SEQ_CST ); 1101 #endif // __STATISTICS__ 1102 1103 if ( unlikely( size == 0 ) ) { free( oaddr ); return 0p; } // special cases 1104 if ( unlikely( oaddr == 0p ) ) return mallocNoStats( size ); 1105 1106 if ( unlikely( nalign == 0 ) ) nalign = libAlign(); // reset alignment to minimum 1107 #ifdef __CFA_DEBUG__ 1108 else 1109 checkAlign( nalign ); // check alignment 1110 #endif // __CFA_DEBUG__ 1111 1112 HeapManager.Storage.Header * header; 1113 HeapManager.FreeHeader * freeElem; 1114 size_t bsize, oalign = 0; 1115 headers( "realloc", oaddr, header, freeElem, bsize, oalign ); 1116 size_t odsize = dataStorage( bsize, oaddr, header ); // data storage available in bucket 1117 1118 if ( oalign != 0 && (uintptr_t)oaddr % nalign == 0 ) { // has alignment and just happens to work out 1119 headerAddr( oaddr )->kind.fake.alignment = nalign | 1; // update alignment (could be the same) 1120 return realloc( oaddr, size ); 1121 } // if 1122 1123 #ifdef __STATISTICS__ 1124 __atomic_add_fetch( &realloc_storage, size, __ATOMIC_SEQ_CST ); 1125 #endif // __STATISTICS__ 1126 1127 // change size and copy old content to new storage 1128 1129 void * naddr; 1130 if ( unlikely( header->kind.real.blockSize & 2 ) ) { // previous request zero fill 1131 naddr = cmemalignNoStats( nalign, 1, size ); // create new aligned area 1132 } else { 1133 naddr = memalignNoStats( nalign, size ); // create new aligned area 1134 } // if 1135 1136 headers( "realloc", naddr, header, freeElem, bsize, oalign ); 1137 size_t ndsize = dataStorage( bsize, naddr, header ); // data storage avilable in bucket 1138 // To preserve prior fill, the entire bucket must be copied versus the size. 1139 memcpy( naddr, oaddr, MIN( odsize, ndsize ) ); // copy bytes 1140 free( oaddr ); 1141 return naddr; 1142 } // realloc 1143 1144 1060 1145 // Local Variables: // 1061 1146 // tab-width: 4 // -
libcfa/src/interpose.cfa
r7768b8d r30763fd 10 10 // Created On : Wed Mar 29 16:10:31 2017 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Sun Jul 14 22:57:16201913 // Update Count : 11 612 // Last Modified On : Thu Nov 21 16:47:02 2019 13 // Update Count : 118 14 14 // 15 15 … … 163 163 abort_lastframe = kernel_abort_lastframe(); 164 164 len = snprintf( abort_text, abort_text_size, "Cforall Runtime error (UNIX pid:%ld) ", (long int)getpid() ); // use UNIX pid (versus getPid) 165 __cfaabi_dbg_ bits_write( abort_text, len );165 __cfaabi_dbg_write( abort_text, len ); 166 166 167 167 if ( fmt ) { … … 171 171 len = vsnprintf( abort_text, abort_text_size, fmt, args ); 172 172 va_end( args ); 173 __cfaabi_dbg_ bits_write( abort_text, len );173 __cfaabi_dbg_write( abort_text, len ); 174 174 175 175 if ( fmt[strlen( fmt ) - 1] != '\n' ) { // add optional newline if missing at the end of the format text 176 __cfaabi_dbg_ bits_write( "\n", 1 );176 __cfaabi_dbg_write( "\n", 1 ); 177 177 } 178 178 } … … 194 194 // find executable name 195 195 *index( messages[0], '(' ) = '\0'; 196 __cfaabi_ dbg_bits_print_nolock("Stack back trace for: %s\n", messages[0]);196 __cfaabi_bits_print_nolock( STDERR_FILENO, "Stack back trace for: %s\n", messages[0]); 197 197 198 198 for ( int i = Start; i < size - abort_lastframe && messages != NULL; i += 1 ) { … … 200 200 201 201 for ( char * p = messages[i]; *p; ++p ) { 202 //__cfaabi_ dbg_bits_print_nolock( "X %s\n", p);202 //__cfaabi_bits_print_nolock( "X %s\n", p); 203 203 // find parantheses and +offset 204 204 if ( *p == '(' ) { … … 220 220 *offset_end++ = '\0'; 221 221 222 __cfaabi_ dbg_bits_print_nolock("(%i) %s : %s + %s %s\n", frameNo, messages[i], name, offset_begin, offset_end);222 __cfaabi_bits_print_nolock( STDERR_FILENO, "(%i) %s : %s + %s %s\n", frameNo, messages[i], name, offset_begin, offset_end); 223 223 } else { // otherwise, print the whole line 224 __cfaabi_ dbg_bits_print_nolock("(%i) %s\n", frameNo, messages[i] );224 __cfaabi_bits_print_nolock( STDERR_FILENO, "(%i) %s\n", frameNo, messages[i] ); 225 225 } 226 226 } -
libcfa/src/stdlib.cfa
r7768b8d r30763fd 10 10 // Created On : Thu Jan 28 17:10:29 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Tue Oct 22 08:57:52201913 // Update Count : 4 7812 // Last Modified On : Wed Nov 20 17:22:47 2019 13 // Update Count : 485 14 14 // 15 15 … … 30 30 T * alloc_set( T ptr[], size_t dim, char fill ) { // realloc array with fill 31 31 size_t olen = malloc_usable_size( ptr ); // current allocation 32 char * nptr = (char*)realloc( (void *)ptr, dim * sizeof(T) ); // C realloc32 void * nptr = (void *)realloc( (void *)ptr, dim * sizeof(T) ); // C realloc 33 33 size_t nlen = malloc_usable_size( nptr ); // new allocation 34 34 if ( nlen > olen ) { // larger ? 35 memset( nptr + olen, (int)fill, nlen - olen ); // initialize added storage35 memset( (char *)nptr + olen, (int)fill, nlen - olen ); // initialize added storage 36 36 } // if 37 37 return (T *)nptr; 38 38 } // alloc_set 39 39 40 T * alloc_align( T ptr[], size_t align ) { // aligned realloc array41 char * nptr;42 size_t alignment = malloc_alignment( ptr );43 if ( align != alignment && (uintptr_t)ptr % align != 0 ) {44 size_t olen = malloc_usable_size( ptr ); // current allocation45 nptr = (char *)memalign( align, olen );46 size_t nlen = malloc_usable_size( nptr ); // new allocation47 size_t lnth = olen < nlen ? olen : nlen; // min48 memcpy( nptr, ptr, lnth ); // initialize storage49 free( ptr );50 } else {51 nptr = (char *)ptr;52 } // if53 return (T *)nptr;54 } // alloc_align55 56 T * alloc_align( T ptr[], size_t align, size_t dim ) { // aligned realloc array57 char * nptr;58 size_t alignment = malloc_alignment( ptr );59 if ( align != alignment ) {60 size_t olen = malloc_usable_size( ptr ); // current allocation61 nptr = (char *)memalign( align, dim * sizeof(T) );62 size_t nlen = malloc_usable_size( nptr ); // new allocation63 size_t lnth = olen < nlen ? olen : nlen; // min64 memcpy( nptr, ptr, lnth ); // initialize storage65 free( ptr );66 } else {67 nptr = (char *)realloc( (void *)ptr, dim * sizeof(T) ); // C realloc68 } // if69 return (T *)nptr;70 } // alloc_align71 72 40 T * alloc_align_set( T ptr[], size_t align, char fill ) { // aligned realloc with fill 73 41 size_t olen = malloc_usable_size( ptr ); // current allocation 74 char * nptr = alloc_align( ptr, align ); 42 void * nptr = (void *)realloc( (void *)ptr, align, sizeof(T) ); // CFA realloc 43 // char * nptr = alloc_align( ptr, align ); 75 44 size_t nlen = malloc_usable_size( nptr ); // new allocation 76 45 if ( nlen > olen ) { // larger ? 77 memset( nptr + olen, (int)fill, nlen - olen ); // initialize added storage46 memset( (char *)nptr + olen, (int)fill, nlen - olen ); // initialize added storage 78 47 } // if 79 48 return (T *)nptr; -
libcfa/src/stdlib.hfa
r7768b8d r30763fd 10 10 // Created On : Thu Jan 28 17:12:35 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Sun Oct 20 22:57:33201913 // Update Count : 39 012 // Last Modified On : Fri Nov 22 15:13:14 2019 13 // Update Count : 399 14 14 // 15 15 … … 28 28 } // extern "C" 29 29 30 void * realloc( void * oaddr, size_t nalign, size_t size ); // CFA heap 31 30 32 //--------------------------------------- 31 33 … … 50 52 } // calloc 51 53 52 T * realloc( T * ptr, size_t size ) { 53 if ( unlikely( ptr == 0 ) ) return malloc(); 54 T * realloc( T * ptr, size_t size ) { // CFA realloc, eliminate return-type cast 54 55 return (T *)(void *)realloc( (void *)ptr, size ); // C realloc 55 56 } // realloc … … 59 60 } // memalign 60 61 62 T * cmemalign( size_t align, size_t dim ) { 63 return (T *)cmemalign( align, dim, sizeof(T) ); // CFA cmemalign 64 } // cmemalign 65 61 66 T * aligned_alloc( size_t align ) { 62 67 return (T *)aligned_alloc( align, sizeof(T) ); // C aligned_alloc … … 79 84 80 85 T * alloc( T ptr[], size_t dim ) { // realloc 81 return realloc( ptr, dim * sizeof(T) );86 return (T *)(void *)realloc( (void *)ptr, dim * sizeof(T) ); // C realloc 82 87 } // alloc 83 88 … … 118 123 } // alloc_align 119 124 125 T * alloc_align( T ptr[], size_t align ) { // aligned realloc array 126 return (T *)(void *)realloc( (void *)ptr, align, sizeof(T) ); // CFA realloc 127 } // alloc_align 128 129 T * alloc_align( T ptr[], size_t align, size_t dim ) { // aligned realloc array 130 return (T *)(void *)realloc( (void *)ptr, align, dim * sizeof(T) ); // CFA realloc 131 } // alloc_align 132 120 133 T * alloc_align_set( size_t align, char fill ) { 121 134 return (T *)memset( (T *)alloc_align( align ), (int)fill, sizeof(T) ); // initialize with fill value … … 142 155 143 156 forall( dtype T | sized(T) ) { 144 T * alloc_align( T ptr[], size_t align ); // realign145 T * alloc_align( T ptr[], size_t align, size_t dim ); // aligned realloc array146 157 T * alloc_align_set( T ptr[], size_t align, size_t dim, char fill ); // aligned realloc array with fill 147 158 } // distribution -
src/AST/Convert.cpp
r7768b8d r30763fd 887 887 auto expr = visitBaseExpr( node, 888 888 new AsmExpr( 889 get<Expression>().accept1(node->inout),889 new std::string(node->inout), 890 890 get<Expression>().accept1(node->constraint), 891 891 get<Expression>().accept1(node->operand) … … 2258 2258 new ast::AsmExpr( 2259 2259 old->location, 2260 GET_ACCEPT_1(inout, Expr),2260 old->inout, 2261 2261 GET_ACCEPT_1(constraint, Expr), 2262 2262 GET_ACCEPT_1(operand, Expr) -
src/AST/Expr.hpp
r7768b8d r30763fd 556 556 class AsmExpr final : public Expr { 557 557 public: 558 ptr<Expr>inout;558 std::string inout; 559 559 ptr<Expr> constraint; 560 560 ptr<Expr> operand; 561 561 562 AsmExpr( const CodeLocation & loc, const Expr *io, const Expr * con, const Expr * op )562 AsmExpr( const CodeLocation & loc, const std::string & io, const Expr * con, const Expr * op ) 563 563 : Expr( loc ), inout( io ), constraint( con ), operand( op ) {} 564 564 -
src/AST/Pass.impl.hpp
r7768b8d r30763fd 1300 1300 maybe_accept( node, &AsmExpr::result ); 1301 1301 } 1302 maybe_accept( node, &AsmExpr::inout );1303 1302 maybe_accept( node, &AsmExpr::constraint ); 1304 1303 maybe_accept( node, &AsmExpr::operand ); -
src/AST/Print.cpp
r7768b8d r30763fd 1011 1011 os << "Asm Expression:" << endl; 1012 1012 ++indent; 1013 if ( node->inout ) node->inout->accept( *this );1013 if ( !node->inout.empty() ) os << "[" << node->inout << "] "; 1014 1014 if ( node->constraint ) node->constraint->accept( *this ); 1015 1015 if ( node->operand ) node->operand->accept( *this ); -
src/CodeGen/CodeGenerator.cc
r7768b8d r30763fd 786 786 787 787 void CodeGenerator::postvisit( AsmExpr * asmExpr ) { 788 if ( asmExpr->get_inout() ) {788 if ( !asmExpr->inout.empty() ) { 789 789 output << "[ "; 790 asmExpr->get_inout()->accept( *visitor );790 output << asmExpr->inout; 791 791 output << " ] "; 792 792 } // if 793 asmExpr-> get_constraint()->accept( *visitor );793 asmExpr->constraint->accept( *visitor ); 794 794 output << " ( "; 795 asmExpr-> get_operand()->accept( *visitor );795 asmExpr->operand->accept( *visitor ); 796 796 output << " )"; 797 797 } -
src/Common/PassVisitor.impl.h
r7768b8d r30763fd 2452 2452 2453 2453 indexerScopedAccept( node->result , *this ); 2454 maybeAccept_impl ( node->inout , *this );2455 2454 maybeAccept_impl ( node->constraint, *this ); 2456 2455 maybeAccept_impl ( node->operand , *this ); … … 2464 2463 2465 2464 indexerScopedAccept( node->result , *this ); 2466 maybeAccept_impl ( node->inout , *this );2467 2465 maybeAccept_impl ( node->constraint, *this ); 2468 2466 maybeAccept_impl ( node->operand , *this ); … … 2477 2475 indexerScopedMutate( node->env , *this ); 2478 2476 indexerScopedMutate( node->result , *this ); 2479 maybeMutate_impl ( node->inout , *this );2480 2477 maybeMutate_impl ( node->constraint, *this ); 2481 2478 maybeMutate_impl ( node->operand , *this ); -
src/Common/SemanticError.cc
r7768b8d r30763fd 149 149 // Helpers 150 150 namespace ErrorHelpers { 151 Colors colors = Colors::Auto; 152 153 static inline bool with_colors() { 154 return colors == Colors::Auto ? isatty( STDERR_FILENO ) : bool(colors); 155 } 156 151 157 const std::string & error_str() { 152 static std::string str = isatty( STDERR_FILENO) ? "\e[31merror:\e[39m " : "error: ";158 static std::string str = with_colors() ? "\e[31merror:\e[39m " : "error: "; 153 159 return str; 154 160 } 155 161 156 162 const std::string & warning_str() { 157 static std::string str = isatty( STDERR_FILENO) ? "\e[95mwarning:\e[39m " : "warning: ";163 static std::string str = with_colors() ? "\e[95mwarning:\e[39m " : "warning: "; 158 164 return str; 159 165 } 160 166 161 167 const std::string & bold_ttycode() { 162 static std::string str = isatty( STDERR_FILENO) ? "\e[1m" : "";168 static std::string str = with_colors() ? "\e[1m" : ""; 163 169 return str; 164 170 } 165 171 166 172 const std::string & reset_font_ttycode() { 167 static std::string str = isatty( STDERR_FILENO) ? "\e[0m" : "";173 static std::string str = with_colors() ? "\e[0m" : ""; 168 174 return str; 169 175 } -
src/Common/SemanticError.h
r7768b8d r30763fd 97 97 // Helpers 98 98 namespace ErrorHelpers { 99 enum class Colors { 100 Never = false, 101 Always = true, 102 Auto, 103 }; 104 105 extern Colors colors; 106 99 107 const std::string & error_str(); 100 108 const std::string & warning_str(); -
src/Parser/parser.yy
r7768b8d r30763fd 1423 1423 asm_operand: // GCC 1424 1424 string_literal '(' constant_expression ')' 1425 { $$ = new ExpressionNode( new AsmExpr( maybeMoveBuild< Expression >( (ExpressionNode *)nullptr ), $1, maybeMoveBuild< Expression >( $3 ) ) ); }1426 | '[' constant_expression']' string_literal '(' constant_expression ')'1427 { $$ = new ExpressionNode( new AsmExpr( maybeMoveBuild< Expression >( $2 ), $4, maybeMoveBuild< Expression >( $6 ) ) ); }1425 { $$ = new ExpressionNode( new AsmExpr( nullptr, $1, maybeMoveBuild< Expression >( $3 ) ) ); } 1426 | '[' IDENTIFIER ']' string_literal '(' constant_expression ')' 1427 { $$ = new ExpressionNode( new AsmExpr( $2, $4, maybeMoveBuild< Expression >( $6 ) ) ); } 1428 1428 ; 1429 1429 -
src/ResolvExpr/Resolver.cc
r7768b8d r30763fd 485 485 visit_children = false; 486 486 findVoidExpression( asmExpr->operand, indexer ); 487 if ( asmExpr->get_inout() ) {488 findVoidExpression( asmExpr->inout, indexer );489 } // if490 487 } 491 488 … … 1365 1362 asmExpr = ast::mutate_field( 1366 1363 asmExpr, &ast::AsmExpr::operand, findVoidExpression( asmExpr->operand, symtab ) ); 1367 1368 if ( asmExpr->inout ) {1369 asmExpr = ast::mutate_field(1370 asmExpr, &ast::AsmExpr::inout, findVoidExpression( asmExpr->inout, symtab ) );1371 }1372 1364 1373 1365 return asmExpr; -
src/SynTree/Expression.cc
r7768b8d r30763fd 527 527 } 528 528 529 AsmExpr::AsmExpr( const AsmExpr & other ) : Expression( other ), inout( maybeClone( other.inout )), constraint( maybeClone( other.constraint ) ), operand( maybeClone( other.operand ) ) {}529 AsmExpr::AsmExpr( const AsmExpr & other ) : Expression( other ), inout( other.inout ), constraint( maybeClone( other.constraint ) ), operand( maybeClone( other.operand ) ) {} 530 530 531 531 532 532 void AsmExpr::print( std::ostream & os, Indenter indent ) const { 533 533 os << "Asm Expression: " << std::endl; 534 if ( inout ) inout->print( os, indent+1 );534 if ( !inout.empty() ) os << "[" << inout << "] "; 535 535 if ( constraint ) constraint->print( os, indent+1 ); 536 536 if ( operand ) operand->print( os, indent+1 ); -
src/SynTree/Expression.h
r7768b8d r30763fd 575 575 class AsmExpr : public Expression { 576 576 public: 577 Expression *inout;577 std::string inout; 578 578 Expression * constraint; 579 579 Expression * operand; 580 580 581 AsmExpr( Expression * inout, Expression * constraint, Expression * operand ) : inout( inout ), constraint( constraint ), operand( operand ) {}581 AsmExpr( const std::string * _inout, Expression * constraint, Expression * operand ) : inout( _inout ? *_inout : "" ), constraint( constraint ), operand( operand ) { delete _inout; } 582 582 AsmExpr( const AsmExpr & other ); 583 virtual ~AsmExpr() { delete inout; delete constraint; delete operand; }; 584 585 Expression * get_inout() const { return inout; } 586 void set_inout( Expression * newValue ) { inout = newValue; } 587 588 Expression * get_constraint() const { return constraint; } 589 void set_constraint( Expression * newValue ) { constraint = newValue; } 590 591 Expression * get_operand() const { return operand; } 592 void set_operand( Expression * newValue ) { operand = newValue; } 583 virtual ~AsmExpr() { delete constraint; delete operand; }; 593 584 594 585 virtual AsmExpr * clone() const override { return new AsmExpr( * this ); } -
src/main.cc
r7768b8d r30763fd 407 407 408 408 409 static const char optstring[] = ": hlLmNnpP:S:tgwW:D:";409 static const char optstring[] = ":c:ghlLmNnpP:S:twW:D:"; 410 410 411 411 enum { PreludeDir = 128 }; 412 412 static struct option long_opts[] = { 413 { "colors", required_argument, nullptr, 'c' }, 414 { "gdb", no_argument, nullptr, 'g' }, 413 415 { "help", no_argument, nullptr, 'h' }, 414 416 { "libcfa", no_argument, nullptr, 'l' }, … … 422 424 { "statistics", required_argument, nullptr, 'S' }, 423 425 { "tree", no_argument, nullptr, 't' }, 424 { "gdb", no_argument, nullptr, 'g' },425 426 { "", no_argument, nullptr, 0 }, // -w 426 427 { "", no_argument, nullptr, 0 }, // -W … … 430 431 431 432 static const char * description[] = { 432 "print help message", // -h 433 "generate libcfa.c", // -l 434 "generate line marks", // -L 435 "do not replace main", // -m 436 "do not generate line marks", // -N 437 "do not read prelude", // -n 433 "diagnostic color: never, always, or auto.", // -c 434 "wait for gdb to attach", // -g 435 "print help message", // -h 436 "generate libcfa.c", // -l 437 "generate line marks", // -L 438 "do not replace main", // -m 439 "do not generate line marks", // -N 440 "do not read prelude", // -n 438 441 "generate prototypes for prelude functions", // -p 439 "print", 442 "print", // -P 440 443 "<directory> prelude directory for debug/nodebug", // no flag 441 444 "<option-list> enable profiling information:\n counters,heap,time,all,none", // -S 442 "building cfa standard lib", // -t 443 "wait for gdb to attach", // -g 444 "", // -w 445 "", // -W 446 "", // -D 445 "building cfa standard lib", // -t 446 "", // -w 447 "", // -W 448 "", // -D 447 449 }; // description 448 450 … … 512 514 while ( (c = getopt_long( argc, argv, optstring, long_opts, nullptr )) != -1 ) { 513 515 switch ( c ) { 516 case 'c': // diagnostic colors 517 if ( strcmp( optarg, "always" ) == 0 ) { 518 ErrorHelpers::colors = ErrorHelpers::Colors::Always; 519 } else if ( strcmp( optarg, "never" ) == 0 ) { 520 ErrorHelpers::colors = ErrorHelpers::Colors::Never; 521 } else if ( strcmp( optarg, "auto" ) == 0 ) { 522 ErrorHelpers::colors = ErrorHelpers::Colors::Auto; 523 } // if 524 break; 514 525 case 'h': // help message 515 526 usage( argv ); // no return -
tests/.expect/alloc.txt
r7768b8d r30763fd 30 30 CFA resize array alloc 31 31 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 32 CFA resize array alloc , fill32 CFA resize array alloc 33 33 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 34 CFA resize array alloc , fill34 CFA resize array alloc 35 35 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 36 36 CFA resize array alloc, fill 37 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x 1010101 0x1010101 0x1010101 0x10101010xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede37 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0xdeadbeef 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0x1010101 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 0xdededede 38 38 39 39 C memalign 42 42.5 -
tests/.expect/gccExtensions.x64.txt
r7768b8d r30763fd 12 12 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=" "r" ( _X3dsti_2 ) : : ); 13 13 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ) : "r" ( _X3srci_2 ) : ); 14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ _X3srci_2] "r" ( _X3dsti_2 ) : "r0" );14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ src ] "r" ( _X3dsti_2 ) : "r0" ); 15 15 L2: L1: asm goto ( "frob %%r5, %1; jc %l[L1]; mov (%2), %%r5" : : "r" ( _X3srci_2 ), "r" ( (&_X3dsti_2) ) : "r5", "memory" : L1, L2 ); 16 16 double _Complex _X2c1Cd_2; -
tests/.expect/gccExtensions.x86.txt
r7768b8d r30763fd 12 12 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=" "r" ( _X3dsti_2 ) : : ); 13 13 asm volatile ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ) : "r" ( _X3srci_2 ) : ); 14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ _X3srci_2] "r" ( _X3dsti_2 ) : "r0" );14 asm ( "mov %1, %0\n\t" "add $1, %0" : "=r" ( _X3dsti_2 ), "=r" ( _X3srci_2 ) : [ src ] "r" ( _X3dsti_2 ) : "r0" ); 15 15 L2: L1: asm goto ( "frob %%r5, %1; jc %l[L1]; mov (%2), %%r5" : : "r" ( _X3srci_2 ), "r" ( (&_X3dsti_2) ) : "r5", "memory" : L1, L2 ); 16 16 double _Complex _X2c1Cd_2; -
tests/alloc.cfa
r7768b8d r30763fd 10 10 // Created On : Wed Feb 3 07:56:22 2016 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Sun Oct 20 21:45:21201913 // Update Count : 39112 // Last Modified On : Fri Nov 22 15:34:19 2019 13 // Update Count : 404 14 14 // 15 15 … … 126 126 127 127 p = alloc( p, 2 * dim ); // CFA resize array alloc 128 for ( i; dim ~ 2 * dim ) { p[i] = 0x1010101; } 128 for ( i; dim ~ 2 * dim ) { p[i] = 0x1010101; } // fill upper part 129 129 printf( "CFA resize array alloc\n" ); 130 130 for ( i; 2 * dim ) { printf( "%#x ", p[i] ); } … … 139 139 140 140 p = alloc_set( p, 3 * dim, fill ); // CFA resize array alloc, fill 141 printf( "CFA resize array alloc , fill\n" );141 printf( "CFA resize array alloc\n" ); 142 142 for ( i; 3 * dim ) { printf( "%#x ", p[i] ); } 143 143 printf( "\n" ); … … 145 145 146 146 p = alloc_set( p, dim, fill ); // CFA resize array alloc, fill 147 printf( "CFA resize array alloc , fill\n" );147 printf( "CFA resize array alloc\n" ); 148 148 for ( i; dim ) { printf( "%#x ", p[i] ); } 149 149 printf( "\n" ); -
tests/builtins/sync.cfa
r7768b8d r30763fd 4 4 void foo() { 5 5 volatile _Bool * vpB = 0; _Bool vB = 0; 6 volatile char * vp1 = 0; char * rp1 = 0; char v1 = 0; 7 volatile short * vp2 = 0; short * rp2 = 0; short v2 = 0; 8 volatile int * vp4 = 0; int * rp4 = 0; int v4 = 0; 9 volatile long long int * vp8 = 0; long long int * rp8 = 0; long long int v8 = 0; 10 #if defined(__SIZEOF_INT128__) 11 volatile __int128 * vp16 = 0; __int128 * rp16 = 0; __int128 v16 = 0; 6 volatile char * vpc = 0; char * rpc = 0; char vc = 0; 7 volatile short * vps = 0; short * rps = 0; short vs = 0; 8 volatile int * vpi = 0; int * rpi = 0; int vi = 0; 9 volatile long int * vpl = 0; long int * rpl = 0; long int vl = 0; 10 volatile long long int * vpll = 0; long long int * rpll = 0; long long int vll = 0; 11 #if defined(__SIZEOF_INT128__) 12 volatile __int128 * vplll = 0; __int128 * rplll = 0; __int128 vlll = 0; 12 13 #endif 13 14 struct type * volatile * vpp = 0; struct type ** rpp = 0; struct type * vp = 0; 14 15 15 { char ret; ret = __sync_fetch_and_add(vp1, v1); } 16 { char ret; ret = __sync_fetch_and_add_1(vp1, v1); } 17 { short ret; ret = __sync_fetch_and_add(vp2, v2); } 18 { short ret; ret = __sync_fetch_and_add_2(vp2, v2); } 19 { int ret; ret = __sync_fetch_and_add(vp4, v4); } 20 { int ret; ret = __sync_fetch_and_add_4(vp4, v4); } 21 { long long int ret; ret = __sync_fetch_and_add(vp8, v8); } 22 { long long int ret; ret = __sync_fetch_and_add_8(vp8, v8); } 23 #if defined(__SIZEOF_INT128__) 24 { __int128 ret; ret = __sync_fetch_and_add(vp16, v16); } 25 { __int128 ret; ret = __sync_fetch_and_add_16(vp16, v16); } 26 #endif 27 28 { char ret; ret = __sync_fetch_and_sub(vp1, v1); } 29 { char ret; ret = __sync_fetch_and_sub_1(vp1, v1); } 30 { short ret; ret = __sync_fetch_and_sub(vp2, v2); } 31 { short ret; ret = __sync_fetch_and_sub_2(vp2, v2); } 32 { int ret; ret = __sync_fetch_and_sub(vp4, v4); } 33 { int ret; ret = __sync_fetch_and_sub_4(vp4, v4); } 34 { long long int ret; ret = __sync_fetch_and_sub(vp8, v8); } 35 { long long int ret; ret = __sync_fetch_and_sub_8(vp8, v8); } 36 #if defined(__SIZEOF_INT128__) 37 { __int128 ret; ret = __sync_fetch_and_sub(vp16, v16); } 38 { __int128 ret; ret = __sync_fetch_and_sub_16(vp16, v16); } 39 #endif 40 41 { char ret; ret = __sync_fetch_and_or(vp1, v1); } 42 { char ret; ret = __sync_fetch_and_or_1(vp1, v1); } 43 { short ret; ret = __sync_fetch_and_or(vp2, v2); } 44 { short ret; ret = __sync_fetch_and_or_2(vp2, v2); } 45 { int ret; ret = __sync_fetch_and_or(vp4, v4); } 46 { int ret; ret = __sync_fetch_and_or_4(vp4, v4); } 47 { long long int ret; ret = __sync_fetch_and_or(vp8, v8); } 48 { long long int ret; ret = __sync_fetch_and_or_8(vp8, v8); } 49 #if defined(__SIZEOF_INT128__) 50 { __int128 ret; ret = __sync_fetch_and_or(vp16, v16); } 51 { __int128 ret; ret = __sync_fetch_and_or_16(vp16, v16); } 52 #endif 53 54 { char ret; ret = __sync_fetch_and_and(vp1, v1); } 55 { char ret; ret = __sync_fetch_and_and_1(vp1, v1); } 56 { short ret; ret = __sync_fetch_and_and(vp2, v2); } 57 { short ret; ret = __sync_fetch_and_and_2(vp2, v2); } 58 { int ret; ret = __sync_fetch_and_and(vp4, v4); } 59 { int ret; ret = __sync_fetch_and_and_4(vp4, v4); } 60 { long long int ret; ret = __sync_fetch_and_and(vp8, v8); } 61 { long long int ret; ret = __sync_fetch_and_and_8(vp8, v8); } 62 #if defined(__SIZEOF_INT128__) 63 { __int128 ret; ret = __sync_fetch_and_and(vp16, v16); } 64 { __int128 ret; ret = __sync_fetch_and_and_16(vp16, v16); } 65 #endif 66 67 { char ret; ret = __sync_fetch_and_xor(vp1, v1); } 68 { char ret; ret = __sync_fetch_and_xor_1(vp1, v1); } 69 { short ret; ret = __sync_fetch_and_xor(vp2, v2); } 70 { short ret; ret = __sync_fetch_and_xor_2(vp2, v2); } 71 { int ret; ret = __sync_fetch_and_xor(vp4, v4); } 72 { int ret; ret = __sync_fetch_and_xor_4(vp4, v4); } 73 { long long int ret; ret = __sync_fetch_and_xor(vp8, v8); } 74 { long long int ret; ret = __sync_fetch_and_xor_8(vp8, v8); } 75 #if defined(__SIZEOF_INT128__) 76 { __int128 ret; ret = __sync_fetch_and_xor(vp16, v16); } 77 { __int128 ret; ret = __sync_fetch_and_xor_16(vp16, v16); } 78 #endif 79 80 { char ret; ret = __sync_fetch_and_nand(vp1, v1); } 81 { char ret; ret = __sync_fetch_and_nand_1(vp1, v1); } 82 { short ret; ret = __sync_fetch_and_nand(vp2, v2); } 83 { short ret; ret = __sync_fetch_and_nand_2(vp2, v2); } 84 { int ret; ret = __sync_fetch_and_nand(vp4, v4); } 85 { int ret; ret = __sync_fetch_and_nand_4(vp4, v4); } 86 { long long int ret; ret = __sync_fetch_and_nand(vp8, v8); } 87 { long long int ret; ret = __sync_fetch_and_nand_8(vp8, v8); } 88 #if defined(__SIZEOF_INT128__) 89 { __int128 ret; ret = __sync_fetch_and_nand(vp16, v16); } 90 { __int128 ret; ret = __sync_fetch_and_nand_16(vp16, v16); } 91 #endif 92 93 { char ret; ret = __sync_add_and_fetch(vp1, v1); } 94 { char ret; ret = __sync_add_and_fetch_1(vp1, v1); } 95 { short ret; ret = __sync_add_and_fetch(vp2, v2); } 96 { short ret; ret = __sync_add_and_fetch_2(vp2, v2); } 97 { int ret; ret = __sync_add_and_fetch(vp4, v4); } 98 { int ret; ret = __sync_add_and_fetch_4(vp4, v4); } 99 { long long int ret; ret = __sync_add_and_fetch(vp8, v8); } 100 { long long int ret; ret = __sync_add_and_fetch_8(vp8, v8); } 101 #if defined(__SIZEOF_INT128__) 102 { __int128 ret; ret = __sync_add_and_fetch(vp16, v16); } 103 { __int128 ret; ret = __sync_add_and_fetch_16(vp16, v16); } 104 #endif 105 106 { char ret; ret = __sync_sub_and_fetch(vp1, v1); } 107 { char ret; ret = __sync_sub_and_fetch_1(vp1, v1); } 108 { short ret; ret = __sync_sub_and_fetch(vp2, v2); } 109 { short ret; ret = __sync_sub_and_fetch_2(vp2, v2); } 110 { int ret; ret = __sync_sub_and_fetch(vp4, v4); } 111 { int ret; ret = __sync_sub_and_fetch_4(vp4, v4); } 112 { long long int ret; ret = __sync_sub_and_fetch(vp8, v8); } 113 { long long int ret; ret = __sync_sub_and_fetch_8(vp8, v8); } 114 #if defined(__SIZEOF_INT128__) 115 { __int128 ret; ret = __sync_sub_and_fetch(vp16, v16); } 116 { __int128 ret; ret = __sync_sub_and_fetch_16(vp16, v16); } 117 #endif 118 119 { char ret; ret = __sync_or_and_fetch(vp1, v1); } 120 { char ret; ret = __sync_or_and_fetch_1(vp1, v1); } 121 { short ret; ret = __sync_or_and_fetch(vp2, v2); } 122 { short ret; ret = __sync_or_and_fetch_2(vp2, v2); } 123 { int ret; ret = __sync_or_and_fetch(vp4, v4); } 124 { int ret; ret = __sync_or_and_fetch_4(vp4, v4); } 125 { long long int ret; ret = __sync_or_and_fetch(vp8, v8); } 126 { long long int ret; ret = __sync_or_and_fetch_8(vp8, v8); } 127 #if defined(__SIZEOF_INT128__) 128 { __int128 ret; ret = __sync_or_and_fetch(vp16, v16); } 129 { __int128 ret; ret = __sync_or_and_fetch_16(vp16, v16); } 130 #endif 131 132 { char ret; ret = __sync_and_and_fetch(vp1, v1); } 133 { char ret; ret = __sync_and_and_fetch_1(vp1, v1); } 134 { short ret; ret = __sync_and_and_fetch(vp2, v2); } 135 { short ret; ret = __sync_and_and_fetch_2(vp2, v2); } 136 { int ret; ret = __sync_and_and_fetch(vp4, v4); } 137 { int ret; ret = __sync_and_and_fetch_4(vp4, v4); } 138 { long long int ret; ret = __sync_and_and_fetch(vp8, v8); } 139 { long long int ret; ret = __sync_and_and_fetch_8(vp8, v8); } 140 #if defined(__SIZEOF_INT128__) 141 { __int128 ret; ret = __sync_and_and_fetch(vp16, v16); } 142 { __int128 ret; ret = __sync_and_and_fetch_16(vp16, v16); } 143 #endif 144 145 { char ret; ret = __sync_xor_and_fetch(vp1, v1); } 146 { char ret; ret = __sync_xor_and_fetch_1(vp1, v1); } 147 { short ret; ret = __sync_xor_and_fetch(vp2, v2); } 148 { short ret; ret = __sync_xor_and_fetch_2(vp2, v2); } 149 { int ret; ret = __sync_xor_and_fetch(vp4, v4); } 150 { int ret; ret = __sync_xor_and_fetch_4(vp4, v4); } 151 { long long int ret; ret = __sync_xor_and_fetch(vp8, v8); } 152 { long long int ret; ret = __sync_xor_and_fetch_8(vp8, v8); } 153 #if defined(__SIZEOF_INT128__) 154 { __int128 ret; ret = __sync_xor_and_fetch(vp16, v16); } 155 { __int128 ret; ret = __sync_xor_and_fetch_16(vp16, v16); } 156 #endif 157 158 { char ret; ret = __sync_nand_and_fetch(vp1, v1); } 159 { char ret; ret = __sync_nand_and_fetch_1(vp1, v1); } 160 { short ret; ret = __sync_nand_and_fetch(vp2, v2); } 161 { short ret; ret = __sync_nand_and_fetch_2(vp2, v2); } 162 { int ret; ret = __sync_nand_and_fetch(vp4, v4); } 163 { int ret; ret = __sync_nand_and_fetch_4(vp4, v4); } 164 { long long int ret; ret = __sync_nand_and_fetch(vp8, v8); } 165 { long long int ret; ret = __sync_nand_and_fetch_8(vp8, v8); } 166 #if defined(__SIZEOF_INT128__) 167 { __int128 ret; ret = __sync_nand_and_fetch(vp16, v16); } 168 { __int128 ret; ret = __sync_nand_and_fetch_16(vp16, v16); } 169 #endif 170 171 { _Bool ret; ret = __sync_bool_compare_and_swap(vp1, v1, v1); } 172 { _Bool ret; ret = __sync_bool_compare_and_swap_1(vp1, v1, v1); } 173 { _Bool ret; ret = __sync_bool_compare_and_swap(vp2, v2, v2); } 174 { _Bool ret; ret = __sync_bool_compare_and_swap_2(vp2, v2, v2); } 175 { _Bool ret; ret = __sync_bool_compare_and_swap(vp4, v4, v4); } 176 { _Bool ret; ret = __sync_bool_compare_and_swap_4(vp4, v4, v4); } 177 { _Bool ret; ret = __sync_bool_compare_and_swap(vp8, v8, v8); } 178 { _Bool ret; ret = __sync_bool_compare_and_swap_8(vp8, v8, v8); } 179 #if defined(__SIZEOF_INT128__) 180 { _Bool ret; ret = __sync_bool_compare_and_swap(vp16, v16, v16); } 181 { _Bool ret; ret = __sync_bool_compare_and_swap_16(vp16, v16,v16); } 16 { char ret; ret = __sync_fetch_and_add(vpc, vc); } 17 { short ret; ret = __sync_fetch_and_add(vps, vs); } 18 { int ret; ret = __sync_fetch_and_add(vpi, vi); } 19 { long int ret; ret = __sync_fetch_and_add(vpl, vl); } 20 { long long int ret; ret = __sync_fetch_and_add(vpll, vll); } 21 #if defined(__SIZEOF_INT128__) 22 { __int128 ret; ret = __sync_fetch_and_add(vplll, vlll); } 23 #endif 24 25 { char ret; ret = __sync_fetch_and_sub(vpc, vc); } 26 { short ret; ret = __sync_fetch_and_sub(vps, vs); } 27 { int ret; ret = __sync_fetch_and_sub(vpi, vi); } 28 { long int ret; ret = __sync_fetch_and_sub(vpl, vl); } 29 { long long int ret; ret = __sync_fetch_and_sub(vpll, vll); } 30 #if defined(__SIZEOF_INT128__) 31 { __int128 ret; ret = __sync_fetch_and_sub(vplll, vlll); } 32 #endif 33 34 { char ret; ret = __sync_fetch_and_or(vpc, vc); } 35 { short ret; ret = __sync_fetch_and_or(vps, vs); } 36 { int ret; ret = __sync_fetch_and_or(vpi, vi); } 37 { long int ret; ret = __sync_fetch_and_or(vpl, vl); } 38 { long long int ret; ret = __sync_fetch_and_or(vpll, vll); } 39 #if defined(__SIZEOF_INT128__) 40 { __int128 ret; ret = __sync_fetch_and_or(vplll, vlll); } 41 #endif 42 43 { char ret; ret = __sync_fetch_and_and(vpc, vc); } 44 { short ret; ret = __sync_fetch_and_and(vps, vs); } 45 { int ret; ret = __sync_fetch_and_and(vpi, vi); } 46 { long int ret; ret = __sync_fetch_and_and(vpl, vl); } 47 { long long int ret; ret = __sync_fetch_and_and(vpll, vll); } 48 #if defined(__SIZEOF_INT128__) 49 { __int128 ret; ret = __sync_fetch_and_and(vplll, vlll); } 50 #endif 51 52 { char ret; ret = __sync_fetch_and_xor(vpc, vc); } 53 { short ret; ret = __sync_fetch_and_xor(vps, vs); } 54 { int ret; ret = __sync_fetch_and_xor(vpi, vi); } 55 { long int ret; ret = __sync_fetch_and_xor(vpl, vl); } 56 { long long int ret; ret = __sync_fetch_and_xor(vpll, vll); } 57 #if defined(__SIZEOF_INT128__) 58 { __int128 ret; ret = __sync_fetch_and_xor(vplll, vlll); } 59 #endif 60 61 { char ret; ret = __sync_fetch_and_nand(vpc, vc); } 62 { short ret; ret = __sync_fetch_and_nand(vps, vs); } 63 { int ret; ret = __sync_fetch_and_nand(vpi, vi); } 64 { long int ret; ret = __sync_fetch_and_nand(vpl, vl); } 65 { long long int ret; ret = __sync_fetch_and_nand(vpll, vll); } 66 #if defined(__SIZEOF_INT128__) 67 { __int128 ret; ret = __sync_fetch_and_nand(vplll, vlll); } 68 { __int128 ret; ret = __sync_fetch_and_nand_16(vplll, vlll); } 69 #endif 70 71 { char ret; ret = __sync_add_and_fetch(vpc, vc); } 72 { short ret; ret = __sync_add_and_fetch(vps, vs); } 73 { int ret; ret = __sync_add_and_fetch(vpi, vi); } 74 { long int ret; ret = __sync_add_and_fetch(vpl, vl); } 75 { long long int ret; ret = __sync_add_and_fetch(vpll, vll); } 76 #if defined(__SIZEOF_INT128__) 77 { __int128 ret; ret = __sync_add_and_fetch(vplll, vlll); } 78 #endif 79 80 { char ret; ret = __sync_sub_and_fetch(vpc, vc); } 81 { short ret; ret = __sync_sub_and_fetch(vps, vs); } 82 { int ret; ret = __sync_sub_and_fetch(vpi, vi); } 83 { long int ret; ret = __sync_sub_and_fetch(vpl, vl); } 84 { long long int ret; ret = __sync_sub_and_fetch(vpll, vll); } 85 #if defined(__SIZEOF_INT128__) 86 { __int128 ret; ret = __sync_sub_and_fetch(vplll, vlll); } 87 #endif 88 89 { char ret; ret = __sync_or_and_fetch(vpc, vc); } 90 { short ret; ret = __sync_or_and_fetch(vps, vs); } 91 { int ret; ret = __sync_or_and_fetch(vpi, vi); } 92 { long int ret; ret = __sync_or_and_fetch(vpl, vl); } 93 { long long int ret; ret = __sync_or_and_fetch(vpll, vll); } 94 #if defined(__SIZEOF_INT128__) 95 { __int128 ret; ret = __sync_or_and_fetch(vplll, vlll); } 96 #endif 97 98 { char ret; ret = __sync_and_and_fetch(vpc, vc); } 99 { short ret; ret = __sync_and_and_fetch(vps, vs); } 100 { int ret; ret = __sync_and_and_fetch(vpi, vi); } 101 { long int ret; ret = __sync_and_and_fetch(vpl, vl); } 102 { long long int ret; ret = __sync_and_and_fetch(vpll, vll); } 103 #if defined(__SIZEOF_INT128__) 104 { __int128 ret; ret = __sync_and_and_fetch(vplll, vlll); } 105 #endif 106 107 { char ret; ret = __sync_xor_and_fetch(vpc, vc); } 108 { short ret; ret = __sync_xor_and_fetch(vps, vs); } 109 { int ret; ret = __sync_xor_and_fetch(vpi, vi); } 110 { long int ret; ret = __sync_xor_and_fetch(vpl, vl); } 111 { long long int ret; ret = __sync_xor_and_fetch(vpll, vll); } 112 #if defined(__SIZEOF_INT128__) 113 { __int128 ret; ret = __sync_xor_and_fetch(vplll, vlll); } 114 #endif 115 116 { char ret; ret = __sync_nand_and_fetch(vpc, vc); } 117 { short ret; ret = __sync_nand_and_fetch(vps, vs); } 118 { int ret; ret = __sync_nand_and_fetch(vpi, vi); } 119 { long int ret; ret = __sync_nand_and_fetch(vpl, vl); } 120 { long long int ret; ret = __sync_nand_and_fetch(vpll, vll); } 121 #if defined(__SIZEOF_INT128__) 122 { __int128 ret; ret = __sync_nand_and_fetch(vplll, vlll); } 123 #endif 124 125 { _Bool ret; ret = __sync_bool_compare_and_swap(vpc, vc, vc); } 126 { _Bool ret; ret = __sync_bool_compare_and_swap(vps, vs, vs); } 127 { _Bool ret; ret = __sync_bool_compare_and_swap(vpi, vi, vi); } 128 { _Bool ret; ret = __sync_bool_compare_and_swap(vpl, vl, vl); } 129 { _Bool ret; ret = __sync_bool_compare_and_swap(vpll, vll, vll); } 130 #if defined(__SIZEOF_INT128__) 131 { _Bool ret; ret = __sync_bool_compare_and_swap(vplll, vlll, vlll); } 182 132 #endif 183 133 { _Bool ret; ret = __sync_bool_compare_and_swap(vpp, vp, vp); } 184 134 185 { char ret; ret = __sync_val_compare_and_swap(vp1, v1, v1); } 186 { char ret; ret = __sync_val_compare_and_swap_1(vp1, v1, v1); } 187 { short ret; ret = __sync_val_compare_and_swap(vp2, v2, v2); } 188 { short ret; ret = __sync_val_compare_and_swap_2(vp2, v2, v2); } 189 { int ret; ret = __sync_val_compare_and_swap(vp4, v4, v4); } 190 { int ret; ret = __sync_val_compare_and_swap_4(vp4, v4, v4); } 191 { long long int ret; ret = __sync_val_compare_and_swap(vp8, v8, v8); } 192 { long long int ret; ret = __sync_val_compare_and_swap_8(vp8, v8, v8); } 193 #if defined(__SIZEOF_INT128__) 194 { __int128 ret; ret = __sync_val_compare_and_swap(vp16, v16, v16); } 195 { __int128 ret; ret = __sync_val_compare_and_swap_16(vp16, v16,v16); } 135 { char ret; ret = __sync_val_compare_and_swap(vpc, vc, vc); } 136 { short ret; ret = __sync_val_compare_and_swap(vps, vs, vs); } 137 { int ret; ret = __sync_val_compare_and_swap(vpi, vi, vi); } 138 { long int ret; ret = __sync_val_compare_and_swap(vpl, vl, vl); } 139 { long long int ret; ret = __sync_val_compare_and_swap(vpll, vll, vll); } 140 #if defined(__SIZEOF_INT128__) 141 { __int128 ret; ret = __sync_val_compare_and_swap(vplll, vlll, vlll); } 196 142 #endif 197 143 { struct type * ret; ret = __sync_val_compare_and_swap(vpp, vp, vp); } 198 144 199 145 200 { char ret; ret = __sync_lock_test_and_set(vp1, v1); } 201 { char ret; ret = __sync_lock_test_and_set_1(vp1, v1); } 202 { short ret; ret = __sync_lock_test_and_set(vp2, v2); } 203 { short ret; ret = __sync_lock_test_and_set_2(vp2, v2); } 204 { int ret; ret = __sync_lock_test_and_set(vp4, v4); } 205 { int ret; ret = __sync_lock_test_and_set_4(vp4, v4); } 206 { long long int ret; ret = __sync_lock_test_and_set(vp8, v8); } 207 { long long int ret; ret = __sync_lock_test_and_set_8(vp8, v8); } 208 #if defined(__SIZEOF_INT128__) 209 { __int128 ret; ret = __sync_lock_test_and_set(vp16, v16); } 210 { __int128 ret; ret = __sync_lock_test_and_set_16(vp16, v16); } 211 #endif 212 213 { __sync_lock_release(vp1); } 214 { __sync_lock_release_1(vp1); } 215 { __sync_lock_release(vp2); } 216 { __sync_lock_release_2(vp2); } 217 { __sync_lock_release(vp4); } 218 { __sync_lock_release_4(vp4); } 219 { __sync_lock_release(vp8); } 220 { __sync_lock_release_8(vp8); } 221 #if defined(__SIZEOF_INT128__) 222 { __sync_lock_release(vp16); } 223 { __sync_lock_release_16(vp16); } 146 { char ret; ret = __sync_lock_test_and_set(vpc, vc); } 147 { short ret; ret = __sync_lock_test_and_set(vps, vs); } 148 { int ret; ret = __sync_lock_test_and_set(vpi, vi); } 149 { long int ret; ret = __sync_lock_test_and_set(vpl, vl); } 150 { long long int ret; ret = __sync_lock_test_and_set(vpll, vll); } 151 #if defined(__SIZEOF_INT128__) 152 { __int128 ret; ret = __sync_lock_test_and_set(vplll, vlll); } 153 #endif 154 155 { __sync_lock_release(vpc); } 156 { __sync_lock_release(vps); } 157 { __sync_lock_release(vpi); } 158 { __sync_lock_release(vpl); } 159 { __sync_lock_release(vpll); } 160 #if defined(__SIZEOF_INT128__) 161 { __sync_lock_release(vplll); } 224 162 #endif 225 163 … … 230 168 231 169 { _Bool ret; ret = __atomic_test_and_set(vpB, vB); } 232 { _Bool ret; ret = __atomic_test_and_set(vp 1, v1); }170 { _Bool ret; ret = __atomic_test_and_set(vpc, vc); } 233 171 { __atomic_clear(vpB, vB); } 234 { __atomic_clear(vp1, v1); } 235 236 { char ret; ret = __atomic_exchange_n(vp1, v1, __ATOMIC_SEQ_CST); } 237 { char ret; ret = __atomic_exchange_1(vp1, v1, __ATOMIC_SEQ_CST); } 238 { char ret; __atomic_exchange(vp1, &v1, &ret, __ATOMIC_SEQ_CST); } 239 { short ret; ret = __atomic_exchange_n(vp2, v2, __ATOMIC_SEQ_CST); } 240 { short ret; ret = __atomic_exchange_2(vp2, v2, __ATOMIC_SEQ_CST); } 241 { short ret; __atomic_exchange(vp2, &v2, &ret, __ATOMIC_SEQ_CST); } 242 { int ret; ret = __atomic_exchange_n(vp4, v4, __ATOMIC_SEQ_CST); } 243 { int ret; ret = __atomic_exchange_4(vp4, v4, __ATOMIC_SEQ_CST); } 244 { int ret; __atomic_exchange(vp4, &v4, &ret, __ATOMIC_SEQ_CST); } 245 { long long int ret; ret = __atomic_exchange_n(vp8, v8, __ATOMIC_SEQ_CST); } 246 { long long int ret; ret = __atomic_exchange_8(vp8, v8, __ATOMIC_SEQ_CST); } 247 { long long int ret; __atomic_exchange(vp8, &v8, &ret, __ATOMIC_SEQ_CST); } 248 #if defined(__SIZEOF_INT128__) 249 { __int128 ret; ret = __atomic_exchange_n(vp16, v16, __ATOMIC_SEQ_CST); } 250 { __int128 ret; ret = __atomic_exchange_16(vp16, v16, __ATOMIC_SEQ_CST); } 251 { __int128 ret; __atomic_exchange(vp16, &v16, &ret, __ATOMIC_SEQ_CST); } 172 { __atomic_clear(vpc, vc); } 173 174 { char ret; ret = __atomic_exchange_n(vpc, vc, __ATOMIC_SEQ_CST); } 175 { char ret; __atomic_exchange(vpc, &vc, &ret, __ATOMIC_SEQ_CST); } 176 { short ret; ret = __atomic_exchange_n(vps, vs, __ATOMIC_SEQ_CST); } 177 { short ret; __atomic_exchange(vps, &vs, &ret, __ATOMIC_SEQ_CST); } 178 { int ret; ret = __atomic_exchange_n(vpi, vi, __ATOMIC_SEQ_CST); } 179 { int ret; __atomic_exchange(vpi, &vi, &ret, __ATOMIC_SEQ_CST); } 180 { long int ret; ret = __atomic_exchange_n(vpl, vl, __ATOMIC_SEQ_CST); } 181 { long int ret; __atomic_exchange(vpl, &vl, &ret, __ATOMIC_SEQ_CST); } 182 { long long int ret; ret = __atomic_exchange_n(vpll, vll, __ATOMIC_SEQ_CST); } 183 { long long int ret; __atomic_exchange(vpll, &vll, &ret, __ATOMIC_SEQ_CST); } 184 #if defined(__SIZEOF_INT128__) 185 { __int128 ret; ret = __atomic_exchange_n(vplll, vlll, __ATOMIC_SEQ_CST); } 186 { __int128 ret; __atomic_exchange(vplll, &vlll, &ret, __ATOMIC_SEQ_CST); } 252 187 #endif 253 188 { struct type * ret; ret = __atomic_exchange_n(vpp, vp, __ATOMIC_SEQ_CST); } 254 189 { struct type * ret; __atomic_exchange(vpp, &vp, &ret, __ATOMIC_SEQ_CST); } 255 190 256 { char ret; ret = __atomic_load_n(vp1, __ATOMIC_SEQ_CST); } 257 { char ret; ret = __atomic_load_1(vp1, __ATOMIC_SEQ_CST); } 258 { char ret; __atomic_load(vp1, &ret, __ATOMIC_SEQ_CST); } 259 { short ret; ret = __atomic_load_n(vp2, __ATOMIC_SEQ_CST); } 260 { short ret; ret = __atomic_load_2(vp2, __ATOMIC_SEQ_CST); } 261 { short ret; __atomic_load(vp2, &ret, __ATOMIC_SEQ_CST); } 262 { int ret; ret = __atomic_load_n(vp4, __ATOMIC_SEQ_CST); } 263 { int ret; ret = __atomic_load_4(vp4, __ATOMIC_SEQ_CST); } 264 { int ret; __atomic_load(vp4, &ret, __ATOMIC_SEQ_CST); } 265 { long long int ret; ret = __atomic_load_n(vp8, __ATOMIC_SEQ_CST); } 266 { long long int ret; ret = __atomic_load_8(vp8, __ATOMIC_SEQ_CST); } 267 { long long int ret; __atomic_load(vp8, &ret, __ATOMIC_SEQ_CST); } 268 #if defined(__SIZEOF_INT128__) 269 { __int128 ret; ret = __atomic_load_n(vp16, __ATOMIC_SEQ_CST); } 270 { __int128 ret; ret = __atomic_load_16(vp16, __ATOMIC_SEQ_CST); } 271 { __int128 ret; __atomic_load(vp16, &ret, __ATOMIC_SEQ_CST); } 191 { char ret; ret = __atomic_load_n(vpc, __ATOMIC_SEQ_CST); } 192 { char ret; __atomic_load(vpc, &ret, __ATOMIC_SEQ_CST); } 193 { short ret; ret = __atomic_load_n(vps, __ATOMIC_SEQ_CST); } 194 { short ret; __atomic_load(vps, &ret, __ATOMIC_SEQ_CST); } 195 { int ret; ret = __atomic_load_n(vpi, __ATOMIC_SEQ_CST); } 196 { int ret; __atomic_load(vpi, &ret, __ATOMIC_SEQ_CST); } 197 { long int ret; ret = __atomic_load_n(vpl, __ATOMIC_SEQ_CST); } 198 { long int ret; __atomic_load(vpl, &ret, __ATOMIC_SEQ_CST); } 199 { long long int ret; ret = __atomic_load_n(vpll, __ATOMIC_SEQ_CST); } 200 { long long int ret; __atomic_load(vpll, &ret, __ATOMIC_SEQ_CST); } 201 #if defined(__SIZEOF_INT128__) 202 { __int128 ret; ret = __atomic_load_n(vplll, __ATOMIC_SEQ_CST); } 203 { __int128 ret; __atomic_load(vplll, &ret, __ATOMIC_SEQ_CST); } 272 204 #endif 273 205 { struct type * ret; ret = __atomic_load_n(vpp, __ATOMIC_SEQ_CST); } 274 206 { struct type * ret; __atomic_load(vpp, &ret, __ATOMIC_SEQ_CST); } 275 207 276 { _Bool ret; ret = __atomic_compare_exchange_n(vp1, rp1, v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 277 { _Bool ret; ret = __atomic_compare_exchange_1(vp1, rp1, v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 278 { _Bool ret; ret = __atomic_compare_exchange(vp1, rp1, &v1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 279 { _Bool ret; ret = __atomic_compare_exchange_n(vp2, rp2, v2, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 280 { _Bool ret; ret = __atomic_compare_exchange_2(vp2, rp2, v2, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 281 { _Bool ret; ret = __atomic_compare_exchange(vp2, rp2, &v2, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 282 { _Bool ret; ret = __atomic_compare_exchange_n(vp4, rp4, v4, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 283 { _Bool ret; ret = __atomic_compare_exchange_4(vp4, rp4, v4, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 284 { _Bool ret; ret = __atomic_compare_exchange(vp4, rp4, &v4, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 285 { _Bool ret; ret = __atomic_compare_exchange_n(vp8, rp8, v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 286 { _Bool ret; ret = __atomic_compare_exchange_8(vp8, rp8, v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 287 { _Bool ret; ret = __atomic_compare_exchange(vp8, rp8, &v8, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 288 #if defined(__SIZEOF_INT128__) 289 { _Bool ret; ret = __atomic_compare_exchange_n(vp16, rp16, v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 290 { _Bool ret; ret = __atomic_compare_exchange_16(vp16, rp16, v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 291 { _Bool ret; ret = __atomic_compare_exchange(vp16, rp16, &v16, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 208 { _Bool ret; ret = __atomic_compare_exchange_n(vpc, rpc, vc, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 209 { _Bool ret; ret = __atomic_compare_exchange(vpc, rpc, &vc, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 210 { _Bool ret; ret = __atomic_compare_exchange_n(vps, rps, vs, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 211 { _Bool ret; ret = __atomic_compare_exchange(vps, rps, &vs, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 212 { _Bool ret; ret = __atomic_compare_exchange_n(vpi, rpi, vi, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 213 { _Bool ret; ret = __atomic_compare_exchange(vpi, rpi, &vi, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 214 { _Bool ret; ret = __atomic_compare_exchange_n(vpl, rpl, vl, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 215 { _Bool ret; ret = __atomic_compare_exchange(vpl, rpl, &vl, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 216 { _Bool ret; ret = __atomic_compare_exchange_n(vpll, rpll, vll, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 217 { _Bool ret; ret = __atomic_compare_exchange(vpll, rpll, &vll, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 218 #if defined(__SIZEOF_INT128__) 219 { _Bool ret; ret = __atomic_compare_exchange_n(vplll, rplll, vlll, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 220 { _Bool ret; ret = __atomic_compare_exchange(vplll, rplll, &vlll, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 292 221 #endif 293 222 { _Bool ret; ret = __atomic_compare_exchange_n(vpp, rpp, vp, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 294 223 { _Bool ret; ret = __atomic_compare_exchange(vpp, rpp, &vp, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } 295 224 296 { __atomic_store_n(vp1, v1, __ATOMIC_SEQ_CST); } 297 { __atomic_store_1(vp1, v1, __ATOMIC_SEQ_CST); } 298 { __atomic_store(vp1, &v1, __ATOMIC_SEQ_CST); } 299 { __atomic_store_n(vp2, v2, __ATOMIC_SEQ_CST); } 300 { __atomic_store_2(vp2, v2, __ATOMIC_SEQ_CST); } 301 { __atomic_store(vp2, &v2, __ATOMIC_SEQ_CST); } 302 { __atomic_store_n(vp4, v4, __ATOMIC_SEQ_CST); } 303 { __atomic_store_4(vp4, v4, __ATOMIC_SEQ_CST); } 304 { __atomic_store(vp4, &v4, __ATOMIC_SEQ_CST); } 305 { __atomic_store_n(vp8, v8, __ATOMIC_SEQ_CST); } 306 { __atomic_store_8(vp8, v8, __ATOMIC_SEQ_CST); } 307 { __atomic_store(vp8, &v8, __ATOMIC_SEQ_CST); } 308 #if defined(__SIZEOF_INT128__) 309 { __atomic_store_n(vp16, v16, __ATOMIC_SEQ_CST); } 310 { __atomic_store_16(vp16, v16, __ATOMIC_SEQ_CST); } 311 { __atomic_store(vp16, &v16, __ATOMIC_SEQ_CST); } 225 { __atomic_store_n(vpc, vc, __ATOMIC_SEQ_CST); } 226 { __atomic_store(vpc, &vc, __ATOMIC_SEQ_CST); } 227 { __atomic_store_n(vps, vs, __ATOMIC_SEQ_CST); } 228 { __atomic_store(vps, &vs, __ATOMIC_SEQ_CST); } 229 { __atomic_store_n(vpi, vi, __ATOMIC_SEQ_CST); } 230 { __atomic_store(vpi, &vi, __ATOMIC_SEQ_CST); } 231 { __atomic_store_n(vpl, vl, __ATOMIC_SEQ_CST); } 232 { __atomic_store(vpl, &vl, __ATOMIC_SEQ_CST); } 233 { __atomic_store_n(vpll, vll, __ATOMIC_SEQ_CST); } 234 { __atomic_store(vpll, &vll, __ATOMIC_SEQ_CST); } 235 #if defined(__SIZEOF_INT128__) 236 { __atomic_store_n(vplll, vlll, __ATOMIC_SEQ_CST); } 237 { __atomic_store(vplll, &vlll, __ATOMIC_SEQ_CST); } 312 238 #endif 313 239 { __atomic_store_n(vpp, vp, __ATOMIC_SEQ_CST); } 314 240 { __atomic_store(vpp, &vp, __ATOMIC_SEQ_CST); } 315 241 316 { char ret; ret = __atomic_add_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 317 { char ret; ret = __atomic_add_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 318 { short ret; ret = __atomic_add_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 319 { short ret; ret = __atomic_add_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 320 { int ret; ret = __atomic_add_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 321 { int ret; ret = __atomic_add_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 322 { long long int ret; ret = __atomic_add_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 323 { long long int ret; ret = __atomic_add_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 324 #if defined(__SIZEOF_INT128__) 325 { __int128 ret; ret = __atomic_add_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 326 { __int128 ret; ret = __atomic_add_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 327 #endif 328 329 { char ret; ret = __atomic_sub_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 330 { char ret; ret = __atomic_sub_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 331 { short ret; ret = __atomic_sub_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 332 { short ret; ret = __atomic_sub_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 333 { int ret; ret = __atomic_sub_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 334 { int ret; ret = __atomic_sub_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 335 { long long int ret; ret = __atomic_sub_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 336 { long long int ret; ret = __atomic_sub_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 337 #if defined(__SIZEOF_INT128__) 338 { __int128 ret; ret = __atomic_sub_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 339 { __int128 ret; ret = __atomic_sub_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 340 #endif 341 342 { char ret; ret = __atomic_and_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 343 { char ret; ret = __atomic_and_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 344 { short ret; ret = __atomic_and_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 345 { short ret; ret = __atomic_and_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 346 { int ret; ret = __atomic_and_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 347 { int ret; ret = __atomic_and_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 348 { long long int ret; ret = __atomic_and_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 349 { long long int ret; ret = __atomic_and_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 350 #if defined(__SIZEOF_INT128__) 351 { __int128 ret; ret = __atomic_and_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 352 { __int128 ret; ret = __atomic_and_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 353 #endif 354 355 { char ret; ret = __atomic_nand_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 356 { char ret; ret = __atomic_nand_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 357 { short ret; ret = __atomic_nand_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 358 { short ret; ret = __atomic_nand_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 359 { int ret; ret = __atomic_nand_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 360 { int ret; ret = __atomic_nand_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 361 { long long int ret; ret = __atomic_nand_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 362 { long long int ret; ret = __atomic_nand_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 363 #if defined(__SIZEOF_INT128__) 364 { __int128 ret; ret = __atomic_nand_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 365 { __int128 ret; ret = __atomic_nand_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 366 #endif 367 368 { char ret; ret = __atomic_xor_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 369 { char ret; ret = __atomic_xor_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 370 { short ret; ret = __atomic_xor_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 371 { short ret; ret = __atomic_xor_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 372 { int ret; ret = __atomic_xor_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 373 { int ret; ret = __atomic_xor_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 374 { long long int ret; ret = __atomic_xor_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 375 { long long int ret; ret = __atomic_xor_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 376 #if defined(__SIZEOF_INT128__) 377 { __int128 ret; ret = __atomic_xor_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 378 { __int128 ret; ret = __atomic_xor_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 379 #endif 380 381 { char ret; ret = __atomic_or_fetch(vp1, v1, __ATOMIC_SEQ_CST); } 382 { char ret; ret = __atomic_or_fetch_1(vp1, v1, __ATOMIC_SEQ_CST); } 383 { short ret; ret = __atomic_or_fetch(vp2, v2, __ATOMIC_SEQ_CST); } 384 { short ret; ret = __atomic_or_fetch_2(vp2, v2, __ATOMIC_SEQ_CST); } 385 { int ret; ret = __atomic_or_fetch(vp4, v4, __ATOMIC_SEQ_CST); } 386 { int ret; ret = __atomic_or_fetch_4(vp4, v4, __ATOMIC_SEQ_CST); } 387 { long long int ret; ret = __atomic_or_fetch(vp8, v8, __ATOMIC_SEQ_CST); } 388 { long long int ret; ret = __atomic_or_fetch_8(vp8, v8, __ATOMIC_SEQ_CST); } 389 #if defined(__SIZEOF_INT128__) 390 { __int128 ret; ret = __atomic_or_fetch(vp16, v16, __ATOMIC_SEQ_CST); } 391 { __int128 ret; ret = __atomic_or_fetch_16(vp16, v16, __ATOMIC_SEQ_CST); } 392 #endif 393 394 { char ret; ret = __atomic_fetch_add(vp1, v1, __ATOMIC_SEQ_CST); } 395 { char ret; ret = __atomic_fetch_add_1(vp1, v1, __ATOMIC_SEQ_CST); } 396 { short ret; ret = __atomic_fetch_add(vp2, v2, __ATOMIC_SEQ_CST); } 397 { short ret; ret = __atomic_fetch_add_2(vp2, v2, __ATOMIC_SEQ_CST); } 398 { int ret; ret = __atomic_fetch_add(vp4, v4, __ATOMIC_SEQ_CST); } 399 { int ret; ret = __atomic_fetch_add_4(vp4, v4, __ATOMIC_SEQ_CST); } 400 { long long int ret; ret = __atomic_fetch_add(vp8, v8, __ATOMIC_SEQ_CST); } 401 { long long int ret; ret = __atomic_fetch_add_8(vp8, v8, __ATOMIC_SEQ_CST); } 402 #if defined(__SIZEOF_INT128__) 403 { __int128 ret; ret = __atomic_fetch_add(vp16, v16, __ATOMIC_SEQ_CST); } 404 { __int128 ret; ret = __atomic_fetch_add_16(vp16, v16, __ATOMIC_SEQ_CST); } 405 #endif 406 407 { char ret; ret = __atomic_fetch_sub(vp1, v1, __ATOMIC_SEQ_CST); } 408 { char ret; ret = __atomic_fetch_sub_1(vp1, v1, __ATOMIC_SEQ_CST); } 409 { short ret; ret = __atomic_fetch_sub(vp2, v2, __ATOMIC_SEQ_CST); } 410 { short ret; ret = __atomic_fetch_sub_2(vp2, v2, __ATOMIC_SEQ_CST); } 411 { int ret; ret = __atomic_fetch_sub(vp4, v4, __ATOMIC_SEQ_CST); } 412 { int ret; ret = __atomic_fetch_sub_4(vp4, v4, __ATOMIC_SEQ_CST); } 413 { long long int ret; ret = __atomic_fetch_sub(vp8, v8, __ATOMIC_SEQ_CST); } 414 { long long int ret; ret = __atomic_fetch_sub_8(vp8, v8, __ATOMIC_SEQ_CST); } 415 #if defined(__SIZEOF_INT128__) 416 { __int128 ret; ret = __atomic_fetch_sub(vp16, v16, __ATOMIC_SEQ_CST); } 417 { __int128 ret; ret = __atomic_fetch_sub_16(vp16, v16, __ATOMIC_SEQ_CST); } 418 #endif 419 420 { char ret; ret = __atomic_fetch_and(vp1, v1, __ATOMIC_SEQ_CST); } 421 { char ret; ret = __atomic_fetch_and_1(vp1, v1, __ATOMIC_SEQ_CST); } 422 { short ret; ret = __atomic_fetch_and(vp2, v2, __ATOMIC_SEQ_CST); } 423 { short ret; ret = __atomic_fetch_and_2(vp2, v2, __ATOMIC_SEQ_CST); } 424 { int ret; ret = __atomic_fetch_and(vp4, v4, __ATOMIC_SEQ_CST); } 425 { int ret; ret = __atomic_fetch_and_4(vp4, v4, __ATOMIC_SEQ_CST); } 426 { long long int ret; ret = __atomic_fetch_and(vp8, v8, __ATOMIC_SEQ_CST); } 427 { long long int ret; ret = __atomic_fetch_and_8(vp8, v8, __ATOMIC_SEQ_CST); } 428 #if defined(__SIZEOF_INT128__) 429 { __int128 ret; ret = __atomic_fetch_and(vp16, v16, __ATOMIC_SEQ_CST); } 430 { __int128 ret; ret = __atomic_fetch_and_16(vp16, v16, __ATOMIC_SEQ_CST); } 431 #endif 432 433 { char ret; ret = __atomic_fetch_nand(vp1, v1, __ATOMIC_SEQ_CST); } 434 { char ret; ret = __atomic_fetch_nand_1(vp1, v1, __ATOMIC_SEQ_CST); } 435 { short ret; ret = __atomic_fetch_nand(vp2, v2, __ATOMIC_SEQ_CST); } 436 { short ret; ret = __atomic_fetch_nand_2(vp2, v2, __ATOMIC_SEQ_CST); } 437 { int ret; ret = __atomic_fetch_nand(vp4, v4, __ATOMIC_SEQ_CST); } 438 { int ret; ret = __atomic_fetch_nand_4(vp4, v4, __ATOMIC_SEQ_CST); } 439 { long long int ret; ret = __atomic_fetch_nand(vp8, v8, __ATOMIC_SEQ_CST); } 440 { long long int ret; ret = __atomic_fetch_nand_8(vp8, v8, __ATOMIC_SEQ_CST); } 441 #if defined(__SIZEOF_INT128__) 442 { __int128 ret; ret = __atomic_fetch_nand(vp16, v16, __ATOMIC_SEQ_CST); } 443 { __int128 ret; ret = __atomic_fetch_nand_16(vp16, v16, __ATOMIC_SEQ_CST); } 444 #endif 445 446 { char ret; ret = __atomic_fetch_xor(vp1, v1, __ATOMIC_SEQ_CST); } 447 { char ret; ret = __atomic_fetch_xor_1(vp1, v1, __ATOMIC_SEQ_CST); } 448 { short ret; ret = __atomic_fetch_xor(vp2, v2, __ATOMIC_SEQ_CST); } 449 { short ret; ret = __atomic_fetch_xor_2(vp2, v2, __ATOMIC_SEQ_CST); } 450 { int ret; ret = __atomic_fetch_xor(vp4, v4, __ATOMIC_SEQ_CST); } 451 { int ret; ret = __atomic_fetch_xor_4(vp4, v4, __ATOMIC_SEQ_CST); } 452 { long long int ret; ret = __atomic_fetch_xor(vp8, v8, __ATOMIC_SEQ_CST); } 453 { long long int ret; ret = __atomic_fetch_xor_8(vp8, v8, __ATOMIC_SEQ_CST); } 454 #if defined(__SIZEOF_INT128__) 455 { __int128 ret; ret = __atomic_fetch_xor(vp16, v16, __ATOMIC_SEQ_CST); } 456 { __int128 ret; ret = __atomic_fetch_xor_16(vp16, v16, __ATOMIC_SEQ_CST); } 457 #endif 458 459 { char ret; ret = __atomic_fetch_or(vp1, v1, __ATOMIC_SEQ_CST); } 460 { char ret; ret = __atomic_fetch_or_1(vp1, v1, __ATOMIC_SEQ_CST); } 461 { short ret; ret = __atomic_fetch_or(vp2, v2, __ATOMIC_SEQ_CST); } 462 { short ret; ret = __atomic_fetch_or_2(vp2, v2, __ATOMIC_SEQ_CST); } 463 { int ret; ret = __atomic_fetch_or(vp4, v4, __ATOMIC_SEQ_CST); } 464 { int ret; ret = __atomic_fetch_or_4(vp4, v4, __ATOMIC_SEQ_CST); } 465 { long long int ret; ret = __atomic_fetch_or(vp8, v8, __ATOMIC_SEQ_CST); } 466 { long long int ret; ret = __atomic_fetch_or_8(vp8, v8, __ATOMIC_SEQ_CST); } 467 #if defined(__SIZEOF_INT128__) 468 { __int128 ret; ret = __atomic_fetch_or(vp16, v16, __ATOMIC_SEQ_CST); } 469 { __int128 ret; ret = __atomic_fetch_or_16(vp16, v16, __ATOMIC_SEQ_CST); } 470 #endif 471 472 { _Bool ret; ret = __atomic_always_lock_free(sizeof(int), vp4); } 473 { _Bool ret; ret = __atomic_is_lock_free(sizeof(int), vp4); } 242 { char ret; ret = __atomic_add_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 243 { short ret; ret = __atomic_add_fetch(vps, vs, __ATOMIC_SEQ_CST); } 244 { int ret; ret = __atomic_add_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 245 { long int ret; ret = __atomic_add_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 246 { long long int ret; ret = __atomic_add_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 247 #if defined(__SIZEOF_INT128__) 248 { __int128 ret; ret = __atomic_add_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 249 #endif 250 251 { char ret; ret = __atomic_sub_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 252 { short ret; ret = __atomic_sub_fetch(vps, vs, __ATOMIC_SEQ_CST); } 253 { int ret; ret = __atomic_sub_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 254 { long int ret; ret = __atomic_sub_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 255 { long long int ret; ret = __atomic_sub_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 256 #if defined(__SIZEOF_INT128__) 257 { __int128 ret; ret = __atomic_sub_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 258 #endif 259 260 { char ret; ret = __atomic_and_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 261 { short ret; ret = __atomic_and_fetch(vps, vs, __ATOMIC_SEQ_CST); } 262 { int ret; ret = __atomic_and_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 263 { long int ret; ret = __atomic_and_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 264 { long long int ret; ret = __atomic_and_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 265 #if defined(__SIZEOF_INT128__) 266 { __int128 ret; ret = __atomic_and_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 267 #endif 268 269 { char ret; ret = __atomic_nand_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 270 { short ret; ret = __atomic_nand_fetch(vps, vs, __ATOMIC_SEQ_CST); } 271 { int ret; ret = __atomic_nand_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 272 { long int ret; ret = __atomic_nand_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 273 { long long int ret; ret = __atomic_nand_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 274 #if defined(__SIZEOF_INT128__) 275 { __int128 ret; ret = __atomic_nand_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 276 #endif 277 278 { char ret; ret = __atomic_xor_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 279 { short ret; ret = __atomic_xor_fetch(vps, vs, __ATOMIC_SEQ_CST); } 280 { int ret; ret = __atomic_xor_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 281 { long int ret; ret = __atomic_xor_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 282 { long long int ret; ret = __atomic_xor_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 283 #if defined(__SIZEOF_INT128__) 284 { __int128 ret; ret = __atomic_xor_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 285 #endif 286 287 { char ret; ret = __atomic_or_fetch(vpc, vc, __ATOMIC_SEQ_CST); } 288 { short ret; ret = __atomic_or_fetch(vps, vs, __ATOMIC_SEQ_CST); } 289 { int ret; ret = __atomic_or_fetch(vpi, vi, __ATOMIC_SEQ_CST); } 290 { long int ret; ret = __atomic_or_fetch(vpl, vl, __ATOMIC_SEQ_CST); } 291 { long long int ret; ret = __atomic_or_fetch(vpll, vll, __ATOMIC_SEQ_CST); } 292 #if defined(__SIZEOF_INT128__) 293 { __int128 ret; ret = __atomic_or_fetch(vplll, vlll, __ATOMIC_SEQ_CST); } 294 #endif 295 296 { char ret; ret = __atomic_fetch_add(vpc, vc, __ATOMIC_SEQ_CST); } 297 { short ret; ret = __atomic_fetch_add(vps, vs, __ATOMIC_SEQ_CST); } 298 { int ret; ret = __atomic_fetch_add(vpi, vi, __ATOMIC_SEQ_CST); } 299 { long int ret; ret = __atomic_fetch_add(vpl, vl, __ATOMIC_SEQ_CST); } 300 { long long int ret; ret = __atomic_fetch_add(vpll, vll, __ATOMIC_SEQ_CST); } 301 #if defined(__SIZEOF_INT128__) 302 { __int128 ret; ret = __atomic_fetch_add(vplll, vlll, __ATOMIC_SEQ_CST); } 303 #endif 304 305 { char ret; ret = __atomic_fetch_sub(vpc, vc, __ATOMIC_SEQ_CST); } 306 { short ret; ret = __atomic_fetch_sub(vps, vs, __ATOMIC_SEQ_CST); } 307 { int ret; ret = __atomic_fetch_sub(vpi, vi, __ATOMIC_SEQ_CST); } 308 { long int ret; ret = __atomic_fetch_sub(vpl, vl, __ATOMIC_SEQ_CST); } 309 { long long int ret; ret = __atomic_fetch_sub(vpll, vll, __ATOMIC_SEQ_CST); } 310 #if defined(__SIZEOF_INT128__) 311 { __int128 ret; ret = __atomic_fetch_sub(vplll, vlll, __ATOMIC_SEQ_CST); } 312 #endif 313 314 { char ret; ret = __atomic_fetch_and(vpc, vc, __ATOMIC_SEQ_CST); } 315 { short ret; ret = __atomic_fetch_and(vps, vs, __ATOMIC_SEQ_CST); } 316 { int ret; ret = __atomic_fetch_and(vpi, vi, __ATOMIC_SEQ_CST); } 317 { long int ret; ret = __atomic_fetch_and(vpl, vl, __ATOMIC_SEQ_CST); } 318 { long long int ret; ret = __atomic_fetch_and(vpll, vll, __ATOMIC_SEQ_CST); } 319 #if defined(__SIZEOF_INT128__) 320 { __int128 ret; ret = __atomic_fetch_and(vplll, vlll, __ATOMIC_SEQ_CST); } 321 #endif 322 323 { char ret; ret = __atomic_fetch_nand(vpc, vc, __ATOMIC_SEQ_CST); } 324 { short ret; ret = __atomic_fetch_nand(vps, vs, __ATOMIC_SEQ_CST); } 325 { int ret; ret = __atomic_fetch_nand(vpi, vi, __ATOMIC_SEQ_CST); } 326 { long int ret; ret = __atomic_fetch_nand(vpl, vl, __ATOMIC_SEQ_CST); } 327 { long long int ret; ret = __atomic_fetch_nand(vpll, vll, __ATOMIC_SEQ_CST); } 328 #if defined(__SIZEOF_INT128__) 329 { __int128 ret; ret = __atomic_fetch_nand(vplll, vlll, __ATOMIC_SEQ_CST); } 330 #endif 331 332 { char ret; ret = __atomic_fetch_xor(vpc, vc, __ATOMIC_SEQ_CST); } 333 { short ret; ret = __atomic_fetch_xor(vps, vs, __ATOMIC_SEQ_CST); } 334 { int ret; ret = __atomic_fetch_xor(vpi, vi, __ATOMIC_SEQ_CST); } 335 { long int ret; ret = __atomic_fetch_xor(vpl, vl, __ATOMIC_SEQ_CST); } 336 { long long int ret; ret = __atomic_fetch_xor(vpll, vll, __ATOMIC_SEQ_CST); } 337 #if defined(__SIZEOF_INT128__) 338 { __int128 ret; ret = __atomic_fetch_xor(vplll, vlll, __ATOMIC_SEQ_CST); } 339 #endif 340 341 { char ret; ret = __atomic_fetch_or(vpc, vc, __ATOMIC_SEQ_CST); } 342 { short ret; ret = __atomic_fetch_or(vps, vs, __ATOMIC_SEQ_CST); } 343 { int ret; ret = __atomic_fetch_or(vpi, vi, __ATOMIC_SEQ_CST); } 344 { long int ret; ret = __atomic_fetch_or(vpl, vl, __ATOMIC_SEQ_CST); } 345 { long long int ret; ret = __atomic_fetch_or(vpll, vll, __ATOMIC_SEQ_CST); } 346 #if defined(__SIZEOF_INT128__) 347 { __int128 ret; ret = __atomic_fetch_or(vplll, vlll, __ATOMIC_SEQ_CST); } 348 #endif 349 350 { _Bool ret; ret = __atomic_always_lock_free(sizeof(int), vpi); } 351 { _Bool ret; ret = __atomic_is_lock_free(sizeof(int), vpi); } 474 352 { __atomic_thread_fence(__ATOMIC_SEQ_CST); } 475 353 { __atomic_signal_fence(__ATOMIC_SEQ_CST); } -
tests/concurrent/preempt.cfa
r7768b8d r30763fd 36 36 if( (counter % 7) == this.value ) { 37 37 __cfaabi_check_preemption(); 38 int next = __atomic_add_fetch _4(&counter, 1, __ATOMIC_SEQ_CST);38 int next = __atomic_add_fetch( &counter, 1, __ATOMIC_SEQ_CST ); 39 39 __cfaabi_check_preemption(); 40 40 if( (next % 100) == 0 ) printf("%d\n", (int)next); -
tests/concurrent/signal/wait.cfa
r7768b8d r30763fd 98 98 } 99 99 100 __ sync_fetch_and_sub_4( &waiter_left, 1);100 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 101 101 } 102 102 … … 109 109 } 110 110 111 __ sync_fetch_and_sub_4( &waiter_left, 1);111 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 112 112 } 113 113 … … 120 120 } 121 121 122 __ sync_fetch_and_sub_4( &waiter_left, 1);122 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 123 123 } 124 124 … … 131 131 } 132 132 133 __ sync_fetch_and_sub_4( &waiter_left, 1);133 __atomic_fetch_sub( &waiter_left, 1, __ATOMIC_SEQ_CST ); 134 134 } 135 135 -
tests/heap.cfa
r7768b8d r30763fd 10 10 // Created On : Tue Nov 6 17:54:56 2018 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : Fri Jul 19 08:22:34201913 // Update Count : 1912 // Last Modified On : Sun Nov 24 12:34:51 2019 13 // Update Count : 28 14 14 // 15 15 … … 38 38 enum { NoOfAllocs = 5000, NoOfMmaps = 10 }; 39 39 char * locns[NoOfAllocs]; 40 int i; 40 size_t amount; 41 enum { limit = 64 * 1024 }; // check alignments up to here 41 42 42 43 // check alloc/free … … 74 75 size_t s = (i + 1) * 20; 75 76 char * area = (char *)malloc( s ); 76 if ( area == 0 ) abort( "malloc/free out of memory" );77 if ( area == 0p ) abort( "malloc/free out of memory" ); 77 78 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last 78 79 area[malloc_usable_size( area ) - 1] = '\345'; // fill ultimate byte … … 83 84 size_t s = i + 1; // +1 to make initialization simpler 84 85 locns[i] = (char *)malloc( s ); 85 if ( locns[i] == 0 ) abort( "malloc/free out of memory" );86 if ( locns[i] == 0p ) abort( "malloc/free out of memory" ); 86 87 locns[i][0] = '\345'; locns[i][s - 1] = '\345'; // fill first/last 87 88 locns[i][malloc_usable_size( locns[i] ) - 1] = '\345'; // fill ultimate byte … … 99 100 size_t s = i + default_mmap_start(); // cross over point 100 101 char * area = (char *)malloc( s ); 101 if ( area == 0 ) abort( "malloc/free out of memory" );102 if ( area == 0p ) abort( "malloc/free out of memory" ); 102 103 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last 103 104 area[malloc_usable_size( area ) - 1] = '\345'; // fill ultimate byte … … 108 109 size_t s = i + default_mmap_start(); // cross over point 109 110 locns[i] = (char *)malloc( s ); 110 if ( locns[i] == 0 ) abort( "malloc/free out of memory" );111 if ( locns[i] == 0p ) abort( "malloc/free out of memory" ); 111 112 locns[i][0] = '\345'; locns[i][s - 1] = '\345'; // fill first/last 112 113 locns[i][malloc_usable_size( locns[i] ) - 1] = '\345'; // fill ultimate byte … … 124 125 size_t s = (i + 1) * 20; 125 126 char * area = (char *)calloc( 5, s ); 126 if ( area == 0 ) abort( "calloc/free out of memory" );127 if ( area == 0p ) abort( "calloc/free out of memory" ); 127 128 if ( area[0] != '\0' || area[s - 1] != '\0' || 128 129 area[malloc_usable_size( area ) - 1] != '\0' || … … 136 137 size_t s = i + 1; 137 138 locns[i] = (char *)calloc( 5, s ); 138 if ( locns[i] == 0 ) abort( "calloc/free out of memory" );139 if ( locns[i] == 0p ) abort( "calloc/free out of memory" ); 139 140 if ( locns[i][0] != '\0' || locns[i][s - 1] != '\0' || 140 141 locns[i][malloc_usable_size( locns[i] ) - 1] != '\0' || … … 155 156 size_t s = i + default_mmap_start(); // cross over point 156 157 char * area = (char *)calloc( 1, s ); 157 if ( area == 0 ) abort( "calloc/free out of memory" );158 if ( area == 0p ) abort( "calloc/free out of memory" ); 158 159 if ( area[0] != '\0' || area[s - 1] != '\0' ) abort( "calloc/free corrupt storage4.1" ); 159 160 if ( area[malloc_usable_size( area ) - 1] != '\0' ) abort( "calloc/free corrupt storage4.2" ); … … 167 168 size_t s = i + default_mmap_start(); // cross over point 168 169 locns[i] = (char *)calloc( 1, s ); 169 if ( locns[i] == 0 ) abort( "calloc/free out of memory" );170 if ( locns[i] == 0p ) abort( "calloc/free out of memory" ); 170 171 if ( locns[i][0] != '\0' || locns[i][s - 1] != '\0' || 171 172 locns[i][malloc_usable_size( locns[i] ) - 1] != '\0' || … … 183 184 // check memalign/free (sbrk) 184 185 185 enum { limit = 64 * 1024 }; // check alignments up to here186 187 186 for ( a; libAlign() ~= limit ~ a ) { // generate powers of 2 188 187 //sout | alignments[a]; 189 188 for ( s; 1 ~ NoOfAllocs ) { // allocation of size 0 can return null 190 189 char * area = (char *)memalign( a, s ); 191 if ( area == 0 ) abort( "memalign/free out of memory" );192 //sout | i | " " |area;190 if ( area == 0p ) abort( "memalign/free out of memory" ); 191 //sout | i | area; 193 192 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 194 193 abort( "memalign/free bad alignment : memalign(%d,%d) = %p", (int)a, s, area ); 195 194 } // if 196 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last byte195 area[0] = '\345'; area[s - 1] = '\345'; // fill first/last byte 197 196 area[malloc_usable_size( area ) - 1] = '\345'; // fill ultimate byte 198 197 free( area ); … … 207 206 size_t s = i + default_mmap_start(); // cross over point 208 207 char * area = (char *)memalign( a, s ); 209 if ( area == 0 ) abort( "memalign/free out of memory" );210 //sout | i | " " |area;208 if ( area == 0p ) abort( "memalign/free out of memory" ); 209 //sout | i | area; 211 210 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 212 211 abort( "memalign/free bad alignment : memalign(%d,%d) = %p", (int)a, (int)s, area ); … … 223 222 // initial N byte allocation 224 223 char * area = (char *)calloc( 5, i ); 225 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );224 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 226 225 if ( area[0] != '\0' || area[i - 1] != '\0' || 227 226 area[malloc_usable_size( area ) - 1] != '\0' || … … 231 230 for ( s; i ~ 256 * 1024 ~ 26 ) { // start at initial memory request 232 231 area = (char *)realloc( area, s ); // attempt to reuse storage 233 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );232 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 234 233 if ( area[0] != '\0' || area[s - 1] != '\0' || 235 234 area[malloc_usable_size( area ) - 1] != '\0' || … … 245 244 size_t s = i + default_mmap_start(); // cross over point 246 245 char * area = (char *)calloc( 1, s ); 247 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );246 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 248 247 if ( area[0] != '\0' || area[s - 1] != '\0' || 249 248 area[malloc_usable_size( area ) - 1] != '\0' || … … 253 252 for ( r; i ~ 256 * 1024 ~ 26 ) { // start at initial memory request 254 253 area = (char *)realloc( area, r ); // attempt to reuse storage 255 if ( area == 0 ) abort( "calloc/realloc/free out of memory" );254 if ( area == 0p ) abort( "calloc/realloc/free out of memory" ); 256 255 if ( area[0] != '\0' || area[r - 1] != '\0' || 257 256 area[malloc_usable_size( area ) - 1] != '\0' || … … 263 262 // check memalign/realloc/free 264 263 265 size_tamount = 2;264 amount = 2; 266 265 for ( a; libAlign() ~= limit ~ a ) { // generate powers of 2 267 266 // initial N byte allocation 268 267 char * area = (char *)memalign( a, amount ); // aligned N-byte allocation 269 if ( area == 0 ) abort( "memalign/realloc/free out of memory" ); // no storage ?270 //sout | alignments[a] | " " |area;268 if ( area == 0p ) abort( "memalign/realloc/free out of memory" ); // no storage ? 269 //sout | alignments[a] | area; 271 270 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 272 271 abort( "memalign/realloc/free bad alignment : memalign(%d,%d) = %p", (int)a, (int)amount, area ); … … 278 277 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "memalign/realloc/free corrupt storage" ); 279 278 area = (char *)realloc( area, s ); // attempt to reuse storage 280 if ( area == 0 ) abort( "memalign/realloc/free out of memory" ); // no storage ?281 //sout | i | " " |area;279 if ( area == 0p ) abort( "memalign/realloc/free out of memory" ); // no storage ? 280 //sout | i | area; 282 281 if ( (size_t)area % a != 0 ) { // check for initial alignment 283 282 abort( "memalign/realloc/free bad alignment %p", area ); … … 294 293 for ( s; 1 ~ limit ) { // allocation of size 0 can return null 295 294 char * area = (char *)cmemalign( a, 1, s ); 296 if ( area == 0 ) abort( "cmemalign/free out of memory" );297 //sout | i | " " |area;295 if ( area == 0p ) abort( "cmemalign/free out of memory" ); 296 //sout | i | area; 298 297 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 299 298 abort( "cmemalign/free bad alignment : cmemalign(%d,%d) = %p", (int)a, s, area ); … … 313 312 // initial N byte allocation 314 313 char * area = (char *)cmemalign( a, 1, amount ); // aligned N-byte allocation 315 if ( area == 0 ) abort( "cmemalign/realloc/free out of memory" ); // no storage ?316 //sout | alignments[a] | " " |area;314 if ( area == 0p ) abort( "cmemalign/realloc/free out of memory" ); // no storage ? 315 //sout | alignments[a] | area; 317 316 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 318 317 abort( "cmemalign/realloc/free bad alignment : cmemalign(%d,%d) = %p", (int)a, (int)amount, area ); … … 327 326 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "cmemalign/realloc/free corrupt storage2" ); 328 327 area = (char *)realloc( area, s ); // attempt to reuse storage 329 if ( area == 0 ) abort( "cmemalign/realloc/free out of memory" ); // no storage ?330 //sout | i | " " |area;328 if ( area == 0p ) abort( "cmemalign/realloc/free out of memory" ); // no storage ? 329 //sout | i | area; 331 330 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 332 331 abort( "cmemalign/realloc/free bad alignment %p", area ); … … 339 338 free( area ); 340 339 } // for 340 341 // check memalign/realloc with align/free 342 343 amount = 2; 344 for ( a; libAlign() ~= limit ~ a ) { // generate powers of 2 345 // initial N byte allocation 346 char * area = (char *)memalign( a, amount ); // aligned N-byte allocation 347 if ( area == 0p ) abort( "memalign/realloc with align/free out of memory" ); // no storage ? 348 //sout | alignments[a] | area | endl; 349 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 350 abort( "memalign/realloc with align/free bad alignment : memalign(%d,%d) = %p", (int)a, (int)amount, area ); 351 } // if 352 area[0] = '\345'; area[amount - 2] = '\345'; // fill first/penultimate byte 353 354 // Do not start this loop index at 0 because realloc of 0 bytes frees the storage. 355 for ( s; amount ~ 256 * 1024 ) { // start at initial memory request 356 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "memalign/realloc/free corrupt storage" ); 357 area = (char *)realloc( area, a * 2, s ); // attempt to reuse storage 358 if ( area == 0p ) abort( "memalign/realloc with align/free out of memory" ); // no storage ? 359 //sout | i | area | endl; 360 if ( (size_t)area % a * 2 != 0 ) { // check for initial alignment 361 abort( "memalign/realloc with align/free bad alignment %p", area ); 362 } // if 363 area[s - 1] = '\345'; // fill last byte 364 } // for 365 free( area ); 366 } // for 367 368 // check cmemalign/realloc with align/free 369 370 amount = 2; 371 for ( size_t a = libAlign() + libAlign(); a <= limit; a += a ) { // generate powers of 2 372 // initial N byte allocation 373 char *area = (char *)cmemalign( a, 1, amount ); // aligned N-byte allocation 374 if ( area == 0p ) abort( "cmemalign/realloc with align/free out of memory" ); // no storage ? 375 //sout | alignments[a] | area | endl; 376 if ( (size_t)area % a != 0 || malloc_alignment( area ) != a ) { // check for initial alignment 377 abort( "cmemalign/realloc with align/free bad alignment : cmemalign(%d,%d) = %p", (int)a, (int)amount, area ); 378 } // if 379 if ( area[0] != '\0' || area[amount - 1] != '\0' || 380 area[malloc_usable_size( area ) - 1] != '\0' || 381 ! malloc_zero_fill( area ) ) abort( "cmemalign/realloc with align/free corrupt storage1" ); 382 area[0] = '\345'; area[amount - 2] = '\345'; // fill first/penultimate byte 383 384 // Do not start this loop index at 0 because realloc of 0 bytes frees the storage. 385 for ( int s = amount; s < 256 * 1024; s += 1 ) { // start at initial memory request 386 if ( area[0] != '\345' || area[s - 2] != '\345' ) abort( "cmemalign/realloc with align/free corrupt storage2" ); 387 area = (char *)realloc( area, a * 2, s ); // attempt to reuse storage 388 if ( area == 0p ) abort( "cmemalign/realloc with align/free out of memory" ); // no storage ? 389 //sout | i | area | endl; 390 if ( (size_t)area % a * 2 != 0 || malloc_alignment( area ) != a * 2 ) { // check for initial alignment 391 abort( "cmemalign/realloc with align/free bad alignment %p %jd %jd", area, malloc_alignment( area ), a * 2 ); 392 } // if 393 if ( area[s - 1] != '\0' || area[s - 1] != '\0' || 394 area[malloc_usable_size( area ) - 1] != '\0' || 395 ! malloc_zero_fill( area ) ) abort( "cmemalign/realloc/free corrupt storage3" ); 396 area[s - 1] = '\345'; // fill last byte 397 } // for 398 free( area ); 399 } // for 400 341 401 //sout | "worker" | thisTask() | "successful completion"; 342 402 } // Worker main
Note: See TracChangeset
for help on using the changeset viewer.