Changeset 6cbc5a62 for libcfa/src/collections/list.hfa
- Timestamp:
- Mar 24, 2026, 10:50:51 PM (10 days ago)
- Branches:
- master
- Children:
- e426c6f
- Parents:
- 402f249
- File:
-
- 1 edited
-
libcfa/src/collections/list.hfa (modified) (13 diffs)
Legend:
- Unmodified
- Added
- Removed
-
libcfa/src/collections/list.hfa
r402f249 r6cbc5a62 10 10 // Created On : Wed Apr 22 18:00:00 2020 11 11 // Last Modified By : Peter A. Buhr 12 // Last Modified On : T hu Aug 7 10:46:08 202513 // Update Count : 7512 // Last Modified On : Tue Mar 24 11:25:34 2026 13 // Update Count : 98 14 14 // 15 15 … … 72 72 73 73 // The origin is the position encountered at the start of iteration, signifying, "need to advance to the first element," 74 // and at the end of iteration, signifying, "no more elements." Normal comsumption of an iterator runs "advance" as 75 // the first step, and uses the return of "advance" as a guard, before dereferencing the iterator. So normal76 // consumption of an iterator does not dereference an iterator in origin position. The value of a pointer (underlying a77 // refence) that is exposed publicly as an iteraor, and also a pointer stored internally in a link field, is tagged, to78 // indicate "is the origin" (internally, is the list-head sentinel node), or untagged, to indicate "is a regular node."79 // Intent is to help a user who dereferences an iterator in origin position (which would be an API-use error on their80 // part), byfailing fast.74 // and at the end of iteration, signifying, "no more elements." Normal comsumption of an iterator runs "advance" as the 75 // first step, and uses the return of "advance" as a guard, before dereferencing the iterator. So normal consumption of 76 // an iterator does not dereference an iterator in origin position. The value of a pointer (underlying a refence) that 77 // is exposed publicly as an iteraor, and also a pointer stored internally in a link field, is tagged, to indicate "is 78 // the origin" (internally, is the list-head sentinel node), or untagged, to indicate "is a regular node." Intent is to 79 // help a user who dereferences an iterator in origin position (which would be an API-use error on their part), by 80 // failing fast. 81 81 82 82 #ifdef __EXPERIMENTAL_DISABLE_OTAG__ // Perf experimention alt mode 83 83 84 // With origin tagging disabled, iteration never reports "no more elements."85 // In this mode, the list API is buggy.86 // This mode is used to quantify the cost of the normal tagging scheme.87 88 #define ORIGIN_TAG_SET(p) (p)89 #define ORIGIN_TAG_CLEAR(p) (p)90 #define ORIGIN_TAG_QUERY(p) 091 #define ORIGIN_TAG_ASGN(p, v) (p)92 #define ORIGIN_TAG_EITHER(p, v) (p)93 #define ORIGIN_TAG_NEQ(v1, v2) 084 // With origin tagging disabled, iteration never reports "no more elements." 85 // In this mode, the list API is buggy. 86 // This mode is used to quantify the cost of the normal tagging scheme. 87 88 #define ORIGIN_TAG_SET(p) (p) 89 #define ORIGIN_TAG_CLEAR(p) (p) 90 #define ORIGIN_TAG_QUERY(p) 0 91 #define ORIGIN_TAG_ASGN(p, v) (p) 92 #define ORIGIN_TAG_EITHER(p, v) (p) 93 #define ORIGIN_TAG_NEQ(v1, v2) 0 94 94 95 95 #else // Normal 96 96 97 #if defined( __x86_64 )98 // Preferred case: tag in the most-significant bit. Dereference99 // has been shown to segfault consistently. Maintenance should100 // list more architectures as "ok" here, to let them use the101 // preferred case, when valid.102 #define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 )103 #else104 // Fallback case: tag in the least-significant bit. Dereference105 // will often give an alignment error, but may not, e.g. if106 // accessing a char-typed member. 32-bit x86 uses the most-107 // significant bit for real room on the heap.108 #define ORIGIN_TAG_BITNO 0109 #endif110 111 #define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO)112 113 #define ORIGIN_TAG_SET(p) ((p) | ORIGIN_TAG_MASK)114 #define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK)115 #define ORIGIN_TAG_QUERY(p) ((p) & ORIGIN_TAG_MASK)116 117 #define ORIGIN_TAG_ASGN(p, v) ( \118 verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \119 ORIGIN_TAG_EITHER((p), (v)) \120 )121 122 #define ORIGIN_TAG_EITHER(p, v) ( \123 verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \124 ( (p) | (v) ) \125 )126 127 #define ORIGIN_TAG_NEQ(v1, v2) ( \128 verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \129 verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \130 ( (v1) ^ (v2) ) \131 )97 #if defined( __x86_64 ) 98 // Preferred case: tag in the most-significant bit. Dereference 99 // has been shown to segfault consistently. Maintenance should 100 // list more architectures as "ok" here, to let them use the 101 // preferred case, when valid. 102 #define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 ) 103 #else 104 // Fallback case: tag in the least-significant bit. Dereference 105 // will often give an alignment error, but may not, e.g. if 106 // accessing a char-typed member. 32-bit x86 uses the most- 107 // significant bit for real room on the heap. 108 #define ORIGIN_TAG_BITNO 0 109 #endif 110 111 #define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO) 112 113 #define ORIGIN_TAG_SET(p) ((p) | ORIGIN_TAG_MASK) 114 #define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK) 115 #define ORIGIN_TAG_QUERY(p) ((p) & ORIGIN_TAG_MASK) 116 117 #define ORIGIN_TAG_ASGN(p, v) ( \ 118 verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \ 119 ORIGIN_TAG_EITHER((p), (v)) \ 120 ) 121 122 #define ORIGIN_TAG_EITHER(p, v) ( \ 123 verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \ 124 ( (p) | (v) ) \ 125 ) 126 127 #define ORIGIN_TAG_NEQ(v1, v2) ( \ 128 verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \ 129 verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \ 130 ( (v1) ^ (v2) ) \ 131 ) 132 132 133 133 #endif 134 134 135 135 136 137 136 #ifdef __EXPERIMENTAL_LOOSE_SINGLES__ // Perf experimention alt mode 138 137 139 // In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element,140 // it must not be listed already" checking. The user must know separately whether an element is listed.141 // Other than inserting it, any list-api action on an unlisted element is undefined. Notably, list iteration142 // starting from an unlisted element is not defined to respond "no more elements," and may instead continue143 // iterating from a formerly occupied list position. This mode matches LQ usage.144 145 #define NOLOOSE(...)146 #define LOOSEONLY(...) __VA_ARGS__138 // In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element, 139 // it must not be listed already" checking. The user must know separately whether an element is listed. 140 // Other than inserting it, any list-api action on an unlisted element is undefined. Notably, list iteration 141 // starting from an unlisted element is not defined to respond "no more elements," and may instead continue 142 // iterating from a formerly occupied list position. This mode matches LQ usage. 143 144 #define NOLOOSE(...) 145 #define LOOSEONLY(...) __VA_ARGS__ 147 146 148 147 #else // Normal 149 148 150 #define NOLOOSE(...) __VA_ARGS__151 #define LOOSEONLY(...)149 #define NOLOOSE(...) __VA_ARGS__ 150 #define LOOSEONLY(...) 152 151 153 152 #endif … … 190 189 static inline forall( tE &, tLinks & | embedded( tE, tLinks, dlink( tE ) ) ) { 191 190 bool isListed( tE & node ) { 192 NOLOOSE(191 NOLOOSE( 193 192 verify( &node != 0p ); 194 193 dlink( tE ) & node_links = node`inner; … … 196 195 ) 197 196 LOOSEONLY( 198 verify(false && "isListed is undefined");197 verify(false && "isListed is undefined"); 199 198 return true; 200 199 ) … … 224 223 225 224 dlink( tE ) & linkToInsert = node`inner; 226 NOLOOSE(225 NOLOOSE( 227 226 verify( linkToInsert.next == 0p ); 228 227 verify( linkToInsert.prev == 0p ); … … 242 241 return node; 243 242 } 243 // FIXME: Change from pointer to reference for node, when tuple type can handle references. 244 forall( List ... | { void insert_before( tE & before, List ); } ) 245 void insert_before( tE & before, tE * node, List args ) { 246 insert_before( before, *node ); 247 insert_before( before, args ); 248 } 249 void insert_before( tE & before, tE * node ) { 250 insert_before( before, *node ); 251 } 244 252 245 253 tE & insert_after( tE & after, tE & node ) { … … 248 256 249 257 dlink( tE ) & linkToInsert = node`inner; 250 NOLOOSE(258 NOLOOSE( 251 259 verify( linkToInsert.prev == 0p ); 252 260 verify( linkToInsert.next == 0p ); … … 265 273 asm( "" : : : "memory" ); 266 274 return node; 275 } 276 // FIXME: Change from pointer to reference for node, when tuple type can handle references. 277 forall( List ... | { void insert_after( tE & after, List ); } ) 278 void insert_after( tE & after, tE * node, List args ) { 279 insert_after( after, *node ); 280 insert_after( after, args ); 281 } 282 void insert_after( tE & after, tE * node ) { 283 insert_after( after, *node ); 267 284 } 268 285 … … 281 298 after_links.prev = &before_raw; 282 299 283 NOLOOSE(300 NOLOOSE( 284 301 asm( "" : : : "memory" ); 285 302 list_pos_links.prev = 0p; … … 288 305 ) 289 306 return node; 307 } 308 // FIXME: Change from pointer to reference for node, when tuple type can handle references. 309 forall( List ... | { void remove( List ); } ) 310 void remove( tE * node, List args ) { 311 remove( *node ); 312 remove( args ); 313 } 314 void remove( tE * node ) { 315 remove( *node ); 290 316 } 291 317 … … 309 335 } 310 336 311 bool isFirst( tE & node ) {312 return recede( node );313 }314 315 bool isLast( tE & node ) {316 return advance( node );337 bool isFirst( tE & node ) { 338 return recede( node ); 339 } 340 341 bool isLast( tE & node ) { 342 return advance( node ); 317 343 } 318 344 … … 331 357 return node; 332 358 } 359 // FIXME: Change from pointer to reference for node, when tuple type can handle references. 360 forall( List ... | { void insert_first( dlist( tE, tLinks ) & list, List ); } ) 361 void insert_first( dlist( tE, tLinks ) & list, tE * node, List args ) { 362 insert_first( list, *node ); 363 insert_first( list, args ); 364 } 365 void insert_first( dlist( tE, tLinks ) & list, tE * node ) { 366 insert_first( list, *node ); 367 } 333 368 334 369 tE & insert_last( dlist( tE, tLinks ) & list, tE & node ) { … … 336 371 return node; 337 372 } 373 // FIXME: Change from pointer to reference for node, when tuple type can handle references. 374 forall( List ... | { void insert_last( dlist( tE, tLinks ) & list, List ); } ) 375 void insert_last( dlist( tE, tLinks ) & list, tE * node, List args ) { 376 insert_last( list, *node ); 377 insert_last( list, args ); 378 } 379 void insert_last( dlist( tE, tLinks ) & list, tE * node ) { 380 insert_last( list, *node ); 381 } 382 338 383 tE & insert( dlist( tE, tLinks ) & list, tE & node ) { // synonym for insert_last 339 384 insert_last( list, node ); 340 385 return node; 386 } 387 // FIXME: Change from pointer to reference for node, when tuple type can handle references. 388 forall( List ... | { void insert( dlist( tE, tLinks ) & list, List ); } ) 389 void insert( dlist( tE, tLinks ) & list, tE * node, List args ) { 390 insert( list, *node ); 391 insert( list, args ); 392 } 393 void insert( dlist( tE, tLinks ) & list, tE * node ) { 394 insert( list, *node ); 341 395 } 342 396
Note:
See TracChangeset
for help on using the changeset viewer.