source: libcfa/src/collections/list2.hfa@ bb9897c

Last change on this file since bb9897c was e6e250d, checked in by Peter A. Buhr <pabuhr@…>, 12 days ago

3rd attempt at harmonizing isOp functions, e.g., isListed, isFirst, isLast

  • Property mode set to 100644
File size: 26.9 KB
Line 
1// A revision of <collections/list.hfa>.
2// Supports headless (and headed) lists.
3// It's basically all good, and should become the official libcfa one, except:
4// - perf tests compare this one ("thesis's work") to libcfa ("old, before supporting headless")
5// - in libcfa, some uses of list.hfa make assumptions about list's private representation, which need fixing
6
7
8
9
10//
11// Cforall Version 1.0.0 Copyright (C) 2020 University of Waterloo
12//
13// The contents of this file are covered under the licence agreement in the
14// file "LICENCE" distributed with Cforall.
15//
16// list -- lets a user-defined stuct form intrusive linked lists
17//
18// Author : Michael Brooks
19// Created On : Wed Apr 22 18:00:00 2020
20// Last Modified By : Peter A. Buhr
21// Last Modified On : Sun Mar 29 22:59:31 2026
22// Update Count : 15
23//
24
25#pragma once
26
27#include <assert.h>
28
29forall( Decorator &, T & )
30struct tytagref {
31 inline T &;
32};
33
34forall( tOuter &, tMid &, tInner & )
35trait embedded {
36 tytagref( tMid, tInner ) ?`inner( tOuter & );
37};
38
39// embedded is reflexive, with no info (void) as the type tag
40forall( T & )
41static inline tytagref(void, T) ?`inner ( T & this ) { tytagref( void, T ) ret = {this}; return ret; }
42
43
44//
45// P9_EMBEDDED: Use on every case of plan-9 inheritance, to make "implements embedded" be a closure of plan-9 inheritance.
46//
47// struct foo {
48// int a, b, c;
49// inline (bar);
50// };
51// P9_EMBEDDED( foo, bar )
52//
53
54// usual version, for structs that are top-level declarations
55#define P9_EMBEDDED( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, static ) P9_EMBEDDED_BDY_( immedBase )
56
57// special version, for structs that are declared in functions
58#define P9_EMBEDDED_INFUNC( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, ) P9_EMBEDDED_BDY_( immedBase )
59
60// forward declarations of both the above; generally not needed
61// may help you control where the P9_EMBEEDED cruft goes, in case "right after the stuct" isn't where you want it
62#define P9_EMBEDDED_FWD( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, static ) ;
63#define P9_EMBEDDED_FWD_INFUNC( derived, immedBase ) auto P9_EMBEDDED_DECL_( derived, immedBase, ) ;
64
65// private helpers
66#define P9_EMBEDDED_DECL_( derived, immedBase, STORAGE ) \
67 forall( Tbase &, TdiscardPath & | { tytagref( TdiscardPath, Tbase ) ?`inner( immedBase & ); } ) \
68 STORAGE inline tytagref( immedBase, Tbase ) ?`inner( derived & this )
69
70#define P9_EMBEDDED_BDY_( immedBase ) { \
71 immedBase & ib = this; \
72 Tbase & b = ib`inner; \
73 tytagref( immedBase, Tbase ) result = { b }; \
74 return result; \
75 }
76
77#define EMBEDDED_VIA( OUTER, MID, INNER ) (struct { tytagref( MID, INNER ) ( * ?`inner ) ( OUTER & ); }){ ?`inner }
78
79#define DLINK_VIA( TE, TLINK ) EMBEDDED_VIA( TE, TLINK, dlink( TE ) )
80
81
82// The origin is the position encountered at the start of iteration, signifying, "need to advance to the first element,"
83// and at the end of iteration, signifying, "no more elements." Normal comsumption of an iterator runs "advance" as the
84// first step, and uses the return of "advance" as a guard, before dereferencing the iterator. So normal consumption of
85// an iterator does not dereference an iterator in origin position. The value of a pointer (underlying a refence) that
86// is exposed publicly as an iteraor, and also a pointer stored internally in a link field, is tagged, to indicate "is
87// the origin" (internally, is the list-head sentinel node), or untagged, to indicate "is a regular node." Intent is to
88// help a user who dereferences an iterator in origin position (which would be an API-use error on their part), by
89// failing fast.
90
91#ifdef __EXPERIMENTAL_DISABLE_OTAG__ // Perf experimention alt mode
92
93 // With origin tagging disabled, iteration never reports "no more elements."
94 // In this mode, the list API is buggy.
95 // This mode is used to quantify the cost of the normal tagging scheme.
96
97 #define ORIGIN_TAG_ENABL(p) (p)
98 #define ORIGIN_TAG_CLEAR(p) (p)
99 #define ORIGIN_TAG_QUERY(p) 0
100 #define ORIGIN_TAG_ASGN(p, v) (p)
101 #define ORIGIN_TAG_EITHER(p, v) (p)
102 #define ORIGIN_TAG_NEQ(v1, v2) 0
103
104 #define TAGSONLY(...)
105 #define NOTAGS(...) __VA_ARGS__
106
107#else // Normal
108
109 #if defined( __x86_64 )
110 // Preferred case: tag in the most-significant bit. Dereference
111 // has been shown to segfault consistently. Maintenance should
112 // list more architectures as "ok" here, to let them use the
113 // preferred case, when valid.
114 #define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 )
115 #else
116 // Fallback case: tag in the least-significant bit. Dereference
117 // will often give an alignment error, but may not, e.g. if
118 // accessing a char-typed member. 32-bit x86 uses the most-
119 // significant bit for real room on the heap.
120 #define ORIGIN_TAG_BITNO 0
121 #endif
122
123 #define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO)
124
125 #define ORIGIN_TAG_ENABL(p) ((p) | ORIGIN_TAG_MASK)
126 #define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK)
127 #define ORIGIN_TAG_QUERY(p) ((p) & ORIGIN_TAG_MASK)
128
129 #define ORIGIN_TAG_ASGN(p, v) ( \
130 verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \
131 ORIGIN_TAG_EITHER((p), (v)) \
132 )
133
134 #define ORIGIN_TAG_EITHER(p, v) ( \
135 verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \
136 ( (p) | (v) ) \
137 )
138
139 #define ORIGIN_TAG_NEQ(v1, v2) ( \
140 verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \
141 verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \
142 ( (v1) ^ (v2) ) \
143 )
144
145 #define TAGSONLY(...) __VA_ARGS__
146 #define NOTAGS(...)
147
148#endif
149
150
151#ifdef __EXPERIMENTAL_LOOSE_SINGLES__ // Perf experimention alt mode
152
153 // In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element,
154 // it must not be listed already" checking. The user must know separately whether an element is listed.
155 // Other than inserting it, any list-api action on an unlisted element is undefined. Notably, list iteration
156 // starting from an unlisted element is not defined to respond "no more elements," and may instead continue
157 // iterating from a formerly occupied list position. This mode matches LQ usage.
158
159 #define NOLOOSE(...)
160 #define LOOSEONLY(...) __VA_ARGS__
161
162#else // Normal
163
164 #define NOLOOSE(...) __VA_ARGS__
165 #define LOOSEONLY(...)
166
167#endif
168
169
170// struct workaround0_t {};
171
172forall( tE & ) {
173 struct dlink;
174
175 // do not use; presence of the field declaration unblocks ability to define dlink (#304)
176 struct __dlink_selfref_workaround_t {
177 dlink(tE) *ref_notSelfRef;
178 };
179
180 struct dlink {
181 dlink(tE) *next; // TODO: rename with $
182 dlink(tE) *prev;
183 };
184
185 static inline void ?{}( dlink(tE) & this ) {
186 NOLOOSE(
187 dlink(tE) * toSelf = & this;
188 size_t toSelfNum = (size_t) toSelf;
189 size_t toSelfNumTagged = ORIGIN_TAG_ENABL( toSelfNum );
190 dlink(tE) * toSelfPtrTagged = (dlink(tE) *) toSelfNumTagged;
191 toSelf = toSelfPtrTagged;
192 (this.next){ toSelf };
193 (this.prev){ toSelf };
194 )
195 }
196
197 // You can "copy" a dlink. But the result won't be linked.
198 // Lets you copy what you inline the dlink into.
199 static inline void ?{}( dlink(tE) & this, dlink(tE) ) {
200 this{};
201 }
202
203 forall( tLinks & = dlink(tE) | embedded(tE, tLinks, dlink(tE)) ) {
204 struct dlist {
205 inline dlink(tE);
206 };
207
208 static inline tE * $get_list_origin_addr( dlist(tE, tLinks) & list ) {
209 dlink(tE) & link_from_null = ( * (tE *) 0p )`inner;
210 ptrdiff_t link_offset = (ptrdiff_t) & link_from_null;
211 size_t origin_addr = ((size_t) & list) - link_offset;
212 size_t preResult = ORIGIN_TAG_ENABL( origin_addr );
213 return (tE *)preResult;
214 }
215
216 static inline void ?{}( dlist(tE, tLinks) & this ) {
217 NOLOOSE(
218 ( (dlink(tE) &) this ){};
219 )
220 LOOSEONLY(
221 dlink(tE) * listOrigin = (dlink(tE) *) $get_list_origin_addr( this );
222 dlink( tE ) & thisl = this;
223 (thisl.prev) = listOrigin;
224 (thisl.next) = listOrigin;
225 )
226 }
227 }
228}
229
230
231forall( tE & ) {
232#ifdef __EXPERIMENTAL_DISABLE_OTAG__ // Perf experimention alt mode
233 static inline size_t origin_tag_query_arith$( tE & raw ) {
234 return 0;
235 }
236 static inline tE & nullif$( tE & val, size_t arith_ctrl ) {
237 verify( arith_ctrl == 0 ); (void) arith_ctrl;
238 return val;
239 }
240#else // Normal
241 // like ORIGIN_TAG_QUERY, but return is arithmetic number 0 or 1 (rather than 0 or non-0)
242 static inline size_t origin_tag_query_arith$( tE & raw ) {
243 size_t ret = (((size_t) & raw) >> ORIGIN_TAG_BITNO) & 1;
244 verify( ORIGIN_TAG_QUERY( (size_t) & raw ) ? ret == 1 : ret == 0 );
245 return ret;
246 }
247 // Requires arith_ctrl being 0 or 1.
248 // When 0, passes val through; when 1, returns null reference.
249 // Importantly, implemented without jumps or tests.
250 static inline tE & nullif$( tE & val, size_t arith_ctrl ) {
251 verify( ! ORIGIN_TAG_QUERY( (size_t) & val ) );
252 verify( arith_ctrl == 0 || arith_ctrl == 1 );
253 size_t mask_ctrl = ~ - arith_ctrl;
254 verify( arith_ctrl == 0 && mask_ctrl == -1 || arith_ctrl == 1 && mask_ctrl ==0 );
255 tE & ret = * (tE*) ( ((size_t) & val) & mask_ctrl);
256 verify( arith_ctrl == 0 && &ret == &val || arith_ctrl == 1 && &ret == 0p );
257 return ret;
258 }
259#endif
260}
261
262// Compile-time memory (cmem) barrier
263// Prevents the optimizer from reordering instructions across it
264// Originally included for correctness, though a broken state is not known to be reproducible.
265// Found to have a critical impact on performance:
266// - in the positions given by default: generally optimal
267// - absent: sometimes much slower, depending on the test harness
268// - in positions (that my be influenced by a principle but) that are arbitrary wrt microarchitecture: typically, much slower
269#ifdef __EXPERIMENTAL_DISABLE_CMEM_BARRIER__
270// upon request, disable cmem barriers
271#define MAYBE_CMEM_BARRIER
272#else
273// by default, enable cmem barriers
274#define MAYBE_CMEM_BARRIER asm( "" : : : "memory" )
275#endif
276
277// Insert read (location)
278// One of the read operations that occurs during an insert operation was found to be performace-critical under certain harnesses.
279// Arguably, the position should not matter if cmem barriers are off. Treating the factors as independent allows for measuring this idea.
280#ifdef __EXPERIMENTAL_DELAY_INSERT_READ__
281// upon request: do the read late (between the cmem barriers); this location is where the read was originally found when this insert read first became a performance-perterbing hypothesis
282#define MAYBE_INSERT_READ_EARLY(...)
283#define MAYBE_INSERT_READ_LATE(...) __VA_ARGS__
284#else
285// by default: do the read early (before the first cmem barrier); better performance has been seen here
286#define MAYBE_INSERT_READ_EARLY(...) __VA_ARGS__
287#define MAYBE_INSERT_READ_LATE(...)
288#endif
289
290forall( tE &, tLinks & | embedded( tE, tLinks, dlink(tE) ) ) {
291 static inline void insert_before(tE & list_pos, tE &to_insert) {
292 size_t list_pos_tag = ORIGIN_TAG_QUERY((size_t) & list_pos); // a request to insert before the origin is fine
293 tE & list_pos_real = * (tE *) ORIGIN_TAG_CLEAR((size_t) & list_pos);
294 verify (&list_pos_real != 0p);
295
296 verify (&to_insert != 0p);
297 NOLOOSE(
298 verify(! ORIGIN_TAG_QUERY((size_t) & to_insert));
299 )
300 dlink(tE) & linkToInsert = to_insert`inner;
301 NOLOOSE(
302 TAGSONLY(
303 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
304 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
305 )
306 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
307 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
308 )
309 dlink(tE) & list_pos_links = list_pos_real`inner;
310 MAYBE_INSERT_READ_EARLY(
311 dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
312 )
313 MAYBE_CMEM_BARRIER;
314 size_t list_pos_links_num = (size_t)(& list_pos_links);
315 size_t to_insert_next_num = ORIGIN_TAG_ASGN(list_pos_links_num, list_pos_tag);
316 dlink(tE) * to_insert_next = (dlink(tE)*)to_insert_next_num;
317 linkToInsert.next = to_insert_next;
318 linkToInsert.prev = list_pos_links.prev;
319 MAYBE_INSERT_READ_LATE(
320 dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
321 )
322 size_t beforeLinks_next_tag = ORIGIN_TAG_QUERY((size_t)beforeLinks.next);
323 size_t linkToInsert_num = (size_t)(& linkToInsert);
324 size_t beforeLinks_next_num = ORIGIN_TAG_ASGN(linkToInsert_num, ORIGIN_TAG_NEQ(list_pos_tag, beforeLinks_next_tag));
325 beforeLinks.next = (dlink(tE)*)(beforeLinks_next_num);
326 list_pos_links.prev = &linkToInsert;
327 MAYBE_CMEM_BARRIER;
328 }
329 // FIXME: Change from pointer to reference for node, when tuple type can handle references.
330 forall( List ... | { void insert_before( tE & before, List ); } )
331 void insert_before( tE & before, tE * node, List args ) {
332 insert_before( before, *node );
333 insert_before( before, args );
334 }
335 void insert_before( tE & before, tE * node ) {
336 insert_before( before, *node );
337 }
338
339 static inline void insert_after(tE & list_pos, tE &to_insert) {
340 size_t list_pos_tag = ORIGIN_TAG_QUERY((size_t) & list_pos); // a request to insert after the origin is fine
341 tE & list_pos_real = * (tE *) ORIGIN_TAG_CLEAR((size_t) & list_pos);
342 verify (&list_pos_real != 0p);
343
344 verify (&to_insert != 0p);
345 NOLOOSE(
346 verify(! ORIGIN_TAG_QUERY((size_t) & to_insert));
347 )
348 dlink(tE) & linkToInsert = to_insert`inner;
349 NOLOOSE(
350 TAGSONLY(
351 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
352 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
353 )
354 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
355 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
356 )
357 dlink(tE) & list_pos_links = list_pos_real`inner;
358 MAYBE_INSERT_READ_EARLY(
359 dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
360 )
361 MAYBE_CMEM_BARRIER;
362 size_t list_pos_links_num = (size_t)(& list_pos_links);
363 size_t to_insert_prev_num = ORIGIN_TAG_ASGN(list_pos_links_num, list_pos_tag);
364 dlink(tE) * to_insert_prev = (dlink(tE)*)to_insert_prev_num;
365 linkToInsert.prev = to_insert_prev;
366 linkToInsert.next = list_pos_links.next;
367 MAYBE_INSERT_READ_LATE(
368 dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
369 )
370 size_t afterLinks_prev_tag = ORIGIN_TAG_QUERY((size_t)afterLinks.prev);
371 size_t linkToInsert_num = (size_t)(& linkToInsert);
372 size_t afterLinks_prev_num = ORIGIN_TAG_ASGN(linkToInsert_num, ORIGIN_TAG_NEQ(list_pos_tag, afterLinks_prev_tag));
373 afterLinks.prev = (dlink(tE)*)(afterLinks_prev_num);
374 list_pos_links.next = &linkToInsert;
375 MAYBE_CMEM_BARRIER;
376 }
377 // FIXME: Change from pointer to reference for node, when tuple type can handle references.
378 forall( List ... | { void insert_after( tE & after, List ); } )
379 void insert_after( tE & after, tE * node, List args ) {
380 insert_after( after, *node );
381 insert_after( after, args );
382 }
383 void insert_after( tE & after, tE * node ) {
384 insert_after( after, *node );
385 }
386
387 static inline tE & remove(tE & list_pos) {
388 verify( ! ORIGIN_TAG_QUERY((size_t) & list_pos) );
389 verify (&list_pos != 0p);
390
391 dlink(tE) & list_pos_links = list_pos`inner;
392 dlink(tE) & before_raw = * list_pos_links.prev;
393 dlink(tE) & before_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & before_raw );
394 size_t before_links_next_tag = ORIGIN_TAG_QUERY( (size_t) (before_links.next) );
395
396 dlink(tE) & after_raw = * list_pos_links.next;
397 dlink(tE) & after_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & after_raw );
398 size_t after_links_prev_tag = ORIGIN_TAG_QUERY( (size_t) (after_links.prev) );
399
400 size_t before_links_next_rslt = ORIGIN_TAG_EITHER( ((size_t) & after_raw), before_links_next_tag );
401 size_t after_links_prev_rslt = ORIGIN_TAG_EITHER( ((size_t) & before_raw), after_links_prev_tag );
402 before_links.next = (dlink(tE) *) before_links_next_rslt;
403 after_links.prev = (dlink(tE) *) after_links_prev_rslt;
404
405 NOLOOSE(
406 MAYBE_CMEM_BARRIER;
407 size_t list_pos_links_num = (size_t) &list_pos_links;
408 size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
409 list_pos_links.next = list_pos_links.prev = (dlink(tE)*) list_pos_links_tagged_num;
410 MAYBE_CMEM_BARRIER;
411 )
412 return list_pos;
413 }
414 // FIXME: Change from pointer to reference for node, when tuple type can handle references.
415 forall( List ... | { void remove( List ); } )
416 void remove( tE * node, List args ) {
417 remove( *node );
418 remove( args );
419 }
420 void remove( tE * node ) {
421 remove( *node );
422 }
423
424 static inline tE & downcast$( tytagref( tLinks, dlink(tE) ) ref ) {
425 dlink(tE) & lnk = ref;
426 dlink(tE) & link_from_null = ( * (tE *) 0p )`inner;
427 ptrdiff_t link_offset = (ptrdiff_t) & link_from_null;
428 size_t elm_addr = ((size_t) & lnk) - link_offset;
429 return * (tE*) elm_addr;
430 }
431
432 static inline tE & first( dlist(tE, tLinks) & list ) {
433 verify (&list != 0p);
434 dlink(tE) * firstLnk = list.next;
435 if (ORIGIN_TAG_QUERY((size_t)firstLnk)) return * 0p;
436 tytagref( tLinks, dlink(tE) ) firstLnkTagged = {*firstLnk};
437 return downcast$( firstLnkTagged );
438 }
439 static inline tE & last ( dlist(tE, tLinks) & list ) {
440 verify (&list != 0p);
441 dlink(tE) * lastLnk = list.prev;
442 if (ORIGIN_TAG_QUERY((size_t)lastLnk)) return * 0p;
443 tytagref( tLinks, dlink(tE) ) lastLnkTagged = {*lastLnk};
444 return downcast$( lastLnkTagged );
445 }
446
447 static inline bool empty( dlist(tE, tLinks) & list ) {
448 verify (&list != 0p);
449 if ( & first(list) == 0p || & last(list) == 0p ) {
450 verify( & last(list) == 0p && & last(list) == 0p );
451 return true;
452 }
453 return false;
454 }
455
456 static inline bool listed( tE & e ) {
457 NOLOOSE(
458 verify (&e != 0p);
459 verify(! ORIGIN_TAG_QUERY( (size_t) & e ));
460 dlink(tE) & e_links = e`inner;
461 dlink(tE) * lprev = (dlink(tE)*) ORIGIN_TAG_CLEAR((size_t) e_links.prev);
462 dlink(tE) * lnext = (dlink(tE)*) ORIGIN_TAG_CLEAR((size_t) e_links.next);
463 return ( lprev != &e_links ) || ( lnext != &e_links );
464 )
465 LOOSEONLY(
466 verify(false && "listed is undefined");
467 return true;
468 )
469 }
470
471 static inline tE & iter( dlist(tE, tLinks) & list ) {
472 tE * origin = $get_list_origin_addr( list );
473 return *origin;
474 }
475
476
477 // todo: resolve the pun:
478 // tag as in proxy (tytagref)
479 // tag as in bit manipulation on a funny pointer
480
481 static inline bool advance( tE && refx ) {
482 tE && ref_inner = refx;
483 tE & oldReferent = * (tE*) ORIGIN_TAG_CLEAR( (size_t) & ref_inner );
484 verify (& oldReferent != 0p);
485 dlink(tE) * tgt = oldReferent`inner.next;
486 size_t tgt_tags = ORIGIN_TAG_QUERY( (size_t)tgt);
487 dlink(tE) * nextl = (dlink(tE) *)ORIGIN_TAG_CLEAR((size_t)tgt);
488 tytagref( tLinks, dlink(tE) ) nextLnkTagged = { * nextl };
489 tE & nexte = downcast$( nextLnkTagged );
490 size_t next_te_num = (size_t) & nexte;
491 size_t new_ref_inner_num = ORIGIN_TAG_ASGN(next_te_num, tgt_tags);
492 tE * new_ref_inner = (tE *) new_ref_inner_num;
493 &ref_inner = new_ref_inner;
494 return ! tgt_tags;
495 }
496
497 static inline bool recede( tE && refx ) {
498 tE && ref_inner = refx;
499 tE & oldReferent = * (tE*) ORIGIN_TAG_CLEAR( (size_t) & ref_inner );
500 verify (& oldReferent != 0p);
501 dlink(tE) * tgt = oldReferent`inner.prev;
502 size_t tgt_tags = ORIGIN_TAG_QUERY( (size_t)tgt);
503 dlink(tE) * prevl = (dlink(tE) *)ORIGIN_TAG_CLEAR((size_t)tgt);
504 tytagref( tLinks, dlink(tE) ) prevLnkTagged = { * prevl };
505 tE & preve = downcast$( prevLnkTagged );
506 size_t prev_te_num = (size_t) & preve;
507 size_t new_ref_inner_num = ORIGIN_TAG_ASGN(prev_te_num, tgt_tags);
508 tE * new_ref_inner = (tE *) new_ref_inner_num;
509 &ref_inner = new_ref_inner;
510 return ! tgt_tags;
511 }
512
513 bool first( tE & node ) {
514 // Probable bug copied from master
515 // should be `! recede(node)`
516 // correct: "is first iff cannot recede"
517 // used backward in test suite too, probably victim of a grep rename
518 return recede( node );
519 }
520
521 bool last( tE & node ) {
522 // ditto, vice versa
523 return advance( node );
524 }
525
526 static inline tE & next( tE & e ) {
527 if( advance(e) ) return e;
528 return * 0p;
529 }
530
531 static inline tE & prev( tE & e ) {
532 if( recede(e) ) return e;
533 return * 0p;
534 }
535
536 // Next 4 headed operations:
537 // Manual inline of the equivalent headless operation, manually simplified.
538 // Applies knowledge of tag pattern around head (unknown to optimizer) to reduce runtime tag operations.
539
540 static inline void insert_first( dlist(tE, tLinks) &list, tE & e ) {
541 dlink(tE) & linkToInsert = e`inner;
542 NOLOOSE(
543 TAGSONLY(
544 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
545 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
546 )
547 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
548 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
549 )
550 dlink(tE) & list_pos_links = list;
551 MAYBE_INSERT_READ_EARLY(
552 dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
553 )
554 MAYBE_CMEM_BARRIER;
555 size_t list_pos_links_num = (size_t)(& list_pos_links);
556 size_t to_insert_prev_num = ORIGIN_TAG_ENABL(list_pos_links_num);
557 dlink(tE) * to_insert_prev = (dlink(tE)*)to_insert_prev_num;
558 linkToInsert.prev = to_insert_prev;
559 linkToInsert.next = list_pos_links.next;
560 MAYBE_INSERT_READ_LATE(
561 dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
562 )
563 size_t linkToInsert_num = (size_t)(& linkToInsert);
564 size_t afterLinks_prev_num = linkToInsert_num;
565 afterLinks.prev = (dlink(tE)*)(afterLinks_prev_num);
566 list_pos_links.next = &linkToInsert;
567 MAYBE_CMEM_BARRIER;
568 }
569 // FIXME: Change from pointer to reference for node, when tuple type can handle references.
570 forall( List ... | { void insert_first( dlist( tE, tLinks ) & list, List ); } )
571 void insert_first( dlist( tE, tLinks ) & list, tE * node, List args ) {
572 insert_first( list, *node );
573 insert_first( list, args );
574 }
575 void insert_first( dlist( tE, tLinks ) & list, tE * node ) {
576 insert_first( list, *node );
577 }
578
579 static inline void insert_last( dlist(tE, tLinks) &list, tE & e ) {
580 dlink(tE) & linkToInsert = e`inner;
581 NOLOOSE(
582 TAGSONLY(
583 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
584 verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
585 )
586 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
587 verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
588 )
589 dlink(tE) & list_pos_links = list;
590 MAYBE_INSERT_READ_EARLY(
591 dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
592 )
593 MAYBE_CMEM_BARRIER;
594 size_t list_pos_links_num = (size_t)(& list_pos_links);
595 size_t to_insert_next_num = ORIGIN_TAG_ENABL(list_pos_links_num);
596 dlink(tE) * to_insert_next = (dlink(tE)*)to_insert_next_num;
597 linkToInsert.next = to_insert_next;
598 linkToInsert.prev = list_pos_links.prev;
599 MAYBE_INSERT_READ_LATE(
600 dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
601 )
602 size_t linkToInsert_num = (size_t)(& linkToInsert);
603 size_t beforeLinks_next_num = linkToInsert_num;
604 beforeLinks.next = (dlink(tE)*)(beforeLinks_next_num);
605 list_pos_links.prev = &linkToInsert;
606 MAYBE_CMEM_BARRIER;
607 }
608 // FIXME: Change from pointer to reference for node, when tuple type can handle references.
609 forall( List ... | { void insert_last( dlist( tE, tLinks ) & list, List ); } )
610 void insert_last( dlist( tE, tLinks ) & list, tE * node, List args ) {
611 insert_last( list, *node );
612 insert_last( list, args );
613 }
614 void insert_last( dlist( tE, tLinks ) & list, tE * node ) {
615 insert_last( list, *node );
616 }
617
618 tE & insert( dlist( tE, tLinks ) & list, tE & node ) { // synonym for insert_last
619 insert_last( list, node );
620 return node;
621 }
622 // FIXME: Change from pointer to reference for node, when tuple type can handle references.
623 forall( List ... | { void insert( dlist( tE, tLinks ) & list, List ); } )
624 void insert( dlist( tE, tLinks ) & list, tE * node, List args ) {
625 insert( list, *node );
626 insert( list, args );
627 }
628 void insert( dlist( tE, tLinks ) & list, tE * node ) {
629 insert( list, *node );
630 }
631
632 static inline tE & remove_first( dlist(tE, tLinks) &list ) {
633 verify (&list != 0p);
634 dlink(tE) & list_links = list;
635 // call is valid on empty list; when so, list_links.next and after_links.prev have otags set
636
637 dlink(tE) & fst_raw = * list_links.next;
638 dlink(tE) & fst_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & fst_raw );
639 size_t fst_tagval = origin_tag_query_arith$( fst_raw );
640
641 dlink(tE) & after_raw = * fst_links.next;
642 dlink(tE) & after_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & after_raw );
643
644 size_t before_links_next_rslt = ((size_t) & after_raw);
645 size_t after_links_prev_rslt = ORIGIN_TAG_ENABL( (size_t) & list_links );
646 list_links.next = (dlink(tE) *) before_links_next_rslt;
647 after_links.prev = (dlink(tE) *) after_links_prev_rslt;
648
649 MAYBE_CMEM_BARRIER;
650 size_t list_pos_links_num = (size_t) &fst_links;
651 size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
652 fst_links.next = fst_links.prev = (dlink(tE)*) list_pos_links_tagged_num;
653 MAYBE_CMEM_BARRIER;
654
655 tytagref( tLinks, dlink(tE) ) retExt = { fst_links };
656 return nullif$( downcast$( retExt ), fst_tagval );
657 }
658
659 static inline tE & remove_last( dlist(tE, tLinks) &list ) {
660 verify (&list != 0p);
661 dlink(tE) & list_links = list;
662 // call is valid on empty list; when so, list_links.prev and before_links.next have otags set
663
664 dlink(tE) & last_raw = * list_links.prev;
665 dlink(tE) & last_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & last_raw );
666 size_t last_tagval = origin_tag_query_arith$( last_raw );
667
668 dlink(tE) & before_raw = * last_links.prev;
669 dlink(tE) & before_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & before_raw );
670
671 size_t after_links_prev_rslt = ((size_t) & before_raw);
672 size_t before_links_next_rslt = ORIGIN_TAG_ENABL( (size_t) & list_links );
673 list_links.prev = (dlink(tE) *) after_links_prev_rslt;
674 before_links.next = (dlink(tE) *) before_links_next_rslt;
675
676 MAYBE_CMEM_BARRIER;
677 size_t list_pos_links_num = (size_t) &last_links;
678 size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
679 last_links.prev = last_links.next = (dlink(tE)*) list_pos_links_tagged_num;
680 MAYBE_CMEM_BARRIER;
681
682 tytagref( tLinks, dlink(tE) ) lpLnkTagged = { last_links };
683 return nullif$( downcast$( lpLnkTagged ), last_tagval );
684 }
685
686 static inline tE & try_pop_first( dlist(tE, tLinks) &list ) {
687 tE & first_inlist = first(list);
688 tE & first_item = first_inlist;
689 if (&first_item) remove(first_inlist); // TODO: should it use pop_front?
690 return first_item;
691 }
692
693 static inline tE & try_pop_last( dlist(tE, tLinks) &list ) {
694 tE & last_inlist = last(list);
695 tE & last_item = last_inlist;
696 if (&last_item) remove(last_inlist); // TODO: should it use pop_back?
697 return last_item;
698 }
699
700
701 #if !defined(NDEBUG) && (defined(__CFA_DEBUG__) || defined(__CFA_VERIFY__))
702 static bool $validate_fwd( dlist(tE, tLinks) & this ) {
703 tE & lagElem = *0p;
704
705 while ( tE & it = iter(this); advance(it) ) {
706 if (& lagElem == 0p && &it != & first(this) ) return false;
707 & lagElem = & it;
708 }
709
710 if (& lagElem != & last(this)) return false;
711
712 // TODO: verify that it is back at iter(this);
713 return true;
714 }
715 static bool $validate_rev( dlist(tE, tLinks) & this ) {
716 tE & lagElem = *0p;
717
718 while ( tE & it = iter(this); recede(it) ) {
719 if (& lagElem == 0p && &it != & last(this) ) return false;
720 & lagElem = & it;
721 }
722
723 if (& lagElem != & first(this)) return false;
724
725 // TODO: verify that it is back at iter(this);
726 return true;
727 }
728 static inline bool validate( dlist(tE, tLinks) & this ) {
729 bool reportsHavingFirst = ((& first(this)) == 0p);
730 bool reportsHavingLast = ((& last(this)) == 0p);
731 if ( reportsHavingFirst != reportsHavingLast ) return false;
732
733 return $validate_fwd(this) && $validate_rev(this);
734 }
735 #endif
736
737}
Note: See TracBrowser for help on using the repository browser.