Index: libcfa/src/collections/list.hfa
===================================================================
--- libcfa/src/collections/list.hfa	(revision 402f2499a77728ad681fed709aef60b2bf4e17f8)
+++ libcfa/src/collections/list.hfa	(revision 6cbc5a626cf54938256101fcd9c34533b8b3d28e)
@@ -10,6 +10,6 @@
 // Created On       : Wed Apr 22 18:00:00 2020
 // Last Modified By : Peter A. Buhr
-// Last Modified On : Thu Aug  7 10:46:08 2025
-// Update Count     : 75
+// Last Modified On : Tue Mar 24 11:25:34 2026
+// Update Count     : 98
 //
 
@@ -72,82 +72,81 @@
 
 // The origin is the position encountered at the start of iteration, signifying, "need to advance to the first element,"
-// and at the end of iteration, signifying, "no more elements."  Normal comsumption of an iterator runs "advance" as
-// the first step, and uses the return of "advance" as a guard, before dereferencing the iterator.  So normal
-// consumption of an iterator does not dereference an iterator in origin position.  The value of a pointer (underlying a
-// refence) that is exposed publicly as an iteraor, and also a pointer stored internally in a link field, is tagged, to
-// indicate "is the origin" (internally, is the list-head sentinel node), or untagged, to indicate "is a regular node."
-// Intent is to help a user who dereferences an iterator in origin position (which would be an API-use error on their
-// part), by failing fast.
+// and at the end of iteration, signifying, "no more elements."  Normal comsumption of an iterator runs "advance" as the
+// first step, and uses the return of "advance" as a guard, before dereferencing the iterator.  So normal consumption of
+// an iterator does not dereference an iterator in origin position.  The value of a pointer (underlying a refence) that
+// is exposed publicly as an iteraor, and also a pointer stored internally in a link field, is tagged, to indicate "is
+// the origin" (internally, is the list-head sentinel node), or untagged, to indicate "is a regular node."  Intent is to
+// help a user who dereferences an iterator in origin position (which would be an API-use error on their part), by
+// failing fast.
 
 #ifdef __EXPERIMENTAL_DISABLE_OTAG__ // Perf experimention alt mode
 
-    // With origin tagging disabled, iteration never reports "no more elements."
-    // In this mode, the list API is buggy.
-    // This mode is used to quantify the cost of the normal tagging scheme.
-
-    #define ORIGIN_TAG_SET(p)   (p)
-    #define ORIGIN_TAG_CLEAR(p) (p)
-    #define ORIGIN_TAG_QUERY(p) 0
-    #define ORIGIN_TAG_ASGN(p, v) (p)
-    #define ORIGIN_TAG_EITHER(p, v) (p)
-    #define ORIGIN_TAG_NEQ(v1, v2) 0
+	// With origin tagging disabled, iteration never reports "no more elements."
+	// In this mode, the list API is buggy.
+	// This mode is used to quantify the cost of the normal tagging scheme.
+
+	#define ORIGIN_TAG_SET(p)   (p)
+	#define ORIGIN_TAG_CLEAR(p) (p)
+	#define ORIGIN_TAG_QUERY(p) 0
+	#define ORIGIN_TAG_ASGN(p, v) (p)
+	#define ORIGIN_TAG_EITHER(p, v) (p)
+	#define ORIGIN_TAG_NEQ(v1, v2) 0
 
 #else // Normal
 
-    #if defined( __x86_64 )
-        // Preferred case: tag in the most-significant bit.  Dereference
-        // has been shown to segfault consistently.  Maintenance should
-        // list more architectures as "ok" here, to let them use the
-        // preferred case, when valid.
-        #define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 )
-    #else
-        // Fallback case: tag in the least-significant bit.  Dereference
-        // will often give an alignment error, but may not, e.g. if
-        // accessing a char-typed member.  32-bit x86 uses the most-
-        // significant bit for real room on the heap.
-        #define ORIGIN_TAG_BITNO 0
-    #endif
-
-    #define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO)
-
-    #define ORIGIN_TAG_SET(p) ((p) |  ORIGIN_TAG_MASK)
-    #define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK)
-    #define ORIGIN_TAG_QUERY(p) ((p) &  ORIGIN_TAG_MASK)
-
-    #define ORIGIN_TAG_ASGN(p, v) ( \
-        verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \
-        ORIGIN_TAG_EITHER((p), (v)) \
-    )
-
-    #define ORIGIN_TAG_EITHER(p, v) ( \
-        verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \
-        ( (p) | (v) ) \
-    )
-
-    #define ORIGIN_TAG_NEQ(v1, v2) ( \
-        verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \
-        verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \
-        ( (v1) ^ (v2) ) \
-    )
+	#if defined( __x86_64 )
+		// Preferred case: tag in the most-significant bit.  Dereference
+		// has been shown to segfault consistently.  Maintenance should
+		// list more architectures as "ok" here, to let them use the
+		// preferred case, when valid.
+		#define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 )
+	#else
+		// Fallback case: tag in the least-significant bit.  Dereference
+		// will often give an alignment error, but may not, e.g. if
+		// accessing a char-typed member.  32-bit x86 uses the most-
+		// significant bit for real room on the heap.
+		#define ORIGIN_TAG_BITNO 0
+	#endif
+
+	#define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO)
+
+	#define ORIGIN_TAG_SET(p) ((p) |  ORIGIN_TAG_MASK)
+	#define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK)
+	#define ORIGIN_TAG_QUERY(p) ((p) &  ORIGIN_TAG_MASK)
+
+	#define ORIGIN_TAG_ASGN(p, v) ( \
+		verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \
+		ORIGIN_TAG_EITHER((p), (v)) \
+	)
+
+	#define ORIGIN_TAG_EITHER(p, v) ( \
+		verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \
+		( (p) | (v) ) \
+	)
+
+	#define ORIGIN_TAG_NEQ(v1, v2) ( \
+		verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \
+		verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \
+		( (v1) ^ (v2) ) \
+	)
 
 #endif
 
 
-
 #ifdef __EXPERIMENTAL_LOOSE_SINGLES__ // Perf experimention alt mode
 
-    // In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element,
-    // it must not be listed already" checking.  The user must know separately whether an element is listed.
-    // Other than inserting it, any list-api action on an unlisted element is undefined.  Notably, list iteration
-    // starting from an unlisted element is not defined to respond "no more elements," and may instead continue
-    // iterating from a formerly occupied list position.  This mode matches LQ usage.
-
-    #define NOLOOSE(...)
-    #define LOOSEONLY(...) __VA_ARGS__
+	// In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element,
+	// it must not be listed already" checking.  The user must know separately whether an element is listed.
+	// Other than inserting it, any list-api action on an unlisted element is undefined.  Notably, list iteration
+	// starting from an unlisted element is not defined to respond "no more elements," and may instead continue
+	// iterating from a formerly occupied list position.  This mode matches LQ usage.
+
+	#define NOLOOSE(...)
+	#define LOOSEONLY(...) __VA_ARGS__
 
 #else // Normal
 
-    #define NOLOOSE(...) __VA_ARGS__
-    #define LOOSEONLY(...)
+	#define NOLOOSE(...) __VA_ARGS__
+	#define LOOSEONLY(...)
 
 #endif
@@ -190,5 +189,5 @@
 static inline forall( tE &, tLinks & | embedded( tE, tLinks, dlink( tE ) ) ) {
 	bool isListed( tE & node ) {
-      NOLOOSE(
+	  NOLOOSE(
 		verify( &node != 0p );
 		dlink( tE ) & node_links = node`inner;
@@ -196,5 +195,5 @@
 	  )
 	  LOOSEONLY(
-        verify(false && "isListed is undefined");
+		verify(false && "isListed is undefined");
 		return true;
 	  )
@@ -224,5 +223,5 @@
 
 		dlink( tE ) & linkToInsert = node`inner;
-      NOLOOSE(
+	  NOLOOSE(
 		verify( linkToInsert.next == 0p );
 		verify( linkToInsert.prev == 0p );
@@ -242,4 +241,13 @@
 		return node;
 	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_before( tE & before, List ); } )
+	void insert_before( tE & before, tE * node, List args ) {
+		insert_before( before, *node );
+		insert_before( before, args );
+	}
+	void insert_before( tE & before, tE * node ) {
+		insert_before( before, *node );
+	}
 
 	tE & insert_after( tE & after, tE & node ) {
@@ -248,5 +256,5 @@
 
 		dlink( tE ) & linkToInsert = node`inner;
-      NOLOOSE(
+	  NOLOOSE(
 		verify( linkToInsert.prev == 0p );
 		verify( linkToInsert.next == 0p );
@@ -265,4 +273,13 @@
 		asm( "" : : : "memory" );
 		return node;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_after( tE & after, List ); } )
+	void insert_after( tE & after, tE * node, List args ) {
+		insert_after( after, *node );
+		insert_after( after, args );
+	}
+	void insert_after( tE & after, tE * node ) {
+		insert_after( after, *node );
 	}
 
@@ -281,5 +298,5 @@
 		after_links.prev = &before_raw;
 
-      NOLOOSE(
+	  NOLOOSE(
 		asm( "" : : : "memory" );
 		list_pos_links.prev = 0p;
@@ -288,4 +305,13 @@
 	  )
 		return node;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void remove( List ); } )
+	void remove( tE * node, List args ) {
+		remove( *node );
+		remove( args );
+	}
+	void remove( tE * node ) {
+		remove( *node );
 	}
 
@@ -309,10 +335,10 @@
 	}
 
-    bool isFirst( tE & node ) {
-        return recede( node );
-    }
-
-    bool isLast( tE & node ) {
-        return advance( node );
+	bool isFirst( tE & node ) {
+		return recede( node );
+	}
+
+	bool isLast( tE & node ) {
+		return advance( node );
     }
 
@@ -331,4 +357,13 @@
 		return node;
 	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_first( dlist( tE, tLinks ) & list, List ); } )
+	void insert_first( dlist( tE, tLinks ) & list, tE * node, List args ) {
+		insert_first( list, *node );
+		insert_first( list, args );
+	}
+	void insert_first( dlist( tE, tLinks ) & list, tE * node ) {
+		insert_first( list, *node );
+	}
 
 	tE & insert_last( dlist( tE, tLinks ) & list, tE & node ) {
@@ -336,7 +371,26 @@
 		return node;
 	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_last( dlist( tE, tLinks ) & list, List ); } )
+	void insert_last( dlist( tE, tLinks ) & list, tE * node, List args ) {
+		insert_last( list, *node );
+		insert_last( list, args );
+	}
+	void insert_last( dlist( tE, tLinks ) & list, tE * node ) {
+		insert_last( list, *node );
+	}
+
 	tE & insert( dlist( tE, tLinks ) & list, tE & node ) { // synonym for insert_last
 		insert_last( list, node );
 		return node;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert( dlist( tE, tLinks ) & list, List ); } )
+	void insert( dlist( tE, tLinks ) & list, tE * node, List args ) {
+		insert( list, *node );
+		insert( list, args );
+	}
+	void insert( dlist( tE, tLinks ) & list, tE * node ) {
+		insert( list, *node );
 	}
 
Index: libcfa/src/collections/list2.hfa
===================================================================
--- libcfa/src/collections/list2.hfa	(revision 402f2499a77728ad681fed709aef60b2bf4e17f8)
+++ libcfa/src/collections/list2.hfa	(revision 6cbc5a626cf54938256101fcd9c34533b8b3d28e)
@@ -19,6 +19,6 @@
 // Created On       : Wed Apr 22 18:00:00 2020
 // Last Modified By : Peter A. Buhr
-// Last Modified On : Thu Feb  2 11:32:26 2023
-// Update Count     : 2
+// Last Modified On : Tue Mar 24 22:20:56 2026
+// Update Count     : 8
 //
 
@@ -29,14 +29,14 @@
 forall( Decorator &, T & )
 struct tytagref {
-    inline T &;
+	inline T &;
 };
 
 forall( tOuter &, tMid &, tInner & )
 trait embedded {
-    tytagref( tMid, tInner ) ?`inner( tOuter & );
+	tytagref( tMid, tInner ) ?`inner( tOuter & );
 };
 
 // embedded is reflexive, with no info (void) as the type tag
-forall (T &)
+forall( T & )
 static inline tytagref(void, T) ?`inner ( T & this ) { tytagref( void, T ) ret = {this}; return ret; }
 
@@ -46,6 +46,6 @@
 //
 // struct foo {
-//    int a, b, c;
-//    inline (bar);
+//	int a, b, c;
+//	inline (bar);
 // };
 // P9_EMBEDDED( foo, bar )
@@ -53,224 +53,208 @@
 
 // usual version, for structs that are top-level declarations
-#define P9_EMBEDDED(        derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, static ) P9_EMBEDDED_BDY_( immedBase )
+#define P9_EMBEDDED( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, static ) P9_EMBEDDED_BDY_( immedBase )
 
 // special version, for structs that are declared in functions
-#define P9_EMBEDDED_INFUNC( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase,        ) P9_EMBEDDED_BDY_( immedBase )
+#define P9_EMBEDDED_INFUNC( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, ) P9_EMBEDDED_BDY_( immedBase )
 
 // forward declarations of both the above; generally not needed
 // may help you control where the P9_EMBEEDED cruft goes, in case "right after the stuct" isn't where you want it
-#define P9_EMBEDDED_FWD(        derived, immedBase )      P9_EMBEDDED_DECL_( derived, immedBase, static ) ;
-#define P9_EMBEDDED_FWD_INFUNC( derived, immedBase ) auto P9_EMBEDDED_DECL_( derived, immedBase,        ) ;
+#define P9_EMBEDDED_FWD( derived, immedBase ) P9_EMBEDDED_DECL_( derived, immedBase, static ) ;
+#define P9_EMBEDDED_FWD_INFUNC( derived, immedBase ) auto P9_EMBEDDED_DECL_( derived, immedBase, ) ;
 
 // private helpers
 #define P9_EMBEDDED_DECL_( derived, immedBase, STORAGE ) \
-    forall( Tbase &, TdiscardPath & | { tytagref( TdiscardPath, Tbase ) ?`inner( immedBase & ); } ) \
-    STORAGE inline tytagref(immedBase, Tbase) ?`inner( derived & this )
-    
+	forall( Tbase &, TdiscardPath & | { tytagref( TdiscardPath, Tbase ) ?`inner( immedBase & ); } ) \
+	STORAGE inline tytagref( immedBase, Tbase ) ?`inner( derived & this )
+
 #define P9_EMBEDDED_BDY_( immedBase ) { \
-        immedBase & ib = this; \
-        Tbase & b = ib`inner; \
-        tytagref(immedBase, Tbase) result = { b }; \
-        return result; \
-    }
-
-#define EMBEDDED_VIA( OUTER, MID, INNER ) \
-   (struct { tytagref(MID, INNER) ( * ?`inner ) ( OUTER & ); }){ ?`inner } 
-
-#define DLINK_VIA( TE, TLINK ) \
-   EMBEDDED_VIA( TE, TLINK, dlink(TE) )
-
-// The origin is the position encountered at the start of iteration,
-// signifying, "need to advance to the first element," and at the end
-// of iteration, signifying, "no more elements."  Normal comsumption of
-// an iterator runs advance as the first step, and uses the return
-// of advance as a guard, before dereferencing the iterator.  So
-// normal consumption of an iterator does not dereference an iterator
-// in origin position.  The value of a pointer (underlying a refence)
-// that is exposed publicly as an iteraor, and also a pointer stored
-// internally in a link field, is tagged, to indicate "is the origin"
-// (internally, is the list-head sentinel node), or untagged, to indicate
-// "is a regular node."  Intent is to help a user who dereferences an
-// iterator in origin position (which would be an API-use error on their
-// part), by failing fast.
+		immedBase & ib = this; \
+		Tbase & b = ib`inner; \
+		tytagref( immedBase, Tbase ) result = { b }; \
+		return result; \
+	}
+
+#define EMBEDDED_VIA( OUTER, MID, INNER ) (struct { tytagref( MID, INNER ) ( * ?`inner ) ( OUTER & ); }){ ?`inner }
+
+#define DLINK_VIA( TE, TLINK ) EMBEDDED_VIA( TE, TLINK, dlink( TE ) )
+
+
+// The origin is the position encountered at the start of iteration, signifying, "need to advance to the first element,"
+// and at the end of iteration, signifying, "no more elements."  Normal comsumption of an iterator runs "advance" as the
+// first step, and uses the return of "advance" as a guard, before dereferencing the iterator.  So normal consumption of
+// an iterator does not dereference an iterator in origin position.  The value of a pointer (underlying a refence) that
+// is exposed publicly as an iteraor, and also a pointer stored internally in a link field, is tagged, to indicate "is
+// the origin" (internally, is the list-head sentinel node), or untagged, to indicate "is a regular node."  Intent is to
+// help a user who dereferences an iterator in origin position (which would be an API-use error on their part), by
+// failing fast.
 
 #ifdef __EXPERIMENTAL_DISABLE_OTAG__ // Perf experimention alt mode
 
-    // With origin tagging disabled, iteration never reports "no more elements."
-    // In this mode, the list API is buggy.
-    // This mode is used to quantify the cost of the normal tagging scheme.
-
-    #define ORIGIN_TAG_ENABL(p)   (p)
-    #define ORIGIN_TAG_CLEAR(p) (p)
-    #define ORIGIN_TAG_QUERY(p) 0
-    #define ORIGIN_TAG_ASGN(p, v) (p)
-    #define ORIGIN_TAG_EITHER(p, v) (p)
-    #define ORIGIN_TAG_NEQ(v1, v2) 0
-
-    #define TAGSONLY(...)
-    #define NOTAGS(...) __VA_ARGS__
+	// With origin tagging disabled, iteration never reports "no more elements."
+	// In this mode, the list API is buggy.
+	// This mode is used to quantify the cost of the normal tagging scheme.
+
+	#define ORIGIN_TAG_ENABL(p)   (p)
+	#define ORIGIN_TAG_CLEAR(p) (p)
+	#define ORIGIN_TAG_QUERY(p) 0
+	#define ORIGIN_TAG_ASGN(p, v) (p)
+	#define ORIGIN_TAG_EITHER(p, v) (p)
+	#define ORIGIN_TAG_NEQ(v1, v2) 0
+
+	#define TAGSONLY(...)
+	#define NOTAGS(...) __VA_ARGS__
 
 #else // Normal
 
-    #if defined( __x86_64 )
-        // Preferred case: tag in the most-significant bit.  Dereference
-        // has been shown to segfault consistently.  Maintenance should
-        // list more architectures as "ok" here, to let them use the
-        // preferred case, when valid.
-        #define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 )
-    #else
-        // Fallback case: tag in the least-significant bit.  Dereference
-        // will often give an alignment error, but may not, e.g. if
-        // accessing a char-typed member.  32-bit x86 uses the most-
-        // significant bit for real room on the heap.
-        #define ORIGIN_TAG_BITNO 0
-    #endif
-
-    #define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO)
-
-    #define ORIGIN_TAG_ENABL(p) ((p) |  ORIGIN_TAG_MASK)
-    #define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK)
-    #define ORIGIN_TAG_QUERY(p) ((p) &  ORIGIN_TAG_MASK)
-
-    #define ORIGIN_TAG_ASGN(p, v) ( \
-        verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \
-        ORIGIN_TAG_EITHER((p), (v)) \
-    )
-
-    #define ORIGIN_TAG_EITHER(p, v) ( \
-        verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \
-        ( (p) | (v) ) \
-    )
-
-    #define ORIGIN_TAG_NEQ(v1, v2) ( \
-        verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \
-        verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \
-        ( (v1) ^ (v2) ) \
-    )
-
-    #define TAGSONLY(...) __VA_ARGS__
-    #define NOTAGS(...)
+	#if defined( __x86_64 )
+		// Preferred case: tag in the most-significant bit.  Dereference
+		// has been shown to segfault consistently.  Maintenance should
+		// list more architectures as "ok" here, to let them use the
+		// preferred case, when valid.
+		#define ORIGIN_TAG_BITNO ( 8 * sizeof( size_t ) - 1 )
+	#else
+		// Fallback case: tag in the least-significant bit.  Dereference
+		// will often give an alignment error, but may not, e.g. if
+		// accessing a char-typed member.  32-bit x86 uses the most-
+		// significant bit for real room on the heap.
+		#define ORIGIN_TAG_BITNO 0
+	#endif
+
+	#define ORIGIN_TAG_MASK (((size_t)1) << ORIGIN_TAG_BITNO)
+
+	#define ORIGIN_TAG_ENABL(p) ((p) |  ORIGIN_TAG_MASK)
+	#define ORIGIN_TAG_CLEAR(p) ((p) & ~ORIGIN_TAG_MASK)
+	#define ORIGIN_TAG_QUERY(p) ((p) &  ORIGIN_TAG_MASK)
+
+	#define ORIGIN_TAG_ASGN(p, v) ( \
+		verify( ! ORIGIN_TAG_QUERY(p) && "p had no tagbit" ), \
+		ORIGIN_TAG_EITHER((p), (v)) \
+	)
+
+	#define ORIGIN_TAG_EITHER(p, v) ( \
+		verify( ! ORIGIN_TAG_CLEAR(v) && "v is a pure tagbit" ), \
+		( (p) | (v) ) \
+	)
+
+	#define ORIGIN_TAG_NEQ(v1, v2) ( \
+		verify( ! ORIGIN_TAG_CLEAR(v1) && "v1 is a pure tagbit" ), \
+		verify( ! ORIGIN_TAG_CLEAR(v2) && "v2 is a pure tagbit" ), \
+		( (v1) ^ (v2) ) \
+	)
+
+	#define TAGSONLY(...) __VA_ARGS__
+	#define NOTAGS(...)
 
 #endif
 
 
-
-
-
-
 #ifdef __EXPERIMENTAL_LOOSE_SINGLES__ // Perf experimention alt mode
 
-    // In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element,
-    // it must not be listed already" checking.  The user must know separately whether an element is listed.
-    // Other than inserting it, any list-api action on an unlisted element is undefined.  Notably, list iteration
-    // starting from an unlisted element is not defined to respond "no more elements," and may instead continue
-    // iterating from a formerly occupied list position.  This mode matches LQ usage.
-
-    #define NOLOOSE(...)
-    #define LOOSEONLY(...) __VA_ARGS__
+	// In loose-singles mode, the ability to answer an "is listed" query is disabled, as is "to insert an element,
+	// it must not be listed already" checking.  The user must know separately whether an element is listed.
+	// Other than inserting it, any list-api action on an unlisted element is undefined.  Notably, list iteration
+	// starting from an unlisted element is not defined to respond "no more elements," and may instead continue
+	// iterating from a formerly occupied list position.  This mode matches LQ usage.
+
+	#define NOLOOSE(...)
+	#define LOOSEONLY(...) __VA_ARGS__
 
 #else // Normal
 
-    #define NOLOOSE(...) __VA_ARGS__
-    #define LOOSEONLY(...)
+	#define NOLOOSE(...) __VA_ARGS__
+	#define LOOSEONLY(...)
 
 #endif
 
 
-
-
-
-
-
-
 // struct workaround0_t {};
 
 forall( tE & ) {
-
-    struct dlink;
-
-    // do not use; presence of the field declaration unblocks ability to define dlink (#304)
-    struct __dlink_selfref_workaround_t {
-        dlink(tE) *ref_notSelfRef;
-    };
-
-    struct dlink {
-        dlink(tE) *next;  // TODO: rename with $
-        dlink(tE) *prev;
-    };
-
-    static inline void ?{}( dlink(tE) & this ) {
-      NOLOOSE(
-        dlink(tE) * toSelf = & this;
-        size_t toSelfNum = (size_t) toSelf;
-        size_t toSelfNumTagged = ORIGIN_TAG_ENABL( toSelfNum );
-        dlink(tE) * toSelfPtrTagged = (dlink(tE) *) toSelfNumTagged;
-        toSelf = toSelfPtrTagged;
-        (this.next){ toSelf };
-        (this.prev){ toSelf };
-      )
-    }
-
-    // You can "copy" a dlink.  But the result won't be linked.
-    // Lets you copy what you inline the dlink into.
-    static inline void ?{}( dlink(tE) & this, dlink(tE) ) {
-        this{};
-    }
-
-    forall( tLinks & = dlink(tE) | embedded(tE, tLinks, dlink(tE)) ) {
-        struct dlist {
-            inline dlink(tE);
-        };
-
-        static inline tE * $get_list_origin_addr( dlist(tE, tLinks) & lst ) {
-            dlink(tE) & link_from_null = ( * (tE *) 0p )`inner;
-            ptrdiff_t link_offset = (ptrdiff_t) & link_from_null;
-            size_t origin_addr = ((size_t) & lst) - link_offset;
-            size_t preResult = ORIGIN_TAG_ENABL( origin_addr );
-            return (tE *)preResult;
-        }
-
-        static inline void ?{}( dlist(tE, tLinks) & this ) {
-          NOLOOSE(
-            ( (dlink(tE) &) this ){};
-          )
-          LOOSEONLY(
+	struct dlink;
+
+	// do not use; presence of the field declaration unblocks ability to define dlink (#304)
+	struct __dlink_selfref_workaround_t {
+		dlink(tE) *ref_notSelfRef;
+	};
+
+	struct dlink {
+		dlink(tE) *next;  // TODO: rename with $
+		dlink(tE) *prev;
+	};
+
+	static inline void ?{}( dlink(tE) & this ) {
+	  NOLOOSE(
+		dlink(tE) * toSelf = & this;
+		size_t toSelfNum = (size_t) toSelf;
+		size_t toSelfNumTagged = ORIGIN_TAG_ENABL( toSelfNum );
+		dlink(tE) * toSelfPtrTagged = (dlink(tE) *) toSelfNumTagged;
+		toSelf = toSelfPtrTagged;
+		(this.next){ toSelf };
+		(this.prev){ toSelf };
+	  )
+	}
+
+	// You can "copy" a dlink.  But the result won't be linked.
+	// Lets you copy what you inline the dlink into.
+	static inline void ?{}( dlink(tE) & this, dlink(tE) ) {
+		this{};
+	}
+
+	forall( tLinks & = dlink(tE) | embedded(tE, tLinks, dlink(tE)) ) {
+		struct dlist {
+			inline dlink(tE);
+		};
+
+		static inline tE * $get_list_origin_addr( dlist(tE, tLinks) & lst ) {
+			dlink(tE) & link_from_null = ( * (tE *) 0p )`inner;
+			ptrdiff_t link_offset = (ptrdiff_t) & link_from_null;
+			size_t origin_addr = ((size_t) & lst) - link_offset;
+			size_t preResult = ORIGIN_TAG_ENABL( origin_addr );
+			return (tE *)preResult;
+		}
+
+		static inline void ?{}( dlist(tE, tLinks) & this ) {
+		  NOLOOSE(
+			( (dlink(tE) &) this ){};
+		  )
+		  LOOSEONLY(
 			dlink(tE) * listOrigin = (dlink(tE) *) $get_list_origin_addr( this );
-            dlink( tE ) & thisl = this;
+			dlink( tE ) & thisl = this;
 			(thisl.prev) = listOrigin;
-            (thisl.next) = listOrigin;
-          )
-        }
-    }
+			(thisl.next) = listOrigin;
+		  )
+		}
+	}
 }
+
 
 forall( tE & ) {
 #ifdef __EXPERIMENTAL_DISABLE_OTAG__ // Perf experimention alt mode
-    static inline size_t origin_tag_query_arith$( tE & raw ) {
-        return 0;
-    }
-    static inline tE & nullif$( tE & val, size_t arith_ctrl ) {
-        verify( arith_ctrl == 0 );  (void) arith_ctrl;
-        return val;
-    }
+	static inline size_t origin_tag_query_arith$( tE & raw ) {
+		return 0;
+	}
+	static inline tE & nullif$( tE & val, size_t arith_ctrl ) {
+		verify( arith_ctrl == 0 );  (void) arith_ctrl;
+		return val;
+	}
 #else // Normal
-    // like ORIGIN_TAG_QUERY, but return is arithmetic number 0 or 1 (rather than 0 or non-0)
-    static inline size_t origin_tag_query_arith$( tE & raw ) {
-        size_t ret = (((size_t) & raw) >> ORIGIN_TAG_BITNO) & 1;
-        verify( ORIGIN_TAG_QUERY( (size_t) & raw ) ? ret == 1 : ret == 0 );
-        return ret;
-    }
-    // Requires arith_ctrl being 0 or 1.
-    // When 0, passes val through; when 1, returns null reference.
-    // Importantly, implemented without jumps or tests.
-    static inline tE & nullif$( tE & val, size_t arith_ctrl ) {
-        verify( ! ORIGIN_TAG_QUERY( (size_t) & val ) );
-        verify( arith_ctrl == 0 || arith_ctrl == 1 );
-        size_t mask_ctrl = ~ - arith_ctrl;
-        verify( arith_ctrl == 0 && mask_ctrl == -1 || arith_ctrl == 1 && mask_ctrl ==0 );
-        tE & ret = * (tE*) ( ((size_t) & val) & mask_ctrl);
-        verify( arith_ctrl == 0 && &ret == &val || arith_ctrl == 1 && &ret == 0p );
-        return ret;
-    }
+	// like ORIGIN_TAG_QUERY, but return is arithmetic number 0 or 1 (rather than 0 or non-0)
+	static inline size_t origin_tag_query_arith$( tE & raw ) {
+		size_t ret = (((size_t) & raw) >> ORIGIN_TAG_BITNO) & 1;
+		verify( ORIGIN_TAG_QUERY( (size_t) & raw ) ? ret == 1 : ret == 0 );
+		return ret;
+	}
+	// Requires arith_ctrl being 0 or 1.
+	// When 0, passes val through; when 1, returns null reference.
+	// Importantly, implemented without jumps or tests.
+	static inline tE & nullif$( tE & val, size_t arith_ctrl ) {
+		verify( ! ORIGIN_TAG_QUERY( (size_t) & val ) );
+		verify( arith_ctrl == 0 || arith_ctrl == 1 );
+		size_t mask_ctrl = ~ - arith_ctrl;
+		verify( arith_ctrl == 0 && mask_ctrl == -1 || arith_ctrl == 1 && mask_ctrl ==0 );
+		tE & ret = * (tE*) ( ((size_t) & val) & mask_ctrl);
+		verify( arith_ctrl == 0 && &ret == &val || arith_ctrl == 1 && &ret == 0p );
+		return ret;
+	}
 #endif
 }
@@ -305,387 +289,445 @@
 
 forall( tE &, tLinks & | embedded( tE, tLinks, dlink(tE) ) ) {
-
-	static inline void insert_after(tE & list_pos, tE &to_insert) {
-        size_t list_pos_tag = ORIGIN_TAG_QUERY((size_t) & list_pos); // a request to insert after the origin is fine
-        tE & list_pos_real = * (tE *) ORIGIN_TAG_CLEAR((size_t) & list_pos);
+	static inline void insert_before(tE & list_pos, tE &to_insert) {
+		size_t list_pos_tag = ORIGIN_TAG_QUERY((size_t) & list_pos); // a request to insert before the origin is fine
+		tE & list_pos_real = * (tE *) ORIGIN_TAG_CLEAR((size_t) & list_pos);
 		verify (&list_pos_real != 0p);
 
 		verify (&to_insert != 0p);
-      NOLOOSE(
+	  NOLOOSE(
 		verify(! ORIGIN_TAG_QUERY((size_t) & to_insert));
-      )
-        dlink(tE) & linkToInsert = to_insert`inner;
-      NOLOOSE(
-       TAGSONLY(
+	  )
+		dlink(tE) & linkToInsert = to_insert`inner;
+	  NOLOOSE(
+	   TAGSONLY(
 		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
 		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
-       )
+	   )
 		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
 		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
-      )
-        dlink(tE) & list_pos_links = list_pos_real`inner;
-      MAYBE_INSERT_READ_EARLY(
-        dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
-      )
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t)(& list_pos_links);
-        size_t to_insert_prev_num = ORIGIN_TAG_ASGN(list_pos_links_num, list_pos_tag);
-        dlink(tE) * to_insert_prev = (dlink(tE)*)to_insert_prev_num;
+	  )
+		dlink(tE) & list_pos_links = list_pos_real`inner;
+	  MAYBE_INSERT_READ_EARLY(
+		dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
+	  )
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t)(& list_pos_links);
+		size_t to_insert_next_num = ORIGIN_TAG_ASGN(list_pos_links_num, list_pos_tag);
+		dlink(tE) * to_insert_next = (dlink(tE)*)to_insert_next_num;
+		linkToInsert.next = to_insert_next;
+		linkToInsert.prev = list_pos_links.prev;
+	  MAYBE_INSERT_READ_LATE(
+		dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
+	  )
+		size_t beforeLinks_next_tag = ORIGIN_TAG_QUERY((size_t)beforeLinks.next);
+		size_t linkToInsert_num = (size_t)(& linkToInsert);
+		size_t beforeLinks_next_num = ORIGIN_TAG_ASGN(linkToInsert_num, ORIGIN_TAG_NEQ(list_pos_tag, beforeLinks_next_tag));
+		beforeLinks.next = (dlink(tE)*)(beforeLinks_next_num);
+		list_pos_links.prev = &linkToInsert;
+		MAYBE_CMEM_BARRIER;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_before( tE & before, List ); } )
+	void insert_before( tE & before, tE * node, List args ) {
+		insert_before( before, *node );
+		insert_before( before, args );
+	}
+	void insert_before( tE & before, tE * node ) {
+		insert_before( before, *node );
+	}
+
+	static inline void insert_after(tE & list_pos, tE &to_insert) {
+		size_t list_pos_tag = ORIGIN_TAG_QUERY((size_t) & list_pos); // a request to insert after the origin is fine
+		tE & list_pos_real = * (tE *) ORIGIN_TAG_CLEAR((size_t) & list_pos);
+		verify (&list_pos_real != 0p);
+
+		verify (&to_insert != 0p);
+	  NOLOOSE(
+		verify(! ORIGIN_TAG_QUERY((size_t) & to_insert));
+	  )
+		dlink(tE) & linkToInsert = to_insert`inner;
+	  NOLOOSE(
+	   TAGSONLY(
+		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
+		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
+	   )
+		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
+		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
+	  )
+		dlink(tE) & list_pos_links = list_pos_real`inner;
+	  MAYBE_INSERT_READ_EARLY(
+		dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
+	  )
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t)(& list_pos_links);
+		size_t to_insert_prev_num = ORIGIN_TAG_ASGN(list_pos_links_num, list_pos_tag);
+		dlink(tE) * to_insert_prev = (dlink(tE)*)to_insert_prev_num;
 		linkToInsert.prev = to_insert_prev;
 		linkToInsert.next = list_pos_links.next;
-      MAYBE_INSERT_READ_LATE(
-        dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
-      )
-        size_t afterLinks_prev_tag = ORIGIN_TAG_QUERY((size_t)afterLinks.prev);
-        size_t linkToInsert_num = (size_t)(& linkToInsert);
-        size_t afterLinks_prev_num = ORIGIN_TAG_ASGN(linkToInsert_num, ORIGIN_TAG_NEQ(list_pos_tag, afterLinks_prev_tag));
-        afterLinks.prev = (dlink(tE)*)(afterLinks_prev_num);
+	  MAYBE_INSERT_READ_LATE(
+		dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
+	  )
+		size_t afterLinks_prev_tag = ORIGIN_TAG_QUERY((size_t)afterLinks.prev);
+		size_t linkToInsert_num = (size_t)(& linkToInsert);
+		size_t afterLinks_prev_num = ORIGIN_TAG_ASGN(linkToInsert_num, ORIGIN_TAG_NEQ(list_pos_tag, afterLinks_prev_tag));
+		afterLinks.prev = (dlink(tE)*)(afterLinks_prev_num);
 		list_pos_links.next = &linkToInsert;
-        MAYBE_CMEM_BARRIER;
-	}
-
-	static inline void insert_before(tE & list_pos, tE &to_insert) {
-        size_t list_pos_tag = ORIGIN_TAG_QUERY((size_t) & list_pos); // a request to insert before the origin is fine
-        tE & list_pos_real = * (tE *) ORIGIN_TAG_CLEAR((size_t) & list_pos);
-		verify (&list_pos_real != 0p);
-
-		verify (&to_insert != 0p);
-      NOLOOSE(
-		verify(! ORIGIN_TAG_QUERY((size_t) & to_insert));
-      )
-        dlink(tE) & linkToInsert = to_insert`inner;
-      NOLOOSE(
-       TAGSONLY(
+		MAYBE_CMEM_BARRIER;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_after( tE & after, List ); } )
+	void insert_after( tE & after, tE * node, List args ) {
+		insert_after( after, *node );
+		insert_after( after, args );
+	}
+	void insert_after( tE & after, tE * node ) {
+		insert_after( after, *node );
+	}
+
+	static inline tE & remove(tE & list_pos) {
+		verify( ! ORIGIN_TAG_QUERY((size_t) & list_pos) );
+		verify (&list_pos != 0p);
+
+		dlink(tE) & list_pos_links = list_pos`inner;
+		dlink(tE) & before_raw = * list_pos_links.prev;
+		dlink(tE) & before_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & before_raw );
+		size_t before_links_next_tag = ORIGIN_TAG_QUERY( (size_t) (before_links.next) );
+
+		dlink(tE) & after_raw = * list_pos_links.next;
+		dlink(tE) & after_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & after_raw );
+		size_t after_links_prev_tag = ORIGIN_TAG_QUERY( (size_t) (after_links.prev) );
+
+		size_t before_links_next_rslt = ORIGIN_TAG_EITHER( ((size_t) & after_raw), before_links_next_tag );
+		size_t after_links_prev_rslt = ORIGIN_TAG_EITHER( ((size_t) & before_raw), after_links_prev_tag );
+		before_links.next = (dlink(tE) *) before_links_next_rslt;
+		after_links.prev = (dlink(tE) *) after_links_prev_rslt;
+
+	  NOLOOSE(
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t) &list_pos_links;
+		size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
+		list_pos_links.next = list_pos_links.prev = (dlink(tE)*) list_pos_links_tagged_num;
+		MAYBE_CMEM_BARRIER;
+	  )
+		return list_pos;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void remove( List ); } )
+	void remove( tE * node, List args ) {
+		remove( *node );
+		remove( args );
+	}
+	void remove( tE * node ) {
+		remove( *node );
+	}
+
+	static inline tE & downcast$( tytagref( tLinks, dlink(tE) ) ref ) {
+		dlink(tE) & lnk = ref;
+		dlink(tE) & link_from_null = ( * (tE *) 0p )`inner;
+		ptrdiff_t link_offset = (ptrdiff_t) & link_from_null;
+		size_t elm_addr = ((size_t) & lnk) - link_offset;
+		return * (tE*) elm_addr;
+	}
+
+	static inline tE & first( dlist(tE, tLinks) & lst ) {
+		verify (&lst != 0p);
+		dlink(tE) * firstLnk = lst.next;
+		if (ORIGIN_TAG_QUERY((size_t)firstLnk)) return * 0p;
+		tytagref( tLinks, dlink(tE) ) firstLnkTagged = {*firstLnk};
+		return downcast$( firstLnkTagged );
+	}
+	static inline tE & last ( dlist(tE, tLinks) & lst ) {
+		verify (&lst != 0p);
+		dlink(tE) * lastLnk = lst.prev;
+		if (ORIGIN_TAG_QUERY((size_t)lastLnk)) return * 0p;
+		tytagref( tLinks, dlink(tE) ) lastLnkTagged = {*lastLnk};
+		return downcast$( lastLnkTagged );
+	}
+
+	static inline bool isEmpty( dlist(tE, tLinks) & lst ) {
+		verify (&lst != 0p);
+		if ( & first(lst) == 0p || & last(lst) == 0p ) {
+			verify( & last(lst) == 0p && & last(lst) == 0p );
+			return true;
+		}
+		return false;
+	}
+
+	static inline bool isListed( tE & e ) {
+	  NOLOOSE(
+		verify (&e != 0p);
+		verify(! ORIGIN_TAG_QUERY( (size_t) & e ));
+		dlink(tE) & e_links = e`inner;
+		dlink(tE) * lprev = (dlink(tE)*) ORIGIN_TAG_CLEAR((size_t) e_links.prev);
+		dlink(tE) * lnext = (dlink(tE)*) ORIGIN_TAG_CLEAR((size_t) e_links.next);
+		return ( lprev != &e_links ) || ( lnext != &e_links );
+	  )
+	  LOOSEONLY(
+		verify(false && "isListed is undefined");
+		return true;
+	  )
+	}
+
+	static inline tE & iter( dlist(tE, tLinks) & lst ) {
+		tE * origin = $get_list_origin_addr( lst );
+		return *origin;
+	}
+
+
+	// todo: resolve the pun:
+	//  tag as in proxy (tytagref)
+	//  tag as in bit manipulation on a funny pointer
+
+	static inline bool advance( tE && refx ) {
+		tE && ref_inner = refx;
+		tE & oldReferent = * (tE*) ORIGIN_TAG_CLEAR( (size_t) & ref_inner );
+		verify (& oldReferent != 0p);
+		dlink(tE) * tgt = oldReferent`inner.next;
+		size_t tgt_tags = ORIGIN_TAG_QUERY( (size_t)tgt);
+		dlink(tE) * nextl = (dlink(tE) *)ORIGIN_TAG_CLEAR((size_t)tgt);
+		tytagref( tLinks, dlink(tE) ) nextLnkTagged = { * nextl };
+		tE & nexte = downcast$( nextLnkTagged );
+		size_t next_te_num = (size_t) & nexte;
+		size_t new_ref_inner_num = ORIGIN_TAG_ASGN(next_te_num, tgt_tags);
+		tE * new_ref_inner = (tE *) new_ref_inner_num;
+		&ref_inner = new_ref_inner;
+		return ! tgt_tags;
+	}
+
+	static inline bool recede( tE && refx ) {
+		tE && ref_inner = refx;
+		tE & oldReferent = * (tE*) ORIGIN_TAG_CLEAR( (size_t) & ref_inner );
+		verify (& oldReferent != 0p);
+		dlink(tE) * tgt = oldReferent`inner.prev;
+		size_t tgt_tags = ORIGIN_TAG_QUERY( (size_t)tgt);
+		dlink(tE) * prevl = (dlink(tE) *)ORIGIN_TAG_CLEAR((size_t)tgt);
+		tytagref( tLinks, dlink(tE) ) prevLnkTagged = { * prevl };
+		tE & preve = downcast$( prevLnkTagged );
+		size_t prev_te_num = (size_t) & preve;
+		size_t new_ref_inner_num = ORIGIN_TAG_ASGN(prev_te_num, tgt_tags);
+		tE * new_ref_inner = (tE *) new_ref_inner_num;
+		&ref_inner = new_ref_inner;
+		return ! tgt_tags;
+	}
+
+	bool isFirst( tE & node ) {
+		// Probable bug copied from master
+		// should be `! recede(node)`
+		// correct: "is first iff cannot recede"
+		// used backward in test suite too, probably victim of a grep rename
+		return recede( node );
+	}
+
+	bool isLast( tE & node ) {
+		// ditto, vice versa
+		return advance( node );
+	}
+
+	static inline tE & next( tE & e ) {
+		if( advance(e) ) return e;
+		return * 0p;
+	}
+
+	static inline tE & prev( tE & e ) {
+		if( recede(e) ) return e;
+		return * 0p;
+	}
+
+	// Next 4 headed operations:
+	// Manual inline of the equivalent headless operation, manually simplified.
+	// Applies knowledge of tag pattern around head (unknown to optimizer) to reduce runtime tag operations.
+
+	static inline void insert_first( dlist(tE, tLinks) &lst, tE & e ) {
+		dlink(tE) & linkToInsert = e`inner;
+	  NOLOOSE(
+	   TAGSONLY(
 		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
 		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
-       )
+	   )
 		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
 		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
-      )
-        dlink(tE) & list_pos_links = list_pos_real`inner;
-      MAYBE_INSERT_READ_EARLY(
-        dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
-      )
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t)(& list_pos_links);
-        size_t to_insert_next_num = ORIGIN_TAG_ASGN(list_pos_links_num, list_pos_tag);
-        dlink(tE) * to_insert_next = (dlink(tE)*)to_insert_next_num;
+	  )
+		dlink(tE) & list_pos_links = lst;
+	  MAYBE_INSERT_READ_EARLY(
+		dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
+	  )
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t)(& list_pos_links);
+		size_t to_insert_prev_num = ORIGIN_TAG_ENABL(list_pos_links_num);
+		dlink(tE) * to_insert_prev = (dlink(tE)*)to_insert_prev_num;
+		linkToInsert.prev = to_insert_prev;
+		linkToInsert.next = list_pos_links.next;
+	  MAYBE_INSERT_READ_LATE(
+		dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
+	  )
+		size_t linkToInsert_num = (size_t)(& linkToInsert);
+		size_t afterLinks_prev_num = linkToInsert_num;
+		afterLinks.prev = (dlink(tE)*)(afterLinks_prev_num);
+		list_pos_links.next = &linkToInsert;
+		MAYBE_CMEM_BARRIER;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_first( dlist( tE, tLinks ) & list, List ); } )
+	void insert_first( dlist( tE, tLinks ) & list, tE * node, List args ) {
+		insert_first( list, *node );
+		insert_first( list, args );
+	}
+	void insert_first( dlist( tE, tLinks ) & list, tE * node ) {
+		insert_first( list, *node );
+	}
+
+	static inline void insert_last( dlist(tE, tLinks) &lst, tE & e ) {
+		dlink(tE) & linkToInsert = e`inner;
+	  NOLOOSE(
+	   TAGSONLY(
+		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
+		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
+	   )
+		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
+		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
+	  )
+		dlink(tE) & list_pos_links = lst;
+	  MAYBE_INSERT_READ_EARLY(
+		dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
+	  )
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t)(& list_pos_links);
+		size_t to_insert_next_num = ORIGIN_TAG_ENABL(list_pos_links_num);
+		dlink(tE) * to_insert_next = (dlink(tE)*)to_insert_next_num;
 		linkToInsert.next = to_insert_next;
 		linkToInsert.prev = list_pos_links.prev;
-      MAYBE_INSERT_READ_LATE(
-        dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
-      )
-        size_t beforeLinks_next_tag = ORIGIN_TAG_QUERY((size_t)beforeLinks.next);
-        size_t linkToInsert_num = (size_t)(& linkToInsert);
-        size_t beforeLinks_next_num = ORIGIN_TAG_ASGN(linkToInsert_num, ORIGIN_TAG_NEQ(list_pos_tag, beforeLinks_next_tag));
-        beforeLinks.next = (dlink(tE)*)(beforeLinks_next_num);
+	  MAYBE_INSERT_READ_LATE(
+		dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
+	  )
+		size_t linkToInsert_num = (size_t)(& linkToInsert);
+		size_t beforeLinks_next_num = linkToInsert_num;
+		beforeLinks.next = (dlink(tE)*)(beforeLinks_next_num);
 		list_pos_links.prev = &linkToInsert;
-        MAYBE_CMEM_BARRIER;
-	}
-
-	static inline tE & remove(tE & list_pos) {
-        verify( ! ORIGIN_TAG_QUERY((size_t) & list_pos) );
-		verify (&list_pos != 0p);
-
-        dlink(tE) & list_pos_links = list_pos`inner;
-        dlink(tE) & before_raw = * list_pos_links.prev;
-        dlink(tE) & before_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & before_raw );
-        size_t before_links_next_tag = ORIGIN_TAG_QUERY( (size_t) (before_links.next) );
-
-        dlink(tE) & after_raw = * list_pos_links.next;
-        dlink(tE) & after_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & after_raw );
-        size_t after_links_prev_tag = ORIGIN_TAG_QUERY( (size_t) (after_links.prev) );
-
-        size_t before_links_next_rslt = ORIGIN_TAG_EITHER( ((size_t) & after_raw), before_links_next_tag );
-        size_t after_links_prev_rslt = ORIGIN_TAG_EITHER( ((size_t) & before_raw), after_links_prev_tag );
-        before_links.next = (dlink(tE) *) before_links_next_rslt;
-        after_links.prev = (dlink(tE) *) after_links_prev_rslt;
-
-      NOLOOSE(
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t) &list_pos_links;
-        size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
-		list_pos_links.next = list_pos_links.prev = (dlink(tE)*) list_pos_links_tagged_num;
-        MAYBE_CMEM_BARRIER;
-      )
-        return list_pos;
-	}
-
-    static inline tE & downcast$( tytagref( tLinks, dlink(tE) ) ref ) {
-        dlink(tE) & lnk = ref;
-        dlink(tE) & link_from_null = ( * (tE *) 0p )`inner;
-        ptrdiff_t link_offset = (ptrdiff_t) & link_from_null;
-        size_t elm_addr = ((size_t) & lnk) - link_offset;
-        return * (tE*) elm_addr;
-    }
-
-    static inline tE & first( dlist(tE, tLinks) & lst ) {
+		MAYBE_CMEM_BARRIER;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert_last( dlist( tE, tLinks ) & list, List ); } )
+	void insert_last( dlist( tE, tLinks ) & list, tE * node, List args ) {
+		insert_last( list, *node );
+		insert_last( list, args );
+	}
+	void insert_last( dlist( tE, tLinks ) & list, tE * node ) {
+		insert_last( list, *node );
+	}
+
+	tE & insert( dlist( tE, tLinks ) & list, tE & node ) { // synonym for insert_last
+		insert_last( list, node );
+		return node;
+	}
+	// FIXME: Change from pointer to reference for node, when tuple type can handle references.
+	forall( List ... | { void insert( dlist( tE, tLinks ) & list, List ); } )
+	void insert( dlist( tE, tLinks ) & list, tE * node, List args ) {
+		insert( list, *node );
+		insert( list, args );
+	}
+	void insert( dlist( tE, tLinks ) & list, tE * node ) {
+		insert( list, *node );
+	}
+
+	static inline tE & remove_first( dlist(tE, tLinks) &lst ) {
 		verify (&lst != 0p);
-        dlink(tE) * firstLnk = lst.next;
-        if (ORIGIN_TAG_QUERY((size_t)firstLnk)) return * 0p;
-        tytagref( tLinks, dlink(tE) ) firstLnkTagged = {*firstLnk};
-        return downcast$( firstLnkTagged );
-    }
-    static inline tE & last ( dlist(tE, tLinks) & lst ) {
+		dlink(tE) & list_links = lst;
+		// call is valid on empty list; when so, list_links.next and after_links.prev have otags set
+
+		dlink(tE) & fst_raw = * list_links.next;
+		dlink(tE) & fst_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & fst_raw );
+		size_t fst_tagval = origin_tag_query_arith$( fst_raw );
+
+		dlink(tE) & after_raw = * fst_links.next;
+		dlink(tE) & after_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & after_raw );
+
+		size_t before_links_next_rslt = ((size_t) & after_raw);
+		size_t after_links_prev_rslt = ORIGIN_TAG_ENABL( (size_t) & list_links );
+		list_links.next = (dlink(tE) *) before_links_next_rslt;
+		after_links.prev = (dlink(tE) *) after_links_prev_rslt;
+
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t) &fst_links;
+		size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
+		fst_links.next = fst_links.prev = (dlink(tE)*) list_pos_links_tagged_num;
+		MAYBE_CMEM_BARRIER;
+
+		tytagref( tLinks, dlink(tE) ) retExt = { fst_links };
+		return nullif$( downcast$( retExt ), fst_tagval );
+	}
+
+	static inline tE & remove_last( dlist(tE, tLinks) &lst ) {
 		verify (&lst != 0p);
-        dlink(tE) * lastLnk = lst.prev;
-        if (ORIGIN_TAG_QUERY((size_t)lastLnk)) return * 0p;
-        tytagref( tLinks, dlink(tE) ) lastLnkTagged = {*lastLnk};
-        return downcast$( lastLnkTagged );
-    }
-
-    static inline bool isEmpty( dlist(tE, tLinks) & lst ) {
-		verify (&lst != 0p);
-        if ( & first(lst) == 0p || & last(lst) == 0p ) {
-            verify( & last(lst) == 0p && & last(lst) == 0p );
-            return true;
-        }
-        return false;
-    }
-
-    static inline bool isListed( tE & e ) {
-      NOLOOSE(
-		verify (&e != 0p);
-		verify(! ORIGIN_TAG_QUERY( (size_t) & e ));
-        dlink(tE) & e_links = e`inner;
-        dlink(tE) * lprev = (dlink(tE)*) ORIGIN_TAG_CLEAR((size_t) e_links.prev);
-        dlink(tE) * lnext = (dlink(tE)*) ORIGIN_TAG_CLEAR((size_t) e_links.next);
-		return ( lprev != &e_links ) || ( lnext != &e_links );
-      )
-      LOOSEONLY(
-        verify(false && "isListed is undefined");
-        return true;
-      )
-    }
-
-    static inline tE & iter( dlist(tE, tLinks) & lst ) {
-        tE * origin = $get_list_origin_addr( lst );
-        return *origin;
-    }
-
-
-    // todo: resolve the pun:
-    //  tag as in proxy (tytagref)
-    //  tag as in bit manipulation on a funny pointer
-
-    static inline bool advance( tE && refx ) {
-        tE && ref_inner = refx;
-        tE & oldReferent = * (tE*) ORIGIN_TAG_CLEAR( (size_t) & ref_inner );
-		verify (& oldReferent != 0p);
-        dlink(tE) * tgt = oldReferent`inner.next;
-        size_t tgt_tags = ORIGIN_TAG_QUERY( (size_t)tgt);
-        dlink(tE) * nextl = (dlink(tE) *)ORIGIN_TAG_CLEAR((size_t)tgt);
-        tytagref( tLinks, dlink(tE) ) nextLnkTagged = { * nextl };
-        tE & nexte = downcast$( nextLnkTagged );
-        size_t next_te_num = (size_t) & nexte;
-        size_t new_ref_inner_num = ORIGIN_TAG_ASGN(next_te_num, tgt_tags);
-        tE * new_ref_inner = (tE *) new_ref_inner_num;
-        &ref_inner = new_ref_inner;
-        return ! tgt_tags;
-    }
-
-    static inline bool recede( tE && refx ) {
-        tE && ref_inner = refx;
-        tE & oldReferent = * (tE*) ORIGIN_TAG_CLEAR( (size_t) & ref_inner );
-		verify (& oldReferent != 0p);
-        dlink(tE) * tgt = oldReferent`inner.prev;
-        size_t tgt_tags = ORIGIN_TAG_QUERY( (size_t)tgt);
-        dlink(tE) * prevl = (dlink(tE) *)ORIGIN_TAG_CLEAR((size_t)tgt);
-        tytagref( tLinks, dlink(tE) ) prevLnkTagged = { * prevl };
-        tE & preve = downcast$( prevLnkTagged );
-        size_t prev_te_num = (size_t) & preve;
-        size_t new_ref_inner_num = ORIGIN_TAG_ASGN(prev_te_num, tgt_tags);
-        tE * new_ref_inner = (tE *) new_ref_inner_num;
-        &ref_inner = new_ref_inner;
-        return ! tgt_tags;
-    }
-
-    bool isFirst( tE & node ) {
-        // Probable bug copied from master
-        // should be `! recede(node)`
-        // correct: "is first iff cannot recede"
-        // used backward in test suite too, probably victim of a grep rename
-        return recede( node );
-    }
-
-    bool isLast( tE & node ) {
-        // ditto, vice versa
-        return advance( node );
-    }
-
-    static inline tE & next( tE & e ) {
-        if( advance(e) ) return e;
-        return * 0p;
-    }
-
-    static inline tE & prev( tE & e ) {
-        if( recede(e) ) return e;
-        return * 0p;
-    }
-
-    // Next 4 headed operations:
-    // Manual inline of the equivalent headless operation, manually simplified.
-    // Applies knowledge of tag pattern around head (unknown to optimizer) to reduce runtime tag operations.
-
-    static inline void insert_first( dlist(tE, tLinks) &lst, tE & e ) {
-        dlink(tE) & linkToInsert = e`inner;
-      NOLOOSE(
-       TAGSONLY(
-		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
-		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
-       )
-		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
-		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
-      )
-        dlink(tE) & list_pos_links = lst;
-      MAYBE_INSERT_READ_EARLY(
-        dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
-      )
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t)(& list_pos_links);
-        size_t to_insert_prev_num = ORIGIN_TAG_ENABL(list_pos_links_num);
-        dlink(tE) * to_insert_prev = (dlink(tE)*)to_insert_prev_num;
-		linkToInsert.prev = to_insert_prev;
-		linkToInsert.next = list_pos_links.next;
-      MAYBE_INSERT_READ_LATE(
-        dlink(tE) & afterLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.next );
-      )
-        size_t linkToInsert_num = (size_t)(& linkToInsert);
-        size_t afterLinks_prev_num = linkToInsert_num;
-        afterLinks.prev = (dlink(tE)*)(afterLinks_prev_num);
-		list_pos_links.next = &linkToInsert;
-        MAYBE_CMEM_BARRIER;
-    }
-
-    static inline void insert_last( dlist(tE, tLinks) &lst, tE & e ) {
-        dlink(tE) & linkToInsert = e`inner;
-      NOLOOSE(
-       TAGSONLY(
-		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.next));
-		verify(ORIGIN_TAG_QUERY((size_t)linkToInsert.prev));
-       )
-		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.next) == (size_t)&linkToInsert);
-		verify(ORIGIN_TAG_CLEAR((size_t)linkToInsert.prev) == (size_t)&linkToInsert);
-      )
-        dlink(tE) & list_pos_links = lst;
-      MAYBE_INSERT_READ_EARLY(
-        dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
-      )
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t)(& list_pos_links);
-        size_t to_insert_next_num = ORIGIN_TAG_ENABL(list_pos_links_num);
-        dlink(tE) * to_insert_next = (dlink(tE)*)to_insert_next_num;
-		linkToInsert.next = to_insert_next;
-		linkToInsert.prev = list_pos_links.prev;
-      MAYBE_INSERT_READ_LATE(
-        dlink(tE) & beforeLinks = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) list_pos_links.prev );
-      )
-        size_t linkToInsert_num = (size_t)(& linkToInsert);
-        size_t beforeLinks_next_num = linkToInsert_num;
-        beforeLinks.next = (dlink(tE)*)(beforeLinks_next_num);
-		list_pos_links.prev = &linkToInsert;
-        MAYBE_CMEM_BARRIER;
-    }
-
-    static inline tE & remove_first( dlist(tE, tLinks) &lst ) {
-		verify (&lst != 0p);
-        dlink(tE) & list_links = lst;
-        // call is valid on empty list; when so, list_links.next and after_links.prev have otags set
-
-        dlink(tE) & fst_raw = * list_links.next;
-        dlink(tE) & fst_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & fst_raw );
-        size_t fst_tagval = origin_tag_query_arith$( fst_raw );
-
-        dlink(tE) & after_raw = * fst_links.next;
-        dlink(tE) & after_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & after_raw );
-
-        size_t before_links_next_rslt = ((size_t) & after_raw);
-        size_t after_links_prev_rslt = ORIGIN_TAG_ENABL( (size_t) & list_links );
-        list_links.next = (dlink(tE) *) before_links_next_rslt;
-        after_links.prev = (dlink(tE) *) after_links_prev_rslt;
-
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t) &fst_links;
-        size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
-		fst_links.next = fst_links.prev = (dlink(tE)*) list_pos_links_tagged_num;
-        MAYBE_CMEM_BARRIER;
-
-        tytagref( tLinks, dlink(tE) ) retExt = { fst_links };
-        return nullif$( downcast$( retExt ), fst_tagval );
-    }
-
-    static inline tE & remove_last( dlist(tE, tLinks) &lst ) {
-		verify (&lst != 0p);
-        dlink(tE) & list_links = lst;
-        // call is valid on empty list; when so, list_links.prev and before_links.next have otags set
-
-        dlink(tE) & last_raw = * list_links.prev;
-        dlink(tE) & last_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & last_raw );
-        size_t last_tagval = origin_tag_query_arith$( last_raw );
-
-        dlink(tE) & before_raw = * last_links.prev;
-        dlink(tE) & before_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & before_raw );
-
-        size_t after_links_prev_rslt = ((size_t) & before_raw);
-        size_t before_links_next_rslt = ORIGIN_TAG_ENABL( (size_t) & list_links );
-        list_links.prev = (dlink(tE) *) after_links_prev_rslt;
-        before_links.next = (dlink(tE) *) before_links_next_rslt;
-
-        MAYBE_CMEM_BARRIER;
-        size_t list_pos_links_num = (size_t) &last_links;
-        size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
+		dlink(tE) & list_links = lst;
+		// call is valid on empty list; when so, list_links.prev and before_links.next have otags set
+
+		dlink(tE) & last_raw = * list_links.prev;
+		dlink(tE) & last_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & last_raw );
+		size_t last_tagval = origin_tag_query_arith$( last_raw );
+
+		dlink(tE) & before_raw = * last_links.prev;
+		dlink(tE) & before_links = * (dlink(tE) *) ORIGIN_TAG_CLEAR( (size_t) & before_raw );
+
+		size_t after_links_prev_rslt = ((size_t) & before_raw);
+		size_t before_links_next_rslt = ORIGIN_TAG_ENABL( (size_t) & list_links );
+		list_links.prev = (dlink(tE) *) after_links_prev_rslt;
+		before_links.next = (dlink(tE) *) before_links_next_rslt;
+
+		MAYBE_CMEM_BARRIER;
+		size_t list_pos_links_num = (size_t) &last_links;
+		size_t list_pos_links_tagged_num = ORIGIN_TAG_ENABL( list_pos_links_num );
 		last_links.prev = last_links.next = (dlink(tE)*) list_pos_links_tagged_num;
-        MAYBE_CMEM_BARRIER;
-
-        tytagref( tLinks, dlink(tE) ) lpLnkTagged = { last_links };
-        return nullif$( downcast$( lpLnkTagged ), last_tagval );
-    }
-
-    static inline tE & try_pop_first( dlist(tE, tLinks) &lst ) {
-        tE & first_inlist = first(lst);
-        tE & first_item = first_inlist;
-        if (&first_item) remove(first_inlist);  // TODO: should it use pop_front?
-        return first_item;
-    }
-
-    static inline tE & try_pop_last( dlist(tE, tLinks) &lst ) {
-        tE & last_inlist = last(lst);
-        tE & last_item = last_inlist;
-        if (&last_item) remove(last_inlist);  // TODO: should it use pop_back?
-        return last_item;
-    }
+		MAYBE_CMEM_BARRIER;
+
+		tytagref( tLinks, dlink(tE) ) lpLnkTagged = { last_links };
+		return nullif$( downcast$( lpLnkTagged ), last_tagval );
+	}
+
+	static inline tE & try_pop_first( dlist(tE, tLinks) &lst ) {
+		tE & first_inlist = first(lst);
+		tE & first_item = first_inlist;
+		if (&first_item) remove(first_inlist);  // TODO: should it use pop_front?
+		return first_item;
+	}
+
+	static inline tE & try_pop_last( dlist(tE, tLinks) &lst ) {
+		tE & last_inlist = last(lst);
+		tE & last_item = last_inlist;
+		if (&last_item) remove(last_inlist);  // TODO: should it use pop_back?
+		return last_item;
+	}
 
 
   #if !defined(NDEBUG) && (defined(__CFA_DEBUG__) || defined(__CFA_VERIFY__))
 	static bool $validate_fwd( dlist(tE, tLinks) & this ) {
-        tE & lagElem = *0p;
-
-        while ( tE & it = iter(this); advance(it) ) {
-            if (& lagElem == 0p &&  &it != & first(this) ) return false;
-            & lagElem = & it;
-        }
-
-        if (& lagElem != & last(this)) return false;
-
-        // TODO: verify that it is back at iter(this);
-        return true;
+		tE & lagElem = *0p;
+
+		while ( tE & it = iter(this); advance(it) ) {
+			if (& lagElem == 0p &&  &it != & first(this) ) return false;
+			& lagElem = & it;
+		}
+
+		if (& lagElem != & last(this)) return false;
+
+		// TODO: verify that it is back at iter(this);
+		return true;
 	}
 	static bool $validate_rev( dlist(tE, tLinks) & this ) {
-        tE & lagElem = *0p;
-
-        while ( tE & it = iter(this); recede(it) ) {
-            if (& lagElem == 0p &&  &it != & last(this) ) return false;
-            & lagElem = & it;
-        }
-
-        if (& lagElem != & first(this)) return false;
-
-        // TODO: verify that it is back at iter(this);
-        return true;
+		tE & lagElem = *0p;
+
+		while ( tE & it = iter(this); recede(it) ) {
+			if (& lagElem == 0p &&  &it != & last(this) ) return false;
+			& lagElem = & it;
+		}
+
+		if (& lagElem != & first(this)) return false;
+
+		// TODO: verify that it is back at iter(this);
+		return true;
 	}
 	static inline bool validate( dlist(tE, tLinks) & this ) {
-        bool reportsHavingFirst = ((& first(this)) == 0p);
-        bool reportsHavingLast = ((& last(this)) == 0p);
-        if ( reportsHavingFirst != reportsHavingLast ) return false;
+		bool reportsHavingFirst = ((& first(this)) == 0p);
+		bool reportsHavingLast = ((& last(this)) == 0p);
+		if ( reportsHavingFirst != reportsHavingLast ) return false;
 
 		return $validate_fwd(this) && $validate_rev(this);
