Changeset 5b11c25
- Timestamp:
- Apr 30, 2019, 2:53:41 PM (5 years ago)
- Branches:
- ADT, arm-eh, ast-experimental, cleanup-dtors, enum, forall-pointer-decay, jacob/cs343-translation, jenkins-sandbox, master, new-ast, new-ast-unique-expr, pthread-emulation, qualifiedEnum
- Children:
- 8278abf
- Parents:
- 673cd63
- Files:
-
- 5 added
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
libcfa/src/concurrency/CtxSwitch-x86_64.S
r673cd63 r5b11c25 40 40 #define FP_OFFSET ( 1 * PTR_BYTE ) 41 41 42 //----------------------------------------------------------------------------- 43 // Regular context switch routine which enables switching from one context to anouther 42 44 .text 43 45 .align 2 … … 77 79 .size CtxSwitch, .-CtxSwitch 78 80 79 .text 81 //----------------------------------------------------------------------------- 82 // Part of a 2 part context switch routine, use with CtxRet, stores the current context and then makes a function call 83 .text 80 84 .align 2 81 .globl CtxInvokeStub 85 .globl CtxStore 86 .type CtxStore, @function 87 CtxStore: 88 89 // Save volatile registers on the stack. 90 91 pushq %r15 92 pushq %r14 93 pushq %r13 94 pushq %r12 95 pushq %rbx 96 97 // Save old context in the "from" area. 98 99 movq %rsp,SP_OFFSET(%rdi) 100 movq %rbp,FP_OFFSET(%rdi) 101 102 // Don't load a new context, directly jump to the desired function 103 104 jmp *%rsi 105 .size CtxStore, .-CtxStore 106 107 //----------------------------------------------------------------------------- 108 // Part of a 2 part context switch routine, use with CtxStore, context switches to the desired target without saving the current context 109 .text 110 .align 2 111 .globl CtxRet 112 .type CtxRet, @function 113 CtxRet: 114 // Load new context from the "to" area. 115 116 movq SP_OFFSET(%rdi),%rsp 117 movq FP_OFFSET(%rdi),%rbp 118 119 // Load volatile registers from the stack. 120 121 popq %rbx 122 popq %r12 123 popq %r13 124 popq %r14 125 popq %r15 126 127 // Return to thread. 128 129 ret 130 .size CtxRet, .-CtxRet 131 132 133 //----------------------------------------------------------------------------- 134 // Stub used to create new stacks which are ready to be context switched to 135 .text 136 .align 2 137 .globl CtxInvokeStub 138 .type CtxInvokeStub, @function 82 139 CtxInvokeStub: 83 140 movq %rbx, %rdi 84 141 jmp *%r12 142 .size CtxInvokeStub, .-CtxInvokeStub 85 143 86 144 // Local Variables: // -
libcfa/src/concurrency/coroutine.hfa
r673cd63 r5b11c25 68 68 69 69 extern void CtxSwitch( struct __stack_context_t * from, struct __stack_context_t * to ) asm ("CtxSwitch"); 70 // void CtxStore ( void * this) asm ("CtxStore");71 // void CtxRet ( void * dst ) asm ("CtxRet");70 extern void CtxStore ( struct __stack_context_t * from, __attribute__((noreturn)) void (*__callback)(void) ) asm ("CtxStore"); 71 extern void CtxRet ( struct __stack_context_t * to ) asm ("CtxRet") __attribute__ ((__noreturn__)); 72 72 } 73 73 … … 172 172 } 173 173 174 175 176 // static inline bool suspend_checkpoint(void) { 177 // // optimization : read TLS once and reuse it 178 // // Safety note: this is preemption safe since if 179 // // preemption occurs after this line, the pointer 180 // // will also migrate which means this value will 181 // // stay in syn with the TLS 182 // // set state of current coroutine to inactive 183 // this->state = Checkpoint; 184 185 // // context switch to specified coroutine 186 // assert( src->stack.context ); 187 188 // CtxStore(src->stack.context); 189 190 // bool ret = this->state == Checkpoint; 191 192 // // set state of new coroutine to active 193 // src->state = Active; 194 195 // enable_interrupts( __cfaabi_dbg_ctx ); 196 // // Safety note : This could cause some false positives due to preemption 197 // verify( TL_GET( preemption_state.enabled ) || TL_GET( this_processor )->do_terminate ); 198 199 // if( unlikely(src->cancellation != NULL) ) { 200 // _CtxCoroutine_Unwind(src->cancellation); 201 // } 202 203 // return ret; 204 // } 174 static inline void suspend_then(fptr_t call) { 175 // optimization : read TLS once and reuse it 176 // Safety note: this is preemption safe since if 177 // preemption occurs after this line, the pointer 178 // will also migrate which means this value will 179 // stay in syn with the TLS 180 coroutine_desc * src = TL_GET( this_thread )->curr_cor; 181 182 assertf( src->last != 0, 183 "Attempt to suspend coroutine \"%.256s\" (%p) that has never been resumed.\n" 184 "Possible cause is a suspend executed in a member called by a coroutine user rather than by the coroutine main.", 185 src->name, src ); 186 assertf( src->last->state != Halted, 187 "Attempt by coroutine \"%.256s\" (%p) to suspend back to terminated coroutine \"%.256s\" (%p).\n" 188 "Possible cause is terminated coroutine's main routine has already returned.", 189 src->name, src, src->last->name, src->last ); 190 191 src->state = PreInactive; 192 193 // context switch to specified coroutine 194 assert( src->context.SP ); 195 196 __attribute__((noreturn)) void __suspend_callback(void) { 197 call(); 198 199 // set state of current coroutine to inactive 200 src->state = src->state == Halted ? Halted : Inactive; 201 202 TL_GET( this_thread )->curr_cor = src->last; 203 204 // context switch to specified coroutine 205 assert( src->last->context.SP ); 206 CtxRet( &src->last->context ); 207 208 abort(); 209 } 210 CtxStore( &src->context, __suspend_callback ); 211 // when CtxStore returns we are back in the src coroutine 212 213 // set state of new coroutine to active 214 src->state = Active; 215 216 if( unlikely(src->cancellation != NULL) ) { 217 _CtxCoroutine_Unwind(src->cancellation, src); 218 } 219 220 return; 221 } 205 222 206 223 // static inline void suspend_return(void) { -
libcfa/src/concurrency/invoke.h
r673cd63 r5b11c25 93 93 }; 94 94 95 enum coroutine_state { Halted, Start, Inactive, Active, Primed };95 enum coroutine_state { Halted, Start, Inactive, Active, Primed, PreInactive }; 96 96 97 97 struct coroutine_desc {
Note: See TracChangeset
for help on using the changeset viewer.