https://gcc.gnu.org/bugzilla/show_bug.cgi?id=65712
Bug ID: 65712 Summary: pthread_self prints wrong result when used with ucontext Product: gcc Version: 4.9.2 Status: UNCONFIRMED Severity: normal Priority: P3 Component: c Assignee: unassigned at gcc dot gnu.org Reporter: albertnetymk at gmail dot com When using no opitimization, the output is expected: gcc test.c -pthread && ./a.out B is 139778852222720 A is 139778843830016 A 139778843830016 B 139778852222720 However, once I turn on the opitmization, the output becomes: gcc -O1 test.c -pthread && ./a.out B is 140067829094144 A is 140067820701440 A 140067820701440 B 140067820701440 Inspect the assembly emitted, it seems that the two calls to `pthread_self` is reduced to be one. OS: Linux Program: #define _XOPEN_SOURCE 800 #include <ucontext.h> #include <signal.h> #include <unistd.h> #include <stdio.h> #include <pthread.h> #include <stdlib.h> #include <assert.h> static int flag = 0; void swap(ucontext_t *old, ucontext_t *new) { int ret = swapcontext(old, new); assert(ret == 0); } #define SSIZE MINSIGSTKSZ static char stack[SSIZE]; static ucontext_t a_ctx[2]; static ucontext_t b_ctx[2]; static void f1 (void) { printf("A %lu\n", pthread_self()); swap(&a_ctx[1], &a_ctx[0]); printf("B %lu\n", pthread_self()); } void *thread_a(void *arg) { printf("A is %lu\n", pthread_self()); ucontext_t ctx = a_ctx[1]; getcontext(&ctx); ctx.uc_stack.ss_sp = stack; ctx.uc_stack.ss_size = sizeof stack; makecontext(&ctx, f1, 0); swap(&a_ctx[0], &ctx); __atomic_store_n(&flag, 1, __ATOMIC_RELAXED); sleep(1); return NULL; } void *thread_b(void *arg) { printf("B is %lu\n", pthread_self()); while(__atomic_load_n(&flag, __ATOMIC_RELAXED) == 0) ; swap(&b_ctx[0], &a_ctx[1]); return NULL; } int main(int argc, char **argv) { pthread_t a, b; pthread_create(&b, NULL, &thread_b, NULL); pthread_create(&a, NULL, &thread_a, NULL); pthread_exit(NULL); }