aboutsummaryrefslogtreecommitdiff
path: root/libthreads/i386/cthreads.h
diff options
context:
space:
mode:
Diffstat (limited to 'libthreads/i386/cthreads.h')
-rw-r--r--libthreads/i386/cthreads.h50
1 files changed, 39 insertions, 11 deletions
diff --git a/libthreads/i386/cthreads.h b/libthreads/i386/cthreads.h
index 8ffe4b72..694387b6 100644
--- a/libthreads/i386/cthreads.h
+++ b/libthreads/i386/cthreads.h
@@ -1,6 +1,6 @@
/*
* Mach Operating System
- * Copyright (c) 1991,1990 Carnegie Mellon University
+ * Copyright (c) 1993,1991,1990 Carnegie Mellon University
* All Rights Reserved.
*
* Permission to use, copy, modify and distribute this software and its
@@ -25,7 +25,37 @@
*/
/*
* HISTORY
- * $Log: cthreads.h,v $
+ * $Log: cthreads.h,v $
+ * Revision 1.3 2007/03/03 23:57:37 sthibaul
+ * 2006-03-04 Samuel Thibault <samuel.thibault@ens-lyon.org>
+ *
+ * * libpthread/sysdeps/i386/machine-sp.h (thread_stack_pointer):
+ * Optimize esp read.
+ * * libpthread/i386/cthreads.h (cthread_sp): Likewise.
+ *
+ * Revision 1.2 2002/05/27 02:50:10 roland
+ * 2002-05-26 Roland McGrath <roland@frob.com>
+ *
+ * Changes merged from CMU MK83a version:
+ * * cthreads.h, options.h: Various cleanups.
+ * * call.c, cthread_data.c, sync.c, mig_support.c: Likewise.
+ * * i386/cthreads.h, i386/thread.c, i386/lock.s: Likewise.
+ * * cthread_internals.h: Add decls for internal functions.
+ * (struct cproc): Use vm_offset_t for stack_base and stack_size members.
+ * Use natural_t for context member.
+ * * cprocs.c: Use prototypes for all defns.
+ * * cthreads.c: Likewise.
+ * (cthread_exit): Cast any_t to integer_t before int.
+ *
+ * Revision 2.9 93/01/24 13:24:58 danner
+ * Move ! in spin_try_lock to give the compiler
+ * a fighting chance.
+ * [92/11/19 rvb]
+ *
+ * Revision 2.8 93/01/14 18:05:09 danner
+ * asm -> __asm__
+ * [93/01/10 danner]
+ *
* Revision 2.7 92/01/03 20:36:59 dbg
* Add volatile to spin_lock_t. Change spin_unlock and
* spin_try_lock definitions back to memory operands, but rely on
@@ -66,22 +96,20 @@ typedef volatile int spin_lock_t;
#define spin_unlock(p) \
({ register int _u__ ; \
- asm volatile("xorl %0, %0; \n\
+ __asm__ volatile("xorl %0, %0; \n\
xchgl %0, %1" \
- : "=&r" (_u__), "=m" (*(p)) ); \
+ : "=&r" (_u__), "=m" (*(p)) :: "memory" ); \
0; })
#define spin_try_lock(p)\
- ({ boolean_t _r__; \
- asm volatile("movl $1, %0; \n\
+ (!({ boolean_t _r__; \
+ __asm__ volatile("movl $1, %0; \n\
xchgl %0, %1" \
- : "=&r" (_r__), "=m" (*(p)) ); \
- !_r__; })
+ : "=&r" (_r__), "=m" (*(p)) :: "memory" ); \
+ _r__; }))
#define cthread_sp() \
- ({ int _sp__; \
- asm("movl %%esp, %0" \
- : "=g" (_sp__) ); \
+ ({ register int _sp__ asm("esp"); \
_sp__; })
#endif /* __GNUC__ */