1 2 3 4 5 | static inline int __raw_spin_is_locked(raw_spinlock_t *lock) { int tmp = *(volatile signed int *)(&(lock)->slock); return (((tmp >> 8) & 0xff) != (tmp & 0xff)); } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | static inline void __raw_spin_lock(raw_spinlock_t *lock) { short inc = 0x0100; __asm__ __volatile__ ( LOCK_PREFIX "xaddw %w0, %1\n" "1:\t" "cmpb %h0, %b0\n\t" "je 2f\n\t" "rep ; nop\n\t" "movb %1, %b0\n\t" /* don't need lfence here, because loads are in-order */ "jmp 1b\n" "2:" :"+Q" (inc), "+m" (lock->slock) : :"memory", "cc"); } |
1 2 3 4 5 6 7 8 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) { __asm__ __volatile__( UNLOCK_LOCK_PREFIX "incb %0" :"+m" (lock->slock) : :"memory", "cc"); } |
欢迎光临 电子技术论坛_中国专业的电子工程师学习交流社区-中电网技术论坛 (http://bbs.eccn.com/) | Powered by Discuz! 7.0.0 |