Searched refs:arch_spin_is_locked (Results 1 – 19 of 19) sorted by relevance
27 #define arch_spin_is_locked(x) ((x)->slock == 0) macro62 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
114 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)
20 #define arch_spin_is_locked(x) (READ_ONCE((x)->lock) != 0) macro44 if (arch_spin_is_locked(lock)) in arch_spin_lock()
35 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
156 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
17 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
114 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
128 return arch_spin_is_locked(&lock->wait_lock); in queued_rwlock_is_contended()
26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
18 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
74 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
10 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
53 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
39 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
101 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
13 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
812 if (arch_spin_is_locked(&old.lock)) in read_hpet()844 } while ((new.value == old.value) && arch_spin_is_locked(&new.lock)); in read_hpet()
Completed in 18 milliseconds