Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
spinlock.h
Go to the documentation of this file.
1 /*
2  * Copyright 2004-2009 Analog Devices Inc.
3  *
4  * Licensed under the GPL-2 or later.
5  */
6 
7 #ifndef __BFIN_SPINLOCK_H
8 #define __BFIN_SPINLOCK_H
9 
10 #ifndef CONFIG_SMP
11 # include <asm-generic/spinlock.h>
12 #else
13 
14 #include <linux/atomic.h>
15 
16 asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
17 asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
18 asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
19 asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
20 asmlinkage void __raw_read_lock_asm(volatile int *ptr);
21 asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
22 asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
23 asmlinkage void __raw_write_lock_asm(volatile int *ptr);
24 asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
25 asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
26 
27 static inline int arch_spin_is_locked(arch_spinlock_t *lock)
28 {
29  return __raw_spin_is_locked_asm(&lock->lock);
30 }
31 
32 static inline void arch_spin_lock(arch_spinlock_t *lock)
33 {
34  __raw_spin_lock_asm(&lock->lock);
35 }
36 
37 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
38 
39 static inline int arch_spin_trylock(arch_spinlock_t *lock)
40 {
41  return __raw_spin_trylock_asm(&lock->lock);
42 }
43 
44 static inline void arch_spin_unlock(arch_spinlock_t *lock)
45 {
46  __raw_spin_unlock_asm(&lock->lock);
47 }
48 
49 static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
50 {
51  while (arch_spin_is_locked(lock))
52  cpu_relax();
53 }
54 
55 static inline int arch_read_can_lock(arch_rwlock_t *rw)
56 {
57  return __raw_uncached_fetch_asm(&rw->lock) > 0;
58 }
59 
60 static inline int arch_write_can_lock(arch_rwlock_t *rw)
61 {
62  return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
63 }
64 
65 static inline void arch_read_lock(arch_rwlock_t *rw)
66 {
67  __raw_read_lock_asm(&rw->lock);
68 }
69 
70 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
71 
72 static inline int arch_read_trylock(arch_rwlock_t *rw)
73 {
74  return __raw_read_trylock_asm(&rw->lock);
75 }
76 
77 static inline void arch_read_unlock(arch_rwlock_t *rw)
78 {
79  __raw_read_unlock_asm(&rw->lock);
80 }
81 
82 static inline void arch_write_lock(arch_rwlock_t *rw)
83 {
84  __raw_write_lock_asm(&rw->lock);
85 }
86 
87 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
88 
89 static inline int arch_write_trylock(arch_rwlock_t *rw)
90 {
91  return __raw_write_trylock_asm(&rw->lock);
92 }
93 
94 static inline void arch_write_unlock(arch_rwlock_t *rw)
95 {
96  __raw_write_unlock_asm(&rw->lock);
97 }
98 
99 #define arch_spin_relax(lock) cpu_relax()
100 #define arch_read_relax(lock) cpu_relax()
101 #define arch_write_relax(lock) cpu_relax()
102 
103 #endif
104 
105 #endif /* !__BFIN_SPINLOCK_H */