Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1/*
2 * Copyright 2004-2009 Analog Devices Inc.
3 *
4 * Licensed under the GPL-2 or later.
5 */
6
7#ifndef __BFIN_SPINLOCK_H
8#define __BFIN_SPINLOCK_H
9
10#ifndef CONFIG_SMP
11# include <asm-generic/spinlock.h>
12#else
13
14#include <linux/atomic.h>
15#include <asm/processor.h>
16#include <asm/barrier.h>
17
18asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
19asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
20asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
21asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
22asmlinkage void __raw_read_lock_asm(volatile int *ptr);
23asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
24asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
25asmlinkage void __raw_write_lock_asm(volatile int *ptr);
26asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
27asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
28
29static inline int arch_spin_is_locked(arch_spinlock_t *lock)
30{
31 return __raw_spin_is_locked_asm(&lock->lock);
32}
33
34static inline void arch_spin_lock(arch_spinlock_t *lock)
35{
36 __raw_spin_lock_asm(&lock->lock);
37}
38
39#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
40
41static inline int arch_spin_trylock(arch_spinlock_t *lock)
42{
43 return __raw_spin_trylock_asm(&lock->lock);
44}
45
46static inline void arch_spin_unlock(arch_spinlock_t *lock)
47{
48 __raw_spin_unlock_asm(&lock->lock);
49}
50
51static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
52{
53 smp_cond_load_acquire(&lock->lock, !VAL);
54}
55
56static inline int arch_read_can_lock(arch_rwlock_t *rw)
57{
58 return __raw_uncached_fetch_asm(&rw->lock) > 0;
59}
60
61static inline int arch_write_can_lock(arch_rwlock_t *rw)
62{
63 return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
64}
65
66static inline void arch_read_lock(arch_rwlock_t *rw)
67{
68 __raw_read_lock_asm(&rw->lock);
69}
70
71#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
72
73static inline int arch_read_trylock(arch_rwlock_t *rw)
74{
75 return __raw_read_trylock_asm(&rw->lock);
76}
77
78static inline void arch_read_unlock(arch_rwlock_t *rw)
79{
80 __raw_read_unlock_asm(&rw->lock);
81}
82
83static inline void arch_write_lock(arch_rwlock_t *rw)
84{
85 __raw_write_lock_asm(&rw->lock);
86}
87
88#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
89
90static inline int arch_write_trylock(arch_rwlock_t *rw)
91{
92 return __raw_write_trylock_asm(&rw->lock);
93}
94
95static inline void arch_write_unlock(arch_rwlock_t *rw)
96{
97 __raw_write_unlock_asm(&rw->lock);
98}
99
100#define arch_spin_relax(lock) cpu_relax()
101#define arch_read_relax(lock) cpu_relax()
102#define arch_write_relax(lock) cpu_relax()
103
104#endif
105
106#endif /* !__BFIN_SPINLOCK_H */