mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2025-01-18 19:05:39 +00:00
Blackfin: SMP: rename the arch_xxx lock funcs to __raw_xxx
The external functions are named __raw_xxx, not arch_xxx, so rename the prototypes to match reality. This fixes some simple build errors in the bfin_ksyms.c code which exports these helpers to modules. Signed-off-by: Graf Yang <graf.yang@analog.com> Signed-off-by: Mike Frysinger <vapier@gentoo.org>
This commit is contained in:
parent
57afb39935
commit
71a66287d9
@ -11,6 +11,9 @@
|
|||||||
|
|
||||||
#include <asm/blackfin.h> /* for SSYNC() */
|
#include <asm/blackfin.h> /* for SSYNC() */
|
||||||
#include <asm/sections.h> /* for _ramend */
|
#include <asm/sections.h> /* for _ramend */
|
||||||
|
#ifdef CONFIG_SMP
|
||||||
|
#include <asm/smp.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
extern void blackfin_icache_flush_range(unsigned long start_address, unsigned long end_address);
|
extern void blackfin_icache_flush_range(unsigned long start_address, unsigned long end_address);
|
||||||
extern void blackfin_dcache_flush_range(unsigned long start_address, unsigned long end_address);
|
extern void blackfin_dcache_flush_range(unsigned long start_address, unsigned long end_address);
|
||||||
|
@ -17,12 +17,12 @@ asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
|
|||||||
asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
|
asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
|
||||||
asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
|
asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
|
||||||
asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
|
asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
|
||||||
asmlinkage void arch_read_lock_asm(volatile int *ptr);
|
asmlinkage void __raw_read_lock_asm(volatile int *ptr);
|
||||||
asmlinkage int arch_read_trylock_asm(volatile int *ptr);
|
asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
|
||||||
asmlinkage void arch_read_unlock_asm(volatile int *ptr);
|
asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
|
||||||
asmlinkage void arch_write_lock_asm(volatile int *ptr);
|
asmlinkage void __raw_write_lock_asm(volatile int *ptr);
|
||||||
asmlinkage int arch_write_trylock_asm(volatile int *ptr);
|
asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
|
||||||
asmlinkage void arch_write_unlock_asm(volatile int *ptr);
|
asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
|
||||||
|
|
||||||
static inline int arch_spin_is_locked(arch_spinlock_t *lock)
|
static inline int arch_spin_is_locked(arch_spinlock_t *lock)
|
||||||
{
|
{
|
||||||
@ -64,32 +64,32 @@ static inline int arch_write_can_lock(arch_rwlock_t *rw)
|
|||||||
|
|
||||||
static inline void arch_read_lock(arch_rwlock_t *rw)
|
static inline void arch_read_lock(arch_rwlock_t *rw)
|
||||||
{
|
{
|
||||||
arch_read_lock_asm(&rw->lock);
|
__raw_read_lock_asm(&rw->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int arch_read_trylock(arch_rwlock_t *rw)
|
static inline int arch_read_trylock(arch_rwlock_t *rw)
|
||||||
{
|
{
|
||||||
return arch_read_trylock_asm(&rw->lock);
|
return __raw_read_trylock_asm(&rw->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void arch_read_unlock(arch_rwlock_t *rw)
|
static inline void arch_read_unlock(arch_rwlock_t *rw)
|
||||||
{
|
{
|
||||||
arch_read_unlock_asm(&rw->lock);
|
__raw_read_unlock_asm(&rw->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void arch_write_lock(arch_rwlock_t *rw)
|
static inline void arch_write_lock(arch_rwlock_t *rw)
|
||||||
{
|
{
|
||||||
arch_write_lock_asm(&rw->lock);
|
__raw_write_lock_asm(&rw->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int arch_write_trylock(arch_rwlock_t *rw)
|
static inline int arch_write_trylock(arch_rwlock_t *rw)
|
||||||
{
|
{
|
||||||
return arch_write_trylock_asm(&rw->lock);
|
return __raw_write_trylock_asm(&rw->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void arch_write_unlock(arch_rwlock_t *rw)
|
static inline void arch_write_unlock(arch_rwlock_t *rw)
|
||||||
{
|
{
|
||||||
arch_write_unlock_asm(&rw->lock);
|
__raw_write_unlock_asm(&rw->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
#define arch_spin_relax(lock) cpu_relax()
|
#define arch_spin_relax(lock) cpu_relax()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user