mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2025-01-04 12:13:43 +00:00
Xtensa updates for v5.16
- add support for xtensa cores witout windowed registers option -----BEGIN PGP SIGNATURE----- iQJHBAABCgAxFiEEK2eFS5jlMn3N6xfYUfnMkfg/oEQFAmGFgpUTHGpjbXZia2Jj QGdtYWlsLmNvbQAKCRBR+cyR+D+gRGA9D/9aADtHeDIynLdlnMuvF38vM1m3N+4H bQU6e69JHMdLoQQ67Bg1mpReFOyR/GEwi86p9pZvnMpiKS5oGqzgJY5IoVcaUlcS yUB+SJ527WeVLv0NuLzIsoQdHrhsEjenT24dUew6MCS5poFs8BRoJE9xrJzBvnpJ Vcb5XzBWvHSB7+9dwwaHhT6ZyNdQUykDN+ST8EkZOEZDfOWZ1lP37FwpuRHdhltS Gbzk3uG0REaZUyhpbzABVo9wyE5xRvzjttTaczqXZgyUw6jT42bURgmQeEQDKDfQ ta4ro/mup2JSVAF1NSMtn8qwscfNNfUNKBoVr1EsjSnvakHvtBwudLICFY+fnJOo 8D/HMhHakRPH43DMde4+XIb/c3JQyLsqScKCjNdXpw/BbDf0oDifq6VQPXtZ7bn3 JFMu0f6dF93dglO+bobEayY07+MdeuTZShYr6KGSMphMYaU1IgtvDSYvDCiPAuey 0JJRLHwTJNKmvX5tyHWCCQ/U6jAxaEHQtbaQyMIDX0Z045puHT4H/PwN0jpvE/aL SRVkzSBH56eMIrxJstqy6CeaAeAS+J5A9desED4vfUFIAJjpuXiErfbS1ul5MwNy 106UxWrCtbY0wwrcIVaOd0S4ste1fiN1PDRjzd6v19d4j9PL6TXT3aS9XwOHf8hG C1jRdoRMyZkFGg== =JWIj -----END PGP SIGNATURE----- Merge tag 'xtensa-20211105' of git://github.com/jcmvbkbc/linux-xtensa Pull xtensa updates from Max Filippov: - add support for xtensa cores without windowed registers option * tag 'xtensa-20211105' of git://github.com/jcmvbkbc/linux-xtensa: xtensa: move section symbols to asm/sections.h xtensa: remove unused variable wmask xtensa: only build windowed register support code when needed xtensa: use register window specific opcodes only when present xtensa: implement call0 ABI support in assembly xtensa: definitions for call0 ABI xtensa: don't use a12 in __xtensa_copy_user in call0 ABI xtensa: don't use a12 in strncpy_user xtensa: use a14 instead of a15 in inline assembly xtensa: move _SimulateUserKernelVectorException out of WindowVectors
This commit is contained in:
commit
00f178e150
@ -42,12 +42,14 @@ _bootparam:
|
|||||||
|
|
||||||
.align 4
|
.align 4
|
||||||
_SetupMMU:
|
_SetupMMU:
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
movi a0, 0
|
movi a0, 0
|
||||||
wsr a0, windowbase
|
wsr a0, windowbase
|
||||||
rsync
|
rsync
|
||||||
movi a0, 1
|
movi a0, 1
|
||||||
wsr a0, windowstart
|
wsr a0, windowstart
|
||||||
rsync
|
rsync
|
||||||
|
#endif
|
||||||
movi a0, 0x1F
|
movi a0, 0x1F
|
||||||
wsr a0, ps
|
wsr a0, ps
|
||||||
rsync
|
rsync
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#include <asm/regs.h>
|
#include <asm/regs.h>
|
||||||
#include <asm/asmmacro.h>
|
#include <asm/asmmacro.h>
|
||||||
#include <asm/cacheasm.h>
|
#include <asm/cacheasm.h>
|
||||||
|
#include <asm/processor.h>
|
||||||
/*
|
/*
|
||||||
* RB-Data: RedBoot data/bss
|
* RB-Data: RedBoot data/bss
|
||||||
* P: Boot-Parameters
|
* P: Boot-Parameters
|
||||||
@ -36,7 +37,7 @@
|
|||||||
.globl __start
|
.globl __start
|
||||||
/* this must be the first byte of the loader! */
|
/* this must be the first byte of the loader! */
|
||||||
__start:
|
__start:
|
||||||
entry sp, 32 # we do not intend to return
|
abi_entry(32) # we do not intend to return
|
||||||
_call0 _start
|
_call0 _start
|
||||||
__start_a0:
|
__start_a0:
|
||||||
.align 4
|
.align 4
|
||||||
@ -55,17 +56,19 @@ _start:
|
|||||||
movi a4, 1
|
movi a4, 1
|
||||||
wsr a4, ps
|
wsr a4, ps
|
||||||
rsync
|
rsync
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
rsr a5, windowbase
|
rsr a5, windowbase
|
||||||
ssl a5
|
ssl a5
|
||||||
sll a4, a4
|
sll a4, a4
|
||||||
wsr a4, windowstart
|
wsr a4, windowstart
|
||||||
rsync
|
rsync
|
||||||
|
#endif
|
||||||
movi a4, 0x00040000
|
movi a4, KERNEL_PS_WOE_MASK
|
||||||
wsr a4, ps
|
wsr a4, ps
|
||||||
rsync
|
rsync
|
||||||
|
|
||||||
|
KABI_C0 mov abi_saved0, abi_arg0
|
||||||
|
|
||||||
/* copy the loader to its address
|
/* copy the loader to its address
|
||||||
* Note: The loader itself is a very small piece, so we assume we
|
* Note: The loader itself is a very small piece, so we assume we
|
||||||
* don't partially overlap. We also assume (even more important)
|
* don't partially overlap. We also assume (even more important)
|
||||||
@ -168,52 +171,52 @@ _reloc:
|
|||||||
|
|
||||||
movi a3, __image_load
|
movi a3, __image_load
|
||||||
sub a4, a3, a4
|
sub a4, a3, a4
|
||||||
add a8, a0, a4
|
add abi_arg2, a0, a4
|
||||||
|
|
||||||
# a1 Stack
|
# a1 Stack
|
||||||
# a8(a4) Load address of the image
|
# a8(a4) Load address of the image
|
||||||
|
|
||||||
movi a6, _image_start
|
movi abi_arg0, _image_start
|
||||||
movi a10, _image_end
|
movi abi_arg4, _image_end
|
||||||
movi a7, 0x1000000
|
movi abi_arg1, 0x1000000
|
||||||
sub a11, a10, a6
|
sub abi_tmp0, abi_arg4, abi_arg0
|
||||||
movi a9, complen
|
movi abi_arg3, complen
|
||||||
s32i a11, a9, 0
|
s32i abi_tmp0, abi_arg3, 0
|
||||||
|
|
||||||
movi a0, 0
|
movi a0, 0
|
||||||
|
|
||||||
# a6 destination
|
# abi_arg0 destination
|
||||||
# a7 maximum size of destination
|
# abi_arg1 maximum size of destination
|
||||||
# a8 source
|
# abi_arg2 source
|
||||||
# a9 ptr to length
|
# abi_arg3 ptr to length
|
||||||
|
|
||||||
.extern gunzip
|
.extern gunzip
|
||||||
movi a4, gunzip
|
movi abi_tmp0, gunzip
|
||||||
beqz a4, 1f
|
beqz abi_tmp0, 1f
|
||||||
|
|
||||||
callx4 a4
|
abi_callx abi_tmp0
|
||||||
|
|
||||||
j 2f
|
j 2f
|
||||||
|
|
||||||
|
|
||||||
# a6 destination start
|
# abi_arg0 destination start
|
||||||
# a7 maximum size of destination
|
# abi_arg1 maximum size of destination
|
||||||
# a8 source start
|
# abi_arg2 source start
|
||||||
# a9 ptr to length
|
# abi_arg3 ptr to length
|
||||||
# a10 destination end
|
# abi_arg4 destination end
|
||||||
|
|
||||||
1:
|
1:
|
||||||
l32i a9, a8, 0
|
l32i abi_tmp0, abi_arg2, 0
|
||||||
l32i a11, a8, 4
|
l32i abi_tmp1, abi_arg2, 4
|
||||||
s32i a9, a6, 0
|
s32i abi_tmp0, abi_arg0, 0
|
||||||
s32i a11, a6, 4
|
s32i abi_tmp1, abi_arg0, 4
|
||||||
l32i a9, a8, 8
|
l32i abi_tmp0, abi_arg2, 8
|
||||||
l32i a11, a8, 12
|
l32i abi_tmp1, abi_arg2, 12
|
||||||
s32i a9, a6, 8
|
s32i abi_tmp0, abi_arg0, 8
|
||||||
s32i a11, a6, 12
|
s32i abi_tmp1, abi_arg0, 12
|
||||||
addi a6, a6, 16
|
addi abi_arg0, abi_arg0, 16
|
||||||
addi a8, a8, 16
|
addi abi_arg2, abi_arg2, 16
|
||||||
blt a6, a10, 1b
|
blt abi_arg0, abi_arg4, 1b
|
||||||
|
|
||||||
|
|
||||||
/* jump to the kernel */
|
/* jump to the kernel */
|
||||||
@ -230,6 +233,7 @@ _reloc:
|
|||||||
|
|
||||||
# a2 Boot parameter list
|
# a2 Boot parameter list
|
||||||
|
|
||||||
|
KABI_C0 mov abi_arg0, abi_saved0
|
||||||
movi a0, _image_start
|
movi a0, _image_start
|
||||||
jx a0
|
jx a0
|
||||||
|
|
||||||
|
@ -194,6 +194,12 @@
|
|||||||
#define XTENSA_STACK_ALIGNMENT 16
|
#define XTENSA_STACK_ALIGNMENT 16
|
||||||
|
|
||||||
#if defined(__XTENSA_WINDOWED_ABI__)
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
|
|
||||||
|
/* Assembly instructions for windowed kernel ABI. */
|
||||||
|
#define KABI_W
|
||||||
|
/* Assembly instructions for call0 kernel ABI (will be ignored). */
|
||||||
|
#define KABI_C0 #
|
||||||
|
|
||||||
#define XTENSA_FRAME_SIZE_RESERVE 16
|
#define XTENSA_FRAME_SIZE_RESERVE 16
|
||||||
#define XTENSA_SPILL_STACK_RESERVE 32
|
#define XTENSA_SPILL_STACK_RESERVE 32
|
||||||
|
|
||||||
@ -206,8 +212,34 @@
|
|||||||
#define abi_ret(frame_size) retw
|
#define abi_ret(frame_size) retw
|
||||||
#define abi_ret_default retw
|
#define abi_ret_default retw
|
||||||
|
|
||||||
|
/* direct call */
|
||||||
|
#define abi_call call4
|
||||||
|
/* indirect call */
|
||||||
|
#define abi_callx callx4
|
||||||
|
/* outgoing call argument registers */
|
||||||
|
#define abi_arg0 a6
|
||||||
|
#define abi_arg1 a7
|
||||||
|
#define abi_arg2 a8
|
||||||
|
#define abi_arg3 a9
|
||||||
|
#define abi_arg4 a10
|
||||||
|
#define abi_arg5 a11
|
||||||
|
/* return value */
|
||||||
|
#define abi_rv a6
|
||||||
|
/* registers preserved across call */
|
||||||
|
#define abi_saved0 a2
|
||||||
|
#define abi_saved1 a3
|
||||||
|
|
||||||
|
/* none of the above */
|
||||||
|
#define abi_tmp0 a4
|
||||||
|
#define abi_tmp1 a5
|
||||||
|
|
||||||
#elif defined(__XTENSA_CALL0_ABI__)
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
|
||||||
|
/* Assembly instructions for windowed kernel ABI (will be ignored). */
|
||||||
|
#define KABI_W #
|
||||||
|
/* Assembly instructions for call0 kernel ABI. */
|
||||||
|
#define KABI_C0
|
||||||
|
|
||||||
#define XTENSA_SPILL_STACK_RESERVE 0
|
#define XTENSA_SPILL_STACK_RESERVE 0
|
||||||
|
|
||||||
#define abi_entry(frame_size) __abi_entry (frame_size)
|
#define abi_entry(frame_size) __abi_entry (frame_size)
|
||||||
@ -233,10 +265,43 @@
|
|||||||
|
|
||||||
#define abi_ret_default ret
|
#define abi_ret_default ret
|
||||||
|
|
||||||
|
/* direct call */
|
||||||
|
#define abi_call call0
|
||||||
|
/* indirect call */
|
||||||
|
#define abi_callx callx0
|
||||||
|
/* outgoing call argument registers */
|
||||||
|
#define abi_arg0 a2
|
||||||
|
#define abi_arg1 a3
|
||||||
|
#define abi_arg2 a4
|
||||||
|
#define abi_arg3 a5
|
||||||
|
#define abi_arg4 a6
|
||||||
|
#define abi_arg5 a7
|
||||||
|
/* return value */
|
||||||
|
#define abi_rv a2
|
||||||
|
/* registers preserved across call */
|
||||||
|
#define abi_saved0 a12
|
||||||
|
#define abi_saved1 a13
|
||||||
|
|
||||||
|
/* none of the above */
|
||||||
|
#define abi_tmp0 a8
|
||||||
|
#define abi_tmp1 a9
|
||||||
|
|
||||||
#else
|
#else
|
||||||
#error Unsupported Xtensa ABI
|
#error Unsupported Xtensa ABI
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(USER_SUPPORT_WINDOWED)
|
||||||
|
/* Assembly instructions for windowed user ABI. */
|
||||||
|
#define UABI_W
|
||||||
|
/* Assembly instructions for call0 user ABI (will be ignored). */
|
||||||
|
#define UABI_C0 #
|
||||||
|
#else
|
||||||
|
/* Assembly instructions for windowed user ABI (will be ignored). */
|
||||||
|
#define UABI_W #
|
||||||
|
/* Assembly instructions for call0 user ABI. */
|
||||||
|
#define UABI_C0
|
||||||
|
#endif
|
||||||
|
|
||||||
#define __XTENSA_HANDLER .section ".exception.text", "ax"
|
#define __XTENSA_HANDLER .section ".exception.text", "ax"
|
||||||
|
|
||||||
#endif /* _XTENSA_ASMMACRO_H */
|
#endif /* _XTENSA_ASMMACRO_H */
|
||||||
|
@ -25,15 +25,15 @@
|
|||||||
*
|
*
|
||||||
* Locking interrupts looks like this:
|
* Locking interrupts looks like this:
|
||||||
*
|
*
|
||||||
* rsil a15, TOPLEVEL
|
* rsil a14, TOPLEVEL
|
||||||
* <code>
|
* <code>
|
||||||
* wsr a15, PS
|
* wsr a14, PS
|
||||||
* rsync
|
* rsync
|
||||||
*
|
*
|
||||||
* Note that a15 is used here because the register allocation
|
* Note that a14 is used here because the register allocation
|
||||||
* done by the compiler is not guaranteed and a window overflow
|
* done by the compiler is not guaranteed and a window overflow
|
||||||
* may not occur between the rsil and wsr instructions. By using
|
* may not occur between the rsil and wsr instructions. By using
|
||||||
* a15 in the rsil, the machine is guaranteed to be in a state
|
* a14 in the rsil, the machine is guaranteed to be in a state
|
||||||
* where no register reference will cause an overflow.
|
* where no register reference will cause an overflow.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@ -185,15 +185,15 @@ static inline void arch_atomic_##op(int i, atomic_t * v) \
|
|||||||
unsigned int vval; \
|
unsigned int vval; \
|
||||||
\
|
\
|
||||||
__asm__ __volatile__( \
|
__asm__ __volatile__( \
|
||||||
" rsil a15, "__stringify(TOPLEVEL)"\n" \
|
" rsil a14, "__stringify(TOPLEVEL)"\n" \
|
||||||
" l32i %[result], %[mem]\n" \
|
" l32i %[result], %[mem]\n" \
|
||||||
" " #op " %[result], %[result], %[i]\n" \
|
" " #op " %[result], %[result], %[i]\n" \
|
||||||
" s32i %[result], %[mem]\n" \
|
" s32i %[result], %[mem]\n" \
|
||||||
" wsr a15, ps\n" \
|
" wsr a14, ps\n" \
|
||||||
" rsync\n" \
|
" rsync\n" \
|
||||||
: [result] "=&a" (vval), [mem] "+m" (*v) \
|
: [result] "=&a" (vval), [mem] "+m" (*v) \
|
||||||
: [i] "a" (i) \
|
: [i] "a" (i) \
|
||||||
: "a15", "memory" \
|
: "a14", "memory" \
|
||||||
); \
|
); \
|
||||||
} \
|
} \
|
||||||
|
|
||||||
@ -203,15 +203,15 @@ static inline int arch_atomic_##op##_return(int i, atomic_t * v) \
|
|||||||
unsigned int vval; \
|
unsigned int vval; \
|
||||||
\
|
\
|
||||||
__asm__ __volatile__( \
|
__asm__ __volatile__( \
|
||||||
" rsil a15,"__stringify(TOPLEVEL)"\n" \
|
" rsil a14,"__stringify(TOPLEVEL)"\n" \
|
||||||
" l32i %[result], %[mem]\n" \
|
" l32i %[result], %[mem]\n" \
|
||||||
" " #op " %[result], %[result], %[i]\n" \
|
" " #op " %[result], %[result], %[i]\n" \
|
||||||
" s32i %[result], %[mem]\n" \
|
" s32i %[result], %[mem]\n" \
|
||||||
" wsr a15, ps\n" \
|
" wsr a14, ps\n" \
|
||||||
" rsync\n" \
|
" rsync\n" \
|
||||||
: [result] "=&a" (vval), [mem] "+m" (*v) \
|
: [result] "=&a" (vval), [mem] "+m" (*v) \
|
||||||
: [i] "a" (i) \
|
: [i] "a" (i) \
|
||||||
: "a15", "memory" \
|
: "a14", "memory" \
|
||||||
); \
|
); \
|
||||||
\
|
\
|
||||||
return vval; \
|
return vval; \
|
||||||
@ -223,16 +223,16 @@ static inline int arch_atomic_fetch_##op(int i, atomic_t * v) \
|
|||||||
unsigned int tmp, vval; \
|
unsigned int tmp, vval; \
|
||||||
\
|
\
|
||||||
__asm__ __volatile__( \
|
__asm__ __volatile__( \
|
||||||
" rsil a15,"__stringify(TOPLEVEL)"\n" \
|
" rsil a14,"__stringify(TOPLEVEL)"\n" \
|
||||||
" l32i %[result], %[mem]\n" \
|
" l32i %[result], %[mem]\n" \
|
||||||
" " #op " %[tmp], %[result], %[i]\n" \
|
" " #op " %[tmp], %[result], %[i]\n" \
|
||||||
" s32i %[tmp], %[mem]\n" \
|
" s32i %[tmp], %[mem]\n" \
|
||||||
" wsr a15, ps\n" \
|
" wsr a14, ps\n" \
|
||||||
" rsync\n" \
|
" rsync\n" \
|
||||||
: [result] "=&a" (vval), [tmp] "=&a" (tmp), \
|
: [result] "=&a" (vval), [tmp] "=&a" (tmp), \
|
||||||
[mem] "+m" (*v) \
|
[mem] "+m" (*v) \
|
||||||
: [i] "a" (i) \
|
: [i] "a" (i) \
|
||||||
: "a15", "memory" \
|
: "a14", "memory" \
|
||||||
); \
|
); \
|
||||||
\
|
\
|
||||||
return vval; \
|
return vval; \
|
||||||
|
@ -52,16 +52,16 @@ __cmpxchg_u32(volatile int *p, int old, int new)
|
|||||||
return new;
|
return new;
|
||||||
#else
|
#else
|
||||||
__asm__ __volatile__(
|
__asm__ __volatile__(
|
||||||
" rsil a15, "__stringify(TOPLEVEL)"\n"
|
" rsil a14, "__stringify(TOPLEVEL)"\n"
|
||||||
" l32i %[old], %[mem]\n"
|
" l32i %[old], %[mem]\n"
|
||||||
" bne %[old], %[cmp], 1f\n"
|
" bne %[old], %[cmp], 1f\n"
|
||||||
" s32i %[new], %[mem]\n"
|
" s32i %[new], %[mem]\n"
|
||||||
"1:\n"
|
"1:\n"
|
||||||
" wsr a15, ps\n"
|
" wsr a14, ps\n"
|
||||||
" rsync\n"
|
" rsync\n"
|
||||||
: [old] "=&a" (old), [mem] "+m" (*p)
|
: [old] "=&a" (old), [mem] "+m" (*p)
|
||||||
: [cmp] "a" (old), [new] "r" (new)
|
: [cmp] "a" (old), [new] "r" (new)
|
||||||
: "a15", "memory");
|
: "a14", "memory");
|
||||||
return old;
|
return old;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -116,10 +116,10 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|||||||
/*
|
/*
|
||||||
* xchg_u32
|
* xchg_u32
|
||||||
*
|
*
|
||||||
* Note that a15 is used here because the register allocation
|
* Note that a14 is used here because the register allocation
|
||||||
* done by the compiler is not guaranteed and a window overflow
|
* done by the compiler is not guaranteed and a window overflow
|
||||||
* may not occur between the rsil and wsr instructions. By using
|
* may not occur between the rsil and wsr instructions. By using
|
||||||
* a15 in the rsil, the machine is guaranteed to be in a state
|
* a14 in the rsil, the machine is guaranteed to be in a state
|
||||||
* where no register reference will cause an overflow.
|
* where no register reference will cause an overflow.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@ -157,14 +157,14 @@ static inline unsigned long xchg_u32(volatile int * m, unsigned long val)
|
|||||||
#else
|
#else
|
||||||
unsigned long tmp;
|
unsigned long tmp;
|
||||||
__asm__ __volatile__(
|
__asm__ __volatile__(
|
||||||
" rsil a15, "__stringify(TOPLEVEL)"\n"
|
" rsil a14, "__stringify(TOPLEVEL)"\n"
|
||||||
" l32i %[tmp], %[mem]\n"
|
" l32i %[tmp], %[mem]\n"
|
||||||
" s32i %[val], %[mem]\n"
|
" s32i %[val], %[mem]\n"
|
||||||
" wsr a15, ps\n"
|
" wsr a14, ps\n"
|
||||||
" rsync\n"
|
" rsync\n"
|
||||||
: [tmp] "=&a" (tmp), [mem] "+m" (*m)
|
: [tmp] "=&a" (tmp), [mem] "+m" (*m)
|
||||||
: [val] "a" (val)
|
: [val] "a" (val)
|
||||||
: "a15", "memory");
|
: "a14", "memory");
|
||||||
return tmp;
|
return tmp;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -26,4 +26,15 @@
|
|||||||
#define XCHAL_SPANNING_WAY 0
|
#define XCHAL_SPANNING_WAY 0
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
|
#if defined(CONFIG_USER_ABI_DEFAULT) || defined(CONFIG_USER_ABI_CALL0_PROBE)
|
||||||
|
/* Whether windowed ABI is supported in userspace. */
|
||||||
|
#define USER_SUPPORT_WINDOWED
|
||||||
|
#endif
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__) || defined(USER_SUPPORT_WINDOWED)
|
||||||
|
/* Whether windowed ABI is supported either in userspace or in the kernel. */
|
||||||
|
#define SUPPORT_WINDOWED
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -18,12 +18,6 @@
|
|||||||
#include <asm/types.h>
|
#include <asm/types.h>
|
||||||
#include <asm/regs.h>
|
#include <asm/regs.h>
|
||||||
|
|
||||||
/* Assertions. */
|
|
||||||
|
|
||||||
#if (XCHAL_HAVE_WINDOWED != 1)
|
|
||||||
# error Linux requires the Xtensa Windowed Registers Option.
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Xtensa ABI requires stack alignment to be at least 16 */
|
/* Xtensa ABI requires stack alignment to be at least 16 */
|
||||||
|
|
||||||
#define STACK_ALIGN (XCHAL_DATA_WIDTH > 16 ? XCHAL_DATA_WIDTH : 16)
|
#define STACK_ALIGN (XCHAL_DATA_WIDTH > 16 ? XCHAL_DATA_WIDTH : 16)
|
||||||
@ -105,8 +99,18 @@
|
|||||||
#define WSBITS (XCHAL_NUM_AREGS / 4) /* width of WINDOWSTART in bits */
|
#define WSBITS (XCHAL_NUM_AREGS / 4) /* width of WINDOWSTART in bits */
|
||||||
#define WBBITS (XCHAL_NUM_AREGS_LOG2 - 2) /* width of WINDOWBASE in bits */
|
#define WBBITS (XCHAL_NUM_AREGS_LOG2 - 2) /* width of WINDOWBASE in bits */
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
|
#define KERNEL_PS_WOE_MASK PS_WOE_MASK
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
#define KERNEL_PS_WOE_MASK 0
|
||||||
|
#else
|
||||||
|
#error Unsupported xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifndef __ASSEMBLY__
|
#ifndef __ASSEMBLY__
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
|
|
||||||
/* Build a valid return address for the specified call winsize.
|
/* Build a valid return address for the specified call winsize.
|
||||||
* winsize must be 1 (call4), 2 (call8), or 3 (call12)
|
* winsize must be 1 (call4), 2 (call8), or 3 (call12)
|
||||||
*/
|
*/
|
||||||
@ -117,6 +121,22 @@
|
|||||||
*/
|
*/
|
||||||
#define MAKE_PC_FROM_RA(ra,sp) (((ra) & 0x3fffffff) | ((sp) & 0xc0000000))
|
#define MAKE_PC_FROM_RA(ra,sp) (((ra) & 0x3fffffff) | ((sp) & 0xc0000000))
|
||||||
|
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
|
||||||
|
/* Build a valid return address for the specified call winsize.
|
||||||
|
* winsize must be 1 (call4), 2 (call8), or 3 (call12)
|
||||||
|
*/
|
||||||
|
#define MAKE_RA_FOR_CALL(ra, ws) (ra)
|
||||||
|
|
||||||
|
/* Convert return address to a valid pc
|
||||||
|
* Note: We assume that the stack pointer is in the same 1GB ranges as the ra
|
||||||
|
*/
|
||||||
|
#define MAKE_PC_FROM_RA(ra, sp) (ra)
|
||||||
|
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Spill slot location for the register reg in the spill area under the stack
|
/* Spill slot location for the register reg in the spill area under the stack
|
||||||
* pointer sp. reg must be in the range [0..4).
|
* pointer sp. reg must be in the range [0..4).
|
||||||
*/
|
*/
|
||||||
|
41
arch/xtensa/include/asm/sections.h
Normal file
41
arch/xtensa/include/asm/sections.h
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
/* SPDX-License-Identifier: GPL-2.0 */
|
||||||
|
|
||||||
|
#ifndef _XTENSA_SECTIONS_H
|
||||||
|
#define _XTENSA_SECTIONS_H
|
||||||
|
|
||||||
|
#include <asm-generic/sections.h>
|
||||||
|
|
||||||
|
#ifdef CONFIG_VECTORS_ADDR
|
||||||
|
extern char _WindowVectors_text_start[];
|
||||||
|
extern char _WindowVectors_text_end[];
|
||||||
|
extern char _DebugInterruptVector_text_start[];
|
||||||
|
extern char _DebugInterruptVector_text_end[];
|
||||||
|
extern char _KernelExceptionVector_text_start[];
|
||||||
|
extern char _KernelExceptionVector_text_end[];
|
||||||
|
extern char _UserExceptionVector_text_start[];
|
||||||
|
extern char _UserExceptionVector_text_end[];
|
||||||
|
extern char _DoubleExceptionVector_text_start[];
|
||||||
|
extern char _DoubleExceptionVector_text_end[];
|
||||||
|
extern char _exception_text_start[];
|
||||||
|
extern char _exception_text_end[];
|
||||||
|
extern char _Level2InterruptVector_text_start[];
|
||||||
|
extern char _Level2InterruptVector_text_end[];
|
||||||
|
extern char _Level3InterruptVector_text_start[];
|
||||||
|
extern char _Level3InterruptVector_text_end[];
|
||||||
|
extern char _Level4InterruptVector_text_start[];
|
||||||
|
extern char _Level4InterruptVector_text_end[];
|
||||||
|
extern char _Level5InterruptVector_text_start[];
|
||||||
|
extern char _Level5InterruptVector_text_end[];
|
||||||
|
extern char _Level6InterruptVector_text_start[];
|
||||||
|
extern char _Level6InterruptVector_text_end[];
|
||||||
|
#endif
|
||||||
|
#ifdef CONFIG_SMP
|
||||||
|
extern char _SecondaryResetVector_text_start[];
|
||||||
|
extern char _SecondaryResetVector_text_end[];
|
||||||
|
#endif
|
||||||
|
#ifdef CONFIG_XIP_KERNEL
|
||||||
|
extern char _xip_start[];
|
||||||
|
extern char _xip_end[];
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif
|
@ -56,6 +56,7 @@ void secondary_trap_init(void);
|
|||||||
|
|
||||||
static inline void spill_registers(void)
|
static inline void spill_registers(void)
|
||||||
{
|
{
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
#if XCHAL_NUM_AREGS > 16
|
#if XCHAL_NUM_AREGS > 16
|
||||||
__asm__ __volatile__ (
|
__asm__ __volatile__ (
|
||||||
" call8 1f\n"
|
" call8 1f\n"
|
||||||
@ -96,6 +97,7 @@ static inline void spill_registers(void)
|
|||||||
" mov a12, a12\n"
|
" mov a12, a12\n"
|
||||||
: : : "memory");
|
: : : "memory");
|
||||||
#endif
|
#endif
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
struct debug_table {
|
struct debug_table {
|
||||||
|
@ -58,7 +58,9 @@
|
|||||||
* BE shift left / mask 0 0 X X
|
* BE shift left / mask 0 0 X X
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
#define UNALIGNED_USER_EXCEPTION
|
#define UNALIGNED_USER_EXCEPTION
|
||||||
|
#endif
|
||||||
|
|
||||||
#if XCHAL_HAVE_BE
|
#if XCHAL_HAVE_BE
|
||||||
|
|
||||||
|
@ -158,6 +158,7 @@ _user_exception:
|
|||||||
/* Rotate ws so that the current windowbase is at bit0. */
|
/* Rotate ws so that the current windowbase is at bit0. */
|
||||||
/* Assume ws = xxwww1yyyy. Rotate ws right, so that a2 = yyyyxxwww1 */
|
/* Assume ws = xxwww1yyyy. Rotate ws right, so that a2 = yyyyxxwww1 */
|
||||||
|
|
||||||
|
#if defined(USER_SUPPORT_WINDOWED)
|
||||||
rsr a2, windowbase
|
rsr a2, windowbase
|
||||||
rsr a3, windowstart
|
rsr a3, windowstart
|
||||||
ssr a2
|
ssr a2
|
||||||
@ -167,24 +168,33 @@ _user_exception:
|
|||||||
src a2, a3, a2
|
src a2, a3, a2
|
||||||
srli a2, a2, 32-WSBITS
|
srli a2, a2, 32-WSBITS
|
||||||
s32i a2, a1, PT_WMASK # needed for restoring registers
|
s32i a2, a1, PT_WMASK # needed for restoring registers
|
||||||
|
#else
|
||||||
|
movi a2, 0
|
||||||
|
movi a3, 1
|
||||||
|
s32i a2, a1, PT_WINDOWBASE
|
||||||
|
s32i a3, a1, PT_WINDOWSTART
|
||||||
|
s32i a3, a1, PT_WMASK
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Save only live registers. */
|
/* Save only live registers. */
|
||||||
|
|
||||||
_bbsi.l a2, 1, 1f
|
UABI_W _bbsi.l a2, 1, 1f
|
||||||
s32i a4, a1, PT_AREG4
|
s32i a4, a1, PT_AREG4
|
||||||
s32i a5, a1, PT_AREG5
|
s32i a5, a1, PT_AREG5
|
||||||
s32i a6, a1, PT_AREG6
|
s32i a6, a1, PT_AREG6
|
||||||
s32i a7, a1, PT_AREG7
|
s32i a7, a1, PT_AREG7
|
||||||
_bbsi.l a2, 2, 1f
|
UABI_W _bbsi.l a2, 2, 1f
|
||||||
s32i a8, a1, PT_AREG8
|
s32i a8, a1, PT_AREG8
|
||||||
s32i a9, a1, PT_AREG9
|
s32i a9, a1, PT_AREG9
|
||||||
s32i a10, a1, PT_AREG10
|
s32i a10, a1, PT_AREG10
|
||||||
s32i a11, a1, PT_AREG11
|
s32i a11, a1, PT_AREG11
|
||||||
_bbsi.l a2, 3, 1f
|
UABI_W _bbsi.l a2, 3, 1f
|
||||||
s32i a12, a1, PT_AREG12
|
s32i a12, a1, PT_AREG12
|
||||||
s32i a13, a1, PT_AREG13
|
s32i a13, a1, PT_AREG13
|
||||||
s32i a14, a1, PT_AREG14
|
s32i a14, a1, PT_AREG14
|
||||||
s32i a15, a1, PT_AREG15
|
s32i a15, a1, PT_AREG15
|
||||||
|
|
||||||
|
#if defined(USER_SUPPORT_WINDOWED)
|
||||||
_bnei a2, 1, 1f # only one valid frame?
|
_bnei a2, 1, 1f # only one valid frame?
|
||||||
|
|
||||||
/* Only one valid frame, skip saving regs. */
|
/* Only one valid frame, skip saving regs. */
|
||||||
@ -239,7 +249,7 @@ _user_exception:
|
|||||||
rsync
|
rsync
|
||||||
|
|
||||||
/* We are back to the original stack pointer (a1) */
|
/* We are back to the original stack pointer (a1) */
|
||||||
|
#endif
|
||||||
2: /* Now, jump to the common exception handler. */
|
2: /* Now, jump to the common exception handler. */
|
||||||
|
|
||||||
j common_exception
|
j common_exception
|
||||||
@ -295,6 +305,7 @@ _kernel_exception:
|
|||||||
s32i a3, a1, PT_SAR
|
s32i a3, a1, PT_SAR
|
||||||
s32i a2, a1, PT_ICOUNTLEVEL
|
s32i a2, a1, PT_ICOUNTLEVEL
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
/* Rotate ws so that the current windowbase is at bit0. */
|
/* Rotate ws so that the current windowbase is at bit0. */
|
||||||
/* Assume ws = xxwww1yyyy. Rotate ws right, so that a2 = yyyyxxwww1 */
|
/* Assume ws = xxwww1yyyy. Rotate ws right, so that a2 = yyyyxxwww1 */
|
||||||
|
|
||||||
@ -305,27 +316,28 @@ _kernel_exception:
|
|||||||
src a2, a3, a2
|
src a2, a3, a2
|
||||||
srli a2, a2, 32-WSBITS
|
srli a2, a2, 32-WSBITS
|
||||||
s32i a2, a1, PT_WMASK # needed for kernel_exception_exit
|
s32i a2, a1, PT_WMASK # needed for kernel_exception_exit
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Save only the live window-frame */
|
/* Save only the live window-frame */
|
||||||
|
|
||||||
_bbsi.l a2, 1, 1f
|
KABI_W _bbsi.l a2, 1, 1f
|
||||||
s32i a4, a1, PT_AREG4
|
s32i a4, a1, PT_AREG4
|
||||||
s32i a5, a1, PT_AREG5
|
s32i a5, a1, PT_AREG5
|
||||||
s32i a6, a1, PT_AREG6
|
s32i a6, a1, PT_AREG6
|
||||||
s32i a7, a1, PT_AREG7
|
s32i a7, a1, PT_AREG7
|
||||||
_bbsi.l a2, 2, 1f
|
KABI_W _bbsi.l a2, 2, 1f
|
||||||
s32i a8, a1, PT_AREG8
|
s32i a8, a1, PT_AREG8
|
||||||
s32i a9, a1, PT_AREG9
|
s32i a9, a1, PT_AREG9
|
||||||
s32i a10, a1, PT_AREG10
|
s32i a10, a1, PT_AREG10
|
||||||
s32i a11, a1, PT_AREG11
|
s32i a11, a1, PT_AREG11
|
||||||
_bbsi.l a2, 3, 1f
|
KABI_W _bbsi.l a2, 3, 1f
|
||||||
s32i a12, a1, PT_AREG12
|
s32i a12, a1, PT_AREG12
|
||||||
s32i a13, a1, PT_AREG13
|
s32i a13, a1, PT_AREG13
|
||||||
s32i a14, a1, PT_AREG14
|
s32i a14, a1, PT_AREG14
|
||||||
s32i a15, a1, PT_AREG15
|
s32i a15, a1, PT_AREG15
|
||||||
|
|
||||||
|
#ifdef __XTENSA_WINDOWED_ABI__
|
||||||
_bnei a2, 1, 1f
|
_bnei a2, 1, 1f
|
||||||
|
|
||||||
/* Copy spill slots of a0 and a1 to imitate movsp
|
/* Copy spill slots of a0 and a1 to imitate movsp
|
||||||
* in order to keep exception stack continuous
|
* in order to keep exception stack continuous
|
||||||
*/
|
*/
|
||||||
@ -333,6 +345,7 @@ _kernel_exception:
|
|||||||
l32i a0, a1, PT_SIZE + 4
|
l32i a0, a1, PT_SIZE + 4
|
||||||
s32e a3, a1, -16
|
s32e a3, a1, -16
|
||||||
s32e a0, a1, -12
|
s32e a0, a1, -12
|
||||||
|
#endif
|
||||||
1:
|
1:
|
||||||
l32i a0, a1, PT_AREG0 # restore saved a0
|
l32i a0, a1, PT_AREG0 # restore saved a0
|
||||||
wsr a0, depc
|
wsr a0, depc
|
||||||
@ -419,16 +432,16 @@ common_exception:
|
|||||||
movi a3, LOCKLEVEL
|
movi a3, LOCKLEVEL
|
||||||
|
|
||||||
.Lexception:
|
.Lexception:
|
||||||
movi a0, PS_WOE_MASK
|
KABI_W movi a0, PS_WOE_MASK
|
||||||
or a3, a3, a0
|
KABI_W or a3, a3, a0
|
||||||
#else
|
#else
|
||||||
addi a2, a2, -EXCCAUSE_LEVEL1_INTERRUPT
|
addi a2, a2, -EXCCAUSE_LEVEL1_INTERRUPT
|
||||||
movi a0, LOCKLEVEL
|
movi a0, LOCKLEVEL
|
||||||
extui a3, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
|
extui a3, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
|
||||||
# a3 = PS.INTLEVEL
|
# a3 = PS.INTLEVEL
|
||||||
moveqz a3, a0, a2 # a3 = LOCKLEVEL iff interrupt
|
moveqz a3, a0, a2 # a3 = LOCKLEVEL iff interrupt
|
||||||
movi a2, PS_WOE_MASK
|
KABI_W movi a2, PS_WOE_MASK
|
||||||
or a3, a3, a2
|
KABI_W or a3, a3, a2
|
||||||
rsr a2, exccause
|
rsr a2, exccause
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -461,14 +474,14 @@ common_exception:
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
rsr a4, excsave1
|
rsr a4, excsave1
|
||||||
mov a6, a1 # pass stack frame
|
|
||||||
mov a7, a2 # pass EXCCAUSE
|
|
||||||
addx4 a4, a2, a4
|
addx4 a4, a2, a4
|
||||||
l32i a4, a4, EXC_TABLE_DEFAULT # load handler
|
l32i a4, a4, EXC_TABLE_DEFAULT # load handler
|
||||||
|
mov abi_arg1, a2 # pass EXCCAUSE
|
||||||
|
mov abi_arg0, a1 # pass stack frame
|
||||||
|
|
||||||
/* Call the second-level handler */
|
/* Call the second-level handler */
|
||||||
|
|
||||||
callx4 a4
|
abi_callx a4
|
||||||
|
|
||||||
/* Jump here for exception exit */
|
/* Jump here for exception exit */
|
||||||
.global common_exception_return
|
.global common_exception_return
|
||||||
@ -482,15 +495,15 @@ common_exception_return:
|
|||||||
1:
|
1:
|
||||||
irq_save a2, a3
|
irq_save a2, a3
|
||||||
#ifdef CONFIG_TRACE_IRQFLAGS
|
#ifdef CONFIG_TRACE_IRQFLAGS
|
||||||
call4 trace_hardirqs_off
|
abi_call trace_hardirqs_off
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Jump if we are returning from kernel exceptions. */
|
/* Jump if we are returning from kernel exceptions. */
|
||||||
|
|
||||||
l32i a3, a1, PT_PS
|
l32i abi_saved1, a1, PT_PS
|
||||||
GET_THREAD_INFO(a2, a1)
|
GET_THREAD_INFO(a2, a1)
|
||||||
l32i a4, a2, TI_FLAGS
|
l32i a4, a2, TI_FLAGS
|
||||||
_bbci.l a3, PS_UM_BIT, 6f
|
_bbci.l abi_saved1, PS_UM_BIT, 6f
|
||||||
|
|
||||||
/* Specific to a user exception exit:
|
/* Specific to a user exception exit:
|
||||||
* We need to check some flags for signal handling and rescheduling,
|
* We need to check some flags for signal handling and rescheduling,
|
||||||
@ -509,20 +522,20 @@ common_exception_return:
|
|||||||
/* Call do_signal() */
|
/* Call do_signal() */
|
||||||
|
|
||||||
#ifdef CONFIG_TRACE_IRQFLAGS
|
#ifdef CONFIG_TRACE_IRQFLAGS
|
||||||
call4 trace_hardirqs_on
|
abi_call trace_hardirqs_on
|
||||||
#endif
|
#endif
|
||||||
rsil a2, 0
|
rsil a2, 0
|
||||||
mov a6, a1
|
mov abi_arg0, a1
|
||||||
call4 do_notify_resume # int do_notify_resume(struct pt_regs*)
|
abi_call do_notify_resume # int do_notify_resume(struct pt_regs*)
|
||||||
j 1b
|
j 1b
|
||||||
|
|
||||||
3: /* Reschedule */
|
3: /* Reschedule */
|
||||||
|
|
||||||
#ifdef CONFIG_TRACE_IRQFLAGS
|
#ifdef CONFIG_TRACE_IRQFLAGS
|
||||||
call4 trace_hardirqs_on
|
abi_call trace_hardirqs_on
|
||||||
#endif
|
#endif
|
||||||
rsil a2, 0
|
rsil a2, 0
|
||||||
call4 schedule # void schedule (void)
|
abi_call schedule # void schedule (void)
|
||||||
j 1b
|
j 1b
|
||||||
|
|
||||||
#ifdef CONFIG_PREEMPTION
|
#ifdef CONFIG_PREEMPTION
|
||||||
@ -533,33 +546,33 @@ common_exception_return:
|
|||||||
|
|
||||||
l32i a4, a2, TI_PRE_COUNT
|
l32i a4, a2, TI_PRE_COUNT
|
||||||
bnez a4, 4f
|
bnez a4, 4f
|
||||||
call4 preempt_schedule_irq
|
abi_call preempt_schedule_irq
|
||||||
j 4f
|
j 4f
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if XTENSA_FAKE_NMI
|
#if XTENSA_FAKE_NMI
|
||||||
.LNMIexit:
|
.LNMIexit:
|
||||||
l32i a3, a1, PT_PS
|
l32i abi_saved1, a1, PT_PS
|
||||||
_bbci.l a3, PS_UM_BIT, 4f
|
_bbci.l abi_saved1, PS_UM_BIT, 4f
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
5:
|
5:
|
||||||
#ifdef CONFIG_HAVE_HW_BREAKPOINT
|
#ifdef CONFIG_HAVE_HW_BREAKPOINT
|
||||||
_bbci.l a4, TIF_DB_DISABLED, 7f
|
_bbci.l a4, TIF_DB_DISABLED, 7f
|
||||||
call4 restore_dbreak
|
abi_call restore_dbreak
|
||||||
7:
|
7:
|
||||||
#endif
|
#endif
|
||||||
#ifdef CONFIG_DEBUG_TLB_SANITY
|
#ifdef CONFIG_DEBUG_TLB_SANITY
|
||||||
l32i a4, a1, PT_DEPC
|
l32i a4, a1, PT_DEPC
|
||||||
bgeui a4, VALID_DOUBLE_EXCEPTION_ADDRESS, 4f
|
bgeui a4, VALID_DOUBLE_EXCEPTION_ADDRESS, 4f
|
||||||
call4 check_tlb_sanity
|
abi_call check_tlb_sanity
|
||||||
#endif
|
#endif
|
||||||
6:
|
6:
|
||||||
4:
|
4:
|
||||||
#ifdef CONFIG_TRACE_IRQFLAGS
|
#ifdef CONFIG_TRACE_IRQFLAGS
|
||||||
extui a4, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
|
extui a4, abi_saved1, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
|
||||||
bgei a4, LOCKLEVEL, 1f
|
bgei a4, LOCKLEVEL, 1f
|
||||||
call4 trace_hardirqs_on
|
abi_call trace_hardirqs_on
|
||||||
1:
|
1:
|
||||||
#endif
|
#endif
|
||||||
/* Restore optional registers. */
|
/* Restore optional registers. */
|
||||||
@ -572,14 +585,15 @@ common_exception_return:
|
|||||||
l32i a2, a1, PT_SCOMPARE1
|
l32i a2, a1, PT_SCOMPARE1
|
||||||
wsr a2, scompare1
|
wsr a2, scompare1
|
||||||
#endif
|
#endif
|
||||||
wsr a3, ps /* disable interrupts */
|
wsr abi_saved1, ps /* disable interrupts */
|
||||||
|
|
||||||
_bbci.l a3, PS_UM_BIT, kernel_exception_exit
|
_bbci.l abi_saved1, PS_UM_BIT, kernel_exception_exit
|
||||||
|
|
||||||
user_exception_exit:
|
user_exception_exit:
|
||||||
|
|
||||||
/* Restore the state of the task and return from the exception. */
|
/* Restore the state of the task and return from the exception. */
|
||||||
|
|
||||||
|
#if defined(USER_SUPPORT_WINDOWED)
|
||||||
/* Switch to the user thread WINDOWBASE. Save SP temporarily in DEPC */
|
/* Switch to the user thread WINDOWBASE. Save SP temporarily in DEPC */
|
||||||
|
|
||||||
l32i a2, a1, PT_WINDOWBASE
|
l32i a2, a1, PT_WINDOWBASE
|
||||||
@ -634,8 +648,10 @@ user_exception_exit:
|
|||||||
* frame where we had loaded a2), or at least the lower 4 bits
|
* frame where we had loaded a2), or at least the lower 4 bits
|
||||||
* (if we have restored WSBITS-1 frames).
|
* (if we have restored WSBITS-1 frames).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
2:
|
2:
|
||||||
|
#else
|
||||||
|
movi a2, 1
|
||||||
|
#endif
|
||||||
#if XCHAL_HAVE_THREADPTR
|
#if XCHAL_HAVE_THREADPTR
|
||||||
l32i a3, a1, PT_THREADPTR
|
l32i a3, a1, PT_THREADPTR
|
||||||
wur a3, threadptr
|
wur a3, threadptr
|
||||||
@ -650,6 +666,7 @@ user_exception_exit:
|
|||||||
|
|
||||||
kernel_exception_exit:
|
kernel_exception_exit:
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
/* Check if we have to do a movsp.
|
/* Check if we have to do a movsp.
|
||||||
*
|
*
|
||||||
* We only have to do a movsp if the previous window-frame has
|
* We only have to do a movsp if the previous window-frame has
|
||||||
@ -702,6 +719,9 @@ kernel_exception_exit:
|
|||||||
*
|
*
|
||||||
* Note: We expect a2 to hold PT_WMASK
|
* Note: We expect a2 to hold PT_WMASK
|
||||||
*/
|
*/
|
||||||
|
#else
|
||||||
|
movi a2, 1
|
||||||
|
#endif
|
||||||
|
|
||||||
common_exception_exit:
|
common_exception_exit:
|
||||||
|
|
||||||
@ -920,14 +940,16 @@ unrecoverable_text:
|
|||||||
|
|
||||||
ENTRY(unrecoverable_exception)
|
ENTRY(unrecoverable_exception)
|
||||||
|
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
movi a0, 1
|
movi a0, 1
|
||||||
movi a1, 0
|
movi a1, 0
|
||||||
|
|
||||||
wsr a0, windowstart
|
wsr a0, windowstart
|
||||||
wsr a1, windowbase
|
wsr a1, windowbase
|
||||||
rsync
|
rsync
|
||||||
|
#endif
|
||||||
|
|
||||||
movi a1, PS_WOE_MASK | LOCKLEVEL
|
movi a1, KERNEL_PS_WOE_MASK | LOCKLEVEL
|
||||||
wsr a1, ps
|
wsr a1, ps
|
||||||
rsync
|
rsync
|
||||||
|
|
||||||
@ -935,8 +957,8 @@ ENTRY(unrecoverable_exception)
|
|||||||
movi a0, 0
|
movi a0, 0
|
||||||
addi a1, a1, PT_REGS_OFFSET
|
addi a1, a1, PT_REGS_OFFSET
|
||||||
|
|
||||||
movi a6, unrecoverable_text
|
movi abi_arg0, unrecoverable_text
|
||||||
call4 panic
|
abi_call panic
|
||||||
|
|
||||||
1: j 1b
|
1: j 1b
|
||||||
|
|
||||||
@ -947,6 +969,7 @@ ENDPROC(unrecoverable_exception)
|
|||||||
__XTENSA_HANDLER
|
__XTENSA_HANDLER
|
||||||
.literal_position
|
.literal_position
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
/*
|
/*
|
||||||
* Fast-handler for alloca exceptions
|
* Fast-handler for alloca exceptions
|
||||||
*
|
*
|
||||||
@ -1010,6 +1033,7 @@ ENTRY(fast_alloca)
|
|||||||
8: j _WindowUnderflow8
|
8: j _WindowUnderflow8
|
||||||
4: j _WindowUnderflow4
|
4: j _WindowUnderflow4
|
||||||
ENDPROC(fast_alloca)
|
ENDPROC(fast_alloca)
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef CONFIG_USER_ABI_CALL0_PROBE
|
#ifdef CONFIG_USER_ABI_CALL0_PROBE
|
||||||
/*
|
/*
|
||||||
@ -1206,7 +1230,8 @@ ENDPROC(fast_syscall_xtensa)
|
|||||||
* Note: We assume the stack pointer is EXC_TABLE_KSTK in the fixup handler.
|
* Note: We assume the stack pointer is EXC_TABLE_KSTK in the fixup handler.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifdef CONFIG_FAST_SYSCALL_SPILL_REGISTERS
|
#if defined(CONFIG_FAST_SYSCALL_SPILL_REGISTERS) && \
|
||||||
|
defined(USER_SUPPORT_WINDOWED)
|
||||||
|
|
||||||
ENTRY(fast_syscall_spill_registers)
|
ENTRY(fast_syscall_spill_registers)
|
||||||
|
|
||||||
@ -1403,12 +1428,12 @@ ENTRY(fast_syscall_spill_registers)
|
|||||||
rsr a3, excsave1
|
rsr a3, excsave1
|
||||||
l32i a1, a3, EXC_TABLE_KSTK
|
l32i a1, a3, EXC_TABLE_KSTK
|
||||||
|
|
||||||
movi a4, PS_WOE_MASK | LOCKLEVEL
|
movi a4, KERNEL_PS_WOE_MASK | LOCKLEVEL
|
||||||
wsr a4, ps
|
wsr a4, ps
|
||||||
rsync
|
rsync
|
||||||
|
|
||||||
movi a6, SIGSEGV
|
movi abi_arg0, SIGSEGV
|
||||||
call4 do_exit
|
abi_call do_exit
|
||||||
|
|
||||||
/* shouldn't return, so panic */
|
/* shouldn't return, so panic */
|
||||||
|
|
||||||
@ -1887,57 +1912,77 @@ ENDPROC(fast_store_prohibited)
|
|||||||
|
|
||||||
ENTRY(system_call)
|
ENTRY(system_call)
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
abi_entry_default
|
abi_entry_default
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
abi_entry(12)
|
||||||
|
|
||||||
|
s32i a0, sp, 0
|
||||||
|
s32i abi_saved0, sp, 4
|
||||||
|
s32i abi_saved1, sp, 8
|
||||||
|
mov abi_saved0, a2
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
/* regs->syscall = regs->areg[2] */
|
/* regs->syscall = regs->areg[2] */
|
||||||
|
|
||||||
l32i a7, a2, PT_AREG2
|
l32i a7, abi_saved0, PT_AREG2
|
||||||
s32i a7, a2, PT_SYSCALL
|
s32i a7, abi_saved0, PT_SYSCALL
|
||||||
|
|
||||||
GET_THREAD_INFO(a4, a1)
|
GET_THREAD_INFO(a4, a1)
|
||||||
l32i a3, a4, TI_FLAGS
|
l32i abi_saved1, a4, TI_FLAGS
|
||||||
movi a4, _TIF_WORK_MASK
|
movi a4, _TIF_WORK_MASK
|
||||||
and a3, a3, a4
|
and abi_saved1, abi_saved1, a4
|
||||||
beqz a3, 1f
|
beqz abi_saved1, 1f
|
||||||
|
|
||||||
mov a6, a2
|
mov abi_arg0, abi_saved0
|
||||||
call4 do_syscall_trace_enter
|
abi_call do_syscall_trace_enter
|
||||||
beqz a6, .Lsyscall_exit
|
beqz abi_rv, .Lsyscall_exit
|
||||||
l32i a7, a2, PT_SYSCALL
|
l32i a7, abi_saved0, PT_SYSCALL
|
||||||
|
|
||||||
1:
|
1:
|
||||||
/* syscall = sys_call_table[syscall_nr] */
|
/* syscall = sys_call_table[syscall_nr] */
|
||||||
|
|
||||||
movi a4, sys_call_table
|
movi a4, sys_call_table
|
||||||
movi a5, __NR_syscalls
|
movi a5, __NR_syscalls
|
||||||
movi a6, -ENOSYS
|
movi abi_rv, -ENOSYS
|
||||||
bgeu a7, a5, 1f
|
bgeu a7, a5, 1f
|
||||||
|
|
||||||
addx4 a4, a7, a4
|
addx4 a4, a7, a4
|
||||||
l32i a4, a4, 0
|
l32i abi_tmp0, a4, 0
|
||||||
|
|
||||||
/* Load args: arg0 - arg5 are passed via regs. */
|
/* Load args: arg0 - arg5 are passed via regs. */
|
||||||
|
|
||||||
l32i a6, a2, PT_AREG6
|
l32i abi_arg0, abi_saved0, PT_AREG6
|
||||||
l32i a7, a2, PT_AREG3
|
l32i abi_arg1, abi_saved0, PT_AREG3
|
||||||
l32i a8, a2, PT_AREG4
|
l32i abi_arg2, abi_saved0, PT_AREG4
|
||||||
l32i a9, a2, PT_AREG5
|
l32i abi_arg3, abi_saved0, PT_AREG5
|
||||||
l32i a10, a2, PT_AREG8
|
l32i abi_arg4, abi_saved0, PT_AREG8
|
||||||
l32i a11, a2, PT_AREG9
|
l32i abi_arg5, abi_saved0, PT_AREG9
|
||||||
|
|
||||||
callx4 a4
|
abi_callx abi_tmp0
|
||||||
|
|
||||||
1: /* regs->areg[2] = return_value */
|
1: /* regs->areg[2] = return_value */
|
||||||
|
|
||||||
s32i a6, a2, PT_AREG2
|
s32i abi_rv, abi_saved0, PT_AREG2
|
||||||
bnez a3, 1f
|
bnez abi_saved1, 1f
|
||||||
.Lsyscall_exit:
|
.Lsyscall_exit:
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
abi_ret_default
|
abi_ret_default
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
l32i a0, sp, 0
|
||||||
|
l32i abi_saved0, sp, 4
|
||||||
|
l32i abi_saved1, sp, 8
|
||||||
|
abi_ret(12)
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
1:
|
1:
|
||||||
mov a6, a2
|
mov abi_arg0, abi_saved0
|
||||||
call4 do_syscall_trace_leave
|
abi_call do_syscall_trace_leave
|
||||||
abi_ret_default
|
j .Lsyscall_exit
|
||||||
|
|
||||||
ENDPROC(system_call)
|
ENDPROC(system_call)
|
||||||
|
|
||||||
@ -1988,8 +2033,18 @@ ENDPROC(system_call)
|
|||||||
|
|
||||||
ENTRY(_switch_to)
|
ENTRY(_switch_to)
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
abi_entry(XTENSA_SPILL_STACK_RESERVE)
|
abi_entry(XTENSA_SPILL_STACK_RESERVE)
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
abi_entry(16)
|
||||||
|
|
||||||
|
s32i a12, sp, 0
|
||||||
|
s32i a13, sp, 4
|
||||||
|
s32i a14, sp, 8
|
||||||
|
s32i a15, sp, 12
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
mov a11, a3 # and 'next' (a3)
|
mov a11, a3 # and 'next' (a3)
|
||||||
|
|
||||||
l32i a4, a2, TASK_THREAD_INFO
|
l32i a4, a2, TASK_THREAD_INFO
|
||||||
@ -2033,7 +2088,9 @@ ENTRY(_switch_to)
|
|||||||
|
|
||||||
/* Flush register file. */
|
/* Flush register file. */
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
spill_registers_kernel
|
spill_registers_kernel
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Set kernel stack (and leave critical section)
|
/* Set kernel stack (and leave critical section)
|
||||||
* Note: It's save to set it here. The stack will not be overwritten
|
* Note: It's save to set it here. The stack will not be overwritten
|
||||||
@ -2055,34 +2112,43 @@ ENTRY(_switch_to)
|
|||||||
wsr a14, ps
|
wsr a14, ps
|
||||||
rsync
|
rsync
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
abi_ret(XTENSA_SPILL_STACK_RESERVE)
|
abi_ret(XTENSA_SPILL_STACK_RESERVE)
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
l32i a12, sp, 0
|
||||||
|
l32i a13, sp, 4
|
||||||
|
l32i a14, sp, 8
|
||||||
|
l32i a15, sp, 12
|
||||||
|
abi_ret(16)
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
ENDPROC(_switch_to)
|
ENDPROC(_switch_to)
|
||||||
|
|
||||||
ENTRY(ret_from_fork)
|
ENTRY(ret_from_fork)
|
||||||
|
|
||||||
/* void schedule_tail (struct task_struct *prev)
|
/* void schedule_tail (struct task_struct *prev)
|
||||||
* Note: prev is still in a6 (return value from fake call4 frame)
|
* Note: prev is still in abi_arg0 (return value from fake call frame)
|
||||||
*/
|
*/
|
||||||
call4 schedule_tail
|
abi_call schedule_tail
|
||||||
|
|
||||||
mov a6, a1
|
mov abi_arg0, a1
|
||||||
call4 do_syscall_trace_leave
|
abi_call do_syscall_trace_leave
|
||||||
|
j common_exception_return
|
||||||
j common_exception_return
|
|
||||||
|
|
||||||
ENDPROC(ret_from_fork)
|
ENDPROC(ret_from_fork)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Kernel thread creation helper
|
* Kernel thread creation helper
|
||||||
* On entry, set up by copy_thread: a2 = thread_fn, a3 = thread_fn arg
|
* On entry, set up by copy_thread: abi_saved0 = thread_fn,
|
||||||
* left from _switch_to: a6 = prev
|
* abi_saved1 = thread_fn arg. Left from _switch_to: abi_arg0 = prev
|
||||||
*/
|
*/
|
||||||
ENTRY(ret_from_kernel_thread)
|
ENTRY(ret_from_kernel_thread)
|
||||||
|
|
||||||
call4 schedule_tail
|
abi_call schedule_tail
|
||||||
mov a6, a3
|
mov abi_arg0, abi_saved1
|
||||||
callx4 a2
|
abi_callx abi_saved0
|
||||||
j common_exception_return
|
j common_exception_return
|
||||||
|
|
||||||
ENDPROC(ret_from_kernel_thread)
|
ENDPROC(ret_from_kernel_thread)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
* Kevin Chea
|
* Kevin Chea
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <asm/asmmacro.h>
|
||||||
#include <asm/processor.h>
|
#include <asm/processor.h>
|
||||||
#include <asm/page.h>
|
#include <asm/page.h>
|
||||||
#include <asm/cacheasm.h>
|
#include <asm/cacheasm.h>
|
||||||
@ -66,11 +67,13 @@ _SetupOCD:
|
|||||||
* xt-gdb to single step via DEBUG exceptions received directly
|
* xt-gdb to single step via DEBUG exceptions received directly
|
||||||
* by ocd.
|
* by ocd.
|
||||||
*/
|
*/
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
movi a1, 1
|
movi a1, 1
|
||||||
movi a0, 0
|
movi a0, 0
|
||||||
wsr a1, windowstart
|
wsr a1, windowstart
|
||||||
wsr a0, windowbase
|
wsr a0, windowbase
|
||||||
rsync
|
rsync
|
||||||
|
#endif
|
||||||
|
|
||||||
movi a1, LOCKLEVEL
|
movi a1, LOCKLEVEL
|
||||||
wsr a1, ps
|
wsr a1, ps
|
||||||
@ -193,9 +196,10 @@ ENTRY(_startup)
|
|||||||
movi a1, start_info
|
movi a1, start_info
|
||||||
l32i a1, a1, 0
|
l32i a1, a1, 0
|
||||||
|
|
||||||
movi a2, PS_WOE_MASK | LOCKLEVEL
|
/* Disable interrupts. */
|
||||||
# WOE=1, INTLEVEL=LOCKLEVEL, UM=0
|
/* Enable window exceptions if kernel is built with windowed ABI. */
|
||||||
wsr a2, ps # (enable reg-windows; progmode stack)
|
movi a2, KERNEL_PS_WOE_MASK | LOCKLEVEL
|
||||||
|
wsr a2, ps
|
||||||
rsync
|
rsync
|
||||||
|
|
||||||
#ifdef CONFIG_SMP
|
#ifdef CONFIG_SMP
|
||||||
@ -267,13 +271,13 @@ ENTRY(_startup)
|
|||||||
l32i a1, a1, 0
|
l32i a1, a1, 0
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
movi a6, 0
|
movi abi_arg0, 0
|
||||||
xsr a6, excsave1
|
xsr abi_arg0, excsave1
|
||||||
|
|
||||||
/* init_arch kick-starts the linux kernel */
|
/* init_arch kick-starts the linux kernel */
|
||||||
|
|
||||||
call4 init_arch
|
abi_call init_arch
|
||||||
call4 start_kernel
|
abi_call start_kernel
|
||||||
|
|
||||||
should_never_return:
|
should_never_return:
|
||||||
j should_never_return
|
j should_never_return
|
||||||
@ -297,10 +301,10 @@ should_never_return:
|
|||||||
s32i a3, a2, 0
|
s32i a3, a2, 0
|
||||||
memw
|
memw
|
||||||
|
|
||||||
movi a6, 0
|
movi abi_arg0, 0
|
||||||
wsr a6, excsave1
|
wsr abi_arg0, excsave1
|
||||||
|
|
||||||
call4 secondary_start_kernel
|
abi_call secondary_start_kernel
|
||||||
j should_never_return
|
j should_never_return
|
||||||
|
|
||||||
#endif /* CONFIG_SMP */
|
#endif /* CONFIG_SMP */
|
||||||
|
@ -17,11 +17,16 @@
|
|||||||
/*
|
/*
|
||||||
* Entry condition:
|
* Entry condition:
|
||||||
*
|
*
|
||||||
* a2: a0 of the caller
|
* a2: a0 of the caller in windowed ABI
|
||||||
|
* a10: a0 of the caller in call0 ABI
|
||||||
|
*
|
||||||
|
* In call0 ABI the function _mcount is called with the special ABI:
|
||||||
|
* its argument is in a10 and all the usual argument registers (a2 - a7)
|
||||||
|
* must be preserved in addition to callee-saved a12 - a15.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ENTRY(_mcount)
|
ENTRY(_mcount)
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
abi_entry_default
|
abi_entry_default
|
||||||
|
|
||||||
movi a4, ftrace_trace_function
|
movi a4, ftrace_trace_function
|
||||||
@ -42,7 +47,36 @@ ENTRY(_mcount)
|
|||||||
callx4 a4
|
callx4 a4
|
||||||
|
|
||||||
abi_ret_default
|
abi_ret_default
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
abi_entry_default
|
||||||
|
|
||||||
|
movi a9, ftrace_trace_function
|
||||||
|
l32i a9, a9, 0
|
||||||
|
movi a11, ftrace_stub
|
||||||
|
bne a9, a11, 1f
|
||||||
|
abi_ret_default
|
||||||
|
|
||||||
|
1: abi_entry(28)
|
||||||
|
s32i a0, sp, 0
|
||||||
|
s32i a2, sp, 4
|
||||||
|
s32i a3, sp, 8
|
||||||
|
s32i a4, sp, 12
|
||||||
|
s32i a5, sp, 16
|
||||||
|
s32i a6, sp, 20
|
||||||
|
s32i a7, sp, 24
|
||||||
|
addi a2, a10, -MCOUNT_INSN_SIZE
|
||||||
|
callx0 a9
|
||||||
|
l32i a0, sp, 0
|
||||||
|
l32i a2, sp, 4
|
||||||
|
l32i a3, sp, 8
|
||||||
|
l32i a4, sp, 12
|
||||||
|
l32i a5, sp, 16
|
||||||
|
l32i a6, sp, 20
|
||||||
|
l32i a7, sp, 24
|
||||||
|
abi_ret(28)
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
ENDPROC(_mcount)
|
ENDPROC(_mcount)
|
||||||
|
|
||||||
ENTRY(ftrace_stub)
|
ENTRY(ftrace_stub)
|
||||||
|
@ -211,11 +211,18 @@ int copy_thread(unsigned long clone_flags, unsigned long usp_thread_fn,
|
|||||||
struct thread_info *ti;
|
struct thread_info *ti;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
/* Create a call4 dummy-frame: a0 = 0, a1 = childregs. */
|
/* Create a call4 dummy-frame: a0 = 0, a1 = childregs. */
|
||||||
SPILL_SLOT(childregs, 1) = (unsigned long)childregs;
|
SPILL_SLOT(childregs, 1) = (unsigned long)childregs;
|
||||||
SPILL_SLOT(childregs, 0) = 0;
|
SPILL_SLOT(childregs, 0) = 0;
|
||||||
|
|
||||||
p->thread.sp = (unsigned long)childregs;
|
p->thread.sp = (unsigned long)childregs;
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
/* Reserve 16 bytes for the _switch_to stack frame. */
|
||||||
|
p->thread.sp = (unsigned long)childregs - 16;
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
if (!(p->flags & (PF_KTHREAD | PF_IO_WORKER))) {
|
if (!(p->flags & (PF_KTHREAD | PF_IO_WORKER))) {
|
||||||
struct pt_regs *regs = current_pt_regs();
|
struct pt_regs *regs = current_pt_regs();
|
||||||
@ -272,11 +279,25 @@ int copy_thread(unsigned long clone_flags, unsigned long usp_thread_fn,
|
|||||||
p->thread.ra = MAKE_RA_FOR_CALL(
|
p->thread.ra = MAKE_RA_FOR_CALL(
|
||||||
(unsigned long)ret_from_kernel_thread, 1);
|
(unsigned long)ret_from_kernel_thread, 1);
|
||||||
|
|
||||||
/* pass parameters to ret_from_kernel_thread:
|
/* pass parameters to ret_from_kernel_thread: */
|
||||||
* a2 = thread_fn, a3 = thread_fn arg
|
#if defined(__XTENSA_WINDOWED_ABI__)
|
||||||
|
/*
|
||||||
|
* a2 = thread_fn, a3 = thread_fn arg.
|
||||||
|
* Window underflow will load registers from the
|
||||||
|
* spill slots on the stack on return from _switch_to.
|
||||||
*/
|
*/
|
||||||
SPILL_SLOT(childregs, 3) = thread_fn_arg;
|
|
||||||
SPILL_SLOT(childregs, 2) = usp_thread_fn;
|
SPILL_SLOT(childregs, 2) = usp_thread_fn;
|
||||||
|
SPILL_SLOT(childregs, 3) = thread_fn_arg;
|
||||||
|
#elif defined(__XTENSA_CALL0_ABI__)
|
||||||
|
/*
|
||||||
|
* a12 = thread_fn, a13 = thread_fn arg.
|
||||||
|
* _switch_to epilogue will load registers from the stack.
|
||||||
|
*/
|
||||||
|
((unsigned long *)p->thread.sp)[0] = usp_thread_fn;
|
||||||
|
((unsigned long *)p->thread.sp)[1] = thread_fn_arg;
|
||||||
|
#else
|
||||||
|
#error Unsupported Xtensa ABI
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Childregs are only used when we're going to userspace
|
/* Childregs are only used when we're going to userspace
|
||||||
* in which case start_thread will set them up.
|
* in which case start_thread will set them up.
|
||||||
|
@ -37,14 +37,15 @@
|
|||||||
#include <asm/bootparam.h>
|
#include <asm/bootparam.h>
|
||||||
#include <asm/kasan.h>
|
#include <asm/kasan.h>
|
||||||
#include <asm/mmu_context.h>
|
#include <asm/mmu_context.h>
|
||||||
#include <asm/processor.h>
|
|
||||||
#include <asm/timex.h>
|
|
||||||
#include <asm/platform.h>
|
|
||||||
#include <asm/page.h>
|
#include <asm/page.h>
|
||||||
#include <asm/setup.h>
|
|
||||||
#include <asm/param.h>
|
#include <asm/param.h>
|
||||||
|
#include <asm/platform.h>
|
||||||
|
#include <asm/processor.h>
|
||||||
|
#include <asm/sections.h>
|
||||||
|
#include <asm/setup.h>
|
||||||
#include <asm/smp.h>
|
#include <asm/smp.h>
|
||||||
#include <asm/sysmem.h>
|
#include <asm/sysmem.h>
|
||||||
|
#include <asm/timex.h>
|
||||||
|
|
||||||
#if defined(CONFIG_VGA_CONSOLE) || defined(CONFIG_DUMMY_CONSOLE)
|
#if defined(CONFIG_VGA_CONSOLE) || defined(CONFIG_DUMMY_CONSOLE)
|
||||||
struct screen_info screen_info = {
|
struct screen_info screen_info = {
|
||||||
@ -271,49 +272,6 @@ void __init init_arch(bp_tag_t *bp_start)
|
|||||||
* Initialize system. Setup memory and reserve regions.
|
* Initialize system. Setup memory and reserve regions.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
extern char _end[];
|
|
||||||
extern char _stext[];
|
|
||||||
extern char _WindowVectors_text_start;
|
|
||||||
extern char _WindowVectors_text_end;
|
|
||||||
extern char _DebugInterruptVector_text_start;
|
|
||||||
extern char _DebugInterruptVector_text_end;
|
|
||||||
extern char _KernelExceptionVector_text_start;
|
|
||||||
extern char _KernelExceptionVector_text_end;
|
|
||||||
extern char _UserExceptionVector_text_start;
|
|
||||||
extern char _UserExceptionVector_text_end;
|
|
||||||
extern char _DoubleExceptionVector_text_start;
|
|
||||||
extern char _DoubleExceptionVector_text_end;
|
|
||||||
extern char _exception_text_start;
|
|
||||||
extern char _exception_text_end;
|
|
||||||
#if XCHAL_EXCM_LEVEL >= 2
|
|
||||||
extern char _Level2InterruptVector_text_start;
|
|
||||||
extern char _Level2InterruptVector_text_end;
|
|
||||||
#endif
|
|
||||||
#if XCHAL_EXCM_LEVEL >= 3
|
|
||||||
extern char _Level3InterruptVector_text_start;
|
|
||||||
extern char _Level3InterruptVector_text_end;
|
|
||||||
#endif
|
|
||||||
#if XCHAL_EXCM_LEVEL >= 4
|
|
||||||
extern char _Level4InterruptVector_text_start;
|
|
||||||
extern char _Level4InterruptVector_text_end;
|
|
||||||
#endif
|
|
||||||
#if XCHAL_EXCM_LEVEL >= 5
|
|
||||||
extern char _Level5InterruptVector_text_start;
|
|
||||||
extern char _Level5InterruptVector_text_end;
|
|
||||||
#endif
|
|
||||||
#if XCHAL_EXCM_LEVEL >= 6
|
|
||||||
extern char _Level6InterruptVector_text_start;
|
|
||||||
extern char _Level6InterruptVector_text_end;
|
|
||||||
#endif
|
|
||||||
#ifdef CONFIG_SMP
|
|
||||||
extern char _SecondaryResetVector_text_start;
|
|
||||||
extern char _SecondaryResetVector_text_end;
|
|
||||||
#endif
|
|
||||||
#ifdef CONFIG_XIP_KERNEL
|
|
||||||
extern char _xip_start[];
|
|
||||||
extern char _xip_end[];
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static inline int __init_memblock mem_reserve(unsigned long start,
|
static inline int __init_memblock mem_reserve(unsigned long start,
|
||||||
unsigned long end)
|
unsigned long end)
|
||||||
{
|
{
|
||||||
@ -349,49 +307,51 @@ void __init setup_arch(char **cmdline_p)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef CONFIG_VECTORS_ADDR
|
#ifdef CONFIG_VECTORS_ADDR
|
||||||
mem_reserve(__pa(&_WindowVectors_text_start),
|
#ifdef SUPPORT_WINDOWED
|
||||||
__pa(&_WindowVectors_text_end));
|
mem_reserve(__pa(_WindowVectors_text_start),
|
||||||
|
__pa(_WindowVectors_text_end));
|
||||||
|
#endif
|
||||||
|
|
||||||
mem_reserve(__pa(&_DebugInterruptVector_text_start),
|
mem_reserve(__pa(_DebugInterruptVector_text_start),
|
||||||
__pa(&_DebugInterruptVector_text_end));
|
__pa(_DebugInterruptVector_text_end));
|
||||||
|
|
||||||
mem_reserve(__pa(&_KernelExceptionVector_text_start),
|
mem_reserve(__pa(_KernelExceptionVector_text_start),
|
||||||
__pa(&_KernelExceptionVector_text_end));
|
__pa(_KernelExceptionVector_text_end));
|
||||||
|
|
||||||
mem_reserve(__pa(&_UserExceptionVector_text_start),
|
mem_reserve(__pa(_UserExceptionVector_text_start),
|
||||||
__pa(&_UserExceptionVector_text_end));
|
__pa(_UserExceptionVector_text_end));
|
||||||
|
|
||||||
mem_reserve(__pa(&_DoubleExceptionVector_text_start),
|
mem_reserve(__pa(_DoubleExceptionVector_text_start),
|
||||||
__pa(&_DoubleExceptionVector_text_end));
|
__pa(_DoubleExceptionVector_text_end));
|
||||||
|
|
||||||
mem_reserve(__pa(&_exception_text_start),
|
mem_reserve(__pa(_exception_text_start),
|
||||||
__pa(&_exception_text_end));
|
__pa(_exception_text_end));
|
||||||
#if XCHAL_EXCM_LEVEL >= 2
|
#if XCHAL_EXCM_LEVEL >= 2
|
||||||
mem_reserve(__pa(&_Level2InterruptVector_text_start),
|
mem_reserve(__pa(_Level2InterruptVector_text_start),
|
||||||
__pa(&_Level2InterruptVector_text_end));
|
__pa(_Level2InterruptVector_text_end));
|
||||||
#endif
|
#endif
|
||||||
#if XCHAL_EXCM_LEVEL >= 3
|
#if XCHAL_EXCM_LEVEL >= 3
|
||||||
mem_reserve(__pa(&_Level3InterruptVector_text_start),
|
mem_reserve(__pa(_Level3InterruptVector_text_start),
|
||||||
__pa(&_Level3InterruptVector_text_end));
|
__pa(_Level3InterruptVector_text_end));
|
||||||
#endif
|
#endif
|
||||||
#if XCHAL_EXCM_LEVEL >= 4
|
#if XCHAL_EXCM_LEVEL >= 4
|
||||||
mem_reserve(__pa(&_Level4InterruptVector_text_start),
|
mem_reserve(__pa(_Level4InterruptVector_text_start),
|
||||||
__pa(&_Level4InterruptVector_text_end));
|
__pa(_Level4InterruptVector_text_end));
|
||||||
#endif
|
#endif
|
||||||
#if XCHAL_EXCM_LEVEL >= 5
|
#if XCHAL_EXCM_LEVEL >= 5
|
||||||
mem_reserve(__pa(&_Level5InterruptVector_text_start),
|
mem_reserve(__pa(_Level5InterruptVector_text_start),
|
||||||
__pa(&_Level5InterruptVector_text_end));
|
__pa(_Level5InterruptVector_text_end));
|
||||||
#endif
|
#endif
|
||||||
#if XCHAL_EXCM_LEVEL >= 6
|
#if XCHAL_EXCM_LEVEL >= 6
|
||||||
mem_reserve(__pa(&_Level6InterruptVector_text_start),
|
mem_reserve(__pa(_Level6InterruptVector_text_start),
|
||||||
__pa(&_Level6InterruptVector_text_end));
|
__pa(_Level6InterruptVector_text_end));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* CONFIG_VECTORS_ADDR */
|
#endif /* CONFIG_VECTORS_ADDR */
|
||||||
|
|
||||||
#ifdef CONFIG_SMP
|
#ifdef CONFIG_SMP
|
||||||
mem_reserve(__pa(&_SecondaryResetVector_text_start),
|
mem_reserve(__pa(_SecondaryResetVector_text_start),
|
||||||
__pa(&_SecondaryResetVector_text_end));
|
__pa(_SecondaryResetVector_text_end));
|
||||||
#endif
|
#endif
|
||||||
parse_early_param();
|
parse_early_param();
|
||||||
bootmem_init();
|
bootmem_init();
|
||||||
|
@ -45,12 +45,13 @@ struct rt_sigframe
|
|||||||
unsigned int window[4];
|
unsigned int window[4];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#if defined(USER_SUPPORT_WINDOWED)
|
||||||
/*
|
/*
|
||||||
* Flush register windows stored in pt_regs to stack.
|
* Flush register windows stored in pt_regs to stack.
|
||||||
* Returns 1 for errors.
|
* Returns 1 for errors.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int
|
static int
|
||||||
flush_window_regs_user(struct pt_regs *regs)
|
flush_window_regs_user(struct pt_regs *regs)
|
||||||
{
|
{
|
||||||
const unsigned long ws = regs->windowstart;
|
const unsigned long ws = regs->windowstart;
|
||||||
@ -121,6 +122,13 @@ flush_window_regs_user(struct pt_regs *regs)
|
|||||||
errout:
|
errout:
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
#else
|
||||||
|
static int
|
||||||
|
flush_window_regs_user(struct pt_regs *regs)
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Note: We don't copy double exception 'regs', we have to finish double exc.
|
* Note: We don't copy double exception 'regs', we have to finish double exc.
|
||||||
|
@ -97,7 +97,9 @@ static dispatch_init_table_t __initdata dispatch_init_table[] = {
|
|||||||
/* EXCCAUSE_INSTRUCTION_FETCH unhandled */
|
/* EXCCAUSE_INSTRUCTION_FETCH unhandled */
|
||||||
/* EXCCAUSE_LOAD_STORE_ERROR unhandled*/
|
/* EXCCAUSE_LOAD_STORE_ERROR unhandled*/
|
||||||
{ EXCCAUSE_LEVEL1_INTERRUPT, 0, do_interrupt },
|
{ EXCCAUSE_LEVEL1_INTERRUPT, 0, do_interrupt },
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
{ EXCCAUSE_ALLOCA, USER|KRNL, fast_alloca },
|
{ EXCCAUSE_ALLOCA, USER|KRNL, fast_alloca },
|
||||||
|
#endif
|
||||||
/* EXCCAUSE_INTEGER_DIVIDE_BY_ZERO unhandled */
|
/* EXCCAUSE_INTEGER_DIVIDE_BY_ZERO unhandled */
|
||||||
/* EXCCAUSE_PRIVILEGED unhandled */
|
/* EXCCAUSE_PRIVILEGED unhandled */
|
||||||
#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
|
#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
|
||||||
@ -462,12 +464,10 @@ void secondary_trap_init(void)
|
|||||||
|
|
||||||
void show_regs(struct pt_regs * regs)
|
void show_regs(struct pt_regs * regs)
|
||||||
{
|
{
|
||||||
int i, wmask;
|
int i;
|
||||||
|
|
||||||
show_regs_print_info(KERN_DEFAULT);
|
show_regs_print_info(KERN_DEFAULT);
|
||||||
|
|
||||||
wmask = regs->wmask & ~1;
|
|
||||||
|
|
||||||
for (i = 0; i < 16; i++) {
|
for (i = 0; i < 16; i++) {
|
||||||
if ((i % 8) == 0)
|
if ((i % 8) == 0)
|
||||||
pr_info("a%02d:", i);
|
pr_info("a%02d:", i);
|
||||||
|
@ -226,6 +226,7 @@ ENTRY(_DoubleExceptionVector)
|
|||||||
|
|
||||||
xsr a0, depc # get DEPC, save a0
|
xsr a0, depc # get DEPC, save a0
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
movi a2, WINDOW_VECTORS_VADDR
|
movi a2, WINDOW_VECTORS_VADDR
|
||||||
_bltu a0, a2, .Lfixup
|
_bltu a0, a2, .Lfixup
|
||||||
addi a2, a2, WINDOW_VECTORS_SIZE
|
addi a2, a2, WINDOW_VECTORS_SIZE
|
||||||
@ -275,6 +276,10 @@ _DoubleExceptionVector_WindowUnderflow:
|
|||||||
l32i a0, a0, EXC_TABLE_FAST_USER
|
l32i a0, a0, EXC_TABLE_FAST_USER
|
||||||
jx a0
|
jx a0
|
||||||
|
|
||||||
|
#else
|
||||||
|
j .Lfixup
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* We only allow the ITLB miss exception if we are in kernel space.
|
* We only allow the ITLB miss exception if we are in kernel space.
|
||||||
* All other exceptions are unexpected and thus unrecoverable!
|
* All other exceptions are unexpected and thus unrecoverable!
|
||||||
@ -343,6 +348,7 @@ _DoubleExceptionVector_WindowUnderflow:
|
|||||||
l32i a0, a0, EXC_TABLE_FAST_USER
|
l32i a0, a0, EXC_TABLE_FAST_USER
|
||||||
jx a0
|
jx a0
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
/*
|
/*
|
||||||
* Restart window OVERFLOW exception.
|
* Restart window OVERFLOW exception.
|
||||||
* Currently:
|
* Currently:
|
||||||
@ -475,9 +481,12 @@ _DoubleExceptionVector_handle_exception:
|
|||||||
rsr a0, depc
|
rsr a0, depc
|
||||||
rotw -3
|
rotw -3
|
||||||
j 1b
|
j 1b
|
||||||
|
#endif
|
||||||
|
|
||||||
ENDPROC(_DoubleExceptionVector)
|
ENDPROC(_DoubleExceptionVector)
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Fixup handler for TLB miss in double exception handler for window owerflow.
|
* Fixup handler for TLB miss in double exception handler for window owerflow.
|
||||||
* We get here with windowbase set to the window that was being spilled and
|
* We get here with windowbase set to the window that was being spilled and
|
||||||
@ -590,6 +599,8 @@ ENTRY(window_overflow_restore_a0_fixup)
|
|||||||
|
|
||||||
ENDPROC(window_overflow_restore_a0_fixup)
|
ENDPROC(window_overflow_restore_a0_fixup)
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Debug interrupt vector
|
* Debug interrupt vector
|
||||||
*
|
*
|
||||||
@ -650,6 +661,25 @@ ENTRY(_Level\level\()InterruptVector)
|
|||||||
irq_entry_level 5
|
irq_entry_level 5
|
||||||
irq_entry_level 6
|
irq_entry_level 6
|
||||||
|
|
||||||
|
#if XCHAL_EXCM_LEVEL >= 2
|
||||||
|
/*
|
||||||
|
* Continuation of medium priority interrupt dispatch code.
|
||||||
|
* On entry here, a0 contains PS, and EPC2 contains saved a0:
|
||||||
|
*/
|
||||||
|
__XTENSA_HANDLER
|
||||||
|
.align 4
|
||||||
|
_SimulateUserKernelVectorException:
|
||||||
|
addi a0, a0, (1 << PS_EXCM_BIT)
|
||||||
|
#if !XTENSA_FAKE_NMI
|
||||||
|
wsr a0, ps
|
||||||
|
#endif
|
||||||
|
bbsi.l a0, PS_UM_BIT, 1f # branch if user mode
|
||||||
|
xsr a0, excsave2 # restore a0
|
||||||
|
j _KernelExceptionVector # simulate kernel vector exception
|
||||||
|
1: xsr a0, excsave2 # restore a0
|
||||||
|
j _UserExceptionVector # simulate user vector exception
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/* Window overflow and underflow handlers.
|
/* Window overflow and underflow handlers.
|
||||||
* The handlers must be 64 bytes apart, first starting with the underflow
|
* The handlers must be 64 bytes apart, first starting with the underflow
|
||||||
@ -668,6 +698,8 @@ ENTRY(_Level\level\()InterruptVector)
|
|||||||
.section .WindowVectors.text, "ax"
|
.section .WindowVectors.text, "ax"
|
||||||
|
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
|
|
||||||
/* 4-Register Window Overflow Vector (Handler) */
|
/* 4-Register Window Overflow Vector (Handler) */
|
||||||
|
|
||||||
ENTRY_ALIGN64(_WindowOverflow4)
|
ENTRY_ALIGN64(_WindowOverflow4)
|
||||||
@ -680,27 +712,6 @@ ENTRY_ALIGN64(_WindowOverflow4)
|
|||||||
|
|
||||||
ENDPROC(_WindowOverflow4)
|
ENDPROC(_WindowOverflow4)
|
||||||
|
|
||||||
|
|
||||||
#if XCHAL_EXCM_LEVEL >= 2
|
|
||||||
/* Not a window vector - but a convenient location
|
|
||||||
* (where we know there's space) for continuation of
|
|
||||||
* medium priority interrupt dispatch code.
|
|
||||||
* On entry here, a0 contains PS, and EPC2 contains saved a0:
|
|
||||||
*/
|
|
||||||
.align 4
|
|
||||||
_SimulateUserKernelVectorException:
|
|
||||||
addi a0, a0, (1 << PS_EXCM_BIT)
|
|
||||||
#if !XTENSA_FAKE_NMI
|
|
||||||
wsr a0, ps
|
|
||||||
#endif
|
|
||||||
bbsi.l a0, PS_UM_BIT, 1f # branch if user mode
|
|
||||||
xsr a0, excsave2 # restore a0
|
|
||||||
j _KernelExceptionVector # simulate kernel vector exception
|
|
||||||
1: xsr a0, excsave2 # restore a0
|
|
||||||
j _UserExceptionVector # simulate user vector exception
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
/* 4-Register Window Underflow Vector (Handler) */
|
/* 4-Register Window Underflow Vector (Handler) */
|
||||||
|
|
||||||
ENTRY_ALIGN64(_WindowUnderflow4)
|
ENTRY_ALIGN64(_WindowUnderflow4)
|
||||||
@ -789,4 +800,6 @@ ENTRY_ALIGN64(_WindowUnderflow12)
|
|||||||
|
|
||||||
ENDPROC(_WindowUnderflow12)
|
ENDPROC(_WindowUnderflow12)
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
.text
|
.text
|
||||||
|
@ -94,7 +94,9 @@ SECTIONS
|
|||||||
. = ALIGN(PAGE_SIZE);
|
. = ALIGN(PAGE_SIZE);
|
||||||
_vecbase = .;
|
_vecbase = .;
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
SECTION_VECTOR2 (.WindowVectors.text, WINDOW_VECTORS_VADDR)
|
SECTION_VECTOR2 (.WindowVectors.text, WINDOW_VECTORS_VADDR)
|
||||||
|
#endif
|
||||||
#if XCHAL_EXCM_LEVEL >= 2
|
#if XCHAL_EXCM_LEVEL >= 2
|
||||||
SECTION_VECTOR2 (.Level2InterruptVector.text, INTLEVEL2_VECTOR_VADDR)
|
SECTION_VECTOR2 (.Level2InterruptVector.text, INTLEVEL2_VECTOR_VADDR)
|
||||||
#endif
|
#endif
|
||||||
@ -166,8 +168,10 @@ SECTIONS
|
|||||||
__boot_reloc_table_start = ABSOLUTE(.);
|
__boot_reloc_table_start = ABSOLUTE(.);
|
||||||
|
|
||||||
#if !MERGED_VECTORS
|
#if !MERGED_VECTORS
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
RELOCATE_ENTRY(_WindowVectors_text,
|
RELOCATE_ENTRY(_WindowVectors_text,
|
||||||
.WindowVectors.text);
|
.WindowVectors.text);
|
||||||
|
#endif
|
||||||
#if XCHAL_EXCM_LEVEL >= 2
|
#if XCHAL_EXCM_LEVEL >= 2
|
||||||
RELOCATE_ENTRY(_Level2InterruptVector_text,
|
RELOCATE_ENTRY(_Level2InterruptVector_text,
|
||||||
.Level2InterruptVector.text);
|
.Level2InterruptVector.text);
|
||||||
@ -229,14 +233,18 @@ SECTIONS
|
|||||||
#if !MERGED_VECTORS
|
#if !MERGED_VECTORS
|
||||||
/* The vectors are relocated to the real position at startup time */
|
/* The vectors are relocated to the real position at startup time */
|
||||||
|
|
||||||
|
#ifdef SUPPORT_WINDOWED
|
||||||
SECTION_VECTOR4 (_WindowVectors_text,
|
SECTION_VECTOR4 (_WindowVectors_text,
|
||||||
.WindowVectors.text,
|
.WindowVectors.text,
|
||||||
WINDOW_VECTORS_VADDR,
|
WINDOW_VECTORS_VADDR,
|
||||||
.dummy)
|
LAST)
|
||||||
|
#undef LAST
|
||||||
|
#define LAST .WindowVectors.text
|
||||||
|
#endif
|
||||||
SECTION_VECTOR4 (_DebugInterruptVector_text,
|
SECTION_VECTOR4 (_DebugInterruptVector_text,
|
||||||
.DebugInterruptVector.text,
|
.DebugInterruptVector.text,
|
||||||
DEBUG_VECTOR_VADDR,
|
DEBUG_VECTOR_VADDR,
|
||||||
.WindowVectors.text)
|
LAST)
|
||||||
#undef LAST
|
#undef LAST
|
||||||
#define LAST .DebugInterruptVector.text
|
#define LAST .DebugInterruptVector.text
|
||||||
#if XCHAL_EXCM_LEVEL >= 2
|
#if XCHAL_EXCM_LEVEL >= 2
|
||||||
|
@ -45,7 +45,6 @@
|
|||||||
# a9/ tmp
|
# a9/ tmp
|
||||||
# a10/ tmp
|
# a10/ tmp
|
||||||
# a11/ dst
|
# a11/ dst
|
||||||
# a12/ tmp
|
|
||||||
|
|
||||||
.text
|
.text
|
||||||
ENTRY(__strncpy_user)
|
ENTRY(__strncpy_user)
|
||||||
@ -61,7 +60,7 @@ ENTRY(__strncpy_user)
|
|||||||
bbsi.l a3, 0, .Lsrc1mod2 # if only 8-bit aligned
|
bbsi.l a3, 0, .Lsrc1mod2 # if only 8-bit aligned
|
||||||
bbsi.l a3, 1, .Lsrc2mod4 # if only 16-bit aligned
|
bbsi.l a3, 1, .Lsrc2mod4 # if only 16-bit aligned
|
||||||
.Lsrcaligned: # return here when src is word-aligned
|
.Lsrcaligned: # return here when src is word-aligned
|
||||||
srli a12, a4, 2 # number of loop iterations with 4B per loop
|
srli a10, a4, 2 # number of loop iterations with 4B per loop
|
||||||
movi a9, 3
|
movi a9, 3
|
||||||
bnone a11, a9, .Laligned
|
bnone a11, a9, .Laligned
|
||||||
j .Ldstunaligned
|
j .Ldstunaligned
|
||||||
@ -102,11 +101,11 @@ EX(10f) s8i a9, a11, 0 # store byte 0
|
|||||||
.byte 0 # (0 mod 4 alignment for LBEG)
|
.byte 0 # (0 mod 4 alignment for LBEG)
|
||||||
.Laligned:
|
.Laligned:
|
||||||
#if XCHAL_HAVE_LOOPS
|
#if XCHAL_HAVE_LOOPS
|
||||||
loopnez a12, .Loop1done
|
loopnez a10, .Loop1done
|
||||||
#else
|
#else
|
||||||
beqz a12, .Loop1done
|
beqz a10, .Loop1done
|
||||||
slli a12, a12, 2
|
slli a10, a10, 2
|
||||||
add a12, a12, a11 # a12 = end of last 4B chunck
|
add a10, a10, a11 # a10 = end of last 4B chunck
|
||||||
#endif
|
#endif
|
||||||
.Loop1:
|
.Loop1:
|
||||||
EX(11f) l32i a9, a3, 0 # get word from src
|
EX(11f) l32i a9, a3, 0 # get word from src
|
||||||
@ -118,7 +117,7 @@ EX(10f) s32i a9, a11, 0 # store word to dst
|
|||||||
bnone a9, a8, .Lz3 # if byte 3 is zero
|
bnone a9, a8, .Lz3 # if byte 3 is zero
|
||||||
addi a11, a11, 4 # advance dst pointer
|
addi a11, a11, 4 # advance dst pointer
|
||||||
#if !XCHAL_HAVE_LOOPS
|
#if !XCHAL_HAVE_LOOPS
|
||||||
blt a11, a12, .Loop1
|
blt a11, a10, .Loop1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
.Loop1done:
|
.Loop1done:
|
||||||
@ -185,7 +184,7 @@ EX(10f) s8i a9, a11, 2
|
|||||||
loopnez a4, .Lunalignedend
|
loopnez a4, .Lunalignedend
|
||||||
#else
|
#else
|
||||||
beqz a4, .Lunalignedend
|
beqz a4, .Lunalignedend
|
||||||
add a12, a11, a4 # a12 = ending address
|
add a10, a11, a4 # a10 = ending address
|
||||||
#endif /* XCHAL_HAVE_LOOPS */
|
#endif /* XCHAL_HAVE_LOOPS */
|
||||||
.Lnextbyte:
|
.Lnextbyte:
|
||||||
EX(11f) l8ui a9, a3, 0
|
EX(11f) l8ui a9, a3, 0
|
||||||
@ -194,7 +193,7 @@ EX(10f) s8i a9, a11, 0
|
|||||||
beqz a9, .Lunalignedend
|
beqz a9, .Lunalignedend
|
||||||
addi a11, a11, 1
|
addi a11, a11, 1
|
||||||
#if !XCHAL_HAVE_LOOPS
|
#if !XCHAL_HAVE_LOOPS
|
||||||
blt a11, a12, .Lnextbyte
|
blt a11, a10, .Lnextbyte
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
.Lunalignedend:
|
.Lunalignedend:
|
||||||
|
@ -60,7 +60,12 @@
|
|||||||
.text
|
.text
|
||||||
ENTRY(__xtensa_copy_user)
|
ENTRY(__xtensa_copy_user)
|
||||||
|
|
||||||
abi_entry_default
|
#if !XCHAL_HAVE_LOOPS && defined(__XTENSA_CALL0_ABI__)
|
||||||
|
#define STACK_SIZE 4
|
||||||
|
#else
|
||||||
|
#define STACK_SIZE 0
|
||||||
|
#endif
|
||||||
|
abi_entry(STACK_SIZE)
|
||||||
# a2/ dst, a3/ src, a4/ len
|
# a2/ dst, a3/ src, a4/ len
|
||||||
mov a5, a2 # copy dst so that a2 is return value
|
mov a5, a2 # copy dst so that a2 is return value
|
||||||
mov a11, a4 # preserve original len for error case
|
mov a11, a4 # preserve original len for error case
|
||||||
@ -75,7 +80,7 @@ ENTRY(__xtensa_copy_user)
|
|||||||
__ssa8 a3 # set shift amount from byte offset
|
__ssa8 a3 # set shift amount from byte offset
|
||||||
bnez a4, .Lsrcunaligned
|
bnez a4, .Lsrcunaligned
|
||||||
movi a2, 0 # return success for len==0
|
movi a2, 0 # return success for len==0
|
||||||
abi_ret_default
|
abi_ret(STACK_SIZE)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Destination is unaligned
|
* Destination is unaligned
|
||||||
@ -127,7 +132,7 @@ EX(10f) s8i a6, a5, 0
|
|||||||
#endif /* !XCHAL_HAVE_LOOPS */
|
#endif /* !XCHAL_HAVE_LOOPS */
|
||||||
.Lbytecopydone:
|
.Lbytecopydone:
|
||||||
movi a2, 0 # return success for len bytes copied
|
movi a2, 0 # return success for len bytes copied
|
||||||
abi_ret_default
|
abi_ret(STACK_SIZE)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Destination and source are word-aligned.
|
* Destination and source are word-aligned.
|
||||||
@ -187,7 +192,7 @@ EX(10f) l8ui a6, a3, 0
|
|||||||
EX(10f) s8i a6, a5, 0
|
EX(10f) s8i a6, a5, 0
|
||||||
.L5:
|
.L5:
|
||||||
movi a2, 0 # return success for len bytes copied
|
movi a2, 0 # return success for len bytes copied
|
||||||
abi_ret_default
|
abi_ret(STACK_SIZE)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Destination is aligned, Source is unaligned
|
* Destination is aligned, Source is unaligned
|
||||||
@ -205,8 +210,14 @@ EX(10f) l32i a6, a3, 0 # load first word
|
|||||||
loopnez a7, .Loop2done
|
loopnez a7, .Loop2done
|
||||||
#else /* !XCHAL_HAVE_LOOPS */
|
#else /* !XCHAL_HAVE_LOOPS */
|
||||||
beqz a7, .Loop2done
|
beqz a7, .Loop2done
|
||||||
|
#if defined(__XTENSA_CALL0_ABI__)
|
||||||
|
s32i a10, a1, 0
|
||||||
|
slli a10, a7, 4
|
||||||
|
add a10, a10, a3 # a10 = end of last 16B source chunk
|
||||||
|
#else
|
||||||
slli a12, a7, 4
|
slli a12, a7, 4
|
||||||
add a12, a12, a3 # a12 = end of last 16B source chunk
|
add a12, a12, a3 # a12 = end of last 16B source chunk
|
||||||
|
#endif
|
||||||
#endif /* !XCHAL_HAVE_LOOPS */
|
#endif /* !XCHAL_HAVE_LOOPS */
|
||||||
.Loop2:
|
.Loop2:
|
||||||
EX(10f) l32i a7, a3, 4
|
EX(10f) l32i a7, a3, 4
|
||||||
@ -224,7 +235,12 @@ EX(10f) s32i a8, a5, 8
|
|||||||
EX(10f) s32i a9, a5, 12
|
EX(10f) s32i a9, a5, 12
|
||||||
addi a5, a5, 16
|
addi a5, a5, 16
|
||||||
#if !XCHAL_HAVE_LOOPS
|
#if !XCHAL_HAVE_LOOPS
|
||||||
|
#if defined(__XTENSA_CALL0_ABI__)
|
||||||
|
blt a3, a10, .Loop2
|
||||||
|
l32i a10, a1, 0
|
||||||
|
#else
|
||||||
blt a3, a12, .Loop2
|
blt a3, a12, .Loop2
|
||||||
|
#endif
|
||||||
#endif /* !XCHAL_HAVE_LOOPS */
|
#endif /* !XCHAL_HAVE_LOOPS */
|
||||||
.Loop2done:
|
.Loop2done:
|
||||||
bbci.l a4, 3, .L12
|
bbci.l a4, 3, .L12
|
||||||
@ -264,7 +280,7 @@ EX(10f) l8ui a6, a3, 0
|
|||||||
EX(10f) s8i a6, a5, 0
|
EX(10f) s8i a6, a5, 0
|
||||||
.L15:
|
.L15:
|
||||||
movi a2, 0 # return success for len bytes copied
|
movi a2, 0 # return success for len bytes copied
|
||||||
abi_ret_default
|
abi_ret(STACK_SIZE)
|
||||||
|
|
||||||
ENDPROC(__xtensa_copy_user)
|
ENDPROC(__xtensa_copy_user)
|
||||||
|
|
||||||
@ -281,4 +297,4 @@ ENDPROC(__xtensa_copy_user)
|
|||||||
10:
|
10:
|
||||||
sub a2, a5, a2 /* a2 <-- bytes copied */
|
sub a2, a5, a2 /* a2 <-- bytes copied */
|
||||||
sub a2, a11, a2 /* a2 <-- bytes not copied */
|
sub a2, a11, a2 /* a2 <-- bytes not copied */
|
||||||
abi_ret_default
|
abi_ret(STACK_SIZE)
|
||||||
|
Loading…
Reference in New Issue
Block a user