Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

[ARM] 4185/2: entry: introduce get_irqnr_preamble and arch_ret_to_user

get_irqnr_preamble allows machines to take some action before entering the
get_irqnr_and_base loop. On iop we enable cp6 access.

arch_ret_to_user is added to the userspace return path to allow individual
architectures to take actions, like disabling coprocessor access, before
the final return to userspace.

Per Nicolas Pitre's note, there is no need to cp_wait on the return to user
as the latency to return is sufficient.

Signed-off-by: Dan Williams <dan.j.williams@intel.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>

authored by

Dan Williams and committed by
Russell King
f80dff9d 588ef769

+218 -25
+1
arch/arm/kernel/entry-armv.S
··· 27 27 * Interrupt handling. Preserves r7, r8, r9 28 28 */ 29 29 .macro irq_handler 30 + get_irqnr_preamble r5, lr 30 31 1: get_irqnr_and_base r0, r6, r5, lr 31 32 movne r1, sp 32 33 @
+7
arch/arm/kernel/entry-common.S
··· 9 9 */ 10 10 11 11 #include <asm/unistd.h> 12 + #include <asm/arch/entry-macro.S> 12 13 13 14 #include "entry-header.S" 14 15 ··· 25 24 ldr r1, [tsk, #TI_FLAGS] 26 25 tst r1, #_TIF_WORK_MASK 27 26 bne fast_work_pending 27 + 28 + /* perform architecture specific actions before user return */ 29 + arch_ret_to_user r1, lr 28 30 29 31 @ fast_restore_user_regs 30 32 ldr r1, [sp, #S_OFF + S_PSR] @ get calling cpsr ··· 65 61 tst r1, #_TIF_WORK_MASK 66 62 bne work_pending 67 63 no_work_pending: 64 + /* perform architecture specific actions before user return */ 65 + arch_ret_to_user r1, lr 66 + 68 67 @ slow_restore_user_regs 69 68 ldr r1, [sp, #S_PSR] @ get calling cpsr 70 69 ldr lr, [sp, #S_PC]! @ get pc
+6
include/asm-arm/arch-aaec2000/entry-macro.S
··· 15 15 .macro disable_fiq 16 16 .endm 17 17 18 + .macro get_irqnr_preamble, base, tmp 19 + .endm 20 + 21 + .macro arch_ret_to_user, tmp1, tmp2 22 + .endm 23 + 18 24 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 19 25 mov r4, #0xf8000000 20 26 add r4, r4, #0x00000500
+6
include/asm-arm/arch-at91rm9200/entry-macro.S
··· 16 16 .macro disable_fiq 17 17 .endm 18 18 19 + .macro get_irqnr_preamble, base, tmp 20 + .endm 21 + 22 + .macro arch_ret_to_user, tmp1, tmp2 23 + .endm 24 + 19 25 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 20 26 ldr \base, =(AT91_VA_BASE_SYS) @ base virtual address of SYS peripherals 21 27 ldr \irqnr, [\base, #AT91_AIC_IVR] @ read IRQ vector register: de-asserts nIRQ to processor (and clears interrupt)
+5
include/asm-arm/arch-cl7500/entry-macro.S
··· 1 1 #include <asm/hardware.h> 2 2 #include <asm/hardware/entry-macro-iomd.S> 3 + .macro get_irqnr_preamble, base, tmp 4 + .endm 5 + 6 + .macro arch_ret_to_user, tmp1, tmp2 7 + .endm 3 8
+6
include/asm-arm/arch-clps711x/entry-macro.S
··· 13 13 .macro disable_fiq 14 14 .endm 15 15 16 + .macro get_irqnr_preamble, base, tmp 17 + .endm 18 + 19 + .macro arch_ret_to_user, tmp1, tmp2 20 + .endm 21 + 16 22 #if (INTSR2 - INTSR1) != (INTMR2 - INTMR1) 17 23 #error INTSR stride != INTMR stride 18 24 #endif
+6
include/asm-arm/arch-ebsa110/entry-macro.S
··· 15 15 .macro disable_fiq 16 16 .endm 17 17 18 + .macro get_irqnr_preamble, base, tmp 19 + .endm 20 + 21 + .macro arch_ret_to_user, tmp1, tmp2 22 + .endm 23 + 18 24 .macro get_irqnr_and_base, irqnr, stat, base, tmp 19 25 mov \base, #IRQ_STAT 20 26 ldrb \stat, [\base] @ get interrupts
+6
include/asm-arm/arch-ebsa285/entry-macro.S
··· 14 14 .macro disable_fiq 15 15 .endm 16 16 17 + .macro get_irqnr_preamble, base, tmp 18 + .endm 19 + 20 + .macro arch_ret_to_user, tmp1, tmp2 21 + .endm 22 + 17 23 .equ dc21285_high, ARMCSR_BASE & 0xff000000 18 24 .equ dc21285_low, ARMCSR_BASE & 0x00ffffff 19 25
+6
include/asm-arm/arch-ep93xx/entry-macro.S
··· 14 14 .macro disable_fiq 15 15 .endm 16 16 17 + .macro get_irqnr_preamble, base, tmp 18 + .endm 19 + 20 + .macro arch_ret_to_user, tmp1, tmp2 21 + .endm 22 + 17 23 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 18 24 ldr \base, =(EP93XX_AHB_VIRT_BASE) 19 25 orr \base, \base, #0x000b0000
+6
include/asm-arm/arch-h720x/entry-macro.S
··· 11 11 .macro disable_fiq 12 12 .endm 13 13 14 + .macro get_irqnr_preamble, base, tmp 15 + .endm 16 + 17 + .macro arch_ret_to_user, tmp1, tmp2 18 + .endm 19 + 14 20 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 15 21 #if defined (CONFIG_CPU_H7201) || defined (CONFIG_CPU_H7202) 16 22 @ we could use the id register on H7202, but this is not
+7
include/asm-arm/arch-imx/entry-macro.S
··· 11 11 12 12 .macro disable_fiq 13 13 .endm 14 + 15 + .macro get_irqnr_preamble, base, tmp 16 + .endm 17 + 18 + .macro arch_ret_to_user, tmp1, tmp2 19 + .endm 20 + 14 21 #define AITC_NIVECSR 0x40 15 22 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 16 23 ldr \irqstat, =IO_ADDRESS(IMX_AITC_BASE)
+6
include/asm-arm/arch-integrator/entry-macro.S
··· 13 13 .macro disable_fiq 14 14 .endm 15 15 16 + .macro get_irqnr_preamble, base, tmp 17 + .endm 18 + 19 + .macro arch_ret_to_user, tmp1, tmp2 20 + .endm 21 + 16 22 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 17 23 /* FIXME: should not be using soo many LDRs here */ 18 24 ldr \base, =IO_ADDRESS(INTEGRATOR_IC_BASE)
+12 -6
include/asm-arm/arch-iop13xx/entry-macro.S
··· 19 19 .macro disable_fiq 20 20 .endm 21 21 22 + .macro get_irqnr_preamble, base, tmp 23 + mrc p15, 0, \tmp, c15, c1, 0 24 + orr \tmp, \tmp, #(1 << 6) 25 + mcr p15, 0, \tmp, c15, c1, 0 @ Enable cp6 access 26 + .endm 27 + 22 28 /* 23 29 * Note: a 1-cycle window exists where iintvec will return the value 24 30 * of iintbase, so we explicitly check for "bad zeros" 25 31 */ 26 32 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 27 - mrc p15, 0, \tmp, c15, c1, 0 28 - orr \tmp, \tmp, #(1 << 6) 29 - mcr p15, 0, \tmp, c15, c1, 0 @ Enable cp6 access 30 - 31 33 mrc p6, 0, \irqnr, c3, c2, 0 @ Read IINTVEC 32 34 cmp \irqnr, #0 33 35 mrceq p6, 0, \irqnr, c3, c2, 0 @ Re-read on potentially bad zero 34 36 adds \irqstat, \irqnr, #1 @ Check for 0xffffffff 35 37 movne \irqnr, \irqnr, lsr #2 @ Convert to irqnr 38 + .endm 36 39 37 - biceq \tmp, \tmp, #(1 << 6) 38 - mcreq p15, 0, \tmp, c15, c1, 0 @ Disable cp6 access if no more interrupts 40 + .macro arch_ret_to_user, tmp1, tmp2 41 + mrc p15, 0, \tmp1, c15, c1, 0 42 + ands \tmp2, \tmp1, #(1 << 6) 43 + bicne \tmp1, \tmp1, #(1 << 6) 44 + mcrne p15, 0, \tmp1, c15, c1, 0 @ Disable cp6 access 39 45 .endm
+24 -9
include/asm-arm/arch-iop32x/entry-macro.S
··· 9 9 */ 10 10 #include <asm/arch/iop32x.h> 11 11 12 - .macro disable_fiq 13 - .endm 12 + .macro disable_fiq 13 + .endm 14 14 15 - .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 16 - ldr \base, =IOP3XX_REG_ADDR(0x07D8) 17 - ldr \irqstat, [\base] @ Read IINTSRC 18 - cmp \irqstat, #0 19 - clzne \irqnr, \irqstat 20 - rsbne \irqnr, \irqnr, #31 21 - .endm 15 + .macro get_irqnr_preamble, base, tmp 16 + mrc p15, 0, \tmp, c15, c1, 0 17 + orr \tmp, \tmp, #(1 << 6) 18 + mcr p15, 0, \tmp, c15, c1, 0 @ Enable cp6 access 19 + mrc p15, 0, \tmp, c15, c1, 0 20 + mov \tmp, \tmp 21 + sub pc, pc, #4 @ cp_wait 22 + .endm 23 + 24 + .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 25 + mrc p6, 0, \irqstat, c8, c0, 0 @ Read IINTSRC 26 + cmp \irqstat, #0 27 + clzne \irqnr, \irqstat 28 + rsbne \irqnr, \irqnr, #31 29 + .endm 30 + 31 + .macro arch_ret_to_user, tmp1, tmp2 32 + mrc p15, 0, \tmp1, c15, c1, 0 33 + ands \tmp2, \tmp1, #(1 << 6) 34 + bicne \tmp1, \tmp1, #(1 << 6) 35 + mcrne p15, 0, \tmp1, c15, c1, 0 @ Disable cp6 access 36 + .endm
+25 -10
include/asm-arm/arch-iop33x/entry-macro.S
··· 9 9 */ 10 10 #include <asm/arch/iop33x.h> 11 11 12 - .macro disable_fiq 13 - .endm 12 + .macro disable_fiq 13 + .endm 14 14 15 - .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 16 - ldr \base, =IOP3XX_REG_ADDR(0x07C8) 17 - ldr \irqstat, [\base] @ Read IINTVEC 18 - cmp \irqstat, #0 19 - ldreq \irqstat, [\base] @ erratum 63 workaround 20 - adds \irqnr, \irqstat, #1 21 - movne \irqnr, \irqstat, lsr #2 22 - .endm 15 + .macro get_irqnr_preamble, base, tmp 16 + mrc p15, 0, \tmp, c15, c1, 0 17 + orr \tmp, \tmp, #(1 << 6) 18 + mcr p15, 0, \tmp, c15, c1, 0 @ Enable cp6 access 19 + mrc p15, 0, \tmp, c15, c1, 0 20 + mov \tmp, \tmp 21 + sub pc, pc, #4 @ cp_wait 22 + .endm 23 + 24 + .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 25 + mrc p6, 0, \irqstat, c14, c0, 0 @ Read IINTVEC 26 + cmp \irqstat, #0 27 + mrceq p6, 0, \irqstat, c14, c0, 0 @ erratum 63 workaround 28 + adds \irqnr, \irqstat, #1 29 + movne \irqnr, \irqstat, lsr #2 30 + .endm 31 + 32 + .macro arch_ret_to_user, tmp1, tmp2 33 + mrc p15, 0, \tmp1, c15, c1, 0 34 + ands \tmp2, \tmp1, #(1 << 6) 35 + bicne \tmp1, \tmp1, #(1 << 6) 36 + mcrne p15, 0, \tmp1, c15, c1, 0 @ Disable cp6 access 37 + .endm
+6
include/asm-arm/arch-ixp2000/entry-macro.S
··· 12 12 .macro disable_fiq 13 13 .endm 14 14 15 + .macro get_irqnr_preamble, base, tmp 16 + .endm 17 + 18 + .macro arch_ret_to_user, tmp1, tmp2 19 + .endm 20 + 15 21 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 16 22 17 23 mov \irqnr, #0x0 @clear out irqnr as default
+6
include/asm-arm/arch-ixp23xx/entry-macro.S
··· 5 5 .macro disable_fiq 6 6 .endm 7 7 8 + .macro get_irqnr_preamble, base, tmp 9 + .endm 10 + 11 + .macro arch_ret_to_user, tmp1, tmp2 12 + .endm 13 + 8 14 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 9 15 ldr \irqnr, =(IXP23XX_INTC_VIRT + IXP23XX_INTR_IRQ_ENC_ST_OFFSET) 10 16 ldr \irqnr, [\irqnr] @ get interrupt number
+6
include/asm-arm/arch-ixp4xx/entry-macro.S
··· 12 12 .macro disable_fiq 13 13 .endm 14 14 15 + .macro get_irqnr_preamble, base, tmp 16 + .endm 17 + 18 + .macro arch_ret_to_user, tmp1, tmp2 19 + .endm 20 + 15 21 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 16 22 ldr \irqstat, =(IXP4XX_INTC_BASE_VIRT+IXP4XX_ICIP_OFFSET) 17 23 ldr \irqstat, [\irqstat] @ get interrupts
+6
include/asm-arm/arch-l7200/entry-macro.S
··· 14 14 .macro disable_fiq 15 15 .endm 16 16 17 + .macro get_irqnr_preamble, base, tmp 18 + .endm 19 + 20 + .macro arch_ret_to_user, tmp1, tmp2 21 + .endm 22 + 17 23 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 18 24 mov \irqstat, #irq_base_addr @ Virt addr IRQ regs 19 25 add \irqstat, \irqstat, #0x00001000 @ Status reg
+6
include/asm-arm/arch-lh7a40x/entry-macro.S
··· 26 26 .macro disable_fiq 27 27 .endm 28 28 29 + .macro get_irqnr_preamble, base, tmp 30 + .endm 31 + 32 + .macro arch_ret_to_user, tmp1, tmp2 33 + .endm 34 + 29 35 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 30 36 31 37 branch_irq_lh7a400: b 1000f
+6
include/asm-arm/arch-netx/entry-macro.S
··· 23 23 .macro disable_fiq 24 24 .endm 25 25 26 + .macro get_irqnr_preamble, base, tmp 27 + .endm 28 + 29 + .macro arch_ret_to_user, tmp1, tmp2 30 + .endm 31 + 26 32 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 27 33 mov \base, #io_p2v(0x00100000) 28 34 add \base, \base, #0x000ff000
+6
include/asm-arm/arch-omap/entry-macro.S
··· 29 29 .macro disable_fiq 30 30 .endm 31 31 32 + .macro get_irqnr_preamble, base, tmp 33 + .endm 34 + 35 + .macro arch_ret_to_user, tmp1, tmp2 36 + .endm 37 + 32 38 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 33 39 ldr \base, =IO_ADDRESS(OMAP_IH1_BASE) 34 40 ldr \irqnr, [\base, #IRQ_ITR_REG_OFFSET]
+6
include/asm-arm/arch-pnx4008/entry-macro.S
··· 28 28 .macro disable_fiq 29 29 .endm 30 30 31 + .macro get_irqnr_preamble, base, tmp 32 + .endm 33 + 34 + .macro arch_ret_to_user, tmp1, tmp2 35 + .endm 36 + 31 37 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 32 38 /* decode the MIC interrupt numbers */ 33 39 ldr \base, =IO_ADDRESS(PNX4008_INTCTRLMIC_BASE)
+6
include/asm-arm/arch-pxa/entry-macro.S
··· 13 13 .macro disable_fiq 14 14 .endm 15 15 16 + .macro get_irqnr_preamble, base, tmp 17 + .endm 18 + 19 + .macro arch_ret_to_user, tmp1, tmp2 20 + .endm 21 + 16 22 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 17 23 #ifdef CONFIG_PXA27x 18 24 mrc p6, 0, \irqstat, c0, c0, 0 @ ICIP
+6
include/asm-arm/arch-realview/entry-macro.S
··· 13 13 .macro disable_fiq 14 14 .endm 15 15 16 + .macro get_irqnr_preamble, base, tmp 17 + .endm 18 + 19 + .macro arch_ret_to_user, tmp1, tmp2 20 + .endm 21 + 16 22 /* 17 23 * The interrupt numbering scheme is defined in the 18 24 * interrupt controller spec. To wit:
+5
include/asm-arm/arch-rpc/entry-macro.S
··· 1 1 #include <asm/hardware.h> 2 2 #include <asm/hardware/entry-macro-iomd.S> 3 + .macro get_irqnr_preamble, base, tmp 4 + .endm 5 + 6 + .macro arch_ret_to_user, tmp1, tmp2 7 + .endm 3 8
+6
include/asm-arm/arch-s3c2410/entry-macro.S
··· 22 22 #include <asm/hardware.h> 23 23 #include <asm/irq.h> 24 24 25 + .macro get_irqnr_preamble, base, tmp 26 + .endm 27 + 28 + .macro arch_ret_to_user, tmp1, tmp2 29 + .endm 30 + 25 31 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 26 32 27 33 mov \base, #S3C24XX_VA_IRQ
+6
include/asm-arm/arch-sa1100/entry-macro.S
··· 11 11 .macro disable_fiq 12 12 .endm 13 13 14 + .macro get_irqnr_preamble, base, tmp 15 + .endm 16 + 17 + .macro arch_ret_to_user, tmp1, tmp2 18 + .endm 19 + 14 20 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 15 21 mov r4, #0xfa000000 @ ICIP = 0xfa050000 16 22 add r4, r4, #0x00050000
+6
include/asm-arm/arch-shark/entry-macro.S
··· 10 10 .macro disable_fiq 11 11 .endm 12 12 13 + .macro get_irqnr_preamble, base, tmp 14 + .endm 15 + 16 + .macro arch_ret_to_user, tmp1, tmp2 17 + .endm 18 + 13 19 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 14 20 mov r4, #0xe0000000 15 21
+6
include/asm-arm/arch-versatile/entry-macro.S
··· 13 13 .macro disable_fiq 14 14 .endm 15 15 16 + .macro get_irqnr_preamble, base, tmp 17 + .endm 18 + 19 + .macro arch_ret_to_user, tmp1, tmp2 20 + .endm 21 + 16 22 .macro get_irqnr_and_base, irqnr, irqstat, base, tmp 17 23 ldr \base, =IO_ADDRESS(VERSATILE_VIC_BASE) 18 24 ldr \irqstat, [\base, #VIC_IRQ_STATUS] @ get masked status