parisc: Fix alignment of pa_tlb_lock in assembly on 32-bit SMP kernel

Qemu for PARISC reported on a 32bit SMP parisc kernel strange failures
about "Not-handled unaligned insn 0x0e8011d6 and 0x0c2011c9."

Those opcodes evaluate to the ldcw() assembly instruction which requires
(on 32bit) an alignment of 16 bytes to ensure atomicity.

As it turns out, qemu is correct and in our assembly code in entry.S and
pacache.S we don't pay attention to the required alignment.

This patch fixes the problem by aligning the lock offset in assembly
code in the same manner as we do in our C-code.

Signed-off-by: Helge Deller <deller@gmx.de>
Cc: <stable@vger.kernel.org> # v4.0+

Changed files
+20 -4
arch
parisc
include
asm
kernel
+2
arch/parisc/include/asm/ldcw.h
··· 12 12 for the semaphore. */ 13 13 14 14 #define __PA_LDCW_ALIGNMENT 16 15 + #define __PA_LDCW_ALIGN_ORDER 4 15 16 #define __ldcw_align(a) ({ \ 16 17 unsigned long __ret = (unsigned long) &(a)->lock[0]; \ 17 18 __ret = (__ret + __PA_LDCW_ALIGNMENT - 1) \ ··· 30 29 ldcd). */ 31 30 32 31 #define __PA_LDCW_ALIGNMENT 4 32 + #define __PA_LDCW_ALIGN_ORDER 2 33 33 #define __ldcw_align(a) (&(a)->slock) 34 34 #define __LDCW "ldcw,co" 35 35
+11 -2
arch/parisc/kernel/entry.S
··· 35 35 #include <asm/pgtable.h> 36 36 #include <asm/signal.h> 37 37 #include <asm/unistd.h> 38 + #include <asm/ldcw.h> 38 39 #include <asm/thread_info.h> 39 40 40 41 #include <linux/linkage.h> ··· 47 46 #endif 48 47 49 48 .import pa_tlb_lock,data 49 + .macro load_pa_tlb_lock reg 50 + #if __PA_LDCW_ALIGNMENT > 4 51 + load32 PA(pa_tlb_lock) + __PA_LDCW_ALIGNMENT-1, \reg 52 + depi 0,31,__PA_LDCW_ALIGN_ORDER, \reg 53 + #else 54 + load32 PA(pa_tlb_lock), \reg 55 + #endif 56 + .endm 50 57 51 58 /* space_to_prot macro creates a prot id from a space id */ 52 59 ··· 466 457 .macro tlb_lock spc,ptp,pte,tmp,tmp1,fault 467 458 #ifdef CONFIG_SMP 468 459 cmpib,COND(=),n 0,\spc,2f 469 - load32 PA(pa_tlb_lock),\tmp 460 + load_pa_tlb_lock \tmp 470 461 1: LDCW 0(\tmp),\tmp1 471 462 cmpib,COND(=) 0,\tmp1,1b 472 463 nop ··· 489 480 /* Release pa_tlb_lock lock. */ 490 481 .macro tlb_unlock1 spc,tmp 491 482 #ifdef CONFIG_SMP 492 - load32 PA(pa_tlb_lock),\tmp 483 + load_pa_tlb_lock \tmp 493 484 tlb_unlock0 \spc,\tmp 494 485 #endif 495 486 .endm
+7 -2
arch/parisc/kernel/pacache.S
··· 36 36 #include <asm/assembly.h> 37 37 #include <asm/pgtable.h> 38 38 #include <asm/cache.h> 39 + #include <asm/ldcw.h> 39 40 #include <linux/linkage.h> 40 41 41 42 .text ··· 334 333 335 334 .macro tlb_lock la,flags,tmp 336 335 #ifdef CONFIG_SMP 337 - ldil L%pa_tlb_lock,%r1 338 - ldo R%pa_tlb_lock(%r1),\la 336 + #if __PA_LDCW_ALIGNMENT > 4 337 + load32 pa_tlb_lock + __PA_LDCW_ALIGNMENT-1, \la 338 + depi 0,31,__PA_LDCW_ALIGN_ORDER, \la 339 + #else 340 + load32 pa_tlb_lock, \la 341 + #endif 339 342 rsm PSW_SM_I,\flags 340 343 1: LDCW 0(\la),\tmp 341 344 cmpib,<>,n 0,\tmp,3f