Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

LoongArch: Re-tab the assembly files

Reflow the *.S files for better stylistic consistency, namely hard tabs
after mnemonic position, and vertical alignment of the first operand
with hard tabs. Tab width is obviously 8. Some pre-existing intra-block
vertical alignments are preserved.

Signed-off-by: WANG Xuerui <git@xen0n.name>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>

authored by

WANG Xuerui and committed by
Huacai Chen
f5c3c22f 1fdb9a92

+163 -163
+2 -2
arch/loongarch/kernel/entry.S
··· 27 27 28 28 addi.d sp, sp, -PT_SIZE 29 29 cfi_st t2, PT_R3 30 - cfi_rel_offset sp, PT_R3 30 + cfi_rel_offset sp, PT_R3 31 31 st.d zero, sp, PT_R0 32 32 csrrd t2, LOONGARCH_CSR_PRMD 33 33 st.d t2, sp, PT_PRMD ··· 50 50 cfi_st a7, PT_R11 51 51 csrrd ra, LOONGARCH_CSR_ERA 52 52 st.d ra, sp, PT_ERA 53 - cfi_rel_offset ra, PT_ERA 53 + cfi_rel_offset ra, PT_ERA 54 54 55 55 cfi_st tp, PT_R2 56 56 cfi_st u0, PT_R21
+85 -85
arch/loongarch/kernel/fpu.S
··· 27 27 .endm 28 28 29 29 .macro sc_save_fp base 30 - EX fst.d $f0, \base, (0 * FPU_REG_WIDTH) 31 - EX fst.d $f1, \base, (1 * FPU_REG_WIDTH) 32 - EX fst.d $f2, \base, (2 * FPU_REG_WIDTH) 33 - EX fst.d $f3, \base, (3 * FPU_REG_WIDTH) 34 - EX fst.d $f4, \base, (4 * FPU_REG_WIDTH) 35 - EX fst.d $f5, \base, (5 * FPU_REG_WIDTH) 36 - EX fst.d $f6, \base, (6 * FPU_REG_WIDTH) 37 - EX fst.d $f7, \base, (7 * FPU_REG_WIDTH) 38 - EX fst.d $f8, \base, (8 * FPU_REG_WIDTH) 39 - EX fst.d $f9, \base, (9 * FPU_REG_WIDTH) 40 - EX fst.d $f10, \base, (10 * FPU_REG_WIDTH) 41 - EX fst.d $f11, \base, (11 * FPU_REG_WIDTH) 42 - EX fst.d $f12, \base, (12 * FPU_REG_WIDTH) 43 - EX fst.d $f13, \base, (13 * FPU_REG_WIDTH) 44 - EX fst.d $f14, \base, (14 * FPU_REG_WIDTH) 45 - EX fst.d $f15, \base, (15 * FPU_REG_WIDTH) 46 - EX fst.d $f16, \base, (16 * FPU_REG_WIDTH) 47 - EX fst.d $f17, \base, (17 * FPU_REG_WIDTH) 48 - EX fst.d $f18, \base, (18 * FPU_REG_WIDTH) 49 - EX fst.d $f19, \base, (19 * FPU_REG_WIDTH) 50 - EX fst.d $f20, \base, (20 * FPU_REG_WIDTH) 51 - EX fst.d $f21, \base, (21 * FPU_REG_WIDTH) 52 - EX fst.d $f22, \base, (22 * FPU_REG_WIDTH) 53 - EX fst.d $f23, \base, (23 * FPU_REG_WIDTH) 54 - EX fst.d $f24, \base, (24 * FPU_REG_WIDTH) 55 - EX fst.d $f25, \base, (25 * FPU_REG_WIDTH) 56 - EX fst.d $f26, \base, (26 * FPU_REG_WIDTH) 57 - EX fst.d $f27, \base, (27 * FPU_REG_WIDTH) 58 - EX fst.d $f28, \base, (28 * FPU_REG_WIDTH) 59 - EX fst.d $f29, \base, (29 * FPU_REG_WIDTH) 60 - EX fst.d $f30, \base, (30 * FPU_REG_WIDTH) 61 - EX fst.d $f31, \base, (31 * FPU_REG_WIDTH) 30 + EX fst.d $f0, \base, (0 * FPU_REG_WIDTH) 31 + EX fst.d $f1, \base, (1 * FPU_REG_WIDTH) 32 + EX fst.d $f2, \base, (2 * FPU_REG_WIDTH) 33 + EX fst.d $f3, \base, (3 * FPU_REG_WIDTH) 34 + EX fst.d $f4, \base, (4 * FPU_REG_WIDTH) 35 + EX fst.d $f5, \base, (5 * FPU_REG_WIDTH) 36 + EX fst.d $f6, \base, (6 * FPU_REG_WIDTH) 37 + EX fst.d $f7, \base, (7 * FPU_REG_WIDTH) 38 + EX fst.d $f8, \base, (8 * FPU_REG_WIDTH) 39 + EX fst.d $f9, \base, (9 * FPU_REG_WIDTH) 40 + EX fst.d $f10, \base, (10 * FPU_REG_WIDTH) 41 + EX fst.d $f11, \base, (11 * FPU_REG_WIDTH) 42 + EX fst.d $f12, \base, (12 * FPU_REG_WIDTH) 43 + EX fst.d $f13, \base, (13 * FPU_REG_WIDTH) 44 + EX fst.d $f14, \base, (14 * FPU_REG_WIDTH) 45 + EX fst.d $f15, \base, (15 * FPU_REG_WIDTH) 46 + EX fst.d $f16, \base, (16 * FPU_REG_WIDTH) 47 + EX fst.d $f17, \base, (17 * FPU_REG_WIDTH) 48 + EX fst.d $f18, \base, (18 * FPU_REG_WIDTH) 49 + EX fst.d $f19, \base, (19 * FPU_REG_WIDTH) 50 + EX fst.d $f20, \base, (20 * FPU_REG_WIDTH) 51 + EX fst.d $f21, \base, (21 * FPU_REG_WIDTH) 52 + EX fst.d $f22, \base, (22 * FPU_REG_WIDTH) 53 + EX fst.d $f23, \base, (23 * FPU_REG_WIDTH) 54 + EX fst.d $f24, \base, (24 * FPU_REG_WIDTH) 55 + EX fst.d $f25, \base, (25 * FPU_REG_WIDTH) 56 + EX fst.d $f26, \base, (26 * FPU_REG_WIDTH) 57 + EX fst.d $f27, \base, (27 * FPU_REG_WIDTH) 58 + EX fst.d $f28, \base, (28 * FPU_REG_WIDTH) 59 + EX fst.d $f29, \base, (29 * FPU_REG_WIDTH) 60 + EX fst.d $f30, \base, (30 * FPU_REG_WIDTH) 61 + EX fst.d $f31, \base, (31 * FPU_REG_WIDTH) 62 62 .endm 63 63 64 64 .macro sc_restore_fp base 65 - EX fld.d $f0, \base, (0 * FPU_REG_WIDTH) 66 - EX fld.d $f1, \base, (1 * FPU_REG_WIDTH) 67 - EX fld.d $f2, \base, (2 * FPU_REG_WIDTH) 68 - EX fld.d $f3, \base, (3 * FPU_REG_WIDTH) 69 - EX fld.d $f4, \base, (4 * FPU_REG_WIDTH) 70 - EX fld.d $f5, \base, (5 * FPU_REG_WIDTH) 71 - EX fld.d $f6, \base, (6 * FPU_REG_WIDTH) 72 - EX fld.d $f7, \base, (7 * FPU_REG_WIDTH) 73 - EX fld.d $f8, \base, (8 * FPU_REG_WIDTH) 74 - EX fld.d $f9, \base, (9 * FPU_REG_WIDTH) 75 - EX fld.d $f10, \base, (10 * FPU_REG_WIDTH) 76 - EX fld.d $f11, \base, (11 * FPU_REG_WIDTH) 77 - EX fld.d $f12, \base, (12 * FPU_REG_WIDTH) 78 - EX fld.d $f13, \base, (13 * FPU_REG_WIDTH) 79 - EX fld.d $f14, \base, (14 * FPU_REG_WIDTH) 80 - EX fld.d $f15, \base, (15 * FPU_REG_WIDTH) 81 - EX fld.d $f16, \base, (16 * FPU_REG_WIDTH) 82 - EX fld.d $f17, \base, (17 * FPU_REG_WIDTH) 83 - EX fld.d $f18, \base, (18 * FPU_REG_WIDTH) 84 - EX fld.d $f19, \base, (19 * FPU_REG_WIDTH) 85 - EX fld.d $f20, \base, (20 * FPU_REG_WIDTH) 86 - EX fld.d $f21, \base, (21 * FPU_REG_WIDTH) 87 - EX fld.d $f22, \base, (22 * FPU_REG_WIDTH) 88 - EX fld.d $f23, \base, (23 * FPU_REG_WIDTH) 89 - EX fld.d $f24, \base, (24 * FPU_REG_WIDTH) 90 - EX fld.d $f25, \base, (25 * FPU_REG_WIDTH) 91 - EX fld.d $f26, \base, (26 * FPU_REG_WIDTH) 92 - EX fld.d $f27, \base, (27 * FPU_REG_WIDTH) 93 - EX fld.d $f28, \base, (28 * FPU_REG_WIDTH) 94 - EX fld.d $f29, \base, (29 * FPU_REG_WIDTH) 95 - EX fld.d $f30, \base, (30 * FPU_REG_WIDTH) 96 - EX fld.d $f31, \base, (31 * FPU_REG_WIDTH) 65 + EX fld.d $f0, \base, (0 * FPU_REG_WIDTH) 66 + EX fld.d $f1, \base, (1 * FPU_REG_WIDTH) 67 + EX fld.d $f2, \base, (2 * FPU_REG_WIDTH) 68 + EX fld.d $f3, \base, (3 * FPU_REG_WIDTH) 69 + EX fld.d $f4, \base, (4 * FPU_REG_WIDTH) 70 + EX fld.d $f5, \base, (5 * FPU_REG_WIDTH) 71 + EX fld.d $f6, \base, (6 * FPU_REG_WIDTH) 72 + EX fld.d $f7, \base, (7 * FPU_REG_WIDTH) 73 + EX fld.d $f8, \base, (8 * FPU_REG_WIDTH) 74 + EX fld.d $f9, \base, (9 * FPU_REG_WIDTH) 75 + EX fld.d $f10, \base, (10 * FPU_REG_WIDTH) 76 + EX fld.d $f11, \base, (11 * FPU_REG_WIDTH) 77 + EX fld.d $f12, \base, (12 * FPU_REG_WIDTH) 78 + EX fld.d $f13, \base, (13 * FPU_REG_WIDTH) 79 + EX fld.d $f14, \base, (14 * FPU_REG_WIDTH) 80 + EX fld.d $f15, \base, (15 * FPU_REG_WIDTH) 81 + EX fld.d $f16, \base, (16 * FPU_REG_WIDTH) 82 + EX fld.d $f17, \base, (17 * FPU_REG_WIDTH) 83 + EX fld.d $f18, \base, (18 * FPU_REG_WIDTH) 84 + EX fld.d $f19, \base, (19 * FPU_REG_WIDTH) 85 + EX fld.d $f20, \base, (20 * FPU_REG_WIDTH) 86 + EX fld.d $f21, \base, (21 * FPU_REG_WIDTH) 87 + EX fld.d $f22, \base, (22 * FPU_REG_WIDTH) 88 + EX fld.d $f23, \base, (23 * FPU_REG_WIDTH) 89 + EX fld.d $f24, \base, (24 * FPU_REG_WIDTH) 90 + EX fld.d $f25, \base, (25 * FPU_REG_WIDTH) 91 + EX fld.d $f26, \base, (26 * FPU_REG_WIDTH) 92 + EX fld.d $f27, \base, (27 * FPU_REG_WIDTH) 93 + EX fld.d $f28, \base, (28 * FPU_REG_WIDTH) 94 + EX fld.d $f29, \base, (29 * FPU_REG_WIDTH) 95 + EX fld.d $f30, \base, (30 * FPU_REG_WIDTH) 96 + EX fld.d $f31, \base, (31 * FPU_REG_WIDTH) 97 97 .endm 98 98 99 99 .macro sc_save_fcc base, tmp0, tmp1 100 100 movcf2gr \tmp0, $fcc0 101 - move \tmp1, \tmp0 101 + move \tmp1, \tmp0 102 102 movcf2gr \tmp0, $fcc1 103 103 bstrins.d \tmp1, \tmp0, 15, 8 104 104 movcf2gr \tmp0, $fcc2 ··· 113 113 bstrins.d \tmp1, \tmp0, 55, 48 114 114 movcf2gr \tmp0, $fcc7 115 115 bstrins.d \tmp1, \tmp0, 63, 56 116 - EX st.d \tmp1, \base, 0 116 + EX st.d \tmp1, \base, 0 117 117 .endm 118 118 119 119 .macro sc_restore_fcc base, tmp0, tmp1 120 - EX ld.d \tmp0, \base, 0 120 + EX ld.d \tmp0, \base, 0 121 121 bstrpick.d \tmp1, \tmp0, 7, 0 122 122 movgr2cf $fcc0, \tmp1 123 123 bstrpick.d \tmp1, \tmp0, 15, 8 ··· 138 138 139 139 .macro sc_save_fcsr base, tmp0 140 140 movfcsr2gr \tmp0, fcsr0 141 - EX st.w \tmp0, \base, 0 141 + EX st.w \tmp0, \base, 0 142 142 .endm 143 143 144 144 .macro sc_restore_fcsr base, tmp0 145 - EX ld.w \tmp0, \base, 0 145 + EX ld.w \tmp0, \base, 0 146 146 movgr2fcsr fcsr0, \tmp0 147 147 .endm 148 148 ··· 151 151 */ 152 152 SYM_FUNC_START(_save_fp) 153 153 fpu_save_csr a0 t1 154 - fpu_save_double a0 t1 # clobbers t1 154 + fpu_save_double a0 t1 # clobbers t1 155 155 fpu_save_cc a0 t1 t2 # clobbers t1, t2 156 - jr ra 156 + jr ra 157 157 SYM_FUNC_END(_save_fp) 158 158 EXPORT_SYMBOL(_save_fp) 159 159 ··· 161 161 * Restore a thread's fp context. 162 162 */ 163 163 SYM_FUNC_START(_restore_fp) 164 - fpu_restore_double a0 t1 # clobbers t1 165 - fpu_restore_csr a0 t1 166 - fpu_restore_cc a0 t1 t2 # clobbers t1, t2 167 - jr ra 164 + fpu_restore_double a0 t1 # clobbers t1 165 + fpu_restore_csr a0 t1 166 + fpu_restore_cc a0 t1 t2 # clobbers t1, t2 167 + jr ra 168 168 SYM_FUNC_END(_restore_fp) 169 169 170 170 /* ··· 225 225 * a2: fcsr 226 226 */ 227 227 SYM_FUNC_START(_save_fp_context) 228 - sc_save_fcc a1 t1 t2 229 - sc_save_fcsr a2 t1 230 - sc_save_fp a0 231 - li.w a0, 0 # success 232 - jr ra 228 + sc_save_fcc a1 t1 t2 229 + sc_save_fcsr a2 t1 230 + sc_save_fp a0 231 + li.w a0, 0 # success 232 + jr ra 233 233 SYM_FUNC_END(_save_fp_context) 234 234 235 235 /* ··· 238 238 * a2: fcsr 239 239 */ 240 240 SYM_FUNC_START(_restore_fp_context) 241 - sc_restore_fp a0 242 - sc_restore_fcc a1 t1 t2 243 - sc_restore_fcsr a2 t1 244 - li.w a0, 0 # success 245 - jr ra 241 + sc_restore_fp a0 242 + sc_restore_fcc a1 t1 t2 243 + sc_restore_fcsr a2 t1 244 + li.w a0, 0 # success 245 + jr ra 246 246 SYM_FUNC_END(_restore_fp_context) 247 247 248 248 SYM_FUNC_START(fault)
+4 -4
arch/loongarch/kernel/genex.S
··· 35 35 BACKUP_T0T1 36 36 SAVE_ALL 37 37 la.abs t1, __arch_cpu_idle 38 - LONG_L t0, sp, PT_ERA 38 + LONG_L t0, sp, PT_ERA 39 39 /* 32 byte rollback region */ 40 40 ori t0, t0, 0x1f 41 41 xori t0, t0, 0x1f 42 42 bne t0, t1, 1f 43 - LONG_S t0, sp, PT_ERA 43 + LONG_S t0, sp, PT_ERA 44 44 1: move a0, sp 45 45 move a1, sp 46 46 la.abs t0, do_vint 47 - jirl ra, t0, 0 47 + jirl ra, t0, 0 48 48 RESTORE_ALL_AND_RET 49 49 SYM_FUNC_END(handle_vint) 50 50 ··· 72 72 build_prep_\prep 73 73 move a0, sp 74 74 la.abs t0, do_\handler 75 - jirl ra, t0, 0 75 + jirl ra, t0, 0 76 76 RESTORE_ALL_AND_RET 77 77 SYM_FUNC_END(handle_\exception) 78 78 .endm
+2 -2
arch/loongarch/kernel/head.S
··· 85 85 ld.d sp, t0, CPU_BOOT_STACK 86 86 ld.d tp, t0, CPU_BOOT_TINFO 87 87 88 - la.abs t0, 0f 89 - jr t0 88 + la.abs t0, 0f 89 + jr t0 90 90 0: 91 91 bl start_secondary 92 92 SYM_CODE_END(smpboot_entry)
+2 -2
arch/loongarch/kernel/switch.S
··· 24 24 move tp, a2 25 25 cpu_restore_nonscratch a1 26 26 27 - li.w t0, _THREAD_SIZE - 32 28 - PTR_ADD t0, t0, tp 27 + li.w t0, _THREAD_SIZE - 32 28 + PTR_ADD t0, t0, tp 29 29 set_saved_sp t0, t1, t2 30 30 31 31 ldptr.d t1, a1, THREAD_CSRPRMD
+59 -59
arch/loongarch/mm/page.S
··· 10 10 11 11 .align 5 12 12 SYM_FUNC_START(clear_page) 13 - lu12i.w t0, 1 << (PAGE_SHIFT - 12) 14 - add.d t0, t0, a0 13 + lu12i.w t0, 1 << (PAGE_SHIFT - 12) 14 + add.d t0, t0, a0 15 15 1: 16 - st.d zero, a0, 0 17 - st.d zero, a0, 8 18 - st.d zero, a0, 16 19 - st.d zero, a0, 24 20 - st.d zero, a0, 32 21 - st.d zero, a0, 40 22 - st.d zero, a0, 48 23 - st.d zero, a0, 56 24 - addi.d a0, a0, 128 25 - st.d zero, a0, -64 26 - st.d zero, a0, -56 27 - st.d zero, a0, -48 28 - st.d zero, a0, -40 29 - st.d zero, a0, -32 30 - st.d zero, a0, -24 31 - st.d zero, a0, -16 32 - st.d zero, a0, -8 33 - bne t0, a0, 1b 16 + st.d zero, a0, 0 17 + st.d zero, a0, 8 18 + st.d zero, a0, 16 19 + st.d zero, a0, 24 20 + st.d zero, a0, 32 21 + st.d zero, a0, 40 22 + st.d zero, a0, 48 23 + st.d zero, a0, 56 24 + addi.d a0, a0, 128 25 + st.d zero, a0, -64 26 + st.d zero, a0, -56 27 + st.d zero, a0, -48 28 + st.d zero, a0, -40 29 + st.d zero, a0, -32 30 + st.d zero, a0, -24 31 + st.d zero, a0, -16 32 + st.d zero, a0, -8 33 + bne t0, a0, 1b 34 34 35 - jr ra 35 + jr ra 36 36 SYM_FUNC_END(clear_page) 37 37 EXPORT_SYMBOL(clear_page) 38 38 39 39 .align 5 40 40 SYM_FUNC_START(copy_page) 41 - lu12i.w t8, 1 << (PAGE_SHIFT - 12) 42 - add.d t8, t8, a0 41 + lu12i.w t8, 1 << (PAGE_SHIFT - 12) 42 + add.d t8, t8, a0 43 43 1: 44 - ld.d t0, a1, 0 45 - ld.d t1, a1, 8 46 - ld.d t2, a1, 16 47 - ld.d t3, a1, 24 48 - ld.d t4, a1, 32 49 - ld.d t5, a1, 40 50 - ld.d t6, a1, 48 51 - ld.d t7, a1, 56 44 + ld.d t0, a1, 0 45 + ld.d t1, a1, 8 46 + ld.d t2, a1, 16 47 + ld.d t3, a1, 24 48 + ld.d t4, a1, 32 49 + ld.d t5, a1, 40 50 + ld.d t6, a1, 48 51 + ld.d t7, a1, 56 52 52 53 - st.d t0, a0, 0 54 - st.d t1, a0, 8 55 - ld.d t0, a1, 64 56 - ld.d t1, a1, 72 57 - st.d t2, a0, 16 58 - st.d t3, a0, 24 59 - ld.d t2, a1, 80 60 - ld.d t3, a1, 88 61 - st.d t4, a0, 32 62 - st.d t5, a0, 40 63 - ld.d t4, a1, 96 64 - ld.d t5, a1, 104 65 - st.d t6, a0, 48 66 - st.d t7, a0, 56 67 - ld.d t6, a1, 112 68 - ld.d t7, a1, 120 69 - addi.d a0, a0, 128 70 - addi.d a1, a1, 128 53 + st.d t0, a0, 0 54 + st.d t1, a0, 8 55 + ld.d t0, a1, 64 56 + ld.d t1, a1, 72 57 + st.d t2, a0, 16 58 + st.d t3, a0, 24 59 + ld.d t2, a1, 80 60 + ld.d t3, a1, 88 61 + st.d t4, a0, 32 62 + st.d t5, a0, 40 63 + ld.d t4, a1, 96 64 + ld.d t5, a1, 104 65 + st.d t6, a0, 48 66 + st.d t7, a0, 56 67 + ld.d t6, a1, 112 68 + ld.d t7, a1, 120 69 + addi.d a0, a0, 128 70 + addi.d a1, a1, 128 71 71 72 - st.d t0, a0, -64 73 - st.d t1, a0, -56 74 - st.d t2, a0, -48 75 - st.d t3, a0, -40 76 - st.d t4, a0, -32 77 - st.d t5, a0, -24 78 - st.d t6, a0, -16 79 - st.d t7, a0, -8 72 + st.d t0, a0, -64 73 + st.d t1, a0, -56 74 + st.d t2, a0, -48 75 + st.d t3, a0, -40 76 + st.d t4, a0, -32 77 + st.d t5, a0, -24 78 + st.d t6, a0, -16 79 + st.d t7, a0, -8 80 80 81 - bne t8, a0, 1b 82 - jr ra 81 + bne t8, a0, 1b 82 + jr ra 83 83 SYM_FUNC_END(copy_page) 84 84 EXPORT_SYMBOL(copy_page)
+9 -9
arch/loongarch/mm/tlbex.S
··· 18 18 REG_S a2, sp, PT_BVADDR 19 19 li.w a1, \write 20 20 la.abs t0, do_page_fault 21 - jirl ra, t0, 0 21 + jirl ra, t0, 0 22 22 RESTORE_ALL_AND_RET 23 23 SYM_FUNC_END(tlb_do_page_fault_\write) 24 24 .endm ··· 34 34 csrrd a2, LOONGARCH_CSR_BADV 35 35 REG_S a2, sp, PT_BVADDR 36 36 la.abs t0, do_page_fault 37 - jirl ra, t0, 0 37 + jirl ra, t0, 0 38 38 RESTORE_ALL_AND_RET 39 39 SYM_FUNC_END(handle_tlb_protect) 40 40 ··· 151 151 st.d t0, t1, 0 152 152 #endif 153 153 addu16i.d t1, zero, -(CSR_TLBIDX_EHINV >> 16) 154 - addi.d ra, t1, 0 155 - csrxchg ra, t1, LOONGARCH_CSR_TLBIDX 154 + addi.d ra, t1, 0 155 + csrxchg ra, t1, LOONGARCH_CSR_TLBIDX 156 156 tlbwr 157 157 158 158 csrxchg zero, t1, LOONGARCH_CSR_TLBIDX ··· 319 319 st.d t0, t1, 0 320 320 #endif 321 321 addu16i.d t1, zero, -(CSR_TLBIDX_EHINV >> 16) 322 - addi.d ra, t1, 0 323 - csrxchg ra, t1, LOONGARCH_CSR_TLBIDX 322 + addi.d ra, t1, 0 323 + csrxchg ra, t1, LOONGARCH_CSR_TLBIDX 324 324 tlbwr 325 325 326 326 csrxchg zero, t1, LOONGARCH_CSR_TLBIDX ··· 454 454 ertn 455 455 #ifdef CONFIG_64BIT 456 456 vmalloc_modify: 457 - la.abs t1, swapper_pg_dir 457 + la.abs t1, swapper_pg_dir 458 458 b vmalloc_done_modify 459 459 #endif 460 460 ··· 512 512 /* Set huge page tlb entry size */ 513 513 addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) 514 514 addu16i.d t1, zero, (PS_HUGE_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) 515 - csrxchg t1, t0, LOONGARCH_CSR_TLBIDX 515 + csrxchg t1, t0, LOONGARCH_CSR_TLBIDX 516 516 517 517 tlbwr 518 518 519 519 /* Reset default page size */ 520 520 addu16i.d t0, zero, (CSR_TLBIDX_PS >> 16) 521 521 addu16i.d t1, zero, (PS_DEFAULT_SIZE << (CSR_TLBIDX_PS_SHIFT - 16)) 522 - csrxchg t1, t0, LOONGARCH_CSR_TLBIDX 522 + csrxchg t1, t0, LOONGARCH_CSR_TLBIDX 523 523 524 524 nopage_tlb_modify: 525 525 dbar 0