Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

LoongArch: Add other common headers

Add some other common headers for basic LoongArch support.

Reviewed-by: WANG Xuerui <git@xen0n.name>
Reviewed-by: Jiaxun Yang <jiaxun.yang@flygoat.com>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>

+917
+7
arch/loongarch/include/asm/asm-prototypes.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + #include <linux/uaccess.h> 3 + #include <asm/fpu.h> 4 + #include <asm/mmu_context.h> 5 + #include <asm/page.h> 6 + #include <asm/ftrace.h> 7 + #include <asm-generic/asm-prototypes.h>
+191
arch/loongarch/include/asm/asm.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Some useful macros for LoongArch assembler code 4 + * 5 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 6 + * 7 + * Derived from MIPS: 8 + * Copyright (C) 1995, 1996, 1997, 1999, 2001 by Ralf Baechle 9 + * Copyright (C) 1999 by Silicon Graphics, Inc. 10 + * Copyright (C) 2001 MIPS Technologies, Inc. 11 + * Copyright (C) 2002 Maciej W. Rozycki 12 + */ 13 + #ifndef __ASM_ASM_H 14 + #define __ASM_ASM_H 15 + 16 + /* LoongArch pref instruction. */ 17 + #ifdef CONFIG_CPU_HAS_PREFETCH 18 + 19 + #define PREF(hint, addr, offs) \ 20 + preld hint, addr, offs; \ 21 + 22 + #define PREFX(hint, addr, index) \ 23 + preldx hint, addr, index; \ 24 + 25 + #else /* !CONFIG_CPU_HAS_PREFETCH */ 26 + 27 + #define PREF(hint, addr, offs) 28 + #define PREFX(hint, addr, index) 29 + 30 + #endif /* !CONFIG_CPU_HAS_PREFETCH */ 31 + 32 + /* 33 + * Stack alignment 34 + */ 35 + #define STACK_ALIGN ~(0xf) 36 + 37 + /* 38 + * Macros to handle different pointer/register sizes for 32/64-bit code 39 + */ 40 + 41 + /* 42 + * Size of a register 43 + */ 44 + #ifndef __loongarch64 45 + #define SZREG 4 46 + #else 47 + #define SZREG 8 48 + #endif 49 + 50 + /* 51 + * Use the following macros in assemblercode to load/store registers, 52 + * pointers etc. 53 + */ 54 + #if (SZREG == 4) 55 + #define REG_L ld.w 56 + #define REG_S st.w 57 + #define REG_ADD add.w 58 + #define REG_SUB sub.w 59 + #else /* SZREG == 8 */ 60 + #define REG_L ld.d 61 + #define REG_S st.d 62 + #define REG_ADD add.d 63 + #define REG_SUB sub.d 64 + #endif 65 + 66 + /* 67 + * How to add/sub/load/store/shift C int variables. 68 + */ 69 + #if (__SIZEOF_INT__ == 4) 70 + #define INT_ADD add.w 71 + #define INT_ADDI addi.w 72 + #define INT_SUB sub.w 73 + #define INT_L ld.w 74 + #define INT_S st.w 75 + #define INT_SLL slli.w 76 + #define INT_SLLV sll.w 77 + #define INT_SRL srli.w 78 + #define INT_SRLV srl.w 79 + #define INT_SRA srai.w 80 + #define INT_SRAV sra.w 81 + #endif 82 + 83 + #if (__SIZEOF_INT__ == 8) 84 + #define INT_ADD add.d 85 + #define INT_ADDI addi.d 86 + #define INT_SUB sub.d 87 + #define INT_L ld.d 88 + #define INT_S st.d 89 + #define INT_SLL slli.d 90 + #define INT_SLLV sll.d 91 + #define INT_SRL srli.d 92 + #define INT_SRLV srl.d 93 + #define INT_SRA srai.d 94 + #define INT_SRAV sra.d 95 + #endif 96 + 97 + /* 98 + * How to add/sub/load/store/shift C long variables. 99 + */ 100 + #if (__SIZEOF_LONG__ == 4) 101 + #define LONG_ADD add.w 102 + #define LONG_ADDI addi.w 103 + #define LONG_SUB sub.w 104 + #define LONG_L ld.w 105 + #define LONG_S st.w 106 + #define LONG_SLL slli.w 107 + #define LONG_SLLV sll.w 108 + #define LONG_SRL srli.w 109 + #define LONG_SRLV srl.w 110 + #define LONG_SRA srai.w 111 + #define LONG_SRAV sra.w 112 + 113 + #ifdef __ASSEMBLY__ 114 + #define LONG .word 115 + #endif 116 + #define LONGSIZE 4 117 + #define LONGMASK 3 118 + #define LONGLOG 2 119 + #endif 120 + 121 + #if (__SIZEOF_LONG__ == 8) 122 + #define LONG_ADD add.d 123 + #define LONG_ADDI addi.d 124 + #define LONG_SUB sub.d 125 + #define LONG_L ld.d 126 + #define LONG_S st.d 127 + #define LONG_SLL slli.d 128 + #define LONG_SLLV sll.d 129 + #define LONG_SRL srli.d 130 + #define LONG_SRLV srl.d 131 + #define LONG_SRA srai.d 132 + #define LONG_SRAV sra.d 133 + 134 + #ifdef __ASSEMBLY__ 135 + #define LONG .dword 136 + #endif 137 + #define LONGSIZE 8 138 + #define LONGMASK 7 139 + #define LONGLOG 3 140 + #endif 141 + 142 + /* 143 + * How to add/sub/load/store/shift pointers. 144 + */ 145 + #if (__SIZEOF_POINTER__ == 4) 146 + #define PTR_ADD add.w 147 + #define PTR_ADDI addi.w 148 + #define PTR_SUB sub.w 149 + #define PTR_L ld.w 150 + #define PTR_S st.w 151 + #define PTR_LI li.w 152 + #define PTR_SLL slli.w 153 + #define PTR_SLLV sll.w 154 + #define PTR_SRL srli.w 155 + #define PTR_SRLV srl.w 156 + #define PTR_SRA srai.w 157 + #define PTR_SRAV sra.w 158 + 159 + #define PTR_SCALESHIFT 2 160 + 161 + #ifdef __ASSEMBLY__ 162 + #define PTR .word 163 + #endif 164 + #define PTRSIZE 4 165 + #define PTRLOG 2 166 + #endif 167 + 168 + #if (__SIZEOF_POINTER__ == 8) 169 + #define PTR_ADD add.d 170 + #define PTR_ADDI addi.d 171 + #define PTR_SUB sub.d 172 + #define PTR_L ld.d 173 + #define PTR_S st.d 174 + #define PTR_LI li.d 175 + #define PTR_SLL slli.d 176 + #define PTR_SLLV sll.d 177 + #define PTR_SRL srli.d 178 + #define PTR_SRLV srl.d 179 + #define PTR_SRA srai.d 180 + #define PTR_SRAV sra.d 181 + 182 + #define PTR_SCALESHIFT 3 183 + 184 + #ifdef __ASSEMBLY__ 185 + #define PTR .dword 186 + #endif 187 + #define PTRSIZE 8 188 + #define PTRLOG 3 189 + #endif 190 + 191 + #endif /* __ASM_ASM_H */
+289
arch/loongarch/include/asm/asmmacro.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef _ASM_ASMMACRO_H 6 + #define _ASM_ASMMACRO_H 7 + 8 + #include <asm/asm-offsets.h> 9 + #include <asm/regdef.h> 10 + #include <asm/fpregdef.h> 11 + #include <asm/loongarch.h> 12 + 13 + .macro parse_v var val 14 + \var = \val 15 + .endm 16 + 17 + .macro parse_r var r 18 + \var = -1 19 + .ifc \r, $r0 20 + \var = 0 21 + .endif 22 + .ifc \r, $r1 23 + \var = 1 24 + .endif 25 + .ifc \r, $r2 26 + \var = 2 27 + .endif 28 + .ifc \r, $r3 29 + \var = 3 30 + .endif 31 + .ifc \r, $r4 32 + \var = 4 33 + .endif 34 + .ifc \r, $r5 35 + \var = 5 36 + .endif 37 + .ifc \r, $r6 38 + \var = 6 39 + .endif 40 + .ifc \r, $r7 41 + \var = 7 42 + .endif 43 + .ifc \r, $r8 44 + \var = 8 45 + .endif 46 + .ifc \r, $r9 47 + \var = 9 48 + .endif 49 + .ifc \r, $r10 50 + \var = 10 51 + .endif 52 + .ifc \r, $r11 53 + \var = 11 54 + .endif 55 + .ifc \r, $r12 56 + \var = 12 57 + .endif 58 + .ifc \r, $r13 59 + \var = 13 60 + .endif 61 + .ifc \r, $r14 62 + \var = 14 63 + .endif 64 + .ifc \r, $r15 65 + \var = 15 66 + .endif 67 + .ifc \r, $r16 68 + \var = 16 69 + .endif 70 + .ifc \r, $r17 71 + \var = 17 72 + .endif 73 + .ifc \r, $r18 74 + \var = 18 75 + .endif 76 + .ifc \r, $r19 77 + \var = 19 78 + .endif 79 + .ifc \r, $r20 80 + \var = 20 81 + .endif 82 + .ifc \r, $r21 83 + \var = 21 84 + .endif 85 + .ifc \r, $r22 86 + \var = 22 87 + .endif 88 + .ifc \r, $r23 89 + \var = 23 90 + .endif 91 + .ifc \r, $r24 92 + \var = 24 93 + .endif 94 + .ifc \r, $r25 95 + \var = 25 96 + .endif 97 + .ifc \r, $r26 98 + \var = 26 99 + .endif 100 + .ifc \r, $r27 101 + \var = 27 102 + .endif 103 + .ifc \r, $r28 104 + \var = 28 105 + .endif 106 + .ifc \r, $r29 107 + \var = 29 108 + .endif 109 + .ifc \r, $r30 110 + \var = 30 111 + .endif 112 + .ifc \r, $r31 113 + \var = 31 114 + .endif 115 + .iflt \var 116 + .error "Unable to parse register name \r" 117 + .endif 118 + .endm 119 + 120 + .macro cpu_save_nonscratch thread 121 + stptr.d s0, \thread, THREAD_REG23 122 + stptr.d s1, \thread, THREAD_REG24 123 + stptr.d s2, \thread, THREAD_REG25 124 + stptr.d s3, \thread, THREAD_REG26 125 + stptr.d s4, \thread, THREAD_REG27 126 + stptr.d s5, \thread, THREAD_REG28 127 + stptr.d s6, \thread, THREAD_REG29 128 + stptr.d s7, \thread, THREAD_REG30 129 + stptr.d s8, \thread, THREAD_REG31 130 + stptr.d sp, \thread, THREAD_REG03 131 + stptr.d fp, \thread, THREAD_REG22 132 + .endm 133 + 134 + .macro cpu_restore_nonscratch thread 135 + ldptr.d s0, \thread, THREAD_REG23 136 + ldptr.d s1, \thread, THREAD_REG24 137 + ldptr.d s2, \thread, THREAD_REG25 138 + ldptr.d s3, \thread, THREAD_REG26 139 + ldptr.d s4, \thread, THREAD_REG27 140 + ldptr.d s5, \thread, THREAD_REG28 141 + ldptr.d s6, \thread, THREAD_REG29 142 + ldptr.d s7, \thread, THREAD_REG30 143 + ldptr.d s8, \thread, THREAD_REG31 144 + ldptr.d ra, \thread, THREAD_REG01 145 + ldptr.d sp, \thread, THREAD_REG03 146 + ldptr.d fp, \thread, THREAD_REG22 147 + .endm 148 + 149 + .macro fpu_save_csr thread tmp 150 + movfcsr2gr \tmp, fcsr0 151 + stptr.w \tmp, \thread, THREAD_FCSR 152 + .endm 153 + 154 + .macro fpu_restore_csr thread tmp 155 + ldptr.w \tmp, \thread, THREAD_FCSR 156 + movgr2fcsr fcsr0, \tmp 157 + .endm 158 + 159 + .macro fpu_save_cc thread tmp0 tmp1 160 + movcf2gr \tmp0, $fcc0 161 + move \tmp1, \tmp0 162 + movcf2gr \tmp0, $fcc1 163 + bstrins.d \tmp1, \tmp0, 15, 8 164 + movcf2gr \tmp0, $fcc2 165 + bstrins.d \tmp1, \tmp0, 23, 16 166 + movcf2gr \tmp0, $fcc3 167 + bstrins.d \tmp1, \tmp0, 31, 24 168 + movcf2gr \tmp0, $fcc4 169 + bstrins.d \tmp1, \tmp0, 39, 32 170 + movcf2gr \tmp0, $fcc5 171 + bstrins.d \tmp1, \tmp0, 47, 40 172 + movcf2gr \tmp0, $fcc6 173 + bstrins.d \tmp1, \tmp0, 55, 48 174 + movcf2gr \tmp0, $fcc7 175 + bstrins.d \tmp1, \tmp0, 63, 56 176 + stptr.d \tmp1, \thread, THREAD_FCC 177 + .endm 178 + 179 + .macro fpu_restore_cc thread tmp0 tmp1 180 + ldptr.d \tmp0, \thread, THREAD_FCC 181 + bstrpick.d \tmp1, \tmp0, 7, 0 182 + movgr2cf $fcc0, \tmp1 183 + bstrpick.d \tmp1, \tmp0, 15, 8 184 + movgr2cf $fcc1, \tmp1 185 + bstrpick.d \tmp1, \tmp0, 23, 16 186 + movgr2cf $fcc2, \tmp1 187 + bstrpick.d \tmp1, \tmp0, 31, 24 188 + movgr2cf $fcc3, \tmp1 189 + bstrpick.d \tmp1, \tmp0, 39, 32 190 + movgr2cf $fcc4, \tmp1 191 + bstrpick.d \tmp1, \tmp0, 47, 40 192 + movgr2cf $fcc5, \tmp1 193 + bstrpick.d \tmp1, \tmp0, 55, 48 194 + movgr2cf $fcc6, \tmp1 195 + bstrpick.d \tmp1, \tmp0, 63, 56 196 + movgr2cf $fcc7, \tmp1 197 + .endm 198 + 199 + .macro fpu_save_double thread tmp 200 + li.w \tmp, THREAD_FPR0 201 + PTR_ADD \tmp, \tmp, \thread 202 + fst.d $f0, \tmp, THREAD_FPR0 - THREAD_FPR0 203 + fst.d $f1, \tmp, THREAD_FPR1 - THREAD_FPR0 204 + fst.d $f2, \tmp, THREAD_FPR2 - THREAD_FPR0 205 + fst.d $f3, \tmp, THREAD_FPR3 - THREAD_FPR0 206 + fst.d $f4, \tmp, THREAD_FPR4 - THREAD_FPR0 207 + fst.d $f5, \tmp, THREAD_FPR5 - THREAD_FPR0 208 + fst.d $f6, \tmp, THREAD_FPR6 - THREAD_FPR0 209 + fst.d $f7, \tmp, THREAD_FPR7 - THREAD_FPR0 210 + fst.d $f8, \tmp, THREAD_FPR8 - THREAD_FPR0 211 + fst.d $f9, \tmp, THREAD_FPR9 - THREAD_FPR0 212 + fst.d $f10, \tmp, THREAD_FPR10 - THREAD_FPR0 213 + fst.d $f11, \tmp, THREAD_FPR11 - THREAD_FPR0 214 + fst.d $f12, \tmp, THREAD_FPR12 - THREAD_FPR0 215 + fst.d $f13, \tmp, THREAD_FPR13 - THREAD_FPR0 216 + fst.d $f14, \tmp, THREAD_FPR14 - THREAD_FPR0 217 + fst.d $f15, \tmp, THREAD_FPR15 - THREAD_FPR0 218 + fst.d $f16, \tmp, THREAD_FPR16 - THREAD_FPR0 219 + fst.d $f17, \tmp, THREAD_FPR17 - THREAD_FPR0 220 + fst.d $f18, \tmp, THREAD_FPR18 - THREAD_FPR0 221 + fst.d $f19, \tmp, THREAD_FPR19 - THREAD_FPR0 222 + fst.d $f20, \tmp, THREAD_FPR20 - THREAD_FPR0 223 + fst.d $f21, \tmp, THREAD_FPR21 - THREAD_FPR0 224 + fst.d $f22, \tmp, THREAD_FPR22 - THREAD_FPR0 225 + fst.d $f23, \tmp, THREAD_FPR23 - THREAD_FPR0 226 + fst.d $f24, \tmp, THREAD_FPR24 - THREAD_FPR0 227 + fst.d $f25, \tmp, THREAD_FPR25 - THREAD_FPR0 228 + fst.d $f26, \tmp, THREAD_FPR26 - THREAD_FPR0 229 + fst.d $f27, \tmp, THREAD_FPR27 - THREAD_FPR0 230 + fst.d $f28, \tmp, THREAD_FPR28 - THREAD_FPR0 231 + fst.d $f29, \tmp, THREAD_FPR29 - THREAD_FPR0 232 + fst.d $f30, \tmp, THREAD_FPR30 - THREAD_FPR0 233 + fst.d $f31, \tmp, THREAD_FPR31 - THREAD_FPR0 234 + .endm 235 + 236 + .macro fpu_restore_double thread tmp 237 + li.w \tmp, THREAD_FPR0 238 + PTR_ADD \tmp, \tmp, \thread 239 + fld.d $f0, \tmp, THREAD_FPR0 - THREAD_FPR0 240 + fld.d $f1, \tmp, THREAD_FPR1 - THREAD_FPR0 241 + fld.d $f2, \tmp, THREAD_FPR2 - THREAD_FPR0 242 + fld.d $f3, \tmp, THREAD_FPR3 - THREAD_FPR0 243 + fld.d $f4, \tmp, THREAD_FPR4 - THREAD_FPR0 244 + fld.d $f5, \tmp, THREAD_FPR5 - THREAD_FPR0 245 + fld.d $f6, \tmp, THREAD_FPR6 - THREAD_FPR0 246 + fld.d $f7, \tmp, THREAD_FPR7 - THREAD_FPR0 247 + fld.d $f8, \tmp, THREAD_FPR8 - THREAD_FPR0 248 + fld.d $f9, \tmp, THREAD_FPR9 - THREAD_FPR0 249 + fld.d $f10, \tmp, THREAD_FPR10 - THREAD_FPR0 250 + fld.d $f11, \tmp, THREAD_FPR11 - THREAD_FPR0 251 + fld.d $f12, \tmp, THREAD_FPR12 - THREAD_FPR0 252 + fld.d $f13, \tmp, THREAD_FPR13 - THREAD_FPR0 253 + fld.d $f14, \tmp, THREAD_FPR14 - THREAD_FPR0 254 + fld.d $f15, \tmp, THREAD_FPR15 - THREAD_FPR0 255 + fld.d $f16, \tmp, THREAD_FPR16 - THREAD_FPR0 256 + fld.d $f17, \tmp, THREAD_FPR17 - THREAD_FPR0 257 + fld.d $f18, \tmp, THREAD_FPR18 - THREAD_FPR0 258 + fld.d $f19, \tmp, THREAD_FPR19 - THREAD_FPR0 259 + fld.d $f20, \tmp, THREAD_FPR20 - THREAD_FPR0 260 + fld.d $f21, \tmp, THREAD_FPR21 - THREAD_FPR0 261 + fld.d $f22, \tmp, THREAD_FPR22 - THREAD_FPR0 262 + fld.d $f23, \tmp, THREAD_FPR23 - THREAD_FPR0 263 + fld.d $f24, \tmp, THREAD_FPR24 - THREAD_FPR0 264 + fld.d $f25, \tmp, THREAD_FPR25 - THREAD_FPR0 265 + fld.d $f26, \tmp, THREAD_FPR26 - THREAD_FPR0 266 + fld.d $f27, \tmp, THREAD_FPR27 - THREAD_FPR0 267 + fld.d $f28, \tmp, THREAD_FPR28 - THREAD_FPR0 268 + fld.d $f29, \tmp, THREAD_FPR29 - THREAD_FPR0 269 + fld.d $f30, \tmp, THREAD_FPR30 - THREAD_FPR0 270 + fld.d $f31, \tmp, THREAD_FPR31 - THREAD_FPR0 271 + .endm 272 + 273 + .macro not dst src 274 + nor \dst, \src, zero 275 + .endm 276 + 277 + .macro bgt r0 r1 label 278 + blt \r1, \r0, \label 279 + .endm 280 + 281 + .macro bltz r0 label 282 + blt \r0, zero, \label 283 + .endm 284 + 285 + .macro bgez r0 label 286 + bge \r0, zero, \label 287 + .endm 288 + 289 + #endif /* _ASM_ASMMACRO_H */
+12
arch/loongarch/include/asm/clocksource.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Author: Huacai Chen <chenhuacai@loongson.cn> 4 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 5 + */ 6 + 7 + #ifndef __ASM_CLOCKSOURCE_H 8 + #define __ASM_CLOCKSOURCE_H 9 + 10 + #include <asm/vdso/clocksource.h> 11 + 12 + #endif /* __ASM_CLOCKSOURCE_H */
+15
arch/loongarch/include/asm/compiler.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef _ASM_COMPILER_H 6 + #define _ASM_COMPILER_H 7 + 8 + #define GCC_OFF_SMALL_ASM() "ZC" 9 + 10 + #define LOONGARCH_ISA_LEVEL "loongarch" 11 + #define LOONGARCH_ISA_ARCH_LEVEL "arch=loongarch" 12 + #define LOONGARCH_ISA_LEVEL_RAW loongarch 13 + #define LOONGARCH_ISA_ARCH_LEVEL_RAW LOONGARCH_ISA_LEVEL_RAW 14 + 15 + #endif /* _ASM_COMPILER_H */
+117
arch/loongarch/include/asm/inst.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef _ASM_INST_H 6 + #define _ASM_INST_H 7 + 8 + #include <linux/types.h> 9 + #include <asm/asm.h> 10 + 11 + #define ADDR_IMMMASK_LU52ID 0xFFF0000000000000 12 + #define ADDR_IMMMASK_LU32ID 0x000FFFFF00000000 13 + #define ADDR_IMMMASK_ADDU16ID 0x00000000FFFF0000 14 + 15 + #define ADDR_IMMSHIFT_LU52ID 52 16 + #define ADDR_IMMSHIFT_LU32ID 32 17 + #define ADDR_IMMSHIFT_ADDU16ID 16 18 + 19 + #define ADDR_IMM(addr, INSN) ((addr & ADDR_IMMMASK_##INSN) >> ADDR_IMMSHIFT_##INSN) 20 + 21 + enum reg1i20_op { 22 + lu12iw_op = 0x0a, 23 + lu32id_op = 0x0b, 24 + }; 25 + 26 + enum reg2i12_op { 27 + lu52id_op = 0x0c, 28 + }; 29 + 30 + enum reg2i16_op { 31 + jirl_op = 0x13, 32 + }; 33 + 34 + struct reg0i26_format { 35 + unsigned int immediate_h : 10; 36 + unsigned int immediate_l : 16; 37 + unsigned int opcode : 6; 38 + }; 39 + 40 + struct reg1i20_format { 41 + unsigned int rd : 5; 42 + unsigned int immediate : 20; 43 + unsigned int opcode : 7; 44 + }; 45 + 46 + struct reg1i21_format { 47 + unsigned int immediate_h : 5; 48 + unsigned int rj : 5; 49 + unsigned int immediate_l : 16; 50 + unsigned int opcode : 6; 51 + }; 52 + 53 + struct reg2i12_format { 54 + unsigned int rd : 5; 55 + unsigned int rj : 5; 56 + unsigned int immediate : 12; 57 + unsigned int opcode : 10; 58 + }; 59 + 60 + struct reg2i16_format { 61 + unsigned int rd : 5; 62 + unsigned int rj : 5; 63 + unsigned int immediate : 16; 64 + unsigned int opcode : 6; 65 + }; 66 + 67 + union loongarch_instruction { 68 + unsigned int word; 69 + struct reg0i26_format reg0i26_format; 70 + struct reg1i20_format reg1i20_format; 71 + struct reg1i21_format reg1i21_format; 72 + struct reg2i12_format reg2i12_format; 73 + struct reg2i16_format reg2i16_format; 74 + }; 75 + 76 + #define LOONGARCH_INSN_SIZE sizeof(union loongarch_instruction) 77 + 78 + enum loongarch_gpr { 79 + LOONGARCH_GPR_ZERO = 0, 80 + LOONGARCH_GPR_RA = 1, 81 + LOONGARCH_GPR_TP = 2, 82 + LOONGARCH_GPR_SP = 3, 83 + LOONGARCH_GPR_A0 = 4, /* Reused as V0 for return value */ 84 + LOONGARCH_GPR_A1, /* Reused as V1 for return value */ 85 + LOONGARCH_GPR_A2, 86 + LOONGARCH_GPR_A3, 87 + LOONGARCH_GPR_A4, 88 + LOONGARCH_GPR_A5, 89 + LOONGARCH_GPR_A6, 90 + LOONGARCH_GPR_A7, 91 + LOONGARCH_GPR_T0 = 12, 92 + LOONGARCH_GPR_T1, 93 + LOONGARCH_GPR_T2, 94 + LOONGARCH_GPR_T3, 95 + LOONGARCH_GPR_T4, 96 + LOONGARCH_GPR_T5, 97 + LOONGARCH_GPR_T6, 98 + LOONGARCH_GPR_T7, 99 + LOONGARCH_GPR_T8, 100 + LOONGARCH_GPR_FP = 22, 101 + LOONGARCH_GPR_S0 = 23, 102 + LOONGARCH_GPR_S1, 103 + LOONGARCH_GPR_S2, 104 + LOONGARCH_GPR_S3, 105 + LOONGARCH_GPR_S4, 106 + LOONGARCH_GPR_S5, 107 + LOONGARCH_GPR_S6, 108 + LOONGARCH_GPR_S7, 109 + LOONGARCH_GPR_S8, 110 + LOONGARCH_GPR_MAX 111 + }; 112 + 113 + u32 larch_insn_gen_lu32id(enum loongarch_gpr rd, int imm); 114 + u32 larch_insn_gen_lu52id(enum loongarch_gpr rd, enum loongarch_gpr rj, int imm); 115 + u32 larch_insn_gen_jirl(enum loongarch_gpr rd, enum loongarch_gpr rj, unsigned long pc, unsigned long dest); 116 + 117 + #endif /* _ASM_INST_H */
+36
arch/loongarch/include/asm/linkage.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + #ifndef __ASM_LINKAGE_H 3 + #define __ASM_LINKAGE_H 4 + 5 + #define __ALIGN .align 2 6 + #define __ALIGN_STR __stringify(__ALIGN) 7 + 8 + #define SYM_FUNC_START(name) \ 9 + SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN) \ 10 + .cfi_startproc; 11 + 12 + #define SYM_FUNC_START_NOALIGN(name) \ 13 + SYM_START(name, SYM_L_GLOBAL, SYM_A_NONE) \ 14 + .cfi_startproc; 15 + 16 + #define SYM_FUNC_START_LOCAL(name) \ 17 + SYM_START(name, SYM_L_LOCAL, SYM_A_ALIGN) \ 18 + .cfi_startproc; 19 + 20 + #define SYM_FUNC_START_LOCAL_NOALIGN(name) \ 21 + SYM_START(name, SYM_L_LOCAL, SYM_A_NONE) \ 22 + .cfi_startproc; 23 + 24 + #define SYM_FUNC_START_WEAK(name) \ 25 + SYM_START(name, SYM_L_WEAK, SYM_A_ALIGN) \ 26 + .cfi_startproc; 27 + 28 + #define SYM_FUNC_START_WEAK_NOALIGN(name) \ 29 + SYM_START(name, SYM_L_WEAK, SYM_A_NONE) \ 30 + .cfi_startproc; 31 + 32 + #define SYM_FUNC_END(name) \ 33 + .cfi_endproc; \ 34 + SYM_END(name, SYM_T_FUNC) 35 + 36 + #endif
+10
arch/loongarch/include/asm/perf_event.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Author: Huacai Chen <chenhuacai@loongson.cn> 4 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 5 + */ 6 + 7 + #ifndef __LOONGARCH_PERF_EVENT_H__ 8 + #define __LOONGARCH_PERF_EVENT_H__ 9 + /* Nothing to show here; the file is required by linux/perf_event.h. */ 10 + #endif /* __LOONGARCH_PERF_EVENT_H__ */
+29
arch/loongarch/include/asm/prefetch.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef __ASM_PREFETCH_H 6 + #define __ASM_PREFETCH_H 7 + 8 + #define Pref_Load 0 9 + #define Pref_Store 8 10 + 11 + #ifdef __ASSEMBLY__ 12 + 13 + .macro __pref hint addr 14 + #ifdef CONFIG_CPU_HAS_PREFETCH 15 + preld \hint, \addr, 0 16 + #endif 17 + .endm 18 + 19 + .macro pref_load addr 20 + __pref Pref_Load, \addr 21 + .endm 22 + 23 + .macro pref_store addr 24 + __pref Pref_Store, \addr 25 + .endm 26 + 27 + #endif 28 + 29 + #endif /* __ASM_PREFETCH_H */
+11
arch/loongarch/include/asm/serial.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef __ASM__SERIAL_H 6 + #define __ASM__SERIAL_H 7 + 8 + #define BASE_BAUD 0 9 + #define STD_COM_FLAGS (ASYNC_BOOT_AUTOCONF | ASYNC_SKIP_TEST) 10 + 11 + #endif /* __ASM__SERIAL_H */
+50
arch/loongarch/include/asm/time.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef _ASM_TIME_H 6 + #define _ASM_TIME_H 7 + 8 + #include <linux/clockchips.h> 9 + #include <linux/clocksource.h> 10 + #include <asm/loongarch.h> 11 + 12 + extern u64 cpu_clock_freq; 13 + extern u64 const_clock_freq; 14 + 15 + extern void sync_counter(void); 16 + 17 + static inline unsigned int calc_const_freq(void) 18 + { 19 + unsigned int res; 20 + unsigned int base_freq; 21 + unsigned int cfm, cfd; 22 + 23 + res = read_cpucfg(LOONGARCH_CPUCFG2); 24 + if (!(res & CPUCFG2_LLFTP)) 25 + return 0; 26 + 27 + base_freq = read_cpucfg(LOONGARCH_CPUCFG4); 28 + res = read_cpucfg(LOONGARCH_CPUCFG5); 29 + cfm = res & 0xffff; 30 + cfd = (res >> 16) & 0xffff; 31 + 32 + if (!base_freq || !cfm || !cfd) 33 + return 0; 34 + 35 + return (base_freq * cfm / cfd); 36 + } 37 + 38 + /* 39 + * Initialize the calling CPU's timer interrupt as clockevent device 40 + */ 41 + extern int constant_clockevent_init(void); 42 + extern int constant_clocksource_init(void); 43 + 44 + static inline void clockevent_set_clock(struct clock_event_device *cd, 45 + unsigned int clock) 46 + { 47 + clockevents_calc_mult_shift(cd, clock, 4); 48 + } 49 + 50 + #endif /* _ASM_TIME_H */
+33
arch/loongarch/include/asm/timex.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef _ASM_TIMEX_H 6 + #define _ASM_TIMEX_H 7 + 8 + #ifdef __KERNEL__ 9 + 10 + #include <linux/compiler.h> 11 + 12 + #include <asm/cpu.h> 13 + #include <asm/cpu-features.h> 14 + 15 + /* 16 + * Standard way to access the cycle counter. 17 + * Currently only used on SMP for scheduling. 18 + * 19 + * We know that all SMP capable CPUs have cycle counters. 20 + */ 21 + 22 + typedef unsigned long cycles_t; 23 + 24 + #define get_cycles get_cycles 25 + 26 + static inline cycles_t get_cycles(void) 27 + { 28 + return drdtime(); 29 + } 30 + 31 + #endif /* __KERNEL__ */ 32 + 33 + #endif /* _ASM_TIMEX_H */
+15
arch/loongarch/include/asm/topology.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef __ASM_TOPOLOGY_H 6 + #define __ASM_TOPOLOGY_H 7 + 8 + #include <linux/smp.h> 9 + 10 + #define cpu_logical_map(cpu) 0 11 + 12 + #include <asm-generic/topology.h> 13 + 14 + static inline void arch_fix_phys_package_id(int num, u32 slot) { } 15 + #endif /* __ASM_TOPOLOGY_H */
+19
arch/loongarch/include/asm/types.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + /* 3 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 + */ 5 + #ifndef _ASM_TYPES_H 6 + #define _ASM_TYPES_H 7 + 8 + #include <asm-generic/int-ll64.h> 9 + #include <uapi/asm/types.h> 10 + 11 + #ifdef __ASSEMBLY__ 12 + #define _ULCAST_ 13 + #define _U64CAST_ 14 + #else 15 + #define _ULCAST_ (unsigned long) 16 + #define _U64CAST_ (u64) 17 + #endif 18 + 19 + #endif /* _ASM_TYPES_H */
+9
arch/loongarch/include/uapi/asm/bitsperlong.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ 2 + #ifndef __ASM_LOONGARCH_BITSPERLONG_H 3 + #define __ASM_LOONGARCH_BITSPERLONG_H 4 + 5 + #define __BITS_PER_LONG (__SIZEOF_LONG__ * 8) 6 + 7 + #include <asm-generic/bitsperlong.h> 8 + 9 + #endif /* __ASM_LOONGARCH_BITSPERLONG_H */
+13
arch/loongarch/include/uapi/asm/byteorder.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0+ WITH Linux-syscall-note */ 2 + /* 3 + * Author: Hanlu Li <lihanlu@loongson.cn> 4 + * Huacai Chen <chenhuacai@loongson.cn> 5 + * 6 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 7 + */ 8 + #ifndef _ASM_BYTEORDER_H 9 + #define _ASM_BYTEORDER_H 10 + 11 + #include <linux/byteorder/little_endian.h> 12 + 13 + #endif /* _ASM_BYTEORDER_H */
+59
arch/loongarch/include/uapi/asm/reg.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ 2 + /* 3 + * Various register offset definitions for debuggers, core file 4 + * examiners and whatnot. 5 + * 6 + * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 7 + */ 8 + 9 + #ifndef __UAPI_ASM_LOONGARCH_REG_H 10 + #define __UAPI_ASM_LOONGARCH_REG_H 11 + 12 + #define LOONGARCH_EF_R0 0 13 + #define LOONGARCH_EF_R1 1 14 + #define LOONGARCH_EF_R2 2 15 + #define LOONGARCH_EF_R3 3 16 + #define LOONGARCH_EF_R4 4 17 + #define LOONGARCH_EF_R5 5 18 + #define LOONGARCH_EF_R6 6 19 + #define LOONGARCH_EF_R7 7 20 + #define LOONGARCH_EF_R8 8 21 + #define LOONGARCH_EF_R9 9 22 + #define LOONGARCH_EF_R10 10 23 + #define LOONGARCH_EF_R11 11 24 + #define LOONGARCH_EF_R12 12 25 + #define LOONGARCH_EF_R13 13 26 + #define LOONGARCH_EF_R14 14 27 + #define LOONGARCH_EF_R15 15 28 + #define LOONGARCH_EF_R16 16 29 + #define LOONGARCH_EF_R17 17 30 + #define LOONGARCH_EF_R18 18 31 + #define LOONGARCH_EF_R19 19 32 + #define LOONGARCH_EF_R20 20 33 + #define LOONGARCH_EF_R21 21 34 + #define LOONGARCH_EF_R22 22 35 + #define LOONGARCH_EF_R23 23 36 + #define LOONGARCH_EF_R24 24 37 + #define LOONGARCH_EF_R25 25 38 + #define LOONGARCH_EF_R26 26 39 + #define LOONGARCH_EF_R27 27 40 + #define LOONGARCH_EF_R28 28 41 + #define LOONGARCH_EF_R29 29 42 + #define LOONGARCH_EF_R30 30 43 + #define LOONGARCH_EF_R31 31 44 + 45 + /* 46 + * Saved special registers 47 + */ 48 + #define LOONGARCH_EF_ORIG_A0 32 49 + #define LOONGARCH_EF_CSR_ERA 33 50 + #define LOONGARCH_EF_CSR_BADV 34 51 + #define LOONGARCH_EF_CSR_CRMD 35 52 + #define LOONGARCH_EF_CSR_PRMD 36 53 + #define LOONGARCH_EF_CSR_EUEN 37 54 + #define LOONGARCH_EF_CSR_ECFG 38 55 + #define LOONGARCH_EF_CSR_ESTAT 39 56 + 57 + #define LOONGARCH_EF_SIZE 320 /* size in bytes */ 58 + 59 + #endif /* __UAPI_ASM_LOONGARCH_REG_H */
+2
tools/include/uapi/asm/bitsperlong.h
··· 17 17 #include "../../../arch/riscv/include/uapi/asm/bitsperlong.h" 18 18 #elif defined(__alpha__) 19 19 #include "../../../arch/alpha/include/uapi/asm/bitsperlong.h" 20 + #elif defined(__loongarch__) 21 + #include "../../../arch/loongarch/include/uapi/asm/bitsperlong.h" 20 22 #else 21 23 #include <asm-generic/bitsperlong.h> 22 24 #endif