at v5.15 145 lines 3.1 kB view raw
1/* SPDX-License-Identifier: GPL-2.0 */ 2#ifndef _ASM_ARCHRANDOM_H 3#define _ASM_ARCHRANDOM_H 4 5#ifdef CONFIG_ARCH_RANDOM 6 7#include <linux/arm-smccc.h> 8#include <linux/bug.h> 9#include <linux/kernel.h> 10#include <asm/cpufeature.h> 11 12#define ARM_SMCCC_TRNG_MIN_VERSION 0x10000UL 13 14extern bool smccc_trng_available; 15 16static inline bool __init smccc_probe_trng(void) 17{ 18 struct arm_smccc_res res; 19 20 arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_VERSION, &res); 21 if ((s32)res.a0 < 0) 22 return false; 23 24 return res.a0 >= ARM_SMCCC_TRNG_MIN_VERSION; 25} 26 27static inline bool __arm64_rndr(unsigned long *v) 28{ 29 bool ok; 30 31 /* 32 * Reads of RNDR set PSTATE.NZCV to 0b0000 on success, 33 * and set PSTATE.NZCV to 0b0100 otherwise. 34 */ 35 asm volatile( 36 __mrs_s("%0", SYS_RNDR_EL0) "\n" 37 " cset %w1, ne\n" 38 : "=r" (*v), "=r" (ok) 39 : 40 : "cc"); 41 42 return ok; 43} 44 45static inline bool __must_check arch_get_random_long(unsigned long *v) 46{ 47 return false; 48} 49 50static inline bool __must_check arch_get_random_int(unsigned int *v) 51{ 52 return false; 53} 54 55static inline bool __must_check arch_get_random_seed_long(unsigned long *v) 56{ 57 struct arm_smccc_res res; 58 59 /* 60 * We prefer the SMCCC call, since its semantics (return actual 61 * hardware backed entropy) is closer to the idea behind this 62 * function here than what even the RNDRSS register provides 63 * (the output of a pseudo RNG freshly seeded by a TRNG). 64 */ 65 if (smccc_trng_available) { 66 arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, 64, &res); 67 if ((int)res.a0 >= 0) { 68 *v = res.a3; 69 return true; 70 } 71 } 72 73 /* 74 * Only support the generic interface after we have detected 75 * the system wide capability, avoiding complexity with the 76 * cpufeature code and with potential scheduling between CPUs 77 * with and without the feature. 78 */ 79 if (cpus_have_const_cap(ARM64_HAS_RNG) && __arm64_rndr(v)) 80 return true; 81 82 return false; 83} 84 85static inline bool __must_check arch_get_random_seed_int(unsigned int *v) 86{ 87 struct arm_smccc_res res; 88 unsigned long val; 89 90 if (smccc_trng_available) { 91 arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, 32, &res); 92 if ((int)res.a0 >= 0) { 93 *v = res.a3 & GENMASK(31, 0); 94 return true; 95 } 96 } 97 98 if (cpus_have_const_cap(ARM64_HAS_RNG)) { 99 if (__arm64_rndr(&val)) { 100 *v = val; 101 return true; 102 } 103 } 104 105 return false; 106} 107 108static inline bool __init __early_cpu_has_rndr(void) 109{ 110 /* Open code as we run prior to the first call to cpufeature. */ 111 unsigned long ftr = read_sysreg_s(SYS_ID_AA64ISAR0_EL1); 112 return (ftr >> ID_AA64ISAR0_RNDR_SHIFT) & 0xf; 113} 114 115static inline bool __init __must_check 116arch_get_random_seed_long_early(unsigned long *v) 117{ 118 WARN_ON(system_state != SYSTEM_BOOTING); 119 120 if (smccc_trng_available) { 121 struct arm_smccc_res res; 122 123 arm_smccc_1_1_invoke(ARM_SMCCC_TRNG_RND64, 64, &res); 124 if ((int)res.a0 >= 0) { 125 *v = res.a3; 126 return true; 127 } 128 } 129 130 if (__early_cpu_has_rndr() && __arm64_rndr(v)) 131 return true; 132 133 return false; 134} 135#define arch_get_random_seed_long_early arch_get_random_seed_long_early 136 137#else /* !CONFIG_ARCH_RANDOM */ 138 139static inline bool __init smccc_probe_trng(void) 140{ 141 return false; 142} 143 144#endif /* CONFIG_ARCH_RANDOM */ 145#endif /* _ASM_ARCHRANDOM_H */