Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: lib/utils - Move utilities into new header

The utilities have historically resided in algapi.h as they were
first used internally before being exported. Move them into a
new header file so external users don't see internal API details.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+75 -63
+1 -62
include/crypto/algapi.h
··· 7 7 #ifndef _CRYPTO_ALGAPI_H 8 8 #define _CRYPTO_ALGAPI_H 9 9 10 + #include <crypto/utils.h> 10 11 #include <linux/align.h> 11 12 #include <linux/cache.h> 12 13 #include <linux/crypto.h> 13 - #include <linux/kconfig.h> 14 - #include <linux/list.h> 15 14 #include <linux/types.h> 16 - 17 - #include <asm/unaligned.h> 18 15 19 16 /* 20 17 * Maximum values for blocksize and alignmask, used to allocate ··· 169 172 } 170 173 171 174 void crypto_inc(u8 *a, unsigned int size); 172 - void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size); 173 - 174 - static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 175 - { 176 - if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && 177 - __builtin_constant_p(size) && 178 - (size % sizeof(unsigned long)) == 0) { 179 - unsigned long *d = (unsigned long *)dst; 180 - unsigned long *s = (unsigned long *)src; 181 - unsigned long l; 182 - 183 - while (size > 0) { 184 - l = get_unaligned(d) ^ get_unaligned(s++); 185 - put_unaligned(l, d++); 186 - size -= sizeof(unsigned long); 187 - } 188 - } else { 189 - __crypto_xor(dst, dst, src, size); 190 - } 191 - } 192 - 193 - static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2, 194 - unsigned int size) 195 - { 196 - if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && 197 - __builtin_constant_p(size) && 198 - (size % sizeof(unsigned long)) == 0) { 199 - unsigned long *d = (unsigned long *)dst; 200 - unsigned long *s1 = (unsigned long *)src1; 201 - unsigned long *s2 = (unsigned long *)src2; 202 - unsigned long l; 203 - 204 - while (size > 0) { 205 - l = get_unaligned(s1++) ^ get_unaligned(s2++); 206 - put_unaligned(l, d++); 207 - size -= sizeof(unsigned long); 208 - } 209 - } else { 210 - __crypto_xor(dst, src1, src2, size); 211 - } 212 - } 213 175 214 176 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 215 177 { ··· 245 289 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt) 246 290 { 247 291 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS); 248 - } 249 - 250 - noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size); 251 - 252 - /** 253 - * crypto_memneq - Compare two areas of memory without leaking 254 - * timing information. 255 - * 256 - * @a: One area of memory 257 - * @b: Another area of memory 258 - * @size: The size of the area. 259 - * 260 - * Returns 0 when data is equal, 1 otherwise. 261 - */ 262 - static inline int crypto_memneq(const void *a, const void *b, size_t size) 263 - { 264 - return __crypto_memneq(a, b, size) != 0UL ? 1 : 0; 265 292 } 266 293 267 294 int crypto_register_notifier(struct notifier_block *nb);
+73
include/crypto/utils.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 + /* 3 + * Cryptographic utilities 4 + * 5 + * Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au> 6 + */ 7 + #ifndef _CRYPTO_UTILS_H 8 + #define _CRYPTO_UTILS_H 9 + 10 + #include <asm/unaligned.h> 11 + #include <linux/compiler_attributes.h> 12 + #include <linux/types.h> 13 + 14 + void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size); 15 + 16 + static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 17 + { 18 + if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && 19 + __builtin_constant_p(size) && 20 + (size % sizeof(unsigned long)) == 0) { 21 + unsigned long *d = (unsigned long *)dst; 22 + unsigned long *s = (unsigned long *)src; 23 + unsigned long l; 24 + 25 + while (size > 0) { 26 + l = get_unaligned(d) ^ get_unaligned(s++); 27 + put_unaligned(l, d++); 28 + size -= sizeof(unsigned long); 29 + } 30 + } else { 31 + __crypto_xor(dst, dst, src, size); 32 + } 33 + } 34 + 35 + static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2, 36 + unsigned int size) 37 + { 38 + if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && 39 + __builtin_constant_p(size) && 40 + (size % sizeof(unsigned long)) == 0) { 41 + unsigned long *d = (unsigned long *)dst; 42 + unsigned long *s1 = (unsigned long *)src1; 43 + unsigned long *s2 = (unsigned long *)src2; 44 + unsigned long l; 45 + 46 + while (size > 0) { 47 + l = get_unaligned(s1++) ^ get_unaligned(s2++); 48 + put_unaligned(l, d++); 49 + size -= sizeof(unsigned long); 50 + } 51 + } else { 52 + __crypto_xor(dst, src1, src2, size); 53 + } 54 + } 55 + 56 + noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size); 57 + 58 + /** 59 + * crypto_memneq - Compare two areas of memory without leaking 60 + * timing information. 61 + * 62 + * @a: One area of memory 63 + * @b: Another area of memory 64 + * @size: The size of the area. 65 + * 66 + * Returns 0 when data is equal, 1 otherwise. 67 + */ 68 + static inline int crypto_memneq(const void *a, const void *b, size_t size) 69 + { 70 + return __crypto_memneq(a, b, size) != 0UL ? 1 : 0; 71 + } 72 + 73 + #endif /* _CRYPTO_UTILS_H */
+1 -1
lib/crypto/utils.c
··· 6 6 */ 7 7 8 8 #include <asm/unaligned.h> 9 - #include <crypto/algapi.h> 9 + #include <crypto/utils.h> 10 10 #include <linux/module.h> 11 11 12 12 /*