Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: remove cipher routines from public crypto API

The cipher routines in the crypto API are mostly intended for templates
implementing skcipher modes generically in software, and shouldn't be
used outside of the crypto subsystem. So move the prototypes and all
related definitions to a new header file under include/crypto/internal.
Also, let's use the new module namespace feature to move the symbol
exports into a new namespace CRYPTO_INTERNAL.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Acked-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
0eb76ba2 a3b01ffd

+273 -207
+2 -2
Documentation/crypto/api-skcipher.rst
··· 28 28 Single Block Cipher API 29 29 ----------------------- 30 30 31 - .. kernel-doc:: include/linux/crypto.h 31 + .. kernel-doc:: include/crypto/internal/cipher.h 32 32 :doc: Single Block Cipher API 33 33 34 - .. kernel-doc:: include/linux/crypto.h 34 + .. kernel-doc:: include/crypto/internal/cipher.h 35 35 :functions: crypto_alloc_cipher crypto_free_cipher crypto_has_cipher crypto_cipher_blocksize crypto_cipher_setkey crypto_cipher_encrypt_one crypto_cipher_decrypt_one
+3
arch/arm/crypto/aes-neonbs-glue.c
··· 9 9 #include <asm/simd.h> 10 10 #include <crypto/aes.h> 11 11 #include <crypto/ctr.h> 12 + #include <crypto/internal/cipher.h> 12 13 #include <crypto/internal/simd.h> 13 14 #include <crypto/internal/skcipher.h> 14 15 #include <crypto/scatterwalk.h> ··· 23 22 MODULE_ALIAS_CRYPTO("cbc(aes)-all"); 24 23 MODULE_ALIAS_CRYPTO("ctr(aes)"); 25 24 MODULE_ALIAS_CRYPTO("xts(aes)"); 25 + 26 + MODULE_IMPORT_NS(CRYPTO_INTERNAL); 26 27 27 28 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds); 28 29
+2
arch/s390/crypto/aes_s390.c
··· 21 21 #include <crypto/algapi.h> 22 22 #include <crypto/ghash.h> 23 23 #include <crypto/internal/aead.h> 24 + #include <crypto/internal/cipher.h> 24 25 #include <crypto/internal/skcipher.h> 25 26 #include <crypto/scatterwalk.h> 26 27 #include <linux/err.h> ··· 1056 1055 1057 1056 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); 1058 1057 MODULE_LICENSE("GPL"); 1058 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/adiantum.c
··· 32 32 33 33 #include <crypto/b128ops.h> 34 34 #include <crypto/chacha.h> 35 + #include <crypto/internal/cipher.h> 35 36 #include <crypto/internal/hash.h> 36 37 #include <crypto/internal/poly1305.h> 37 38 #include <crypto/internal/skcipher.h> ··· 617 616 MODULE_LICENSE("GPL v2"); 618 617 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>"); 619 618 MODULE_ALIAS_CRYPTO("adiantum"); 619 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/ansi_cprng.c
··· 7 7 * (C) Neil Horman <nhorman@tuxdriver.com> 8 8 */ 9 9 10 + #include <crypto/internal/cipher.h> 10 11 #include <crypto/internal/rng.h> 11 12 #include <linux/err.h> 12 13 #include <linux/init.h> ··· 471 470 module_exit(prng_mod_fini); 472 471 MODULE_ALIAS_CRYPTO("stdrng"); 473 472 MODULE_ALIAS_CRYPTO("ansi_cprng"); 473 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+1
crypto/cbc.c
··· 6 6 */ 7 7 8 8 #include <crypto/algapi.h> 9 + #include <crypto/internal/cipher.h> 9 10 #include <crypto/internal/skcipher.h> 10 11 #include <linux/err.h> 11 12 #include <linux/init.h>
+2
crypto/ccm.c
··· 6 6 */ 7 7 8 8 #include <crypto/internal/aead.h> 9 + #include <crypto/internal/cipher.h> 9 10 #include <crypto/internal/hash.h> 10 11 #include <crypto/internal/skcipher.h> 11 12 #include <crypto/scatterwalk.h> ··· 955 954 MODULE_ALIAS_CRYPTO("rfc4309"); 956 955 MODULE_ALIAS_CRYPTO("ccm"); 957 956 MODULE_ALIAS_CRYPTO("cbcmac"); 957 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/cfb.c
··· 20 20 */ 21 21 22 22 #include <crypto/algapi.h> 23 + #include <crypto/internal/cipher.h> 23 24 #include <crypto/internal/skcipher.h> 24 25 #include <linux/err.h> 25 26 #include <linux/init.h> ··· 251 250 MODULE_LICENSE("GPL"); 252 251 MODULE_DESCRIPTION("CFB block cipher mode of operation"); 253 252 MODULE_ALIAS_CRYPTO("cfb"); 253 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+4 -3
crypto/cipher.c
··· 9 9 */ 10 10 11 11 #include <crypto/algapi.h> 12 + #include <crypto/internal/cipher.h> 12 13 #include <linux/kernel.h> 13 14 #include <linux/crypto.h> 14 15 #include <linux/errno.h> ··· 54 53 55 54 return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen); 56 55 } 57 - EXPORT_SYMBOL_GPL(crypto_cipher_setkey); 56 + EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL); 58 57 59 58 static inline void cipher_crypt_one(struct crypto_cipher *tfm, 60 59 u8 *dst, const u8 *src, bool enc) ··· 82 81 { 83 82 cipher_crypt_one(tfm, dst, src, true); 84 83 } 85 - EXPORT_SYMBOL_GPL(crypto_cipher_encrypt_one); 84 + EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL); 86 85 87 86 void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 88 87 u8 *dst, const u8 *src) 89 88 { 90 89 cipher_crypt_one(tfm, dst, src, false); 91 90 } 92 - EXPORT_SYMBOL_GPL(crypto_cipher_decrypt_one); 91 + EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);
+2
crypto/cmac.c
··· 11 11 * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org> 12 12 */ 13 13 14 + #include <crypto/internal/cipher.h> 14 15 #include <crypto/internal/hash.h> 15 16 #include <linux/err.h> 16 17 #include <linux/kernel.h> ··· 314 313 MODULE_LICENSE("GPL"); 315 314 MODULE_DESCRIPTION("CMAC keyed hash algorithm"); 316 315 MODULE_ALIAS_CRYPTO("cmac"); 316 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/ctr.c
··· 7 7 8 8 #include <crypto/algapi.h> 9 9 #include <crypto/ctr.h> 10 + #include <crypto/internal/cipher.h> 10 11 #include <crypto/internal/skcipher.h> 11 12 #include <linux/err.h> 12 13 #include <linux/init.h> ··· 359 358 MODULE_DESCRIPTION("CTR block cipher mode of operation"); 360 359 MODULE_ALIAS_CRYPTO("rfc3686"); 361 360 MODULE_ALIAS_CRYPTO("ctr"); 361 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/drbg.c
··· 98 98 */ 99 99 100 100 #include <crypto/drbg.h> 101 + #include <crypto/internal/cipher.h> 101 102 #include <linux/kernel.h> 102 103 103 104 /*************************************************************** ··· 2162 2161 CRYPTO_DRBG_HMAC_STRING 2163 2162 CRYPTO_DRBG_CTR_STRING); 2164 2163 MODULE_ALIAS_CRYPTO("stdrng"); 2164 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+1
crypto/ecb.c
··· 6 6 */ 7 7 8 8 #include <crypto/algapi.h> 9 + #include <crypto/internal/cipher.h> 9 10 #include <crypto/internal/skcipher.h> 10 11 #include <linux/err.h> 11 12 #include <linux/init.h>
+2
crypto/essiv.c
··· 30 30 31 31 #include <crypto/authenc.h> 32 32 #include <crypto/internal/aead.h> 33 + #include <crypto/internal/cipher.h> 33 34 #include <crypto/internal/hash.h> 34 35 #include <crypto/internal/skcipher.h> 35 36 #include <crypto/scatterwalk.h> ··· 644 643 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption"); 645 644 MODULE_LICENSE("GPL v2"); 646 645 MODULE_ALIAS_CRYPTO("essiv"); 646 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/keywrap.c
··· 85 85 #include <linux/crypto.h> 86 86 #include <linux/scatterlist.h> 87 87 #include <crypto/scatterwalk.h> 88 + #include <crypto/internal/cipher.h> 88 89 #include <crypto/internal/skcipher.h> 89 90 90 91 struct crypto_kw_block { ··· 317 316 MODULE_AUTHOR("Stephan Mueller <smueller@chronox.de>"); 318 317 MODULE_DESCRIPTION("Key Wrapping (RFC3394 / NIST SP800-38F)"); 319 318 MODULE_ALIAS_CRYPTO("kw"); 319 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/ofb.c
··· 8 8 */ 9 9 10 10 #include <crypto/algapi.h> 11 + #include <crypto/internal/cipher.h> 11 12 #include <crypto/internal/skcipher.h> 12 13 #include <linux/err.h> 13 14 #include <linux/init.h> ··· 103 102 MODULE_LICENSE("GPL"); 104 103 MODULE_DESCRIPTION("OFB block cipher mode of operation"); 105 104 MODULE_ALIAS_CRYPTO("ofb"); 105 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/pcbc.c
··· 10 10 */ 11 11 12 12 #include <crypto/algapi.h> 13 + #include <crypto/internal/cipher.h> 13 14 #include <crypto/internal/skcipher.h> 14 15 #include <linux/err.h> 15 16 #include <linux/init.h> ··· 192 191 MODULE_LICENSE("GPL"); 193 192 MODULE_DESCRIPTION("PCBC block cipher mode of operation"); 194 193 MODULE_ALIAS_CRYPTO("pcbc"); 194 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/skcipher.c
··· 10 10 */ 11 11 12 12 #include <crypto/internal/aead.h> 13 + #include <crypto/internal/cipher.h> 13 14 #include <crypto/internal/skcipher.h> 14 15 #include <crypto/scatterwalk.h> 15 16 #include <linux/bug.h> ··· 987 986 988 987 MODULE_LICENSE("GPL"); 989 988 MODULE_DESCRIPTION("Symmetric key cipher type"); 989 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+3
crypto/testmgr.c
··· 33 33 #include <crypto/akcipher.h> 34 34 #include <crypto/kpp.h> 35 35 #include <crypto/acompress.h> 36 + #include <crypto/internal/cipher.h> 36 37 #include <crypto/internal/simd.h> 37 38 38 39 #include "internal.h" 40 + 41 + MODULE_IMPORT_NS(CRYPTO_INTERNAL); 39 42 40 43 static bool notests; 41 44 module_param(notests, bool, 0644);
+2
crypto/vmac.c
··· 36 36 #include <linux/scatterlist.h> 37 37 #include <asm/byteorder.h> 38 38 #include <crypto/scatterwalk.h> 39 + #include <crypto/internal/cipher.h> 39 40 #include <crypto/internal/hash.h> 40 41 41 42 /* ··· 694 693 MODULE_LICENSE("GPL"); 695 694 MODULE_DESCRIPTION("VMAC hash algorithm"); 696 695 MODULE_ALIAS_CRYPTO("vmac64"); 696 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/xcbc.c
··· 6 6 * Kazunori Miyazawa <miyazawa@linux-ipv6.org> 7 7 */ 8 8 9 + #include <crypto/internal/cipher.h> 9 10 #include <crypto/internal/hash.h> 10 11 #include <linux/err.h> 11 12 #include <linux/kernel.h> ··· 273 272 MODULE_LICENSE("GPL"); 274 273 MODULE_DESCRIPTION("XCBC keyed hash algorithm"); 275 274 MODULE_ALIAS_CRYPTO("xcbc"); 275 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
crypto/xts.c
··· 7 7 * Based on ecb.c 8 8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 9 9 */ 10 + #include <crypto/internal/cipher.h> 10 11 #include <crypto/internal/skcipher.h> 11 12 #include <crypto/scatterwalk.h> 12 13 #include <linux/err.h> ··· 465 464 MODULE_LICENSE("GPL"); 466 465 MODULE_DESCRIPTION("XTS block cipher mode"); 467 466 MODULE_ALIAS_CRYPTO("xts"); 467 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+2
drivers/crypto/geode-aes.c
··· 10 10 #include <linux/spinlock.h> 11 11 #include <crypto/algapi.h> 12 12 #include <crypto/aes.h> 13 + #include <crypto/internal/cipher.h> 13 14 #include <crypto/internal/skcipher.h> 14 15 15 16 #include <linux/io.h> ··· 435 434 MODULE_AUTHOR("Advanced Micro Devices, Inc."); 436 435 MODULE_DESCRIPTION("Geode LX Hardware AES driver"); 437 436 MODULE_LICENSE("GPL"); 437 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+1
drivers/crypto/inside-secure/safexcel.c
··· 1999 1999 MODULE_AUTHOR("Igal Liberman <igall@marvell.com>"); 2000 2000 MODULE_DESCRIPTION("Support for SafeXcel cryptographic engines: EIP97 & EIP197"); 2001 2001 MODULE_LICENSE("GPL v2"); 2002 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+1
drivers/crypto/inside-secure/safexcel_hash.c
··· 13 13 #include <crypto/sha3.h> 14 14 #include <crypto/skcipher.h> 15 15 #include <crypto/sm3.h> 16 + #include <crypto/internal/cipher.h> 16 17 #include <linux/device.h> 17 18 #include <linux/dma-mapping.h> 18 19 #include <linux/dmapool.h>
+1
drivers/crypto/qat/qat_common/adf_ctl_drv.c
··· 464 464 MODULE_DESCRIPTION("Intel(R) QuickAssist Technology"); 465 465 MODULE_ALIAS_CRYPTO("intel_qat"); 466 466 MODULE_VERSION(ADF_DRV_VERSION); 467 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+1
drivers/crypto/qat/qat_common/qat_algs.c
··· 4 4 #include <linux/slab.h> 5 5 #include <linux/crypto.h> 6 6 #include <crypto/internal/aead.h> 7 + #include <crypto/internal/cipher.h> 7 8 #include <crypto/internal/skcipher.h> 8 9 #include <crypto/aes.h> 9 10 #include <crypto/sha1.h>
+1
drivers/crypto/vmx/aes.c
··· 14 14 #include <asm/simd.h> 15 15 #include <asm/switch_to.h> 16 16 #include <crypto/aes.h> 17 + #include <crypto/internal/cipher.h> 17 18 #include <crypto/internal/simd.h> 18 19 19 20 #include "aesp8-ppc.h"
+1
drivers/crypto/vmx/vmx.c
··· 78 78 "support on Power 8"); 79 79 MODULE_LICENSE("GPL"); 80 80 MODULE_VERSION("1.0.0"); 81 + MODULE_IMPORT_NS(CRYPTO_INTERNAL);
-39
include/crypto/algapi.h
··· 189 189 return inst->__ctx; 190 190 } 191 191 192 - struct crypto_cipher_spawn { 193 - struct crypto_spawn base; 194 - }; 195 - 196 - static inline int crypto_grab_cipher(struct crypto_cipher_spawn *spawn, 197 - struct crypto_instance *inst, 198 - const char *name, u32 type, u32 mask) 199 - { 200 - type &= ~CRYPTO_ALG_TYPE_MASK; 201 - type |= CRYPTO_ALG_TYPE_CIPHER; 202 - mask |= CRYPTO_ALG_TYPE_MASK; 203 - return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 204 - } 205 - 206 - static inline void crypto_drop_cipher(struct crypto_cipher_spawn *spawn) 207 - { 208 - crypto_drop_spawn(&spawn->base); 209 - } 210 - 211 - static inline struct crypto_alg *crypto_spawn_cipher_alg( 212 - struct crypto_cipher_spawn *spawn) 213 - { 214 - return spawn->base.alg; 215 - } 216 - 217 - static inline struct crypto_cipher *crypto_spawn_cipher( 218 - struct crypto_cipher_spawn *spawn) 219 - { 220 - u32 type = CRYPTO_ALG_TYPE_CIPHER; 221 - u32 mask = CRYPTO_ALG_TYPE_MASK; 222 - 223 - return __crypto_cipher_cast(crypto_spawn_tfm(&spawn->base, type, mask)); 224 - } 225 - 226 - static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) 227 - { 228 - return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; 229 - } 230 - 231 192 static inline struct crypto_async_request *crypto_get_backlog( 232 193 struct crypto_queue *queue) 233 194 {
+218
include/crypto/internal/cipher.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 + /* 3 + * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 4 + * Copyright (c) 2002 David S. Miller (davem@redhat.com) 5 + * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 6 + * 7 + * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 8 + * and Nettle, by Niels Möller. 9 + */ 10 + 11 + #ifndef _CRYPTO_INTERNAL_CIPHER_H 12 + #define _CRYPTO_INTERNAL_CIPHER_H 13 + 14 + #include <crypto/algapi.h> 15 + 16 + struct crypto_cipher { 17 + struct crypto_tfm base; 18 + }; 19 + 20 + /** 21 + * DOC: Single Block Cipher API 22 + * 23 + * The single block cipher API is used with the ciphers of type 24 + * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto). 25 + * 26 + * Using the single block cipher API calls, operations with the basic cipher 27 + * primitive can be implemented. These cipher primitives exclude any block 28 + * chaining operations including IV handling. 29 + * 30 + * The purpose of this single block cipher API is to support the implementation 31 + * of templates or other concepts that only need to perform the cipher operation 32 + * on one block at a time. Templates invoke the underlying cipher primitive 33 + * block-wise and process either the input or the output data of these cipher 34 + * operations. 35 + */ 36 + 37 + static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 38 + { 39 + return (struct crypto_cipher *)tfm; 40 + } 41 + 42 + /** 43 + * crypto_alloc_cipher() - allocate single block cipher handle 44 + * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 45 + * single block cipher 46 + * @type: specifies the type of the cipher 47 + * @mask: specifies the mask for the cipher 48 + * 49 + * Allocate a cipher handle for a single block cipher. The returned struct 50 + * crypto_cipher is the cipher handle that is required for any subsequent API 51 + * invocation for that single block cipher. 52 + * 53 + * Return: allocated cipher handle in case of success; IS_ERR() is true in case 54 + * of an error, PTR_ERR() returns the error code. 55 + */ 56 + static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 57 + u32 type, u32 mask) 58 + { 59 + type &= ~CRYPTO_ALG_TYPE_MASK; 60 + type |= CRYPTO_ALG_TYPE_CIPHER; 61 + mask |= CRYPTO_ALG_TYPE_MASK; 62 + 63 + return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 64 + } 65 + 66 + static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 67 + { 68 + return &tfm->base; 69 + } 70 + 71 + /** 72 + * crypto_free_cipher() - zeroize and free the single block cipher handle 73 + * @tfm: cipher handle to be freed 74 + */ 75 + static inline void crypto_free_cipher(struct crypto_cipher *tfm) 76 + { 77 + crypto_free_tfm(crypto_cipher_tfm(tfm)); 78 + } 79 + 80 + /** 81 + * crypto_has_cipher() - Search for the availability of a single block cipher 82 + * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 83 + * single block cipher 84 + * @type: specifies the type of the cipher 85 + * @mask: specifies the mask for the cipher 86 + * 87 + * Return: true when the single block cipher is known to the kernel crypto API; 88 + * false otherwise 89 + */ 90 + static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 91 + { 92 + type &= ~CRYPTO_ALG_TYPE_MASK; 93 + type |= CRYPTO_ALG_TYPE_CIPHER; 94 + mask |= CRYPTO_ALG_TYPE_MASK; 95 + 96 + return crypto_has_alg(alg_name, type, mask); 97 + } 98 + 99 + /** 100 + * crypto_cipher_blocksize() - obtain block size for cipher 101 + * @tfm: cipher handle 102 + * 103 + * The block size for the single block cipher referenced with the cipher handle 104 + * tfm is returned. The caller may use that information to allocate appropriate 105 + * memory for the data returned by the encryption or decryption operation 106 + * 107 + * Return: block size of cipher 108 + */ 109 + static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 110 + { 111 + return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 112 + } 113 + 114 + static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 115 + { 116 + return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 117 + } 118 + 119 + static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 120 + { 121 + return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 122 + } 123 + 124 + static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 125 + u32 flags) 126 + { 127 + crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 128 + } 129 + 130 + static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 131 + u32 flags) 132 + { 133 + crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 134 + } 135 + 136 + /** 137 + * crypto_cipher_setkey() - set key for cipher 138 + * @tfm: cipher handle 139 + * @key: buffer holding the key 140 + * @keylen: length of the key in bytes 141 + * 142 + * The caller provided key is set for the single block cipher referenced by the 143 + * cipher handle. 144 + * 145 + * Note, the key length determines the cipher type. Many block ciphers implement 146 + * different cipher modes depending on the key size, such as AES-128 vs AES-192 147 + * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128 148 + * is performed. 149 + * 150 + * Return: 0 if the setting of the key was successful; < 0 if an error occurred 151 + */ 152 + int crypto_cipher_setkey(struct crypto_cipher *tfm, 153 + const u8 *key, unsigned int keylen); 154 + 155 + /** 156 + * crypto_cipher_encrypt_one() - encrypt one block of plaintext 157 + * @tfm: cipher handle 158 + * @dst: points to the buffer that will be filled with the ciphertext 159 + * @src: buffer holding the plaintext to be encrypted 160 + * 161 + * Invoke the encryption operation of one block. The caller must ensure that 162 + * the plaintext and ciphertext buffers are at least one block in size. 163 + */ 164 + void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 165 + u8 *dst, const u8 *src); 166 + 167 + /** 168 + * crypto_cipher_decrypt_one() - decrypt one block of ciphertext 169 + * @tfm: cipher handle 170 + * @dst: points to the buffer that will be filled with the plaintext 171 + * @src: buffer holding the ciphertext to be decrypted 172 + * 173 + * Invoke the decryption operation of one block. The caller must ensure that 174 + * the plaintext and ciphertext buffers are at least one block in size. 175 + */ 176 + void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 177 + u8 *dst, const u8 *src); 178 + 179 + struct crypto_cipher_spawn { 180 + struct crypto_spawn base; 181 + }; 182 + 183 + static inline int crypto_grab_cipher(struct crypto_cipher_spawn *spawn, 184 + struct crypto_instance *inst, 185 + const char *name, u32 type, u32 mask) 186 + { 187 + type &= ~CRYPTO_ALG_TYPE_MASK; 188 + type |= CRYPTO_ALG_TYPE_CIPHER; 189 + mask |= CRYPTO_ALG_TYPE_MASK; 190 + return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 191 + } 192 + 193 + static inline void crypto_drop_cipher(struct crypto_cipher_spawn *spawn) 194 + { 195 + crypto_drop_spawn(&spawn->base); 196 + } 197 + 198 + static inline struct crypto_alg *crypto_spawn_cipher_alg( 199 + struct crypto_cipher_spawn *spawn) 200 + { 201 + return spawn->base.alg; 202 + } 203 + 204 + static inline struct crypto_cipher *crypto_spawn_cipher( 205 + struct crypto_cipher_spawn *spawn) 206 + { 207 + u32 type = CRYPTO_ALG_TYPE_CIPHER; 208 + u32 mask = CRYPTO_ALG_TYPE_MASK; 209 + 210 + return __crypto_cipher_cast(crypto_spawn_tfm(&spawn->base, type, mask)); 211 + } 212 + 213 + static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) 214 + { 215 + return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; 216 + } 217 + 218 + #endif
+1
include/crypto/internal/skcipher.h
··· 9 9 #define _CRYPTO_INTERNAL_SKCIPHER_H 10 10 11 11 #include <crypto/algapi.h> 12 + #include <crypto/internal/cipher.h> 12 13 #include <crypto/skcipher.h> 13 14 #include <linux/list.h> 14 15 #include <linux/types.h>
-163
include/linux/crypto.h
··· 636 636 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR; 637 637 }; 638 638 639 - struct crypto_cipher { 640 - struct crypto_tfm base; 641 - }; 642 - 643 639 struct crypto_comp { 644 640 struct crypto_tfm base; 645 641 }; ··· 738 742 struct crypto_tfm *tfm; 739 743 return __alignof__(tfm->__crt_ctx); 740 744 } 741 - 742 - /** 743 - * DOC: Single Block Cipher API 744 - * 745 - * The single block cipher API is used with the ciphers of type 746 - * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto). 747 - * 748 - * Using the single block cipher API calls, operations with the basic cipher 749 - * primitive can be implemented. These cipher primitives exclude any block 750 - * chaining operations including IV handling. 751 - * 752 - * The purpose of this single block cipher API is to support the implementation 753 - * of templates or other concepts that only need to perform the cipher operation 754 - * on one block at a time. Templates invoke the underlying cipher primitive 755 - * block-wise and process either the input or the output data of these cipher 756 - * operations. 757 - */ 758 - 759 - static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 760 - { 761 - return (struct crypto_cipher *)tfm; 762 - } 763 - 764 - /** 765 - * crypto_alloc_cipher() - allocate single block cipher handle 766 - * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 767 - * single block cipher 768 - * @type: specifies the type of the cipher 769 - * @mask: specifies the mask for the cipher 770 - * 771 - * Allocate a cipher handle for a single block cipher. The returned struct 772 - * crypto_cipher is the cipher handle that is required for any subsequent API 773 - * invocation for that single block cipher. 774 - * 775 - * Return: allocated cipher handle in case of success; IS_ERR() is true in case 776 - * of an error, PTR_ERR() returns the error code. 777 - */ 778 - static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name, 779 - u32 type, u32 mask) 780 - { 781 - type &= ~CRYPTO_ALG_TYPE_MASK; 782 - type |= CRYPTO_ALG_TYPE_CIPHER; 783 - mask |= CRYPTO_ALG_TYPE_MASK; 784 - 785 - return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask)); 786 - } 787 - 788 - static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm) 789 - { 790 - return &tfm->base; 791 - } 792 - 793 - /** 794 - * crypto_free_cipher() - zeroize and free the single block cipher handle 795 - * @tfm: cipher handle to be freed 796 - */ 797 - static inline void crypto_free_cipher(struct crypto_cipher *tfm) 798 - { 799 - crypto_free_tfm(crypto_cipher_tfm(tfm)); 800 - } 801 - 802 - /** 803 - * crypto_has_cipher() - Search for the availability of a single block cipher 804 - * @alg_name: is the cra_name / name or cra_driver_name / driver name of the 805 - * single block cipher 806 - * @type: specifies the type of the cipher 807 - * @mask: specifies the mask for the cipher 808 - * 809 - * Return: true when the single block cipher is known to the kernel crypto API; 810 - * false otherwise 811 - */ 812 - static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask) 813 - { 814 - type &= ~CRYPTO_ALG_TYPE_MASK; 815 - type |= CRYPTO_ALG_TYPE_CIPHER; 816 - mask |= CRYPTO_ALG_TYPE_MASK; 817 - 818 - return crypto_has_alg(alg_name, type, mask); 819 - } 820 - 821 - /** 822 - * crypto_cipher_blocksize() - obtain block size for cipher 823 - * @tfm: cipher handle 824 - * 825 - * The block size for the single block cipher referenced with the cipher handle 826 - * tfm is returned. The caller may use that information to allocate appropriate 827 - * memory for the data returned by the encryption or decryption operation 828 - * 829 - * Return: block size of cipher 830 - */ 831 - static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm) 832 - { 833 - return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm)); 834 - } 835 - 836 - static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm) 837 - { 838 - return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm)); 839 - } 840 - 841 - static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm) 842 - { 843 - return crypto_tfm_get_flags(crypto_cipher_tfm(tfm)); 844 - } 845 - 846 - static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm, 847 - u32 flags) 848 - { 849 - crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags); 850 - } 851 - 852 - static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm, 853 - u32 flags) 854 - { 855 - crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags); 856 - } 857 - 858 - /** 859 - * crypto_cipher_setkey() - set key for cipher 860 - * @tfm: cipher handle 861 - * @key: buffer holding the key 862 - * @keylen: length of the key in bytes 863 - * 864 - * The caller provided key is set for the single block cipher referenced by the 865 - * cipher handle. 866 - * 867 - * Note, the key length determines the cipher type. Many block ciphers implement 868 - * different cipher modes depending on the key size, such as AES-128 vs AES-192 869 - * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128 870 - * is performed. 871 - * 872 - * Return: 0 if the setting of the key was successful; < 0 if an error occurred 873 - */ 874 - int crypto_cipher_setkey(struct crypto_cipher *tfm, 875 - const u8 *key, unsigned int keylen); 876 - 877 - /** 878 - * crypto_cipher_encrypt_one() - encrypt one block of plaintext 879 - * @tfm: cipher handle 880 - * @dst: points to the buffer that will be filled with the ciphertext 881 - * @src: buffer holding the plaintext to be encrypted 882 - * 883 - * Invoke the encryption operation of one block. The caller must ensure that 884 - * the plaintext and ciphertext buffers are at least one block in size. 885 - */ 886 - void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, 887 - u8 *dst, const u8 *src); 888 - 889 - /** 890 - * crypto_cipher_decrypt_one() - decrypt one block of ciphertext 891 - * @tfm: cipher handle 892 - * @dst: points to the buffer that will be filled with the plaintext 893 - * @src: buffer holding the ciphertext to be decrypted 894 - * 895 - * Invoke the decryption operation of one block. The caller must ensure that 896 - * the plaintext and ciphertext buffers are at least one block in size. 897 - */ 898 - void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, 899 - u8 *dst, const u8 *src); 900 745 901 746 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) 902 747 {