Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

ima: use new crypto_shash API instead of old crypto_hash

Old crypto hash API internally uses shash API.
Using shash API directly is more efficient.

Signed-off-by: Dmitry Kasatkin <dmitry.kasatkin@intel.com>
Signed-off-by: Mimi Zohar <zohar@linux.vnet.ibm.com>

authored by

Dmitry Kasatkin and committed by
Mimi Zohar
76bb28f6 85865c1f

+42 -39
+1
security/integrity/ima/ima.h
··· 89 89 int ima_calc_boot_aggregate(char *digest); 90 90 void ima_add_violation(struct inode *inode, const unsigned char *filename, 91 91 const char *op, const char *cause); 92 + int ima_init_crypto(void); 92 93 93 94 /* 94 95 * used to protect h_table and sha_table
+38 -39
security/integrity/ima/ima_crypto.c
··· 19 19 #include <linux/scatterlist.h> 20 20 #include <linux/err.h> 21 21 #include <linux/slab.h> 22 + #include <crypto/hash.h> 22 23 #include "ima.h" 23 24 24 - static int init_desc(struct hash_desc *desc) 25 - { 26 - int rc; 25 + static struct crypto_shash *ima_shash_tfm; 27 26 28 - desc->tfm = crypto_alloc_hash(ima_hash, 0, CRYPTO_ALG_ASYNC); 29 - if (IS_ERR(desc->tfm)) { 30 - pr_info("IMA: failed to load %s transform: %ld\n", 31 - ima_hash, PTR_ERR(desc->tfm)); 32 - rc = PTR_ERR(desc->tfm); 27 + int ima_init_crypto(void) 28 + { 29 + long rc; 30 + 31 + ima_shash_tfm = crypto_alloc_shash(ima_hash, 0, 0); 32 + if (IS_ERR(ima_shash_tfm)) { 33 + rc = PTR_ERR(ima_shash_tfm); 34 + pr_err("Can not allocate %s (reason: %ld)\n", ima_hash, rc); 33 35 return rc; 34 36 } 35 - desc->flags = 0; 36 - rc = crypto_hash_init(desc); 37 - if (rc) 38 - crypto_free_hash(desc->tfm); 39 - return rc; 37 + return 0; 40 38 } 41 39 42 40 /* ··· 42 44 */ 43 45 int ima_calc_hash(struct file *file, char *digest) 44 46 { 45 - struct hash_desc desc; 46 - struct scatterlist sg[1]; 47 47 loff_t i_size, offset = 0; 48 48 char *rbuf; 49 49 int rc, read = 0; 50 + struct { 51 + struct shash_desc shash; 52 + char ctx[crypto_shash_descsize(ima_shash_tfm)]; 53 + } desc; 50 54 51 - rc = init_desc(&desc); 55 + desc.shash.tfm = ima_shash_tfm; 56 + desc.shash.flags = 0; 57 + 58 + rc = crypto_shash_init(&desc.shash); 52 59 if (rc != 0) 53 60 return rc; 54 61 ··· 78 75 if (rbuf_len == 0) 79 76 break; 80 77 offset += rbuf_len; 81 - sg_init_one(sg, rbuf, rbuf_len); 82 78 83 - rc = crypto_hash_update(&desc, sg, rbuf_len); 79 + rc = crypto_shash_update(&desc.shash, rbuf, rbuf_len); 84 80 if (rc) 85 81 break; 86 82 } 87 83 kfree(rbuf); 88 84 if (!rc) 89 - rc = crypto_hash_final(&desc, digest); 85 + rc = crypto_shash_final(&desc.shash, digest); 90 86 if (read) 91 87 file->f_mode &= ~FMODE_READ; 92 88 out: 93 - crypto_free_hash(desc.tfm); 94 89 return rc; 95 90 } 96 91 ··· 97 96 */ 98 97 int ima_calc_template_hash(int template_len, void *template, char *digest) 99 98 { 100 - struct hash_desc desc; 101 - struct scatterlist sg[1]; 102 - int rc; 99 + struct { 100 + struct shash_desc shash; 101 + char ctx[crypto_shash_descsize(ima_shash_tfm)]; 102 + } desc; 103 103 104 - rc = init_desc(&desc); 105 - if (rc != 0) 106 - return rc; 104 + desc.shash.tfm = ima_shash_tfm; 105 + desc.shash.flags = 0; 107 106 108 - sg_init_one(sg, template, template_len); 109 - rc = crypto_hash_update(&desc, sg, template_len); 110 - if (!rc) 111 - rc = crypto_hash_final(&desc, digest); 112 - crypto_free_hash(desc.tfm); 113 - return rc; 107 + return crypto_shash_digest(&desc.shash, template, template_len, digest); 114 108 } 115 109 116 110 static void __init ima_pcrread(int idx, u8 *pcr) ··· 122 126 */ 123 127 int __init ima_calc_boot_aggregate(char *digest) 124 128 { 125 - struct hash_desc desc; 126 - struct scatterlist sg; 127 129 u8 pcr_i[IMA_DIGEST_SIZE]; 128 130 int rc, i; 131 + struct { 132 + struct shash_desc shash; 133 + char ctx[crypto_shash_descsize(ima_shash_tfm)]; 134 + } desc; 129 135 130 - rc = init_desc(&desc); 136 + desc.shash.tfm = ima_shash_tfm; 137 + desc.shash.flags = 0; 138 + 139 + rc = crypto_shash_init(&desc.shash); 131 140 if (rc != 0) 132 141 return rc; 133 142 ··· 140 139 for (i = TPM_PCR0; i < TPM_PCR8; i++) { 141 140 ima_pcrread(i, pcr_i); 142 141 /* now accumulate with current aggregate */ 143 - sg_init_one(&sg, pcr_i, IMA_DIGEST_SIZE); 144 - rc = crypto_hash_update(&desc, &sg, IMA_DIGEST_SIZE); 142 + rc = crypto_shash_update(&desc.shash, pcr_i, IMA_DIGEST_SIZE); 145 143 } 146 144 if (!rc) 147 - crypto_hash_final(&desc, digest); 148 - crypto_free_hash(desc.tfm); 145 + crypto_shash_final(&desc.shash, digest); 149 146 return rc; 150 147 }
+3
security/integrity/ima/ima_init.c
··· 85 85 if (!ima_used_chip) 86 86 pr_info("IMA: No TPM chip found, activating TPM-bypass!\n"); 87 87 88 + rc = ima_init_crypto(); 89 + if (rc) 90 + return rc; 88 91 ima_add_boot_aggregate(); /* boot aggregate must be first entry */ 89 92 ima_init_policy(); 90 93