Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: hctr2 - Convert to use POLYVAL library

The "hash function" in hctr2 is fixed at POLYVAL; it can never vary.
Just use the POLYVAL library, which is much easier to use than the
crypto_shash API. It's faster, uses fixed-size structs, and never fails
(all the functions return void).

Note that this eliminates the only known user of the polyval support in
crypto_shash. A later commit will remove support for polyval from
crypto_shash, given that the library API is sufficient.

Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20251109234726.638437-7-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>

+64 -163
+1 -1
crypto/Kconfig
··· 696 696 config CRYPTO_HCTR2 697 697 tristate "HCTR2" 698 698 select CRYPTO_XCTR 699 - select CRYPTO_POLYVAL 699 + select CRYPTO_LIB_POLYVAL 700 700 select CRYPTO_MANAGER 701 701 help 702 702 HCTR2 length-preserving encryption mode
+62 -160
crypto/hctr2.c
··· 17 17 */ 18 18 19 19 #include <crypto/internal/cipher.h> 20 - #include <crypto/internal/hash.h> 21 20 #include <crypto/internal/skcipher.h> 22 21 #include <crypto/polyval.h> 23 22 #include <crypto/scatterwalk.h> ··· 36 37 struct hctr2_instance_ctx { 37 38 struct crypto_cipher_spawn blockcipher_spawn; 38 39 struct crypto_skcipher_spawn xctr_spawn; 39 - struct crypto_shash_spawn polyval_spawn; 40 40 }; 41 41 42 42 struct hctr2_tfm_ctx { 43 43 struct crypto_cipher *blockcipher; 44 44 struct crypto_skcipher *xctr; 45 - struct crypto_shash *polyval; 45 + struct polyval_key poly_key; 46 + struct polyval_elem hashed_tweaklens[2]; 46 47 u8 L[BLOCKCIPHER_BLOCK_SIZE]; 47 - int hashed_tweak_offset; 48 - /* 49 - * This struct is allocated with extra space for two exported hash 50 - * states. Since the hash state size is not known at compile-time, we 51 - * can't add these to the struct directly. 52 - * 53 - * hashed_tweaklen_divisible; 54 - * hashed_tweaklen_remainder; 55 - */ 56 48 }; 57 49 58 50 struct hctr2_request_ctx { ··· 53 63 struct scatterlist *bulk_part_src; 54 64 struct scatterlist sg_src[2]; 55 65 struct scatterlist sg_dst[2]; 66 + struct polyval_elem hashed_tweak; 56 67 /* 57 - * Sub-request sizes are unknown at compile-time, so they need to go 58 - * after the members with known sizes. 68 + * skcipher sub-request size is unknown at compile-time, so it needs to 69 + * go after the members with known sizes. 59 70 */ 60 71 union { 61 - struct shash_desc hash_desc; 72 + struct polyval_ctx poly_ctx; 62 73 struct skcipher_request xctr_req; 63 74 } u; 64 - /* 65 - * This struct is allocated with extra space for one exported hash 66 - * state. Since the hash state size is not known at compile-time, we 67 - * can't add it to the struct directly. 68 - * 69 - * hashed_tweak; 70 - */ 71 75 }; 72 - 73 - static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx, 74 - bool has_remainder) 75 - { 76 - u8 *p = (u8 *)tctx + sizeof(*tctx); 77 - 78 - if (has_remainder) /* For messages not a multiple of block length */ 79 - p += crypto_shash_statesize(tctx->polyval); 80 - return p; 81 - } 82 - 83 - static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx, 84 - struct hctr2_request_ctx *rctx) 85 - { 86 - return (u8 *)rctx + tctx->hashed_tweak_offset; 87 - } 88 76 89 77 /* 90 78 * The input data for each HCTR2 hash step begins with a 16-byte block that ··· 74 106 * 75 107 * These precomputed hashes are stored in hctr2_tfm_ctx. 76 108 */ 77 - static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder) 109 + static void hctr2_hash_tweaklens(struct hctr2_tfm_ctx *tctx) 78 110 { 79 - SHASH_DESC_ON_STACK(shash, tfm->polyval); 80 - __le64 tweak_length_block[2]; 81 - int err; 111 + struct polyval_ctx ctx; 82 112 83 - shash->tfm = tctx->polyval; 84 - memset(tweak_length_block, 0, sizeof(tweak_length_block)); 113 + for (int has_remainder = 0; has_remainder < 2; has_remainder++) { 114 + const __le64 tweak_length_block[2] = { 115 + cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder), 116 + }; 85 117 86 - tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder); 87 - err = crypto_shash_init(shash); 88 - if (err) 89 - return err; 90 - err = crypto_shash_update(shash, (u8 *)tweak_length_block, 91 - POLYVAL_BLOCK_SIZE); 92 - if (err) 93 - return err; 94 - return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder)); 118 + polyval_init(&ctx, &tctx->poly_key); 119 + polyval_update(&ctx, (const u8 *)&tweak_length_block, 120 + sizeof(tweak_length_block)); 121 + static_assert(sizeof(tweak_length_block) == POLYVAL_BLOCK_SIZE); 122 + polyval_export_blkaligned( 123 + &ctx, &tctx->hashed_tweaklens[has_remainder]); 124 + } 125 + memzero_explicit(&ctx, sizeof(ctx)); 95 126 } 96 127 97 128 static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key, ··· 123 156 tctx->L[0] = 0x01; 124 157 crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L); 125 158 126 - crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK); 127 - crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) & 128 - CRYPTO_TFM_REQ_MASK); 129 - err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE); 130 - if (err) 131 - return err; 159 + static_assert(sizeof(hbar) == POLYVAL_BLOCK_SIZE); 160 + polyval_preparekey(&tctx->poly_key, hbar); 132 161 memzero_explicit(hbar, sizeof(hbar)); 133 162 134 - return hctr2_hash_tweaklen(tctx, true) ?: hctr2_hash_tweaklen(tctx, false); 163 + hctr2_hash_tweaklens(tctx); 164 + return 0; 135 165 } 136 166 137 - static int hctr2_hash_tweak(struct skcipher_request *req) 167 + static void hctr2_hash_tweak(struct skcipher_request *req) 138 168 { 139 169 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 140 170 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 141 171 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 142 - struct shash_desc *hash_desc = &rctx->u.hash_desc; 143 - int err; 172 + struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx; 144 173 bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE; 145 174 146 - hash_desc->tfm = tctx->polyval; 147 - err = crypto_shash_import(hash_desc, hctr2_hashed_tweaklen(tctx, has_remainder)); 148 - if (err) 149 - return err; 150 - err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE); 151 - if (err) 152 - return err; 175 + polyval_import_blkaligned(poly_ctx, &tctx->poly_key, 176 + &tctx->hashed_tweaklens[has_remainder]); 177 + polyval_update(poly_ctx, req->iv, TWEAK_SIZE); 153 178 154 179 // Store the hashed tweak, since we need it when computing both 155 180 // H(T || N) and H(T || V). 156 - return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx)); 181 + static_assert(TWEAK_SIZE % POLYVAL_BLOCK_SIZE == 0); 182 + polyval_export_blkaligned(poly_ctx, &rctx->hashed_tweak); 157 183 } 158 184 159 - static int hctr2_hash_message(struct skcipher_request *req, 160 - struct scatterlist *sgl, 161 - u8 digest[POLYVAL_DIGEST_SIZE]) 185 + static void hctr2_hash_message(struct skcipher_request *req, 186 + struct scatterlist *sgl, 187 + u8 digest[POLYVAL_DIGEST_SIZE]) 162 188 { 163 - static const u8 padding[BLOCKCIPHER_BLOCK_SIZE] = { 0x1 }; 189 + static const u8 padding = 0x1; 164 190 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 165 - struct shash_desc *hash_desc = &rctx->u.hash_desc; 191 + struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx; 166 192 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; 167 193 struct sg_mapping_iter miter; 168 - unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE; 169 194 int i; 170 - int err = 0; 171 195 int n = 0; 172 196 173 197 sg_miter_start(&miter, sgl, sg_nents(sgl), ··· 166 208 for (i = 0; i < bulk_len; i += n) { 167 209 sg_miter_next(&miter); 168 210 n = min_t(unsigned int, miter.length, bulk_len - i); 169 - err = crypto_shash_update(hash_desc, miter.addr, n); 170 - if (err) 171 - break; 211 + polyval_update(poly_ctx, miter.addr, n); 172 212 } 173 213 sg_miter_stop(&miter); 174 214 175 - if (err) 176 - return err; 177 - 178 - if (remainder) { 179 - err = crypto_shash_update(hash_desc, padding, 180 - BLOCKCIPHER_BLOCK_SIZE - remainder); 181 - if (err) 182 - return err; 183 - } 184 - return crypto_shash_final(hash_desc, digest); 215 + if (req->cryptlen % BLOCKCIPHER_BLOCK_SIZE) 216 + polyval_update(poly_ctx, &padding, 1); 217 + polyval_final(poly_ctx, digest); 185 218 } 186 219 187 220 static int hctr2_finish(struct skcipher_request *req) ··· 180 231 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 181 232 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 182 233 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 234 + struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx; 183 235 u8 digest[POLYVAL_DIGEST_SIZE]; 184 - struct shash_desc *hash_desc = &rctx->u.hash_desc; 185 - int err; 186 236 187 237 // U = UU ^ H(T || V) 188 238 // or M = MM ^ H(T || N) 189 - hash_desc->tfm = tctx->polyval; 190 - err = crypto_shash_import(hash_desc, hctr2_hashed_tweak(tctx, rctx)); 191 - if (err) 192 - return err; 193 - err = hctr2_hash_message(req, rctx->bulk_part_dst, digest); 194 - if (err) 195 - return err; 239 + polyval_import_blkaligned(poly_ctx, &tctx->poly_key, 240 + &rctx->hashed_tweak); 241 + hctr2_hash_message(req, rctx->bulk_part_dst, digest); 196 242 crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE); 197 243 198 244 // Copy U (or M) into dst scatterlist ··· 213 269 struct hctr2_request_ctx *rctx = skcipher_request_ctx(req); 214 270 u8 digest[POLYVAL_DIGEST_SIZE]; 215 271 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; 216 - int err; 217 272 218 273 // Requests must be at least one block 219 274 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) ··· 230 287 231 288 // MM = M ^ H(T || N) 232 289 // or UU = U ^ H(T || V) 233 - err = hctr2_hash_tweak(req); 234 - if (err) 235 - return err; 236 - err = hctr2_hash_message(req, rctx->bulk_part_src, digest); 237 - if (err) 238 - return err; 290 + hctr2_hash_tweak(req); 291 + hctr2_hash_message(req, rctx->bulk_part_src, digest); 239 292 crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE); 240 293 241 294 // UU = E(MM) ··· 277 338 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); 278 339 struct crypto_skcipher *xctr; 279 340 struct crypto_cipher *blockcipher; 280 - struct crypto_shash *polyval; 281 - unsigned int subreq_size; 282 341 int err; 283 342 284 343 xctr = crypto_spawn_skcipher(&ictx->xctr_spawn); ··· 289 352 goto err_free_xctr; 290 353 } 291 354 292 - polyval = crypto_spawn_shash(&ictx->polyval_spawn); 293 - if (IS_ERR(polyval)) { 294 - err = PTR_ERR(polyval); 295 - goto err_free_blockcipher; 296 - } 297 - 298 355 tctx->xctr = xctr; 299 356 tctx->blockcipher = blockcipher; 300 - tctx->polyval = polyval; 301 357 302 358 BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) != 303 359 sizeof(struct hctr2_request_ctx)); 304 - subreq_size = max(sizeof_field(struct hctr2_request_ctx, u.hash_desc) + 305 - crypto_shash_descsize(polyval), 306 - sizeof_field(struct hctr2_request_ctx, u.xctr_req) + 307 - crypto_skcipher_reqsize(xctr)); 308 - 309 - tctx->hashed_tweak_offset = offsetof(struct hctr2_request_ctx, u) + 310 - subreq_size; 311 - crypto_skcipher_set_reqsize(tfm, tctx->hashed_tweak_offset + 312 - crypto_shash_statesize(polyval)); 360 + crypto_skcipher_set_reqsize( 361 + tfm, max(sizeof(struct hctr2_request_ctx), 362 + offsetofend(struct hctr2_request_ctx, u.xctr_req) + 363 + crypto_skcipher_reqsize(xctr))); 313 364 return 0; 314 365 315 - err_free_blockcipher: 316 - crypto_free_cipher(blockcipher); 317 366 err_free_xctr: 318 367 crypto_free_skcipher(xctr); 319 368 return err; ··· 311 388 312 389 crypto_free_cipher(tctx->blockcipher); 313 390 crypto_free_skcipher(tctx->xctr); 314 - crypto_free_shash(tctx->polyval); 315 391 } 316 392 317 393 static void hctr2_free_instance(struct skcipher_instance *inst) ··· 319 397 320 398 crypto_drop_cipher(&ictx->blockcipher_spawn); 321 399 crypto_drop_skcipher(&ictx->xctr_spawn); 322 - crypto_drop_shash(&ictx->polyval_spawn); 323 400 kfree(inst); 324 401 } 325 402 326 - static int hctr2_create_common(struct crypto_template *tmpl, 327 - struct rtattr **tb, 328 - const char *xctr_name, 329 - const char *polyval_name) 403 + static int hctr2_create_common(struct crypto_template *tmpl, struct rtattr **tb, 404 + const char *xctr_name) 330 405 { 331 406 struct skcipher_alg_common *xctr_alg; 332 407 u32 mask; 333 408 struct skcipher_instance *inst; 334 409 struct hctr2_instance_ctx *ictx; 335 410 struct crypto_alg *blockcipher_alg; 336 - struct shash_alg *polyval_alg; 337 411 char blockcipher_name[CRYPTO_MAX_ALG_NAME]; 338 412 int len; 339 413 int err; ··· 375 457 if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE) 376 458 goto err_free_inst; 377 459 378 - /* Polyval ε-∆U hash function */ 379 - err = crypto_grab_shash(&ictx->polyval_spawn, 380 - skcipher_crypto_instance(inst), 381 - polyval_name, 0, mask); 382 - if (err) 383 - goto err_free_inst; 384 - polyval_alg = crypto_spawn_shash_alg(&ictx->polyval_spawn); 385 - 386 - /* Ensure Polyval is being used */ 387 - err = -EINVAL; 388 - if (strcmp(polyval_alg->base.cra_name, "polyval") != 0) 389 - goto err_free_inst; 390 - 391 460 /* Instance fields */ 392 461 393 462 err = -ENAMETOOLONG; ··· 382 477 blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 383 478 goto err_free_inst; 384 479 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 385 - "hctr2_base(%s,%s)", 386 - xctr_alg->base.cra_driver_name, 387 - polyval_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 480 + "hctr2_base(%s,polyval-lib)", 481 + xctr_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 388 482 goto err_free_inst; 389 483 390 484 inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE; 391 - inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) + 392 - polyval_alg->statesize * 2; 485 + inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx); 393 486 inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask; 394 - /* 395 - * The hash function is called twice, so it is weighted higher than the 396 - * xctr and blockcipher. 397 - */ 398 487 inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority + 399 - 4 * polyval_alg->base.cra_priority + 400 - blockcipher_alg->cra_priority) / 7; 488 + blockcipher_alg->cra_priority) / 489 + 3; 401 490 402 491 inst->alg.setkey = hctr2_setkey; 403 492 inst->alg.encrypt = hctr2_encrypt; ··· 424 525 polyval_name = crypto_attr_alg_name(tb[2]); 425 526 if (IS_ERR(polyval_name)) 426 527 return PTR_ERR(polyval_name); 528 + if (strcmp(polyval_name, "polyval") != 0 && 529 + strcmp(polyval_name, "polyval-lib") != 0) 530 + return -ENOENT; 427 531 428 - return hctr2_create_common(tmpl, tb, xctr_name, polyval_name); 532 + return hctr2_create_common(tmpl, tb, xctr_name); 429 533 } 430 534 431 535 static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb) ··· 444 542 blockcipher_name) >= CRYPTO_MAX_ALG_NAME) 445 543 return -ENAMETOOLONG; 446 544 447 - return hctr2_create_common(tmpl, tb, xctr_name, "polyval"); 545 + return hctr2_create_common(tmpl, tb, xctr_name); 448 546 } 449 547 450 548 static struct crypto_template hctr2_tmpls[] = {
+1 -2
crypto/testmgr.c
··· 5059 5059 } 5060 5060 }, { 5061 5061 .alg = "hctr2(aes)", 5062 - .generic_driver = 5063 - "hctr2_base(xctr(aes-generic),polyval-generic)", 5062 + .generic_driver = "hctr2_base(xctr(aes-generic),polyval-lib)", 5064 5063 .test = alg_test_skcipher, 5065 5064 .suite = { 5066 5065 .cipher = __VECS(aes_hctr2_tv_template)