Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: tegra - Reserve keyslots to allocate dynamically

The HW supports only storing 15 keys at a time. This limits the number
of tfms that can work without failutes. Reserve keyslots to solve this
and use the reserved ones during the encryption/decryption operation.
This allow users to have the capability of hardware protected keys
and faster operations if there are limited number of tfms while not
halting the operation if there are more tfms.

Fixes: 0880bb3b00c8 ("crypto: tegra - Add Tegra Security Engine driver")
Signed-off-by: Akhil R <akhilrajeev@nvidia.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Akhil R and committed by
Herbert Xu
b157e7a2 bde55822

+164 -22
+118 -21
drivers/crypto/tegra/tegra-se-aes.c
··· 28 28 u32 ivsize; 29 29 u32 key1_id; 30 30 u32 key2_id; 31 + u32 keylen; 32 + u8 key1[AES_MAX_KEY_SIZE]; 33 + u8 key2[AES_MAX_KEY_SIZE]; 31 34 }; 32 35 33 36 struct tegra_aes_reqctx { ··· 46 43 struct tegra_se *se; 47 44 unsigned int authsize; 48 45 u32 alg; 49 - u32 keylen; 50 46 u32 key_id; 47 + u32 keylen; 48 + u8 key[AES_MAX_KEY_SIZE]; 51 49 }; 52 50 53 51 struct tegra_aead_reqctx { ··· 60 56 unsigned int cryptlen; 61 57 unsigned int authsize; 62 58 bool encrypt; 63 - u32 config; 64 59 u32 crypto_config; 60 + u32 config; 65 61 u32 key_id; 66 62 u32 iv[4]; 67 63 u8 authdata[16]; ··· 71 67 struct tegra_se *se; 72 68 unsigned int alg; 73 69 u32 key_id; 70 + u32 keylen; 71 + u8 key[AES_MAX_KEY_SIZE]; 74 72 struct crypto_shash *fallback_tfm; 75 73 }; 76 74 ··· 266 260 struct tegra_aes_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 267 261 struct tegra_aes_reqctx *rctx = skcipher_request_ctx(req); 268 262 struct tegra_se *se = ctx->se; 269 - unsigned int cmdlen; 263 + unsigned int cmdlen, key1_id, key2_id; 270 264 int ret; 271 265 272 266 rctx->iv = (u32 *)req->iv; 273 267 rctx->len = req->cryptlen; 268 + key1_id = ctx->key1_id; 269 + key2_id = ctx->key2_id; 274 270 275 271 /* Pad input to AES Block size */ 276 272 if (ctx->alg != SE_ALG_XTS) { ··· 290 282 291 283 scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0); 292 284 285 + rctx->config = tegra234_aes_cfg(ctx->alg, rctx->encrypt); 286 + rctx->crypto_config = tegra234_aes_crypto_cfg(ctx->alg, rctx->encrypt); 287 + 288 + if (!key1_id) { 289 + ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key1, 290 + ctx->keylen, ctx->alg, &key1_id); 291 + if (ret) 292 + goto out; 293 + } 294 + 295 + rctx->crypto_config |= SE_AES_KEY_INDEX(key1_id); 296 + 297 + if (ctx->alg == SE_ALG_XTS) { 298 + if (!key2_id) { 299 + ret = tegra_key_submit_reserved_xts(ctx->se, ctx->key2, 300 + ctx->keylen, ctx->alg, &key2_id); 301 + if (ret) 302 + goto out; 303 + } 304 + 305 + rctx->crypto_config |= SE_AES_KEY2_INDEX(key2_id); 306 + } 307 + 293 308 /* Prepare the command and submit for execution */ 294 309 cmdlen = tegra_aes_prep_cmd(ctx, rctx); 295 310 ret = tegra_se_host1x_submit(se, se->cmdbuf, cmdlen); ··· 321 290 tegra_aes_update_iv(req, ctx); 322 291 scatterwalk_map_and_copy(rctx->datbuf.buf, req->dst, 0, req->cryptlen, 1); 323 292 293 + out: 324 294 /* Free the buffer */ 325 295 dma_free_coherent(ctx->se->dev, rctx->datbuf.size, 326 296 rctx->datbuf.buf, rctx->datbuf.addr); 297 + 298 + if (tegra_key_is_reserved(key1_id)) 299 + tegra_key_invalidate_reserved(ctx->se, key1_id, ctx->alg); 300 + 301 + if (tegra_key_is_reserved(key2_id)) 302 + tegra_key_invalidate_reserved(ctx->se, key2_id, ctx->alg); 327 303 328 304 out_finalize: 329 305 crypto_finalize_skcipher_request(se->engine, req, ret); ··· 354 316 ctx->se = se_alg->se_dev; 355 317 ctx->key1_id = 0; 356 318 ctx->key2_id = 0; 319 + ctx->keylen = 0; 357 320 358 321 algname = crypto_tfm_alg_name(&tfm->base); 359 322 ret = se_algname_to_algid(algname); ··· 383 344 const u8 *key, u32 keylen) 384 345 { 385 346 struct tegra_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 347 + int ret; 386 348 387 349 if (aes_check_keylen(keylen)) { 388 350 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); 389 351 return -EINVAL; 390 352 } 391 353 392 - return tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key1_id); 354 + ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key1_id); 355 + if (ret) { 356 + ctx->keylen = keylen; 357 + memcpy(ctx->key1, key, keylen); 358 + } 359 + 360 + return 0; 393 361 } 394 362 395 363 static int tegra_xts_setkey(struct crypto_skcipher *tfm, ··· 414 368 415 369 ret = tegra_key_submit(ctx->se, key, len, 416 370 ctx->alg, &ctx->key1_id); 417 - if (ret) 418 - return ret; 371 + if (ret) { 372 + ctx->keylen = len; 373 + memcpy(ctx->key1, key, len); 374 + } 419 375 420 - return tegra_key_submit(ctx->se, key + len, len, 376 + ret = tegra_key_submit(ctx->se, key + len, len, 421 377 ctx->alg, &ctx->key2_id); 378 + if (ret) { 379 + ctx->keylen = len; 380 + memcpy(ctx->key2, key + len, len); 381 + } 422 382 423 383 return 0; 424 384 } ··· 502 450 req->iv = NULL; 503 451 504 452 rctx->encrypt = encrypt; 505 - rctx->config = tegra234_aes_cfg(ctx->alg, encrypt); 506 - rctx->crypto_config = tegra234_aes_crypto_cfg(ctx->alg, encrypt); 507 - rctx->crypto_config |= SE_AES_KEY_INDEX(ctx->key1_id); 508 - 509 - if (ctx->key2_id) 510 - rctx->crypto_config |= SE_AES_KEY2_INDEX(ctx->key2_id); 511 453 512 454 return crypto_transfer_skcipher_request_to_engine(ctx->se->engine, req); 513 455 } ··· 767 721 768 722 rctx->config = tegra234_aes_cfg(SE_ALG_GMAC, rctx->encrypt); 769 723 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GMAC, rctx->encrypt) | 770 - SE_AES_KEY_INDEX(ctx->key_id); 724 + SE_AES_KEY_INDEX(rctx->key_id); 771 725 772 726 cmdlen = tegra_gmac_prep_cmd(ctx, rctx); 773 727 ··· 784 738 785 739 rctx->config = tegra234_aes_cfg(SE_ALG_GCM, rctx->encrypt); 786 740 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GCM, rctx->encrypt) | 787 - SE_AES_KEY_INDEX(ctx->key_id); 741 + SE_AES_KEY_INDEX(rctx->key_id); 788 742 789 743 /* Prepare command and submit */ 790 744 cmdlen = tegra_gcm_crypt_prep_cmd(ctx, rctx); ··· 807 761 808 762 rctx->config = tegra234_aes_cfg(SE_ALG_GCM_FINAL, rctx->encrypt); 809 763 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_GCM_FINAL, rctx->encrypt) | 810 - SE_AES_KEY_INDEX(ctx->key_id); 764 + SE_AES_KEY_INDEX(rctx->key_id); 811 765 812 766 /* Prepare command and submit */ 813 767 cmdlen = tegra_gcm_prep_final_cmd(se, cpuvaddr, rctx); ··· 938 892 rctx->config = tegra234_aes_cfg(SE_ALG_CBC_MAC, rctx->encrypt); 939 893 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_CBC_MAC, 940 894 rctx->encrypt) | 941 - SE_AES_KEY_INDEX(ctx->key_id); 895 + SE_AES_KEY_INDEX(rctx->key_id); 942 896 943 897 /* Prepare command and submit */ 944 898 cmdlen = tegra_cbcmac_prep_cmd(ctx, rctx); ··· 1125 1079 1126 1080 rctx->config = tegra234_aes_cfg(SE_ALG_CTR, rctx->encrypt); 1127 1081 rctx->crypto_config = tegra234_aes_crypto_cfg(SE_ALG_CTR, rctx->encrypt) | 1128 - SE_AES_KEY_INDEX(ctx->key_id); 1082 + SE_AES_KEY_INDEX(rctx->key_id); 1129 1083 1130 1084 /* Copy authdata in the top of buffer for encryption/decryption */ 1131 1085 if (rctx->encrypt) ··· 1206 1160 if (ret) 1207 1161 goto out_finalize; 1208 1162 1163 + rctx->key_id = ctx->key_id; 1164 + 1209 1165 /* Allocate buffers required */ 1210 1166 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100; 1211 1167 rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, ··· 1221 1173 if (!rctx->outbuf.buf) { 1222 1174 ret = -ENOMEM; 1223 1175 goto out_free_inbuf; 1176 + } 1177 + 1178 + if (!ctx->key_id) { 1179 + ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, 1180 + ctx->keylen, ctx->alg, &rctx->key_id); 1181 + if (ret) 1182 + goto out; 1224 1183 } 1225 1184 1226 1185 if (rctx->encrypt) { ··· 1260 1205 dma_free_coherent(ctx->se->dev, rctx->outbuf.size, 1261 1206 rctx->inbuf.buf, rctx->inbuf.addr); 1262 1207 1208 + if (tegra_key_is_reserved(rctx->key_id)) 1209 + tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); 1210 + 1263 1211 out_finalize: 1264 1212 crypto_finalize_aead_request(ctx->se->engine, req, ret); 1265 1213 ··· 1290 1232 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); 1291 1233 rctx->iv[3] = (1 << 24); 1292 1234 1235 + rctx->key_id = ctx->key_id; 1236 + 1293 1237 /* Allocate buffers required */ 1294 1238 rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen; 1295 1239 rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size, ··· 1307 1247 if (!rctx->outbuf.buf) { 1308 1248 ret = -ENOMEM; 1309 1249 goto out_free_inbuf; 1250 + } 1251 + 1252 + if (!ctx->key_id) { 1253 + ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, 1254 + ctx->keylen, ctx->alg, &rctx->key_id); 1255 + if (ret) 1256 + goto out; 1310 1257 } 1311 1258 1312 1259 /* If there is associated data perform GMAC operation */ ··· 1346 1279 dma_free_coherent(ctx->se->dev, rctx->inbuf.size, 1347 1280 rctx->inbuf.buf, rctx->inbuf.addr); 1348 1281 1282 + if (tegra_key_is_reserved(rctx->key_id)) 1283 + tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); 1284 + 1349 1285 out_finalize: 1350 1286 crypto_finalize_aead_request(ctx->se->engine, req, ret); 1351 1287 ··· 1371 1301 1372 1302 ctx->se = se_alg->se_dev; 1373 1303 ctx->key_id = 0; 1304 + ctx->keylen = 0; 1374 1305 1375 1306 ret = se_algname_to_algid(algname); 1376 1307 if (ret < 0) { ··· 1453 1382 const u8 *key, u32 keylen) 1454 1383 { 1455 1384 struct tegra_aead_ctx *ctx = crypto_aead_ctx(tfm); 1385 + int ret; 1456 1386 1457 1387 if (aes_check_keylen(keylen)) { 1458 1388 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); 1459 1389 return -EINVAL; 1460 1390 } 1461 1391 1462 - return tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 1392 + ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 1393 + if (ret) { 1394 + ctx->keylen = keylen; 1395 + memcpy(ctx->key, key, keylen); 1396 + } 1397 + 1398 + return 0; 1463 1399 } 1464 1400 1465 1401 static unsigned int tegra_cmac_prep_cmd(struct tegra_cmac_ctx *ctx, ··· 1551 1473 rctx->total_len = 0; 1552 1474 rctx->datbuf.size = 0; 1553 1475 rctx->residue.size = 0; 1476 + rctx->key_id = ctx->key_id; 1554 1477 rctx->task |= SHA_FIRST; 1555 1478 rctx->blk_size = crypto_ahash_blocksize(tfm); 1556 1479 ··· 1596 1517 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; 1597 1518 rctx->total_len += rctx->datbuf.size; 1598 1519 rctx->config = tegra234_aes_cfg(SE_ALG_CMAC, 0); 1599 - rctx->crypto_config = SE_AES_KEY_INDEX(ctx->key_id); 1520 + rctx->crypto_config = SE_AES_KEY_INDEX(rctx->key_id); 1600 1521 1601 1522 /* 1602 1523 * Keep one block and residue bytes in residue and ··· 1722 1643 rctx->task &= ~SHA_INIT; 1723 1644 } 1724 1645 1646 + if (!ctx->key_id) { 1647 + ret = tegra_key_submit_reserved_aes(ctx->se, ctx->key, 1648 + ctx->keylen, ctx->alg, &rctx->key_id); 1649 + if (ret) 1650 + goto out; 1651 + } 1652 + 1725 1653 if (rctx->task & SHA_UPDATE) { 1726 1654 ret = tegra_cmac_do_update(req); 1727 1655 if (ret) ··· 1745 1659 rctx->task &= ~SHA_FINAL; 1746 1660 } 1747 1661 out: 1662 + if (tegra_key_is_reserved(rctx->key_id)) 1663 + tegra_key_invalidate_reserved(ctx->se, rctx->key_id, ctx->alg); 1664 + 1748 1665 crypto_finalize_hash_request(se->engine, req, ret); 1749 1666 1750 1667 return 0; ··· 1788 1699 1789 1700 ctx->se = se_alg->se_dev; 1790 1701 ctx->key_id = 0; 1702 + ctx->keylen = 0; 1791 1703 1792 1704 ret = se_algname_to_algid(algname); 1793 1705 if (ret < 0) { ··· 1817 1727 unsigned int keylen) 1818 1728 { 1819 1729 struct tegra_cmac_ctx *ctx = crypto_ahash_ctx(tfm); 1730 + int ret; 1820 1731 1821 1732 if (aes_check_keylen(keylen)) { 1822 1733 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen); ··· 1827 1736 if (ctx->fallback_tfm) 1828 1737 crypto_shash_setkey(ctx->fallback_tfm, key, keylen); 1829 1738 1830 - return tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 1739 + ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id); 1740 + if (ret) { 1741 + ctx->keylen = keylen; 1742 + memcpy(ctx->key, key, keylen); 1743 + } 1744 + 1745 + return 0; 1831 1746 } 1832 1747 1833 1748 static int tegra_cmac_init(struct ahash_request *req)
+18 -1
drivers/crypto/tegra/tegra-se-key.c
··· 141 141 tegra_keyslot_free(keyid); 142 142 } 143 143 144 + void tegra_key_invalidate_reserved(struct tegra_se *se, u32 keyid, u32 alg) 145 + { 146 + u8 zkey[AES_MAX_KEY_SIZE] = {0}; 147 + 148 + if (!keyid) 149 + return; 150 + 151 + /* Overwrite the key with 0s */ 152 + tegra_key_insert(se, zkey, AES_MAX_KEY_SIZE, keyid, alg); 153 + } 154 + 155 + inline int tegra_key_submit_reserved(struct tegra_se *se, const u8 *key, 156 + u32 keylen, u32 alg, u32 *keyid) 157 + { 158 + return tegra_key_insert(se, key, keylen, *keyid, alg); 159 + } 160 + 144 161 int tegra_key_submit(struct tegra_se *se, const u8 *key, u32 keylen, u32 alg, u32 *keyid) 145 162 { 146 163 int ret; ··· 166 149 if (!tegra_key_in_kslt(*keyid)) { 167 150 *keyid = tegra_keyslot_alloc(); 168 151 if (!(*keyid)) { 169 - dev_err(se->dev, "failed to allocate key slot\n"); 152 + dev_dbg(se->dev, "failed to allocate key slot\n"); 170 153 return -ENOMEM; 171 154 } 172 155 }
+28
drivers/crypto/tegra/tegra-se.h
··· 342 342 #define SE_MAX_KEYSLOT 15 343 343 #define SE_MAX_MEM_ALLOC SZ_4M 344 344 345 + #define TEGRA_AES_RESERVED_KSLT 14 346 + #define TEGRA_XTS_RESERVED_KSLT 15 347 + 345 348 #define SHA_FIRST BIT(0) 346 349 #define SHA_INIT BIT(1) 347 350 #define SHA_UPDATE BIT(2) ··· 505 502 void tegra_deinit_hash(struct tegra_se *se); 506 503 int tegra_key_submit(struct tegra_se *se, const u8 *key, 507 504 u32 keylen, u32 alg, u32 *keyid); 505 + 506 + int tegra_key_submit_reserved(struct tegra_se *se, const u8 *key, 507 + u32 keylen, u32 alg, u32 *keyid); 508 + 508 509 void tegra_key_invalidate(struct tegra_se *se, u32 keyid, u32 alg); 510 + void tegra_key_invalidate_reserved(struct tegra_se *se, u32 keyid, u32 alg); 509 511 int tegra_se_host1x_submit(struct tegra_se *se, struct tegra_se_cmdbuf *cmdbuf, u32 size); 512 + 513 + static inline int tegra_key_submit_reserved_aes(struct tegra_se *se, const u8 *key, 514 + u32 keylen, u32 alg, u32 *keyid) 515 + { 516 + *keyid = TEGRA_AES_RESERVED_KSLT; 517 + return tegra_key_submit_reserved(se, key, keylen, alg, keyid); 518 + } 519 + 520 + static inline int tegra_key_submit_reserved_xts(struct tegra_se *se, const u8 *key, 521 + u32 keylen, u32 alg, u32 *keyid) 522 + { 523 + *keyid = TEGRA_XTS_RESERVED_KSLT; 524 + return tegra_key_submit_reserved(se, key, keylen, alg, keyid); 525 + } 526 + 527 + static inline bool tegra_key_is_reserved(u32 keyid) 528 + { 529 + return ((keyid == TEGRA_AES_RESERVED_KSLT) || 530 + (keyid == TEGRA_XTS_RESERVED_KSLT)); 531 + } 510 532 511 533 /* HOST1x OPCODES */ 512 534 static inline u32 host1x_opcode_setpayload(unsigned int payload)