Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: ti - Add driver for DTHE V2 AES Engine (ECB, CBC)

Add support for ECB and CBC modes in the AES Engine of the DTHE V2
hardware cryptography engine.

Signed-off-by: T Pratham <t-pratham@ti.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

T Pratham and committed by
Herbert Xu
52f641bc 97d37c0a

+749
+1
MAINTAINERS
··· 25174 25174 L: linux-crypto@vger.kernel.org 25175 25175 S: Supported 25176 25176 F: Documentation/devicetree/bindings/crypto/ti,am62l-dthev2.yaml 25177 + F: drivers/crypto/ti/ 25177 25178 25178 25179 TI DAVINCI MACHINE SUPPORT 25179 25180 M: Bartosz Golaszewski <brgl@bgdev.pl>
+1
drivers/crypto/Kconfig
··· 863 863 source "drivers/crypto/aspeed/Kconfig" 864 864 source "drivers/crypto/starfive/Kconfig" 865 865 source "drivers/crypto/inside-secure/eip93/Kconfig" 866 + source "drivers/crypto/ti/Kconfig" 866 867 867 868 endif # CRYPTO_HW
+1
drivers/crypto/Makefile
··· 48 48 obj-y += intel/ 49 49 obj-y += starfive/ 50 50 obj-y += cavium/ 51 + obj-$(CONFIG_ARCH_K3) += ti/
+14
drivers/crypto/ti/Kconfig
··· 1 + # SPDX-License-Identifier: GPL-2.0-only 2 + config CRYPTO_DEV_TI_DTHEV2 3 + tristate "Support for TI DTHE V2 cryptography engine" 4 + depends on CRYPTO && CRYPTO_HW && ARCH_K3 5 + select CRYPTO_ENGINE 6 + select CRYPTO_SKCIPHER 7 + select CRYPTO_ECB 8 + select CRYPTO_CBC 9 + help 10 + This enables support for the TI DTHE V2 hw cryptography engine 11 + which can be found on TI K3 SOCs. Selecting this enables use 12 + of hardware offloading for cryptographic algorithms on 13 + these devices, providing enhanced resistance against side-channel 14 + attacks.
+3
drivers/crypto/ti/Makefile
··· 1 + # SPDX-License-Identifier: GPL-2.0-only 2 + obj-$(CONFIG_CRYPTO_DEV_TI_DTHEV2) += dthev2.o 3 + dthev2-objs := dthev2-common.o dthev2-aes.o
+411
drivers/crypto/ti/dthev2-aes.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * K3 DTHE V2 crypto accelerator driver 4 + * 5 + * Copyright (C) Texas Instruments 2025 - https://www.ti.com 6 + * Author: T Pratham <t-pratham@ti.com> 7 + */ 8 + 9 + #include <crypto/aead.h> 10 + #include <crypto/aes.h> 11 + #include <crypto/algapi.h> 12 + #include <crypto/engine.h> 13 + #include <crypto/internal/aead.h> 14 + #include <crypto/internal/skcipher.h> 15 + 16 + #include "dthev2-common.h" 17 + 18 + #include <linux/delay.h> 19 + #include <linux/dmaengine.h> 20 + #include <linux/dma-mapping.h> 21 + #include <linux/io.h> 22 + #include <linux/scatterlist.h> 23 + 24 + /* Registers */ 25 + 26 + // AES Engine 27 + #define DTHE_P_AES_BASE 0x7000 28 + #define DTHE_P_AES_KEY1_0 0x0038 29 + #define DTHE_P_AES_KEY1_1 0x003C 30 + #define DTHE_P_AES_KEY1_2 0x0030 31 + #define DTHE_P_AES_KEY1_3 0x0034 32 + #define DTHE_P_AES_KEY1_4 0x0028 33 + #define DTHE_P_AES_KEY1_5 0x002C 34 + #define DTHE_P_AES_KEY1_6 0x0020 35 + #define DTHE_P_AES_KEY1_7 0x0024 36 + #define DTHE_P_AES_IV_IN_0 0x0040 37 + #define DTHE_P_AES_IV_IN_1 0x0044 38 + #define DTHE_P_AES_IV_IN_2 0x0048 39 + #define DTHE_P_AES_IV_IN_3 0x004C 40 + #define DTHE_P_AES_CTRL 0x0050 41 + #define DTHE_P_AES_C_LENGTH_0 0x0054 42 + #define DTHE_P_AES_C_LENGTH_1 0x0058 43 + #define DTHE_P_AES_AUTH_LENGTH 0x005C 44 + #define DTHE_P_AES_DATA_IN_OUT 0x0060 45 + 46 + #define DTHE_P_AES_SYSCONFIG 0x0084 47 + #define DTHE_P_AES_IRQSTATUS 0x008C 48 + #define DTHE_P_AES_IRQENABLE 0x0090 49 + 50 + /* Register write values and macros */ 51 + 52 + enum aes_ctrl_mode_masks { 53 + AES_CTRL_ECB_MASK = 0x00, 54 + AES_CTRL_CBC_MASK = BIT(5), 55 + }; 56 + 57 + #define DTHE_AES_CTRL_MODE_CLEAR_MASK ~GENMASK(28, 5) 58 + 59 + #define DTHE_AES_CTRL_DIR_ENC BIT(2) 60 + 61 + #define DTHE_AES_CTRL_KEYSIZE_16B BIT(3) 62 + #define DTHE_AES_CTRL_KEYSIZE_24B BIT(4) 63 + #define DTHE_AES_CTRL_KEYSIZE_32B (BIT(3) | BIT(4)) 64 + 65 + #define DTHE_AES_CTRL_SAVE_CTX_SET BIT(29) 66 + 67 + #define DTHE_AES_CTRL_OUTPUT_READY BIT_MASK(0) 68 + #define DTHE_AES_CTRL_INPUT_READY BIT_MASK(1) 69 + #define DTHE_AES_CTRL_SAVED_CTX_READY BIT_MASK(30) 70 + #define DTHE_AES_CTRL_CTX_READY BIT_MASK(31) 71 + 72 + #define DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN GENMASK(6, 5) 73 + #define DTHE_AES_IRQENABLE_EN_ALL GENMASK(3, 0) 74 + 75 + /* Misc */ 76 + #define AES_IV_SIZE AES_BLOCK_SIZE 77 + #define AES_BLOCK_WORDS (AES_BLOCK_SIZE / sizeof(u32)) 78 + #define AES_IV_WORDS AES_BLOCK_WORDS 79 + 80 + static int dthe_cipher_init_tfm(struct crypto_skcipher *tfm) 81 + { 82 + struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm); 83 + struct dthe_data *dev_data = dthe_get_dev(ctx); 84 + 85 + ctx->dev_data = dev_data; 86 + ctx->keylen = 0; 87 + 88 + return 0; 89 + } 90 + 91 + static int dthe_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen) 92 + { 93 + struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm); 94 + 95 + if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256) 96 + return -EINVAL; 97 + 98 + ctx->keylen = keylen; 99 + memcpy(ctx->key, key, keylen); 100 + 101 + return 0; 102 + } 103 + 104 + static int dthe_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen) 105 + { 106 + struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm); 107 + 108 + ctx->aes_mode = DTHE_AES_ECB; 109 + 110 + return dthe_aes_setkey(tfm, key, keylen); 111 + } 112 + 113 + static int dthe_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen) 114 + { 115 + struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm); 116 + 117 + ctx->aes_mode = DTHE_AES_CBC; 118 + 119 + return dthe_aes_setkey(tfm, key, keylen); 120 + } 121 + 122 + static void dthe_aes_set_ctrl_key(struct dthe_tfm_ctx *ctx, 123 + struct dthe_aes_req_ctx *rctx, 124 + u32 *iv_in) 125 + { 126 + struct dthe_data *dev_data = dthe_get_dev(ctx); 127 + void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE; 128 + u32 ctrl_val = 0; 129 + 130 + writel_relaxed(ctx->key[0], aes_base_reg + DTHE_P_AES_KEY1_0); 131 + writel_relaxed(ctx->key[1], aes_base_reg + DTHE_P_AES_KEY1_1); 132 + writel_relaxed(ctx->key[2], aes_base_reg + DTHE_P_AES_KEY1_2); 133 + writel_relaxed(ctx->key[3], aes_base_reg + DTHE_P_AES_KEY1_3); 134 + 135 + if (ctx->keylen > AES_KEYSIZE_128) { 136 + writel_relaxed(ctx->key[4], aes_base_reg + DTHE_P_AES_KEY1_4); 137 + writel_relaxed(ctx->key[5], aes_base_reg + DTHE_P_AES_KEY1_5); 138 + } 139 + if (ctx->keylen == AES_KEYSIZE_256) { 140 + writel_relaxed(ctx->key[6], aes_base_reg + DTHE_P_AES_KEY1_6); 141 + writel_relaxed(ctx->key[7], aes_base_reg + DTHE_P_AES_KEY1_7); 142 + } 143 + 144 + if (rctx->enc) 145 + ctrl_val |= DTHE_AES_CTRL_DIR_ENC; 146 + 147 + if (ctx->keylen == AES_KEYSIZE_128) 148 + ctrl_val |= DTHE_AES_CTRL_KEYSIZE_16B; 149 + else if (ctx->keylen == AES_KEYSIZE_192) 150 + ctrl_val |= DTHE_AES_CTRL_KEYSIZE_24B; 151 + else 152 + ctrl_val |= DTHE_AES_CTRL_KEYSIZE_32B; 153 + 154 + // Write AES mode 155 + ctrl_val &= DTHE_AES_CTRL_MODE_CLEAR_MASK; 156 + switch (ctx->aes_mode) { 157 + case DTHE_AES_ECB: 158 + ctrl_val |= AES_CTRL_ECB_MASK; 159 + break; 160 + case DTHE_AES_CBC: 161 + ctrl_val |= AES_CTRL_CBC_MASK; 162 + break; 163 + } 164 + 165 + if (iv_in) { 166 + ctrl_val |= DTHE_AES_CTRL_SAVE_CTX_SET; 167 + for (int i = 0; i < AES_IV_WORDS; ++i) 168 + writel_relaxed(iv_in[i], 169 + aes_base_reg + DTHE_P_AES_IV_IN_0 + (DTHE_REG_SIZE * i)); 170 + } 171 + 172 + writel_relaxed(ctrl_val, aes_base_reg + DTHE_P_AES_CTRL); 173 + } 174 + 175 + static void dthe_aes_dma_in_callback(void *data) 176 + { 177 + struct skcipher_request *req = (struct skcipher_request *)data; 178 + struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); 179 + 180 + complete(&rctx->aes_compl); 181 + } 182 + 183 + static int dthe_aes_run(struct crypto_engine *engine, void *areq) 184 + { 185 + struct skcipher_request *req = container_of(areq, struct skcipher_request, base); 186 + struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 187 + struct dthe_data *dev_data = dthe_get_dev(ctx); 188 + struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); 189 + 190 + unsigned int len = req->cryptlen; 191 + struct scatterlist *src = req->src; 192 + struct scatterlist *dst = req->dst; 193 + 194 + int src_nents = sg_nents_for_len(src, len); 195 + int dst_nents; 196 + 197 + int src_mapped_nents; 198 + int dst_mapped_nents; 199 + 200 + bool diff_dst; 201 + enum dma_data_direction src_dir, dst_dir; 202 + 203 + struct device *tx_dev, *rx_dev; 204 + struct dma_async_tx_descriptor *desc_in, *desc_out; 205 + 206 + int ret; 207 + 208 + void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE; 209 + 210 + u32 aes_irqenable_val = readl_relaxed(aes_base_reg + DTHE_P_AES_IRQENABLE); 211 + u32 aes_sysconfig_val = readl_relaxed(aes_base_reg + DTHE_P_AES_SYSCONFIG); 212 + 213 + aes_sysconfig_val |= DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN; 214 + writel_relaxed(aes_sysconfig_val, aes_base_reg + DTHE_P_AES_SYSCONFIG); 215 + 216 + aes_irqenable_val |= DTHE_AES_IRQENABLE_EN_ALL; 217 + writel_relaxed(aes_irqenable_val, aes_base_reg + DTHE_P_AES_IRQENABLE); 218 + 219 + if (src == dst) { 220 + diff_dst = false; 221 + src_dir = DMA_BIDIRECTIONAL; 222 + dst_dir = DMA_BIDIRECTIONAL; 223 + } else { 224 + diff_dst = true; 225 + src_dir = DMA_TO_DEVICE; 226 + dst_dir = DMA_FROM_DEVICE; 227 + } 228 + 229 + tx_dev = dmaengine_get_dma_device(dev_data->dma_aes_tx); 230 + rx_dev = dmaengine_get_dma_device(dev_data->dma_aes_rx); 231 + 232 + src_mapped_nents = dma_map_sg(tx_dev, src, src_nents, src_dir); 233 + if (src_mapped_nents == 0) { 234 + ret = -EINVAL; 235 + goto aes_err; 236 + } 237 + 238 + if (!diff_dst) { 239 + dst_nents = src_nents; 240 + dst_mapped_nents = src_mapped_nents; 241 + } else { 242 + dst_nents = sg_nents_for_len(dst, len); 243 + dst_mapped_nents = dma_map_sg(rx_dev, dst, dst_nents, dst_dir); 244 + if (dst_mapped_nents == 0) { 245 + dma_unmap_sg(tx_dev, src, src_nents, src_dir); 246 + ret = -EINVAL; 247 + goto aes_err; 248 + } 249 + } 250 + 251 + desc_in = dmaengine_prep_slave_sg(dev_data->dma_aes_rx, dst, dst_mapped_nents, 252 + DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 253 + if (!desc_in) { 254 + dev_err(dev_data->dev, "IN prep_slave_sg() failed\n"); 255 + ret = -EINVAL; 256 + goto aes_prep_err; 257 + } 258 + 259 + desc_out = dmaengine_prep_slave_sg(dev_data->dma_aes_tx, src, src_mapped_nents, 260 + DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 261 + if (!desc_out) { 262 + dev_err(dev_data->dev, "OUT prep_slave_sg() failed\n"); 263 + ret = -EINVAL; 264 + goto aes_prep_err; 265 + } 266 + 267 + desc_in->callback = dthe_aes_dma_in_callback; 268 + desc_in->callback_param = req; 269 + 270 + init_completion(&rctx->aes_compl); 271 + 272 + if (ctx->aes_mode == DTHE_AES_ECB) 273 + dthe_aes_set_ctrl_key(ctx, rctx, NULL); 274 + else 275 + dthe_aes_set_ctrl_key(ctx, rctx, (u32 *)req->iv); 276 + 277 + writel_relaxed(lower_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0); 278 + writel_relaxed(upper_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1); 279 + 280 + dmaengine_submit(desc_in); 281 + dmaengine_submit(desc_out); 282 + 283 + dma_async_issue_pending(dev_data->dma_aes_rx); 284 + dma_async_issue_pending(dev_data->dma_aes_tx); 285 + 286 + // Need to do a timeout to ensure finalise gets called if DMA callback fails for any reason 287 + ret = wait_for_completion_timeout(&rctx->aes_compl, msecs_to_jiffies(DTHE_DMA_TIMEOUT_MS)); 288 + if (!ret) { 289 + ret = -ETIMEDOUT; 290 + dmaengine_terminate_sync(dev_data->dma_aes_rx); 291 + dmaengine_terminate_sync(dev_data->dma_aes_tx); 292 + 293 + for (int i = 0; i < AES_BLOCK_WORDS; ++i) 294 + readl_relaxed(aes_base_reg + DTHE_P_AES_DATA_IN_OUT + (DTHE_REG_SIZE * i)); 295 + } else { 296 + ret = 0; 297 + } 298 + 299 + // For modes other than ECB, read IV_OUT 300 + if (ctx->aes_mode != DTHE_AES_ECB) { 301 + u32 *iv_out = (u32 *)req->iv; 302 + 303 + for (int i = 0; i < AES_IV_WORDS; ++i) 304 + iv_out[i] = readl_relaxed(aes_base_reg + 305 + DTHE_P_AES_IV_IN_0 + 306 + (DTHE_REG_SIZE * i)); 307 + } 308 + 309 + aes_prep_err: 310 + dma_unmap_sg(tx_dev, src, src_nents, src_dir); 311 + if (dst_dir != DMA_BIDIRECTIONAL) 312 + dma_unmap_sg(rx_dev, dst, dst_nents, dst_dir); 313 + 314 + aes_err: 315 + local_bh_disable(); 316 + crypto_finalize_skcipher_request(dev_data->engine, req, ret); 317 + local_bh_enable(); 318 + return ret; 319 + } 320 + 321 + static int dthe_aes_crypt(struct skcipher_request *req) 322 + { 323 + struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 324 + struct dthe_data *dev_data = dthe_get_dev(ctx); 325 + struct crypto_engine *engine; 326 + 327 + /* 328 + * If data is not a multiple of AES_BLOCK_SIZE, need to return -EINVAL 329 + * If data length input is zero, no need to do any operation. 330 + */ 331 + if (req->cryptlen % AES_BLOCK_SIZE) 332 + return -EINVAL; 333 + 334 + if (req->cryptlen == 0) 335 + return 0; 336 + 337 + engine = dev_data->engine; 338 + return crypto_transfer_skcipher_request_to_engine(engine, req); 339 + } 340 + 341 + static int dthe_aes_encrypt(struct skcipher_request *req) 342 + { 343 + struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); 344 + 345 + rctx->enc = 1; 346 + return dthe_aes_crypt(req); 347 + } 348 + 349 + static int dthe_aes_decrypt(struct skcipher_request *req) 350 + { 351 + struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req); 352 + 353 + rctx->enc = 0; 354 + return dthe_aes_crypt(req); 355 + } 356 + 357 + static struct skcipher_engine_alg cipher_algs[] = { 358 + { 359 + .base.init = dthe_cipher_init_tfm, 360 + .base.setkey = dthe_aes_ecb_setkey, 361 + .base.encrypt = dthe_aes_encrypt, 362 + .base.decrypt = dthe_aes_decrypt, 363 + .base.min_keysize = AES_MIN_KEY_SIZE, 364 + .base.max_keysize = AES_MAX_KEY_SIZE, 365 + .base.base = { 366 + .cra_name = "ecb(aes)", 367 + .cra_driver_name = "ecb-aes-dthev2", 368 + .cra_priority = 299, 369 + .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 370 + CRYPTO_ALG_KERN_DRIVER_ONLY, 371 + .cra_alignmask = AES_BLOCK_SIZE - 1, 372 + .cra_blocksize = AES_BLOCK_SIZE, 373 + .cra_ctxsize = sizeof(struct dthe_tfm_ctx), 374 + .cra_reqsize = sizeof(struct dthe_aes_req_ctx), 375 + .cra_module = THIS_MODULE, 376 + }, 377 + .op.do_one_request = dthe_aes_run, 378 + }, /* ECB AES */ 379 + { 380 + .base.init = dthe_cipher_init_tfm, 381 + .base.setkey = dthe_aes_cbc_setkey, 382 + .base.encrypt = dthe_aes_encrypt, 383 + .base.decrypt = dthe_aes_decrypt, 384 + .base.min_keysize = AES_MIN_KEY_SIZE, 385 + .base.max_keysize = AES_MAX_KEY_SIZE, 386 + .base.ivsize = AES_IV_SIZE, 387 + .base.base = { 388 + .cra_name = "cbc(aes)", 389 + .cra_driver_name = "cbc-aes-dthev2", 390 + .cra_priority = 299, 391 + .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 392 + CRYPTO_ALG_KERN_DRIVER_ONLY, 393 + .cra_alignmask = AES_BLOCK_SIZE - 1, 394 + .cra_blocksize = AES_BLOCK_SIZE, 395 + .cra_ctxsize = sizeof(struct dthe_tfm_ctx), 396 + .cra_reqsize = sizeof(struct dthe_aes_req_ctx), 397 + .cra_module = THIS_MODULE, 398 + }, 399 + .op.do_one_request = dthe_aes_run, 400 + } /* CBC AES */ 401 + }; 402 + 403 + int dthe_register_aes_algs(void) 404 + { 405 + return crypto_engine_register_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs)); 406 + } 407 + 408 + void dthe_unregister_aes_algs(void) 409 + { 410 + crypto_engine_unregister_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs)); 411 + }
+217
drivers/crypto/ti/dthev2-common.c
··· 1 + // SPDX-License-Identifier: GPL-2.0-only 2 + /* 3 + * K3 DTHE V2 crypto accelerator driver 4 + * 5 + * Copyright (C) Texas Instruments 2025 - https://www.ti.com 6 + * Author: T Pratham <t-pratham@ti.com> 7 + */ 8 + 9 + #include <crypto/aes.h> 10 + #include <crypto/algapi.h> 11 + #include <crypto/engine.h> 12 + #include <crypto/internal/aead.h> 13 + #include <crypto/internal/skcipher.h> 14 + 15 + #include "dthev2-common.h" 16 + 17 + #include <linux/delay.h> 18 + #include <linux/dmaengine.h> 19 + #include <linux/dmapool.h> 20 + #include <linux/dma-mapping.h> 21 + #include <linux/io.h> 22 + #include <linux/kernel.h> 23 + #include <linux/module.h> 24 + #include <linux/mod_devicetable.h> 25 + #include <linux/platform_device.h> 26 + #include <linux/scatterlist.h> 27 + 28 + #define DRIVER_NAME "dthev2" 29 + 30 + static struct dthe_list dthe_dev_list = { 31 + .dev_list = LIST_HEAD_INIT(dthe_dev_list.dev_list), 32 + .lock = __SPIN_LOCK_UNLOCKED(dthe_dev_list.lock), 33 + }; 34 + 35 + struct dthe_data *dthe_get_dev(struct dthe_tfm_ctx *ctx) 36 + { 37 + struct dthe_data *dev_data; 38 + 39 + if (ctx->dev_data) 40 + return ctx->dev_data; 41 + 42 + spin_lock_bh(&dthe_dev_list.lock); 43 + dev_data = list_first_entry(&dthe_dev_list.dev_list, struct dthe_data, list); 44 + if (dev_data) 45 + list_move_tail(&dev_data->list, &dthe_dev_list.dev_list); 46 + spin_unlock_bh(&dthe_dev_list.lock); 47 + 48 + return dev_data; 49 + } 50 + 51 + static int dthe_dma_init(struct dthe_data *dev_data) 52 + { 53 + int ret; 54 + struct dma_slave_config cfg; 55 + 56 + dev_data->dma_aes_rx = NULL; 57 + dev_data->dma_aes_tx = NULL; 58 + dev_data->dma_sha_tx = NULL; 59 + 60 + dev_data->dma_aes_rx = dma_request_chan(dev_data->dev, "rx"); 61 + if (IS_ERR(dev_data->dma_aes_rx)) { 62 + return dev_err_probe(dev_data->dev, PTR_ERR(dev_data->dma_aes_rx), 63 + "Unable to request rx DMA channel\n"); 64 + } 65 + 66 + dev_data->dma_aes_tx = dma_request_chan(dev_data->dev, "tx1"); 67 + if (IS_ERR(dev_data->dma_aes_tx)) { 68 + ret = dev_err_probe(dev_data->dev, PTR_ERR(dev_data->dma_aes_tx), 69 + "Unable to request tx1 DMA channel\n"); 70 + goto err_dma_aes_tx; 71 + } 72 + 73 + dev_data->dma_sha_tx = dma_request_chan(dev_data->dev, "tx2"); 74 + if (IS_ERR(dev_data->dma_sha_tx)) { 75 + ret = dev_err_probe(dev_data->dev, PTR_ERR(dev_data->dma_sha_tx), 76 + "Unable to request tx2 DMA channel\n"); 77 + goto err_dma_sha_tx; 78 + } 79 + 80 + memzero_explicit(&cfg, sizeof(cfg)); 81 + 82 + cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 83 + cfg.src_maxburst = 4; 84 + 85 + ret = dmaengine_slave_config(dev_data->dma_aes_rx, &cfg); 86 + if (ret) { 87 + dev_err(dev_data->dev, "Can't configure IN dmaengine slave: %d\n", ret); 88 + goto err_dma_config; 89 + } 90 + 91 + cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; 92 + cfg.dst_maxburst = 4; 93 + 94 + ret = dmaengine_slave_config(dev_data->dma_aes_tx, &cfg); 95 + if (ret) { 96 + dev_err(dev_data->dev, "Can't configure OUT dmaengine slave: %d\n", ret); 97 + goto err_dma_config; 98 + } 99 + 100 + return 0; 101 + 102 + err_dma_config: 103 + dma_release_channel(dev_data->dma_sha_tx); 104 + err_dma_sha_tx: 105 + dma_release_channel(dev_data->dma_aes_tx); 106 + err_dma_aes_tx: 107 + dma_release_channel(dev_data->dma_aes_rx); 108 + 109 + return ret; 110 + } 111 + 112 + static int dthe_register_algs(void) 113 + { 114 + return dthe_register_aes_algs(); 115 + } 116 + 117 + static void dthe_unregister_algs(void) 118 + { 119 + dthe_unregister_aes_algs(); 120 + } 121 + 122 + static int dthe_probe(struct platform_device *pdev) 123 + { 124 + struct device *dev = &pdev->dev; 125 + struct dthe_data *dev_data; 126 + int ret; 127 + 128 + dev_data = devm_kzalloc(dev, sizeof(*dev_data), GFP_KERNEL); 129 + if (!dev_data) 130 + return -ENOMEM; 131 + 132 + dev_data->dev = dev; 133 + dev_data->regs = devm_platform_ioremap_resource(pdev, 0); 134 + if (IS_ERR(dev_data->regs)) 135 + return PTR_ERR(dev_data->regs); 136 + 137 + platform_set_drvdata(pdev, dev_data); 138 + 139 + spin_lock(&dthe_dev_list.lock); 140 + list_add(&dev_data->list, &dthe_dev_list.dev_list); 141 + spin_unlock(&dthe_dev_list.lock); 142 + 143 + ret = dthe_dma_init(dev_data); 144 + if (ret) 145 + goto probe_dma_err; 146 + 147 + dev_data->engine = crypto_engine_alloc_init(dev, 1); 148 + if (!dev_data->engine) { 149 + ret = -ENOMEM; 150 + goto probe_engine_err; 151 + } 152 + 153 + ret = crypto_engine_start(dev_data->engine); 154 + if (ret) { 155 + dev_err(dev, "Failed to start crypto engine\n"); 156 + goto probe_engine_start_err; 157 + } 158 + 159 + ret = dthe_register_algs(); 160 + if (ret) { 161 + dev_err(dev, "Failed to register algs\n"); 162 + goto probe_engine_start_err; 163 + } 164 + 165 + return 0; 166 + 167 + probe_engine_start_err: 168 + crypto_engine_exit(dev_data->engine); 169 + probe_engine_err: 170 + dma_release_channel(dev_data->dma_aes_rx); 171 + dma_release_channel(dev_data->dma_aes_tx); 172 + dma_release_channel(dev_data->dma_sha_tx); 173 + probe_dma_err: 174 + spin_lock(&dthe_dev_list.lock); 175 + list_del(&dev_data->list); 176 + spin_unlock(&dthe_dev_list.lock); 177 + 178 + return ret; 179 + } 180 + 181 + static void dthe_remove(struct platform_device *pdev) 182 + { 183 + struct dthe_data *dev_data = platform_get_drvdata(pdev); 184 + 185 + spin_lock(&dthe_dev_list.lock); 186 + list_del(&dev_data->list); 187 + spin_unlock(&dthe_dev_list.lock); 188 + 189 + dthe_unregister_algs(); 190 + 191 + crypto_engine_exit(dev_data->engine); 192 + 193 + dma_release_channel(dev_data->dma_aes_rx); 194 + dma_release_channel(dev_data->dma_aes_tx); 195 + dma_release_channel(dev_data->dma_sha_tx); 196 + } 197 + 198 + static const struct of_device_id dthe_of_match[] = { 199 + { .compatible = "ti,am62l-dthev2", }, 200 + {}, 201 + }; 202 + MODULE_DEVICE_TABLE(of, dthe_of_match); 203 + 204 + static struct platform_driver dthe_driver = { 205 + .probe = dthe_probe, 206 + .remove = dthe_remove, 207 + .driver = { 208 + .name = DRIVER_NAME, 209 + .of_match_table = dthe_of_match, 210 + }, 211 + }; 212 + 213 + module_platform_driver(dthe_driver); 214 + 215 + MODULE_AUTHOR("T Pratham <t-pratham@ti.com>"); 216 + MODULE_DESCRIPTION("Texas Instruments DTHE V2 driver"); 217 + MODULE_LICENSE("GPL");
+101
drivers/crypto/ti/dthev2-common.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * K3 DTHE V2 crypto accelerator driver 4 + * 5 + * Copyright (C) Texas Instruments 2025 - https://www.ti.com 6 + * Author: T Pratham <t-pratham@ti.com> 7 + */ 8 + 9 + #ifndef __TI_DTHEV2_H__ 10 + #define __TI_DTHEV2_H__ 11 + 12 + #include <crypto/aead.h> 13 + #include <crypto/aes.h> 14 + #include <crypto/algapi.h> 15 + #include <crypto/engine.h> 16 + #include <crypto/hash.h> 17 + #include <crypto/internal/aead.h> 18 + #include <crypto/internal/hash.h> 19 + #include <crypto/internal/skcipher.h> 20 + 21 + #include <linux/delay.h> 22 + #include <linux/dmaengine.h> 23 + #include <linux/dmapool.h> 24 + #include <linux/dma-mapping.h> 25 + #include <linux/io.h> 26 + #include <linux/scatterlist.h> 27 + 28 + #define DTHE_REG_SIZE 4 29 + #define DTHE_DMA_TIMEOUT_MS 2000 30 + 31 + enum dthe_aes_mode { 32 + DTHE_AES_ECB = 0, 33 + DTHE_AES_CBC, 34 + }; 35 + 36 + /* Driver specific struct definitions */ 37 + 38 + /** 39 + * struct dthe_data - DTHE_V2 driver instance data 40 + * @dev: Device pointer 41 + * @regs: Base address of the register space 42 + * @list: list node for dev 43 + * @engine: Crypto engine instance 44 + * @dma_aes_rx: AES Rx DMA Channel 45 + * @dma_aes_tx: AES Tx DMA Channel 46 + * @dma_sha_tx: SHA Tx DMA Channel 47 + */ 48 + struct dthe_data { 49 + struct device *dev; 50 + void __iomem *regs; 51 + struct list_head list; 52 + struct crypto_engine *engine; 53 + 54 + struct dma_chan *dma_aes_rx; 55 + struct dma_chan *dma_aes_tx; 56 + 57 + struct dma_chan *dma_sha_tx; 58 + }; 59 + 60 + /** 61 + * struct dthe_list - device data list head 62 + * @dev_list: linked list head 63 + * @lock: Spinlock protecting accesses to the list 64 + */ 65 + struct dthe_list { 66 + struct list_head dev_list; 67 + spinlock_t lock; 68 + }; 69 + 70 + /** 71 + * struct dthe_tfm_ctx - Transform ctx struct containing ctx for all sub-components of DTHE V2 72 + * @dev_data: Device data struct pointer 73 + * @keylen: AES key length 74 + * @key: AES key 75 + * @aes_mode: AES mode 76 + */ 77 + struct dthe_tfm_ctx { 78 + struct dthe_data *dev_data; 79 + unsigned int keylen; 80 + u32 key[AES_KEYSIZE_256 / sizeof(u32)]; 81 + enum dthe_aes_mode aes_mode; 82 + }; 83 + 84 + /** 85 + * struct dthe_aes_req_ctx - AES engine req ctx struct 86 + * @enc: flag indicating encryption or decryption operation 87 + * @aes_compl: Completion variable for use in manual completion in case of DMA callback failure 88 + */ 89 + struct dthe_aes_req_ctx { 90 + int enc; 91 + struct completion aes_compl; 92 + }; 93 + 94 + /* Struct definitions end */ 95 + 96 + struct dthe_data *dthe_get_dev(struct dthe_tfm_ctx *ctx); 97 + 98 + int dthe_register_aes_algs(void); 99 + void dthe_unregister_aes_algs(void); 100 + 101 + #endif