Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
5 * Ux500 support taken from snippets in the old Ux500 cryp driver
6 */
7
8#include <crypto/aes.h>
9#include <crypto/engine.h>
10#include <crypto/internal/aead.h>
11#include <crypto/internal/des.h>
12#include <crypto/internal/skcipher.h>
13#include <crypto/scatterwalk.h>
14#include <linux/bottom_half.h>
15#include <linux/clk.h>
16#include <linux/delay.h>
17#include <linux/dma-mapping.h>
18#include <linux/dmaengine.h>
19#include <linux/err.h>
20#include <linux/iopoll.h>
21#include <linux/interrupt.h>
22#include <linux/kernel.h>
23#include <linux/module.h>
24#include <linux/of.h>
25#include <linux/platform_device.h>
26#include <linux/pm_runtime.h>
27#include <linux/reset.h>
28#include <linux/string.h>
29
30#define DRIVER_NAME "stm32-cryp"
31
32/* Bit [0] encrypt / decrypt */
33#define FLG_ENCRYPT BIT(0)
34/* Bit [8..1] algo & operation mode */
35#define FLG_AES BIT(1)
36#define FLG_DES BIT(2)
37#define FLG_TDES BIT(3)
38#define FLG_ECB BIT(4)
39#define FLG_CBC BIT(5)
40#define FLG_CTR BIT(6)
41#define FLG_GCM BIT(7)
42#define FLG_CCM BIT(8)
43/* Mode mask = bits [15..0] */
44#define FLG_MODE_MASK GENMASK(15, 0)
45/* Bit [31..16] status */
46#define FLG_IN_OUT_DMA BIT(16)
47#define FLG_HEADER_DMA BIT(17)
48
49/* Registers */
50#define CRYP_CR 0x00000000
51#define CRYP_SR 0x00000004
52#define CRYP_DIN 0x00000008
53#define CRYP_DOUT 0x0000000C
54#define CRYP_DMACR 0x00000010
55#define CRYP_IMSCR 0x00000014
56#define CRYP_RISR 0x00000018
57#define CRYP_MISR 0x0000001C
58#define CRYP_K0LR 0x00000020
59#define CRYP_K0RR 0x00000024
60#define CRYP_K1LR 0x00000028
61#define CRYP_K1RR 0x0000002C
62#define CRYP_K2LR 0x00000030
63#define CRYP_K2RR 0x00000034
64#define CRYP_K3LR 0x00000038
65#define CRYP_K3RR 0x0000003C
66#define CRYP_IV0LR 0x00000040
67#define CRYP_IV0RR 0x00000044
68#define CRYP_IV1LR 0x00000048
69#define CRYP_IV1RR 0x0000004C
70#define CRYP_CSGCMCCM0R 0x00000050
71#define CRYP_CSGCM0R 0x00000070
72
73#define UX500_CRYP_CR 0x00000000
74#define UX500_CRYP_SR 0x00000004
75#define UX500_CRYP_DIN 0x00000008
76#define UX500_CRYP_DINSIZE 0x0000000C
77#define UX500_CRYP_DOUT 0x00000010
78#define UX500_CRYP_DOUSIZE 0x00000014
79#define UX500_CRYP_DMACR 0x00000018
80#define UX500_CRYP_IMSC 0x0000001C
81#define UX500_CRYP_RIS 0x00000020
82#define UX500_CRYP_MIS 0x00000024
83#define UX500_CRYP_K1L 0x00000028
84#define UX500_CRYP_K1R 0x0000002C
85#define UX500_CRYP_K2L 0x00000030
86#define UX500_CRYP_K2R 0x00000034
87#define UX500_CRYP_K3L 0x00000038
88#define UX500_CRYP_K3R 0x0000003C
89#define UX500_CRYP_K4L 0x00000040
90#define UX500_CRYP_K4R 0x00000044
91#define UX500_CRYP_IV0L 0x00000048
92#define UX500_CRYP_IV0R 0x0000004C
93#define UX500_CRYP_IV1L 0x00000050
94#define UX500_CRYP_IV1R 0x00000054
95
96/* Registers values */
97#define CR_DEC_NOT_ENC 0x00000004
98#define CR_TDES_ECB 0x00000000
99#define CR_TDES_CBC 0x00000008
100#define CR_DES_ECB 0x00000010
101#define CR_DES_CBC 0x00000018
102#define CR_AES_ECB 0x00000020
103#define CR_AES_CBC 0x00000028
104#define CR_AES_CTR 0x00000030
105#define CR_AES_KP 0x00000038 /* Not on Ux500 */
106#define CR_AES_XTS 0x00000038 /* Only on Ux500 */
107#define CR_AES_GCM 0x00080000
108#define CR_AES_CCM 0x00080008
109#define CR_AES_UNKNOWN 0xFFFFFFFF
110#define CR_ALGO_MASK 0x00080038
111#define CR_DATA32 0x00000000
112#define CR_DATA16 0x00000040
113#define CR_DATA8 0x00000080
114#define CR_DATA1 0x000000C0
115#define CR_KEY128 0x00000000
116#define CR_KEY192 0x00000100
117#define CR_KEY256 0x00000200
118#define CR_KEYRDEN 0x00000400 /* Only on Ux500 */
119#define CR_KSE 0x00000800 /* Only on Ux500 */
120#define CR_FFLUSH 0x00004000
121#define CR_CRYPEN 0x00008000
122#define CR_PH_INIT 0x00000000
123#define CR_PH_HEADER 0x00010000
124#define CR_PH_PAYLOAD 0x00020000
125#define CR_PH_FINAL 0x00030000
126#define CR_PH_MASK 0x00030000
127#define CR_NBPBL_SHIFT 20
128
129#define SR_IFNF BIT(1)
130#define SR_OFNE BIT(2)
131#define SR_BUSY BIT(8)
132
133#define DMACR_DIEN BIT(0)
134#define DMACR_DOEN BIT(1)
135
136#define IMSCR_IN BIT(0)
137#define IMSCR_OUT BIT(1)
138
139#define MISR_IN BIT(0)
140#define MISR_OUT BIT(1)
141
142/* Misc */
143#define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
144#define GCM_CTR_INIT 2
145#define CRYP_AUTOSUSPEND_DELAY 50
146
147#define CRYP_DMA_BURST_REG 4
148
149enum stm32_dma_mode {
150 NO_DMA,
151 DMA_PLAIN_SG,
152 DMA_NEED_SG_TRUNC
153};
154
155struct stm32_cryp_caps {
156 bool aeads_support;
157 bool linear_aes_key;
158 bool kp_mode;
159 bool iv_protection;
160 bool swap_final;
161 bool padding_wa;
162 u32 cr;
163 u32 sr;
164 u32 din;
165 u32 dout;
166 u32 dmacr;
167 u32 imsc;
168 u32 mis;
169 u32 k1l;
170 u32 k1r;
171 u32 k3r;
172 u32 iv0l;
173 u32 iv0r;
174 u32 iv1l;
175 u32 iv1r;
176};
177
178struct stm32_cryp_ctx {
179 struct stm32_cryp *cryp;
180 int keylen;
181 __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
182 unsigned long flags;
183};
184
185struct stm32_cryp_reqctx {
186 unsigned long mode;
187};
188
189struct stm32_cryp {
190 struct list_head list;
191 struct device *dev;
192 void __iomem *regs;
193 phys_addr_t phys_base;
194 struct clk *clk;
195 unsigned long flags;
196 u32 irq_status;
197 const struct stm32_cryp_caps *caps;
198 struct stm32_cryp_ctx *ctx;
199
200 struct crypto_engine *engine;
201
202 struct skcipher_request *req;
203 struct aead_request *areq;
204
205 size_t authsize;
206 size_t hw_blocksize;
207
208 size_t payload_in;
209 size_t header_in;
210 size_t payload_out;
211
212 /* DMA process fields */
213 struct scatterlist *in_sg;
214 struct scatterlist *header_sg;
215 struct scatterlist *out_sg;
216 size_t in_sg_len;
217 size_t header_sg_len;
218 size_t out_sg_len;
219 struct completion dma_completion;
220
221 struct dma_chan *dma_lch_in;
222 struct dma_chan *dma_lch_out;
223 enum stm32_dma_mode dma_mode;
224
225 /* IT process fields */
226 struct scatter_walk in_walk;
227 struct scatter_walk out_walk;
228
229 __be32 last_ctr[4];
230 u32 gcm_ctr;
231};
232
233struct stm32_cryp_list {
234 struct list_head dev_list;
235 spinlock_t lock; /* protect dev_list */
236};
237
238static struct stm32_cryp_list cryp_list = {
239 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
240 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
241};
242
243static inline bool is_aes(struct stm32_cryp *cryp)
244{
245 return cryp->flags & FLG_AES;
246}
247
248static inline bool is_des(struct stm32_cryp *cryp)
249{
250 return cryp->flags & FLG_DES;
251}
252
253static inline bool is_tdes(struct stm32_cryp *cryp)
254{
255 return cryp->flags & FLG_TDES;
256}
257
258static inline bool is_ecb(struct stm32_cryp *cryp)
259{
260 return cryp->flags & FLG_ECB;
261}
262
263static inline bool is_cbc(struct stm32_cryp *cryp)
264{
265 return cryp->flags & FLG_CBC;
266}
267
268static inline bool is_ctr(struct stm32_cryp *cryp)
269{
270 return cryp->flags & FLG_CTR;
271}
272
273static inline bool is_gcm(struct stm32_cryp *cryp)
274{
275 return cryp->flags & FLG_GCM;
276}
277
278static inline bool is_ccm(struct stm32_cryp *cryp)
279{
280 return cryp->flags & FLG_CCM;
281}
282
283static inline bool is_encrypt(struct stm32_cryp *cryp)
284{
285 return cryp->flags & FLG_ENCRYPT;
286}
287
288static inline bool is_decrypt(struct stm32_cryp *cryp)
289{
290 return !is_encrypt(cryp);
291}
292
293static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
294{
295 return readl_relaxed(cryp->regs + ofst);
296}
297
298static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
299{
300 writel_relaxed(val, cryp->regs + ofst);
301}
302
303static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
304{
305 u32 status;
306
307 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
308 !(status & SR_BUSY), 10, 100000);
309}
310
311static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
312{
313 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN,
314 cryp->regs + cryp->caps->cr);
315}
316
317static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
318{
319 u32 status;
320
321 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status,
322 !(status & CR_CRYPEN), 10, 100000);
323}
324
325static inline int stm32_cryp_wait_input(struct stm32_cryp *cryp)
326{
327 u32 status;
328
329 return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
330 status & SR_IFNF, 1, 10);
331}
332
333static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
334{
335 u32 status;
336
337 return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
338 status & SR_OFNE, 1, 10);
339}
340
341static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp)
342{
343 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN,
344 cryp->regs + cryp->caps->cr);
345}
346
347static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp)
348{
349 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN,
350 cryp->regs + cryp->caps->cr);
351}
352
353static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp);
354static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp);
355static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp);
356static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
357static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
358static int stm32_cryp_dma_start(struct stm32_cryp *cryp);
359static int stm32_cryp_it_start(struct stm32_cryp *cryp);
360
361static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
362{
363 struct stm32_cryp *tmp, *cryp = NULL;
364
365 spin_lock_bh(&cryp_list.lock);
366 if (!ctx->cryp) {
367 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
368 cryp = tmp;
369 break;
370 }
371 ctx->cryp = cryp;
372 } else {
373 cryp = ctx->cryp;
374 }
375
376 spin_unlock_bh(&cryp_list.lock);
377
378 return cryp;
379}
380
381static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
382{
383 if (!iv)
384 return;
385
386 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++));
387 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++));
388
389 if (is_aes(cryp)) {
390 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++));
391 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++));
392 }
393}
394
395static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
396{
397 struct skcipher_request *req = cryp->req;
398 __be32 *tmp = (void *)req->iv;
399
400 if (!tmp)
401 return;
402
403 if (cryp->caps->iv_protection)
404 stm32_cryp_key_read_enable(cryp);
405
406 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
407 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
408
409 if (is_aes(cryp)) {
410 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
411 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
412 }
413
414 if (cryp->caps->iv_protection)
415 stm32_cryp_key_read_disable(cryp);
416}
417
418/**
419 * ux500_swap_bits_in_byte() - mirror the bits in a byte
420 * @b: the byte to be mirrored
421 *
422 * The bits are swapped the following way:
423 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
424 * nibble 2 (n2) bits 4-7.
425 *
426 * Nibble 1 (n1):
427 * (The "old" (moved) bit is replaced with a zero)
428 * 1. Move bit 6 and 7, 4 positions to the left.
429 * 2. Move bit 3 and 5, 2 positions to the left.
430 * 3. Move bit 1-4, 1 position to the left.
431 *
432 * Nibble 2 (n2):
433 * 1. Move bit 0 and 1, 4 positions to the right.
434 * 2. Move bit 2 and 4, 2 positions to the right.
435 * 3. Move bit 3-6, 1 position to the right.
436 *
437 * Combine the two nibbles to a complete and swapped byte.
438 */
439static inline u8 ux500_swap_bits_in_byte(u8 b)
440{
441#define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
442#define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
443 right shift 2 */
444#define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
445 right shift 1 */
446#define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
447#define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
448 left shift 2 */
449#define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
450 left shift 1 */
451
452 u8 n1;
453 u8 n2;
454
455 /* Swap most significant nibble */
456 /* Right shift 4, bits 6 and 7 */
457 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
458 /* Right shift 2, bits 3 and 5 */
459 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
460 /* Right shift 1, bits 1-4 */
461 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
462
463 /* Swap least significant nibble */
464 /* Left shift 4, bits 0 and 1 */
465 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
466 /* Left shift 2, bits 2 and 4 */
467 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
468 /* Left shift 1, bits 3-6 */
469 n2 = (n2 & L_SHIFT_1_MASK) << 1;
470
471 return n1 | n2;
472}
473
474/**
475 * ux500_swizzle_key() - Shuffle around words and bits in the AES key
476 * @in: key to swizzle
477 * @out: swizzled key
478 * @len: length of key, in bytes
479 *
480 * This "key swizzling procedure" is described in the examples in the
481 * DB8500 design specification. There is no real description of why
482 * the bits have been arranged like this in the hardware.
483 */
484static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len)
485{
486 int i = 0;
487 int bpw = sizeof(u32);
488 int j;
489 int index = 0;
490
491 j = len - bpw;
492 while (j >= 0) {
493 for (i = 0; i < bpw; i++) {
494 index = len - j - bpw + i;
495 out[j + i] =
496 ux500_swap_bits_in_byte(in[index]);
497 }
498 j -= bpw;
499 }
500}
501
502static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
503{
504 unsigned int i;
505 int r_id;
506
507 if (is_des(c)) {
508 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0]));
509 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1]));
510 return;
511 }
512
513 /*
514 * On the Ux500 the AES key is considered as a single bit sequence
515 * of 128, 192 or 256 bits length. It is written linearly into the
516 * registers from K1L and down, and need to be processed to become
517 * a proper big-endian bit sequence.
518 */
519 if (is_aes(c) && c->caps->linear_aes_key) {
520 u32 tmpkey[8];
521
522 ux500_swizzle_key((u8 *)c->ctx->key,
523 (u8 *)tmpkey, c->ctx->keylen);
524
525 r_id = c->caps->k1l;
526 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4)
527 stm32_cryp_write(c, r_id, tmpkey[i]);
528
529 return;
530 }
531
532 r_id = c->caps->k3r;
533 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
534 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1]));
535}
536
537static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
538{
539 if (is_aes(cryp) && is_ecb(cryp))
540 return CR_AES_ECB;
541
542 if (is_aes(cryp) && is_cbc(cryp))
543 return CR_AES_CBC;
544
545 if (is_aes(cryp) && is_ctr(cryp))
546 return CR_AES_CTR;
547
548 if (is_aes(cryp) && is_gcm(cryp))
549 return CR_AES_GCM;
550
551 if (is_aes(cryp) && is_ccm(cryp))
552 return CR_AES_CCM;
553
554 if (is_des(cryp) && is_ecb(cryp))
555 return CR_DES_ECB;
556
557 if (is_des(cryp) && is_cbc(cryp))
558 return CR_DES_CBC;
559
560 if (is_tdes(cryp) && is_ecb(cryp))
561 return CR_TDES_ECB;
562
563 if (is_tdes(cryp) && is_cbc(cryp))
564 return CR_TDES_CBC;
565
566 dev_err(cryp->dev, "Unknown mode\n");
567 return CR_AES_UNKNOWN;
568}
569
570static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
571{
572 return is_encrypt(cryp) ? cryp->areq->cryptlen :
573 cryp->areq->cryptlen - cryp->authsize;
574}
575
576static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
577{
578 int ret;
579 __be32 iv[4];
580
581 /* Phase 1 : init */
582 memcpy(iv, cryp->areq->iv, 12);
583 iv[3] = cpu_to_be32(GCM_CTR_INIT);
584 cryp->gcm_ctr = GCM_CTR_INIT;
585 stm32_cryp_hw_write_iv(cryp, iv);
586
587 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
588
589 /* Wait for end of processing */
590 ret = stm32_cryp_wait_enable(cryp);
591 if (ret) {
592 dev_err(cryp->dev, "Timeout (gcm init)\n");
593 return ret;
594 }
595
596 /* Prepare next phase */
597 if (cryp->areq->assoclen) {
598 cfg |= CR_PH_HEADER;
599 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
600 } else if (stm32_cryp_get_input_text_len(cryp)) {
601 cfg |= CR_PH_PAYLOAD;
602 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
603 }
604
605 return 0;
606}
607
608static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
609{
610 u32 cfg;
611 int err;
612
613 /* Check if whole header written */
614 if (!cryp->header_in) {
615 /* Wait for completion */
616 err = stm32_cryp_wait_busy(cryp);
617 if (err) {
618 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
619 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
620 stm32_cryp_finish_req(cryp, err);
621 return;
622 }
623
624 if (stm32_cryp_get_input_text_len(cryp)) {
625 /* Phase 3 : payload */
626 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
627 cfg &= ~CR_CRYPEN;
628 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
629
630 cfg &= ~CR_PH_MASK;
631 cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
632 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
633 } else {
634 /*
635 * Phase 4 : tag.
636 * Nothing to read, nothing to write, caller have to
637 * end request
638 */
639 }
640 }
641}
642
643static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
644{
645 size_t written;
646 size_t len;
647 u32 alen = cryp->areq->assoclen;
648 u32 block[AES_BLOCK_32] = {0};
649 u8 *b8 = (u8 *)block;
650
651 if (alen <= 65280) {
652 /* Write first u32 of B1 */
653 b8[0] = (alen >> 8) & 0xFF;
654 b8[1] = alen & 0xFF;
655 len = 2;
656 } else {
657 /* Build the two first u32 of B1 */
658 b8[0] = 0xFF;
659 b8[1] = 0xFE;
660 b8[2] = (alen & 0xFF000000) >> 24;
661 b8[3] = (alen & 0x00FF0000) >> 16;
662 b8[4] = (alen & 0x0000FF00) >> 8;
663 b8[5] = alen & 0x000000FF;
664 len = 6;
665 }
666
667 written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
668
669 memcpy_from_scatterwalk((char *)block + len, &cryp->in_walk, written);
670
671 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
672
673 cryp->header_in -= written;
674
675 stm32_crypt_gcmccm_end_header(cryp);
676}
677
678static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
679{
680 int ret;
681 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
682 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
683 __be32 *bd;
684 u32 *d;
685 unsigned int i, textlen;
686
687 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
688 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
689 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
690 iv[AES_BLOCK_SIZE - 1] = 1;
691 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
692
693 /* Build B0 */
694 memcpy(b0, iv, AES_BLOCK_SIZE);
695
696 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
697
698 if (cryp->areq->assoclen)
699 b0[0] |= 0x40;
700
701 textlen = stm32_cryp_get_input_text_len(cryp);
702
703 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
704 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
705
706 /* Enable HW */
707 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
708
709 /* Write B0 */
710 d = (u32 *)b0;
711 bd = (__be32 *)b0;
712
713 for (i = 0; i < AES_BLOCK_32; i++) {
714 u32 xd = d[i];
715
716 if (!cryp->caps->padding_wa)
717 xd = be32_to_cpu(bd[i]);
718 stm32_cryp_write(cryp, cryp->caps->din, xd);
719 }
720
721 /* Wait for end of processing */
722 ret = stm32_cryp_wait_enable(cryp);
723 if (ret) {
724 dev_err(cryp->dev, "Timeout (ccm init)\n");
725 return ret;
726 }
727
728 /* Prepare next phase */
729 if (cryp->areq->assoclen) {
730 cfg |= CR_PH_HEADER | CR_CRYPEN;
731 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
732
733 /* Write first (special) block (may move to next phase [payload]) */
734 stm32_cryp_write_ccm_first_header(cryp);
735 } else if (stm32_cryp_get_input_text_len(cryp)) {
736 cfg |= CR_PH_PAYLOAD;
737 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
738 }
739
740 return 0;
741}
742
743static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
744{
745 int ret;
746 u32 cfg, hw_mode;
747
748 pm_runtime_get_sync(cryp->dev);
749
750 /* Disable interrupt */
751 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
752
753 /* Set configuration */
754 cfg = CR_DATA8 | CR_FFLUSH;
755
756 switch (cryp->ctx->keylen) {
757 case AES_KEYSIZE_128:
758 cfg |= CR_KEY128;
759 break;
760
761 case AES_KEYSIZE_192:
762 cfg |= CR_KEY192;
763 break;
764
765 default:
766 case AES_KEYSIZE_256:
767 cfg |= CR_KEY256;
768 break;
769 }
770
771 hw_mode = stm32_cryp_get_hw_mode(cryp);
772 if (hw_mode == CR_AES_UNKNOWN)
773 return -EINVAL;
774
775 /* AES ECB/CBC decrypt: run key preparation first */
776 if (is_decrypt(cryp) &&
777 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
778 /* Configure in key preparation mode */
779 if (cryp->caps->kp_mode)
780 stm32_cryp_write(cryp, cryp->caps->cr,
781 cfg | CR_AES_KP);
782 else
783 stm32_cryp_write(cryp,
784 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE);
785
786 /* Set key only after full configuration done */
787 stm32_cryp_hw_write_key(cryp);
788
789 /* Start prepare key */
790 stm32_cryp_enable(cryp);
791 /* Wait for end of processing */
792 ret = stm32_cryp_wait_busy(cryp);
793 if (ret) {
794 dev_err(cryp->dev, "Timeout (key preparation)\n");
795 return ret;
796 }
797
798 cfg |= hw_mode | CR_DEC_NOT_ENC;
799
800 /* Apply updated config (Decrypt + algo) and flush */
801 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
802 } else {
803 cfg |= hw_mode;
804 if (is_decrypt(cryp))
805 cfg |= CR_DEC_NOT_ENC;
806
807 /* Apply config and flush */
808 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
809
810 /* Set key only after configuration done */
811 stm32_cryp_hw_write_key(cryp);
812 }
813
814 switch (hw_mode) {
815 case CR_AES_GCM:
816 case CR_AES_CCM:
817 /* Phase 1 : init */
818 if (hw_mode == CR_AES_CCM)
819 ret = stm32_cryp_ccm_init(cryp, cfg);
820 else
821 ret = stm32_cryp_gcm_init(cryp, cfg);
822
823 if (ret)
824 return ret;
825
826 break;
827
828 case CR_DES_CBC:
829 case CR_TDES_CBC:
830 case CR_AES_CBC:
831 case CR_AES_CTR:
832 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
833 break;
834
835 default:
836 break;
837 }
838
839 /* Enable now */
840 stm32_cryp_enable(cryp);
841
842 return 0;
843}
844
845static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
846{
847 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
848 /* Phase 4 : output tag */
849 err = stm32_cryp_read_auth_tag(cryp);
850
851 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
852 stm32_cryp_get_iv(cryp);
853
854 pm_runtime_put_autosuspend(cryp->dev);
855
856 if (is_gcm(cryp) || is_ccm(cryp))
857 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
858 else
859 crypto_finalize_skcipher_request(cryp->engine, cryp->req, err);
860}
861
862static void stm32_cryp_header_dma_callback(void *param)
863{
864 struct stm32_cryp *cryp = (struct stm32_cryp *)param;
865 int ret;
866 u32 reg;
867
868 dma_unmap_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
869
870 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
871 stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
872
873 kfree(cryp->header_sg);
874
875 reg = stm32_cryp_read(cryp, cryp->caps->cr);
876
877 if (cryp->header_in) {
878 stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
879
880 ret = stm32_cryp_wait_input(cryp);
881 if (ret) {
882 dev_err(cryp->dev, "input header ready timeout after dma\n");
883 stm32_cryp_finish_req(cryp, ret);
884 return;
885 }
886 stm32_cryp_irq_write_gcmccm_header(cryp);
887 WARN_ON(cryp->header_in);
888 }
889
890 if (stm32_cryp_get_input_text_len(cryp)) {
891 /* Phase 3 : payload */
892 reg = stm32_cryp_read(cryp, cryp->caps->cr);
893 stm32_cryp_write(cryp, cryp->caps->cr, reg & ~CR_CRYPEN);
894
895 reg &= ~CR_PH_MASK;
896 reg |= CR_PH_PAYLOAD | CR_CRYPEN;
897 stm32_cryp_write(cryp, cryp->caps->cr, reg);
898
899 if (cryp->flags & FLG_IN_OUT_DMA) {
900 ret = stm32_cryp_dma_start(cryp);
901 if (ret)
902 stm32_cryp_finish_req(cryp, ret);
903 } else {
904 stm32_cryp_it_start(cryp);
905 }
906 } else {
907 /*
908 * Phase 4 : tag.
909 * Nothing to read, nothing to write => end request
910 */
911 stm32_cryp_finish_req(cryp, 0);
912 }
913}
914
915static void stm32_cryp_dma_callback(void *param)
916{
917 struct stm32_cryp *cryp = (struct stm32_cryp *)param;
918 int ret;
919 u32 reg;
920
921 complete(&cryp->dma_completion); /* completion to indicate no timeout */
922
923 dma_sync_sg_for_device(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
924
925 if (cryp->in_sg != cryp->out_sg)
926 dma_unmap_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
927
928 dma_unmap_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
929
930 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
931 stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
932
933 reg = stm32_cryp_read(cryp, cryp->caps->cr);
934
935 if (is_gcm(cryp) || is_ccm(cryp)) {
936 kfree(cryp->in_sg);
937 kfree(cryp->out_sg);
938 } else {
939 if (cryp->in_sg != cryp->req->src)
940 kfree(cryp->in_sg);
941 if (cryp->out_sg != cryp->req->dst)
942 kfree(cryp->out_sg);
943 }
944
945 if (cryp->payload_in) {
946 stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
947
948 ret = stm32_cryp_wait_input(cryp);
949 if (ret) {
950 dev_err(cryp->dev, "input ready timeout after dma\n");
951 stm32_cryp_finish_req(cryp, ret);
952 return;
953 }
954 stm32_cryp_irq_write_data(cryp);
955
956 ret = stm32_cryp_wait_output(cryp);
957 if (ret) {
958 dev_err(cryp->dev, "output ready timeout after dma\n");
959 stm32_cryp_finish_req(cryp, ret);
960 return;
961 }
962 stm32_cryp_irq_read_data(cryp);
963 }
964
965 stm32_cryp_finish_req(cryp, 0);
966}
967
968static int stm32_cryp_header_dma_start(struct stm32_cryp *cryp)
969{
970 int ret;
971 struct dma_async_tx_descriptor *tx_in;
972 u32 reg;
973 size_t align_size;
974
975 ret = dma_map_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
976 if (!ret) {
977 dev_err(cryp->dev, "dma_map_sg() error\n");
978 return -ENOMEM;
979 }
980
981 dma_sync_sg_for_device(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
982
983 tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->header_sg, cryp->header_sg_len,
984 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
985 if (!tx_in) {
986 dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
987 return -EINVAL;
988 }
989
990 tx_in->callback_param = cryp;
991 tx_in->callback = stm32_cryp_header_dma_callback;
992
993 /* Advance scatterwalk to not DMA'ed data */
994 align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
995 scatterwalk_skip(&cryp->in_walk, align_size);
996 cryp->header_in -= align_size;
997
998 ret = dma_submit_error(dmaengine_submit(tx_in));
999 if (ret < 0) {
1000 dev_err(cryp->dev, "DMA in submit failed\n");
1001 return ret;
1002 }
1003 dma_async_issue_pending(cryp->dma_lch_in);
1004
1005 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1006 stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DIEN);
1007
1008 return 0;
1009}
1010
1011static int stm32_cryp_dma_start(struct stm32_cryp *cryp)
1012{
1013 int ret;
1014 size_t align_size;
1015 struct dma_async_tx_descriptor *tx_in, *tx_out;
1016 u32 reg;
1017
1018 if (cryp->in_sg != cryp->out_sg) {
1019 ret = dma_map_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1020 if (!ret) {
1021 dev_err(cryp->dev, "dma_map_sg() error\n");
1022 return -ENOMEM;
1023 }
1024 }
1025
1026 ret = dma_map_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
1027 if (!ret) {
1028 dev_err(cryp->dev, "dma_map_sg() error\n");
1029 return -ENOMEM;
1030 }
1031
1032 dma_sync_sg_for_device(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1033
1034 tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->in_sg, cryp->in_sg_len,
1035 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1036 if (!tx_in) {
1037 dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
1038 return -EINVAL;
1039 }
1040
1041 /* No callback necessary */
1042 tx_in->callback_param = cryp;
1043 tx_in->callback = NULL;
1044
1045 tx_out = dmaengine_prep_slave_sg(cryp->dma_lch_out, cryp->out_sg, cryp->out_sg_len,
1046 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1047 if (!tx_out) {
1048 dev_err(cryp->dev, "OUT prep_slave_sg() failed\n");
1049 return -EINVAL;
1050 }
1051
1052 reinit_completion(&cryp->dma_completion);
1053 tx_out->callback = stm32_cryp_dma_callback;
1054 tx_out->callback_param = cryp;
1055
1056 /* Advance scatterwalk to not DMA'ed data */
1057 align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1058 scatterwalk_skip(&cryp->in_walk, align_size);
1059 cryp->payload_in -= align_size;
1060
1061 ret = dma_submit_error(dmaengine_submit(tx_in));
1062 if (ret < 0) {
1063 dev_err(cryp->dev, "DMA in submit failed\n");
1064 return ret;
1065 }
1066 dma_async_issue_pending(cryp->dma_lch_in);
1067
1068 /* Advance scatterwalk to not DMA'ed data */
1069 scatterwalk_skip(&cryp->out_walk, align_size);
1070 cryp->payload_out -= align_size;
1071 ret = dma_submit_error(dmaengine_submit(tx_out));
1072 if (ret < 0) {
1073 dev_err(cryp->dev, "DMA out submit failed\n");
1074 return ret;
1075 }
1076 dma_async_issue_pending(cryp->dma_lch_out);
1077
1078 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1079 stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DOEN | DMACR_DIEN);
1080
1081 if (!wait_for_completion_timeout(&cryp->dma_completion, msecs_to_jiffies(1000))) {
1082 dev_err(cryp->dev, "DMA out timed out\n");
1083 dmaengine_terminate_sync(cryp->dma_lch_out);
1084 return -ETIMEDOUT;
1085 }
1086
1087 return 0;
1088}
1089
1090static int stm32_cryp_it_start(struct stm32_cryp *cryp)
1091{
1092 /* Enable interrupt and let the IRQ handler do everything */
1093 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT);
1094
1095 return 0;
1096}
1097
1098static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
1099
1100static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
1101{
1102 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1103
1104 return 0;
1105}
1106
1107static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
1108
1109static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
1110{
1111 crypto_aead_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1112
1113 return 0;
1114}
1115
1116static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
1117{
1118 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1119 crypto_skcipher_reqtfm(req));
1120 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
1121 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1122
1123 if (!cryp)
1124 return -ENODEV;
1125
1126 rctx->mode = mode;
1127
1128 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
1129}
1130
1131static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
1132{
1133 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1134 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
1135 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1136
1137 if (!cryp)
1138 return -ENODEV;
1139
1140 rctx->mode = mode;
1141
1142 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
1143}
1144
1145static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
1146 unsigned int keylen)
1147{
1148 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1149
1150 memcpy(ctx->key, key, keylen);
1151 ctx->keylen = keylen;
1152
1153 return 0;
1154}
1155
1156static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1157 unsigned int keylen)
1158{
1159 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1160 keylen != AES_KEYSIZE_256)
1161 return -EINVAL;
1162 else
1163 return stm32_cryp_setkey(tfm, key, keylen);
1164}
1165
1166static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
1167 unsigned int keylen)
1168{
1169 return verify_skcipher_des_key(tfm, key) ?:
1170 stm32_cryp_setkey(tfm, key, keylen);
1171}
1172
1173static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1174 unsigned int keylen)
1175{
1176 return verify_skcipher_des3_key(tfm, key) ?:
1177 stm32_cryp_setkey(tfm, key, keylen);
1178}
1179
1180static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
1181 unsigned int keylen)
1182{
1183 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
1184
1185 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1186 keylen != AES_KEYSIZE_256)
1187 return -EINVAL;
1188
1189 memcpy(ctx->key, key, keylen);
1190 ctx->keylen = keylen;
1191
1192 return 0;
1193}
1194
1195static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
1196 unsigned int authsize)
1197{
1198 switch (authsize) {
1199 case 4:
1200 case 8:
1201 case 12:
1202 case 13:
1203 case 14:
1204 case 15:
1205 case 16:
1206 break;
1207 default:
1208 return -EINVAL;
1209 }
1210
1211 return 0;
1212}
1213
1214static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
1215 unsigned int authsize)
1216{
1217 switch (authsize) {
1218 case 4:
1219 case 6:
1220 case 8:
1221 case 10:
1222 case 12:
1223 case 14:
1224 case 16:
1225 break;
1226 default:
1227 return -EINVAL;
1228 }
1229
1230 return 0;
1231}
1232
1233static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
1234{
1235 if (req->cryptlen % AES_BLOCK_SIZE)
1236 return -EINVAL;
1237
1238 if (req->cryptlen == 0)
1239 return 0;
1240
1241 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
1242}
1243
1244static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
1245{
1246 if (req->cryptlen % AES_BLOCK_SIZE)
1247 return -EINVAL;
1248
1249 if (req->cryptlen == 0)
1250 return 0;
1251
1252 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
1253}
1254
1255static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
1256{
1257 if (req->cryptlen % AES_BLOCK_SIZE)
1258 return -EINVAL;
1259
1260 if (req->cryptlen == 0)
1261 return 0;
1262
1263 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
1264}
1265
1266static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
1267{
1268 if (req->cryptlen % AES_BLOCK_SIZE)
1269 return -EINVAL;
1270
1271 if (req->cryptlen == 0)
1272 return 0;
1273
1274 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
1275}
1276
1277static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
1278{
1279 if (req->cryptlen == 0)
1280 return 0;
1281
1282 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
1283}
1284
1285static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
1286{
1287 if (req->cryptlen == 0)
1288 return 0;
1289
1290 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
1291}
1292
1293static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
1294{
1295 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
1296}
1297
1298static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
1299{
1300 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
1301}
1302
1303static inline int crypto_ccm_check_iv(const u8 *iv)
1304{
1305 /* 2 <= L <= 8, so 1 <= L' <= 7. */
1306 if (iv[0] < 1 || iv[0] > 7)
1307 return -EINVAL;
1308
1309 return 0;
1310}
1311
1312static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
1313{
1314 int err;
1315
1316 err = crypto_ccm_check_iv(req->iv);
1317 if (err)
1318 return err;
1319
1320 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
1321}
1322
1323static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
1324{
1325 int err;
1326
1327 err = crypto_ccm_check_iv(req->iv);
1328 if (err)
1329 return err;
1330
1331 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
1332}
1333
1334static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
1335{
1336 if (req->cryptlen % DES_BLOCK_SIZE)
1337 return -EINVAL;
1338
1339 if (req->cryptlen == 0)
1340 return 0;
1341
1342 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
1343}
1344
1345static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
1346{
1347 if (req->cryptlen % DES_BLOCK_SIZE)
1348 return -EINVAL;
1349
1350 if (req->cryptlen == 0)
1351 return 0;
1352
1353 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
1354}
1355
1356static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
1357{
1358 if (req->cryptlen % DES_BLOCK_SIZE)
1359 return -EINVAL;
1360
1361 if (req->cryptlen == 0)
1362 return 0;
1363
1364 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
1365}
1366
1367static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
1368{
1369 if (req->cryptlen % DES_BLOCK_SIZE)
1370 return -EINVAL;
1371
1372 if (req->cryptlen == 0)
1373 return 0;
1374
1375 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
1376}
1377
1378static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
1379{
1380 if (req->cryptlen % DES_BLOCK_SIZE)
1381 return -EINVAL;
1382
1383 if (req->cryptlen == 0)
1384 return 0;
1385
1386 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
1387}
1388
1389static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
1390{
1391 if (req->cryptlen % DES_BLOCK_SIZE)
1392 return -EINVAL;
1393
1394 if (req->cryptlen == 0)
1395 return 0;
1396
1397 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
1398}
1399
1400static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
1401{
1402 if (req->cryptlen % DES_BLOCK_SIZE)
1403 return -EINVAL;
1404
1405 if (req->cryptlen == 0)
1406 return 0;
1407
1408 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
1409}
1410
1411static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
1412{
1413 if (req->cryptlen % DES_BLOCK_SIZE)
1414 return -EINVAL;
1415
1416 if (req->cryptlen == 0)
1417 return 0;
1418
1419 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
1420}
1421
1422static enum stm32_dma_mode stm32_cryp_dma_check_sg(struct scatterlist *test_sg, size_t len,
1423 size_t block_size)
1424{
1425 struct scatterlist *sg;
1426 int i;
1427
1428 if (len <= 16)
1429 return NO_DMA; /* Faster */
1430
1431 for_each_sg(test_sg, sg, sg_nents(test_sg), i) {
1432 if (!IS_ALIGNED(sg->length, block_size) && !sg_is_last(sg))
1433 return NO_DMA;
1434
1435 if (sg->offset % sizeof(u32))
1436 return NO_DMA;
1437
1438 if (sg_is_last(sg) && !IS_ALIGNED(sg->length, AES_BLOCK_SIZE))
1439 return DMA_NEED_SG_TRUNC;
1440 }
1441
1442 return DMA_PLAIN_SG;
1443}
1444
1445static enum stm32_dma_mode stm32_cryp_dma_check(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1446 struct scatterlist *out_sg)
1447{
1448 enum stm32_dma_mode ret = DMA_PLAIN_SG;
1449
1450 if (!is_aes(cryp))
1451 return NO_DMA;
1452
1453 if (!cryp->dma_lch_in || !cryp->dma_lch_out)
1454 return NO_DMA;
1455
1456 ret = stm32_cryp_dma_check_sg(in_sg, cryp->payload_in, AES_BLOCK_SIZE);
1457 if (ret == NO_DMA)
1458 return ret;
1459
1460 ret = stm32_cryp_dma_check_sg(out_sg, cryp->payload_out, AES_BLOCK_SIZE);
1461 if (ret == NO_DMA)
1462 return ret;
1463
1464 /* Check CTR counter overflow */
1465 if (is_aes(cryp) && is_ctr(cryp)) {
1466 u32 c;
1467 __be32 iv3;
1468
1469 memcpy(&iv3, &cryp->req->iv[3 * sizeof(u32)], sizeof(iv3));
1470 c = be32_to_cpu(iv3);
1471 if ((c + cryp->payload_in) < cryp->payload_in)
1472 return NO_DMA;
1473 }
1474
1475 /* Workaround */
1476 if (is_aes(cryp) && is_ctr(cryp) && ret == DMA_NEED_SG_TRUNC)
1477 return NO_DMA;
1478
1479 return ret;
1480}
1481
1482static int stm32_cryp_truncate_sg(struct scatterlist **new_sg, size_t *new_sg_len,
1483 struct scatterlist *sg, off_t skip, size_t size)
1484{
1485 struct scatterlist *cur;
1486 int alloc_sg_len;
1487
1488 *new_sg_len = 0;
1489
1490 if (!sg || !size) {
1491 *new_sg = NULL;
1492 return 0;
1493 }
1494
1495 alloc_sg_len = sg_nents_for_len(sg, skip + size);
1496 if (alloc_sg_len < 0)
1497 return alloc_sg_len;
1498
1499 /* We allocate to much sg entry, but it is easier */
1500 *new_sg = kmalloc_array((size_t)alloc_sg_len, sizeof(struct scatterlist), GFP_KERNEL);
1501 if (!*new_sg)
1502 return -ENOMEM;
1503
1504 sg_init_table(*new_sg, (unsigned int)alloc_sg_len);
1505
1506 cur = *new_sg;
1507 while (sg && size) {
1508 unsigned int len = sg->length;
1509 unsigned int offset = sg->offset;
1510
1511 if (skip > len) {
1512 skip -= len;
1513 sg = sg_next(sg);
1514 continue;
1515 }
1516
1517 if (skip) {
1518 len -= skip;
1519 offset += skip;
1520 skip = 0;
1521 }
1522
1523 if (size < len)
1524 len = size;
1525
1526 if (len > 0) {
1527 (*new_sg_len)++;
1528 size -= len;
1529 sg_set_page(cur, sg_page(sg), len, offset);
1530 if (size == 0)
1531 sg_mark_end(cur);
1532 cur = sg_next(cur);
1533 }
1534
1535 sg = sg_next(sg);
1536 }
1537
1538 return 0;
1539}
1540
1541static int stm32_cryp_cipher_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1542 struct scatterlist *out_sg)
1543{
1544 size_t align_size;
1545 int ret;
1546
1547 cryp->dma_mode = stm32_cryp_dma_check(cryp, in_sg, out_sg);
1548
1549 scatterwalk_start(&cryp->in_walk, in_sg);
1550 scatterwalk_start(&cryp->out_walk, out_sg);
1551
1552 if (cryp->dma_mode == NO_DMA) {
1553 cryp->flags &= ~FLG_IN_OUT_DMA;
1554
1555 if (is_ctr(cryp))
1556 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1557
1558 } else if (cryp->dma_mode == DMA_NEED_SG_TRUNC) {
1559
1560 cryp->flags |= FLG_IN_OUT_DMA;
1561
1562 align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1563 ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, 0, align_size);
1564 if (ret)
1565 return ret;
1566
1567 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, 0,
1568 align_size);
1569 if (ret) {
1570 kfree(cryp->in_sg);
1571 return ret;
1572 }
1573 } else {
1574 cryp->flags |= FLG_IN_OUT_DMA;
1575
1576 cryp->in_sg = in_sg;
1577 cryp->out_sg = out_sg;
1578
1579 ret = sg_nents_for_len(cryp->in_sg, cryp->payload_in);
1580 if (ret < 0)
1581 return ret;
1582 cryp->in_sg_len = (size_t)ret;
1583
1584 ret = sg_nents_for_len(out_sg, cryp->payload_out);
1585 if (ret < 0)
1586 return ret;
1587 cryp->out_sg_len = (size_t)ret;
1588 }
1589
1590 return 0;
1591}
1592
1593static int stm32_cryp_aead_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1594 struct scatterlist *out_sg)
1595{
1596 size_t align_size;
1597 off_t skip;
1598 int ret, ret2;
1599
1600 cryp->header_sg = NULL;
1601 cryp->in_sg = NULL;
1602 cryp->out_sg = NULL;
1603
1604 if (!cryp->dma_lch_in || !cryp->dma_lch_out) {
1605 cryp->dma_mode = NO_DMA;
1606 cryp->flags &= ~(FLG_IN_OUT_DMA | FLG_HEADER_DMA);
1607
1608 return 0;
1609 }
1610
1611 /* CCM hw_init may have advanced in header */
1612 skip = cryp->areq->assoclen - cryp->header_in;
1613
1614 align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
1615 ret = stm32_cryp_truncate_sg(&cryp->header_sg, &cryp->header_sg_len, in_sg, skip,
1616 align_size);
1617 if (ret)
1618 return ret;
1619
1620 ret = stm32_cryp_dma_check_sg(cryp->header_sg, align_size, AES_BLOCK_SIZE);
1621 if (ret == NO_DMA) {
1622 /* We cannot DMA the header */
1623 kfree(cryp->header_sg);
1624 cryp->header_sg = NULL;
1625
1626 cryp->flags &= ~FLG_HEADER_DMA;
1627 } else {
1628 cryp->flags |= FLG_HEADER_DMA;
1629 }
1630
1631 /* Now skip all header to be at payload start */
1632 skip = cryp->areq->assoclen;
1633 align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1634 ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, skip, align_size);
1635 if (ret) {
1636 kfree(cryp->header_sg);
1637 return ret;
1638 }
1639
1640 /* For out buffer align_size is same as in buffer */
1641 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, skip, align_size);
1642 if (ret) {
1643 kfree(cryp->header_sg);
1644 kfree(cryp->in_sg);
1645 return ret;
1646 }
1647
1648 ret = stm32_cryp_dma_check_sg(cryp->in_sg, align_size, AES_BLOCK_SIZE);
1649 ret2 = stm32_cryp_dma_check_sg(cryp->out_sg, align_size, AES_BLOCK_SIZE);
1650 if (ret == NO_DMA || ret2 == NO_DMA) {
1651 kfree(cryp->in_sg);
1652 cryp->in_sg = NULL;
1653
1654 kfree(cryp->out_sg);
1655 cryp->out_sg = NULL;
1656
1657 cryp->flags &= ~FLG_IN_OUT_DMA;
1658 } else {
1659 cryp->flags |= FLG_IN_OUT_DMA;
1660 }
1661
1662 return 0;
1663}
1664
1665static int stm32_cryp_prepare_req(struct skcipher_request *req,
1666 struct aead_request *areq)
1667{
1668 struct stm32_cryp_ctx *ctx;
1669 struct stm32_cryp *cryp;
1670 struct stm32_cryp_reqctx *rctx;
1671 struct scatterlist *in_sg, *out_sg;
1672 int ret;
1673
1674 if (!req && !areq)
1675 return -EINVAL;
1676
1677 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1678 crypto_aead_ctx(crypto_aead_reqtfm(areq));
1679
1680 cryp = ctx->cryp;
1681
1682 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1683 rctx->mode &= FLG_MODE_MASK;
1684
1685 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1686 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1687 cryp->ctx = ctx;
1688
1689 if (req) {
1690 cryp->req = req;
1691 cryp->areq = NULL;
1692 cryp->header_in = 0;
1693 cryp->payload_in = req->cryptlen;
1694 cryp->payload_out = req->cryptlen;
1695 cryp->authsize = 0;
1696
1697 in_sg = req->src;
1698 out_sg = req->dst;
1699
1700 ret = stm32_cryp_cipher_prepare(cryp, in_sg, out_sg);
1701 if (ret)
1702 return ret;
1703
1704 ret = stm32_cryp_hw_init(cryp);
1705 } else {
1706 /*
1707 * Length of input and output data:
1708 * Encryption case:
1709 * INPUT = AssocData || PlainText
1710 * <- assoclen -> <- cryptlen ->
1711 *
1712 * OUTPUT = AssocData || CipherText || AuthTag
1713 * <- assoclen -> <-- cryptlen --> <- authsize ->
1714 *
1715 * Decryption case:
1716 * INPUT = AssocData || CipherTex || AuthTag
1717 * <- assoclen ---> <---------- cryptlen ---------->
1718 *
1719 * OUTPUT = AssocData || PlainText
1720 * <- assoclen -> <- cryptlen - authsize ->
1721 */
1722 cryp->areq = areq;
1723 cryp->req = NULL;
1724 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1725 if (is_encrypt(cryp)) {
1726 cryp->payload_in = areq->cryptlen;
1727 cryp->header_in = areq->assoclen;
1728 cryp->payload_out = areq->cryptlen;
1729 } else {
1730 cryp->payload_in = areq->cryptlen - cryp->authsize;
1731 cryp->header_in = areq->assoclen;
1732 cryp->payload_out = cryp->payload_in;
1733 }
1734
1735 in_sg = areq->src;
1736 out_sg = areq->dst;
1737
1738 scatterwalk_start(&cryp->in_walk, in_sg);
1739 /* In output, jump after assoc data */
1740 scatterwalk_start_at_pos(&cryp->out_walk, out_sg,
1741 areq->assoclen);
1742
1743 ret = stm32_cryp_hw_init(cryp);
1744 if (ret)
1745 return ret;
1746
1747 ret = stm32_cryp_aead_prepare(cryp, in_sg, out_sg);
1748 }
1749
1750 return ret;
1751}
1752
1753static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1754{
1755 struct skcipher_request *req = container_of(areq,
1756 struct skcipher_request,
1757 base);
1758 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1759 crypto_skcipher_reqtfm(req));
1760 struct stm32_cryp *cryp = ctx->cryp;
1761 int ret;
1762
1763 if (!cryp)
1764 return -ENODEV;
1765
1766 ret = stm32_cryp_prepare_req(req, NULL);
1767 if (ret)
1768 return ret;
1769
1770 if (cryp->flags & FLG_IN_OUT_DMA)
1771 ret = stm32_cryp_dma_start(cryp);
1772 else
1773 ret = stm32_cryp_it_start(cryp);
1774
1775 if (ret == -ETIMEDOUT)
1776 stm32_cryp_finish_req(cryp, ret);
1777
1778 return ret;
1779}
1780
1781static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1782{
1783 struct aead_request *req = container_of(areq, struct aead_request,
1784 base);
1785 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1786 struct stm32_cryp *cryp = ctx->cryp;
1787 int err;
1788
1789 if (!cryp)
1790 return -ENODEV;
1791
1792 err = stm32_cryp_prepare_req(NULL, req);
1793 if (err)
1794 return err;
1795
1796 if (!stm32_cryp_get_input_text_len(cryp) && !cryp->header_in &&
1797 !(cryp->flags & FLG_HEADER_DMA)) {
1798 /* No input data to process: get tag and finish */
1799 stm32_cryp_finish_req(cryp, 0);
1800 return 0;
1801 }
1802
1803 if (cryp->flags & FLG_HEADER_DMA)
1804 return stm32_cryp_header_dma_start(cryp);
1805
1806 if (!cryp->header_in && cryp->flags & FLG_IN_OUT_DMA)
1807 return stm32_cryp_dma_start(cryp);
1808
1809 return stm32_cryp_it_start(cryp);
1810}
1811
1812static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1813{
1814 u32 cfg, size_bit;
1815 unsigned int i;
1816 int ret = 0;
1817
1818 /* Update Config */
1819 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1820
1821 cfg &= ~CR_PH_MASK;
1822 cfg |= CR_PH_FINAL;
1823 cfg &= ~CR_DEC_NOT_ENC;
1824 cfg |= CR_CRYPEN;
1825
1826 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1827
1828 if (is_gcm(cryp)) {
1829 /* GCM: write aad and payload size (in bits) */
1830 size_bit = cryp->areq->assoclen * 8;
1831 if (cryp->caps->swap_final)
1832 size_bit = (__force u32)cpu_to_be32(size_bit);
1833
1834 stm32_cryp_write(cryp, cryp->caps->din, 0);
1835 stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1836
1837 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1838 cryp->areq->cryptlen - cryp->authsize;
1839 size_bit *= 8;
1840 if (cryp->caps->swap_final)
1841 size_bit = (__force u32)cpu_to_be32(size_bit);
1842
1843 stm32_cryp_write(cryp, cryp->caps->din, 0);
1844 stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1845 } else {
1846 /* CCM: write CTR0 */
1847 u32 iv32[AES_BLOCK_32];
1848 u8 *iv = (u8 *)iv32;
1849 __be32 *biv = (__be32 *)iv32;
1850
1851 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1852 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1853
1854 for (i = 0; i < AES_BLOCK_32; i++) {
1855 u32 xiv = iv32[i];
1856
1857 if (!cryp->caps->padding_wa)
1858 xiv = be32_to_cpu(biv[i]);
1859 stm32_cryp_write(cryp, cryp->caps->din, xiv);
1860 }
1861 }
1862
1863 /* Wait for output data */
1864 ret = stm32_cryp_wait_output(cryp);
1865 if (ret) {
1866 dev_err(cryp->dev, "Timeout (read tag)\n");
1867 return ret;
1868 }
1869
1870 if (is_encrypt(cryp)) {
1871 u32 out_tag[AES_BLOCK_32];
1872
1873 /* Get and write tag */
1874 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1875 memcpy_to_scatterwalk(&cryp->out_walk, out_tag, cryp->authsize);
1876 } else {
1877 /* Get and check tag */
1878 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1879
1880 memcpy_from_scatterwalk(in_tag, &cryp->in_walk, cryp->authsize);
1881 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1882
1883 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1884 ret = -EBADMSG;
1885 }
1886
1887 /* Disable cryp */
1888 cfg &= ~CR_CRYPEN;
1889 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1890
1891 return ret;
1892}
1893
1894static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1895{
1896 u32 cr;
1897
1898 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1899 /*
1900 * In this case, we need to increment manually the ctr counter,
1901 * as HW doesn't handle the U32 carry.
1902 */
1903 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1904
1905 cr = stm32_cryp_read(cryp, cryp->caps->cr);
1906 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN);
1907
1908 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1909
1910 stm32_cryp_write(cryp, cryp->caps->cr, cr);
1911 }
1912
1913 /* The IV registers are BE */
1914 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
1915 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
1916 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
1917 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
1918}
1919
1920static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1921{
1922 u32 block[AES_BLOCK_32];
1923
1924 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1925 memcpy_to_scatterwalk(&cryp->out_walk, block, min_t(size_t, cryp->hw_blocksize,
1926 cryp->payload_out));
1927 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1928 cryp->payload_out);
1929}
1930
1931static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1932{
1933 u32 block[AES_BLOCK_32] = {0};
1934
1935 memcpy_from_scatterwalk(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1936 cryp->payload_in));
1937 writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
1938 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1939}
1940
1941static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1942{
1943 int err;
1944 u32 cfg, block[AES_BLOCK_32] = {0};
1945 unsigned int i;
1946
1947 /* 'Special workaround' procedure described in the datasheet */
1948
1949 /* a) disable ip */
1950 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1951 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1952 cfg &= ~CR_CRYPEN;
1953 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1954
1955 /* b) Update IV1R */
1956 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2);
1957
1958 /* c) change mode to CTR */
1959 cfg &= ~CR_ALGO_MASK;
1960 cfg |= CR_AES_CTR;
1961 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1962
1963 /* a) enable IP */
1964 cfg |= CR_CRYPEN;
1965 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1966
1967 /* b) pad and write the last block */
1968 stm32_cryp_irq_write_block(cryp);
1969 /* wait end of process */
1970 err = stm32_cryp_wait_output(cryp);
1971 if (err) {
1972 dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1973 return stm32_cryp_finish_req(cryp, err);
1974 }
1975
1976 /* c) get and store encrypted data */
1977 /*
1978 * Same code as stm32_cryp_irq_read_data(), but we want to store
1979 * block value
1980 */
1981 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1982
1983 memcpy_to_scatterwalk(&cryp->out_walk, block, min_t(size_t, cryp->hw_blocksize,
1984 cryp->payload_out));
1985 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1986 cryp->payload_out);
1987
1988 /* d) change mode back to AES GCM */
1989 cfg &= ~CR_ALGO_MASK;
1990 cfg |= CR_AES_GCM;
1991 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1992
1993 /* e) change phase to Final */
1994 cfg &= ~CR_PH_MASK;
1995 cfg |= CR_PH_FINAL;
1996 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1997
1998 /* f) write padded data */
1999 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2000
2001 /* g) Empty fifo out */
2002 err = stm32_cryp_wait_output(cryp);
2003 if (err) {
2004 dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
2005 return stm32_cryp_finish_req(cryp, err);
2006 }
2007
2008 for (i = 0; i < AES_BLOCK_32; i++)
2009 stm32_cryp_read(cryp, cryp->caps->dout);
2010
2011 /* h) run the he normal Final phase */
2012 stm32_cryp_finish_req(cryp, 0);
2013}
2014
2015static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
2016{
2017 u32 cfg;
2018
2019 /* disable ip, set NPBLB and reneable ip */
2020 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2021 cfg &= ~CR_CRYPEN;
2022 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2023
2024 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
2025 cfg |= CR_CRYPEN;
2026 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2027}
2028
2029static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
2030{
2031 int err = 0;
2032 u32 cfg, iv1tmp;
2033 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
2034 u32 block[AES_BLOCK_32] = {0};
2035 unsigned int i;
2036
2037 /* 'Special workaround' procedure described in the datasheet */
2038
2039 /* a) disable ip */
2040 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
2041
2042 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2043 cfg &= ~CR_CRYPEN;
2044 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2045
2046 /* b) get IV1 from CRYP_CSGCMCCM7 */
2047 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
2048
2049 /* c) Load CRYP_CSGCMCCMxR */
2050 for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
2051 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2052
2053 /* d) Write IV1R */
2054 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp);
2055
2056 /* e) change mode to CTR */
2057 cfg &= ~CR_ALGO_MASK;
2058 cfg |= CR_AES_CTR;
2059 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2060
2061 /* a) enable IP */
2062 cfg |= CR_CRYPEN;
2063 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2064
2065 /* b) pad and write the last block */
2066 stm32_cryp_irq_write_block(cryp);
2067 /* wait end of process */
2068 err = stm32_cryp_wait_output(cryp);
2069 if (err) {
2070 dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2071 return stm32_cryp_finish_req(cryp, err);
2072 }
2073
2074 /* c) get and store decrypted data */
2075 /*
2076 * Same code as stm32_cryp_irq_read_data(), but we want to store
2077 * block value
2078 */
2079 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
2080
2081 memcpy_to_scatterwalk(&cryp->out_walk, block, min_t(size_t, cryp->hw_blocksize,
2082 cryp->payload_out));
2083 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
2084
2085 /* d) Load again CRYP_CSGCMCCMxR */
2086 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
2087 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2088
2089 /* e) change mode back to AES CCM */
2090 cfg &= ~CR_ALGO_MASK;
2091 cfg |= CR_AES_CCM;
2092 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2093
2094 /* f) change phase to header */
2095 cfg &= ~CR_PH_MASK;
2096 cfg |= CR_PH_HEADER;
2097 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2098
2099 /* g) XOR and write padded data */
2100 for (i = 0; i < ARRAY_SIZE(block); i++) {
2101 block[i] ^= cstmp1[i];
2102 block[i] ^= cstmp2[i];
2103 stm32_cryp_write(cryp, cryp->caps->din, block[i]);
2104 }
2105
2106 /* h) wait for completion */
2107 err = stm32_cryp_wait_busy(cryp);
2108 if (err)
2109 dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2110
2111 /* i) run the he normal Final phase */
2112 stm32_cryp_finish_req(cryp, err);
2113}
2114
2115static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
2116{
2117 if (unlikely(!cryp->payload_in)) {
2118 dev_warn(cryp->dev, "No more data to process\n");
2119 return;
2120 }
2121
2122 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
2123 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
2124 is_encrypt(cryp))) {
2125 /* Padding for AES GCM encryption */
2126 if (cryp->caps->padding_wa) {
2127 /* Special case 1 */
2128 stm32_cryp_irq_write_gcm_padded_data(cryp);
2129 return;
2130 }
2131
2132 /* Setting padding bytes (NBBLB) */
2133 stm32_cryp_irq_set_npblb(cryp);
2134 }
2135
2136 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
2137 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
2138 is_decrypt(cryp))) {
2139 /* Padding for AES CCM decryption */
2140 if (cryp->caps->padding_wa) {
2141 /* Special case 2 */
2142 stm32_cryp_irq_write_ccm_padded_data(cryp);
2143 return;
2144 }
2145
2146 /* Setting padding bytes (NBBLB) */
2147 stm32_cryp_irq_set_npblb(cryp);
2148 }
2149
2150 if (is_aes(cryp) && is_ctr(cryp))
2151 stm32_cryp_check_ctr_counter(cryp);
2152
2153 stm32_cryp_irq_write_block(cryp);
2154}
2155
2156static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
2157{
2158 u32 block[AES_BLOCK_32] = {0};
2159 size_t written;
2160
2161 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
2162
2163 memcpy_from_scatterwalk(block, &cryp->in_walk, written);
2164
2165 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2166
2167 cryp->header_in -= written;
2168
2169 stm32_crypt_gcmccm_end_header(cryp);
2170}
2171
2172static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
2173{
2174 struct stm32_cryp *cryp = arg;
2175 u32 ph;
2176 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc);
2177
2178 if (cryp->irq_status & MISR_OUT)
2179 /* Output FIFO IRQ: read data */
2180 stm32_cryp_irq_read_data(cryp);
2181
2182 if (cryp->irq_status & MISR_IN) {
2183 if (is_gcm(cryp) || is_ccm(cryp)) {
2184 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK;
2185 if (unlikely(ph == CR_PH_HEADER))
2186 /* Write Header */
2187 stm32_cryp_irq_write_gcmccm_header(cryp);
2188 else
2189 /* Input FIFO IRQ: write data */
2190 stm32_cryp_irq_write_data(cryp);
2191 if (is_gcm(cryp))
2192 cryp->gcm_ctr++;
2193 } else {
2194 /* Input FIFO IRQ: write data */
2195 stm32_cryp_irq_write_data(cryp);
2196 }
2197 }
2198
2199 /* Mask useless interrupts */
2200 if (!cryp->payload_in && !cryp->header_in)
2201 it_mask &= ~IMSCR_IN;
2202 if (!cryp->payload_out)
2203 it_mask &= ~IMSCR_OUT;
2204 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask);
2205
2206 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) {
2207 local_bh_disable();
2208 stm32_cryp_finish_req(cryp, 0);
2209 local_bh_enable();
2210 }
2211
2212 return IRQ_HANDLED;
2213}
2214
2215static irqreturn_t stm32_cryp_irq(int irq, void *arg)
2216{
2217 struct stm32_cryp *cryp = arg;
2218
2219 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis);
2220
2221 return IRQ_WAKE_THREAD;
2222}
2223
2224static int stm32_cryp_dma_init(struct stm32_cryp *cryp)
2225{
2226 struct dma_slave_config dma_conf;
2227 struct dma_chan *chan;
2228 int ret;
2229
2230 memset(&dma_conf, 0, sizeof(dma_conf));
2231
2232 dma_conf.direction = DMA_MEM_TO_DEV;
2233 dma_conf.dst_addr = cryp->phys_base + cryp->caps->din;
2234 dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2235 dma_conf.dst_maxburst = CRYP_DMA_BURST_REG;
2236 dma_conf.device_fc = false;
2237
2238 chan = dma_request_chan(cryp->dev, "in");
2239 if (IS_ERR(chan))
2240 return PTR_ERR(chan);
2241
2242 cryp->dma_lch_in = chan;
2243 ret = dmaengine_slave_config(cryp->dma_lch_in, &dma_conf);
2244 if (ret) {
2245 dma_release_channel(cryp->dma_lch_in);
2246 cryp->dma_lch_in = NULL;
2247 dev_err(cryp->dev, "Couldn't configure DMA in slave.\n");
2248 return ret;
2249 }
2250
2251 memset(&dma_conf, 0, sizeof(dma_conf));
2252
2253 dma_conf.direction = DMA_DEV_TO_MEM;
2254 dma_conf.src_addr = cryp->phys_base + cryp->caps->dout;
2255 dma_conf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2256 dma_conf.src_maxburst = CRYP_DMA_BURST_REG;
2257 dma_conf.device_fc = false;
2258
2259 chan = dma_request_chan(cryp->dev, "out");
2260 if (IS_ERR(chan)) {
2261 dma_release_channel(cryp->dma_lch_in);
2262 cryp->dma_lch_in = NULL;
2263 return PTR_ERR(chan);
2264 }
2265
2266 cryp->dma_lch_out = chan;
2267
2268 ret = dmaengine_slave_config(cryp->dma_lch_out, &dma_conf);
2269 if (ret) {
2270 dma_release_channel(cryp->dma_lch_out);
2271 cryp->dma_lch_out = NULL;
2272 dev_err(cryp->dev, "Couldn't configure DMA out slave.\n");
2273 dma_release_channel(cryp->dma_lch_in);
2274 cryp->dma_lch_in = NULL;
2275 return ret;
2276 }
2277
2278 init_completion(&cryp->dma_completion);
2279
2280 return 0;
2281}
2282
2283static struct skcipher_engine_alg crypto_algs[] = {
2284{
2285 .base = {
2286 .base.cra_name = "ecb(aes)",
2287 .base.cra_driver_name = "stm32-ecb-aes",
2288 .base.cra_priority = 300,
2289 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2290 .base.cra_blocksize = AES_BLOCK_SIZE,
2291 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2292 .base.cra_alignmask = 0,
2293 .base.cra_module = THIS_MODULE,
2294
2295 .init = stm32_cryp_init_tfm,
2296 .min_keysize = AES_MIN_KEY_SIZE,
2297 .max_keysize = AES_MAX_KEY_SIZE,
2298 .setkey = stm32_cryp_aes_setkey,
2299 .encrypt = stm32_cryp_aes_ecb_encrypt,
2300 .decrypt = stm32_cryp_aes_ecb_decrypt,
2301 },
2302 .op = {
2303 .do_one_request = stm32_cryp_cipher_one_req,
2304 },
2305},
2306{
2307 .base = {
2308 .base.cra_name = "cbc(aes)",
2309 .base.cra_driver_name = "stm32-cbc-aes",
2310 .base.cra_priority = 300,
2311 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2312 .base.cra_blocksize = AES_BLOCK_SIZE,
2313 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2314 .base.cra_alignmask = 0,
2315 .base.cra_module = THIS_MODULE,
2316
2317 .init = stm32_cryp_init_tfm,
2318 .min_keysize = AES_MIN_KEY_SIZE,
2319 .max_keysize = AES_MAX_KEY_SIZE,
2320 .ivsize = AES_BLOCK_SIZE,
2321 .setkey = stm32_cryp_aes_setkey,
2322 .encrypt = stm32_cryp_aes_cbc_encrypt,
2323 .decrypt = stm32_cryp_aes_cbc_decrypt,
2324 },
2325 .op = {
2326 .do_one_request = stm32_cryp_cipher_one_req,
2327 },
2328},
2329{
2330 .base = {
2331 .base.cra_name = "ctr(aes)",
2332 .base.cra_driver_name = "stm32-ctr-aes",
2333 .base.cra_priority = 300,
2334 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2335 .base.cra_blocksize = 1,
2336 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2337 .base.cra_alignmask = 0,
2338 .base.cra_module = THIS_MODULE,
2339
2340 .init = stm32_cryp_init_tfm,
2341 .min_keysize = AES_MIN_KEY_SIZE,
2342 .max_keysize = AES_MAX_KEY_SIZE,
2343 .ivsize = AES_BLOCK_SIZE,
2344 .setkey = stm32_cryp_aes_setkey,
2345 .encrypt = stm32_cryp_aes_ctr_encrypt,
2346 .decrypt = stm32_cryp_aes_ctr_decrypt,
2347 },
2348 .op = {
2349 .do_one_request = stm32_cryp_cipher_one_req,
2350 },
2351},
2352{
2353 .base = {
2354 .base.cra_name = "ecb(des)",
2355 .base.cra_driver_name = "stm32-ecb-des",
2356 .base.cra_priority = 300,
2357 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2358 .base.cra_blocksize = DES_BLOCK_SIZE,
2359 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2360 .base.cra_alignmask = 0,
2361 .base.cra_module = THIS_MODULE,
2362
2363 .init = stm32_cryp_init_tfm,
2364 .min_keysize = DES_BLOCK_SIZE,
2365 .max_keysize = DES_BLOCK_SIZE,
2366 .setkey = stm32_cryp_des_setkey,
2367 .encrypt = stm32_cryp_des_ecb_encrypt,
2368 .decrypt = stm32_cryp_des_ecb_decrypt,
2369 },
2370 .op = {
2371 .do_one_request = stm32_cryp_cipher_one_req,
2372 },
2373},
2374{
2375 .base = {
2376 .base.cra_name = "cbc(des)",
2377 .base.cra_driver_name = "stm32-cbc-des",
2378 .base.cra_priority = 300,
2379 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2380 .base.cra_blocksize = DES_BLOCK_SIZE,
2381 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2382 .base.cra_alignmask = 0,
2383 .base.cra_module = THIS_MODULE,
2384
2385 .init = stm32_cryp_init_tfm,
2386 .min_keysize = DES_BLOCK_SIZE,
2387 .max_keysize = DES_BLOCK_SIZE,
2388 .ivsize = DES_BLOCK_SIZE,
2389 .setkey = stm32_cryp_des_setkey,
2390 .encrypt = stm32_cryp_des_cbc_encrypt,
2391 .decrypt = stm32_cryp_des_cbc_decrypt,
2392 },
2393 .op = {
2394 .do_one_request = stm32_cryp_cipher_one_req,
2395 },
2396},
2397{
2398 .base = {
2399 .base.cra_name = "ecb(des3_ede)",
2400 .base.cra_driver_name = "stm32-ecb-des3",
2401 .base.cra_priority = 300,
2402 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2403 .base.cra_blocksize = DES_BLOCK_SIZE,
2404 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2405 .base.cra_alignmask = 0,
2406 .base.cra_module = THIS_MODULE,
2407
2408 .init = stm32_cryp_init_tfm,
2409 .min_keysize = 3 * DES_BLOCK_SIZE,
2410 .max_keysize = 3 * DES_BLOCK_SIZE,
2411 .setkey = stm32_cryp_tdes_setkey,
2412 .encrypt = stm32_cryp_tdes_ecb_encrypt,
2413 .decrypt = stm32_cryp_tdes_ecb_decrypt,
2414 },
2415 .op = {
2416 .do_one_request = stm32_cryp_cipher_one_req,
2417 },
2418},
2419{
2420 .base = {
2421 .base.cra_name = "cbc(des3_ede)",
2422 .base.cra_driver_name = "stm32-cbc-des3",
2423 .base.cra_priority = 300,
2424 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2425 .base.cra_blocksize = DES_BLOCK_SIZE,
2426 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2427 .base.cra_alignmask = 0,
2428 .base.cra_module = THIS_MODULE,
2429
2430 .init = stm32_cryp_init_tfm,
2431 .min_keysize = 3 * DES_BLOCK_SIZE,
2432 .max_keysize = 3 * DES_BLOCK_SIZE,
2433 .ivsize = DES_BLOCK_SIZE,
2434 .setkey = stm32_cryp_tdes_setkey,
2435 .encrypt = stm32_cryp_tdes_cbc_encrypt,
2436 .decrypt = stm32_cryp_tdes_cbc_decrypt,
2437 },
2438 .op = {
2439 .do_one_request = stm32_cryp_cipher_one_req,
2440 },
2441},
2442};
2443
2444static struct aead_engine_alg aead_algs[] = {
2445{
2446 .base.setkey = stm32_cryp_aes_aead_setkey,
2447 .base.setauthsize = stm32_cryp_aes_gcm_setauthsize,
2448 .base.encrypt = stm32_cryp_aes_gcm_encrypt,
2449 .base.decrypt = stm32_cryp_aes_gcm_decrypt,
2450 .base.init = stm32_cryp_aes_aead_init,
2451 .base.ivsize = 12,
2452 .base.maxauthsize = AES_BLOCK_SIZE,
2453
2454 .base.base = {
2455 .cra_name = "gcm(aes)",
2456 .cra_driver_name = "stm32-gcm-aes",
2457 .cra_priority = 300,
2458 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2459 .cra_blocksize = 1,
2460 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2461 .cra_alignmask = 0,
2462 .cra_module = THIS_MODULE,
2463 },
2464 .op = {
2465 .do_one_request = stm32_cryp_aead_one_req,
2466 },
2467},
2468{
2469 .base.setkey = stm32_cryp_aes_aead_setkey,
2470 .base.setauthsize = stm32_cryp_aes_ccm_setauthsize,
2471 .base.encrypt = stm32_cryp_aes_ccm_encrypt,
2472 .base.decrypt = stm32_cryp_aes_ccm_decrypt,
2473 .base.init = stm32_cryp_aes_aead_init,
2474 .base.ivsize = AES_BLOCK_SIZE,
2475 .base.maxauthsize = AES_BLOCK_SIZE,
2476
2477 .base.base = {
2478 .cra_name = "ccm(aes)",
2479 .cra_driver_name = "stm32-ccm-aes",
2480 .cra_priority = 300,
2481 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2482 .cra_blocksize = 1,
2483 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2484 .cra_alignmask = 0,
2485 .cra_module = THIS_MODULE,
2486 },
2487 .op = {
2488 .do_one_request = stm32_cryp_aead_one_req,
2489 },
2490},
2491};
2492
2493static const struct stm32_cryp_caps ux500_data = {
2494 .aeads_support = false,
2495 .linear_aes_key = true,
2496 .kp_mode = false,
2497 .iv_protection = true,
2498 .swap_final = true,
2499 .padding_wa = true,
2500 .cr = UX500_CRYP_CR,
2501 .sr = UX500_CRYP_SR,
2502 .din = UX500_CRYP_DIN,
2503 .dout = UX500_CRYP_DOUT,
2504 .dmacr = UX500_CRYP_DMACR,
2505 .imsc = UX500_CRYP_IMSC,
2506 .mis = UX500_CRYP_MIS,
2507 .k1l = UX500_CRYP_K1L,
2508 .k1r = UX500_CRYP_K1R,
2509 .k3r = UX500_CRYP_K3R,
2510 .iv0l = UX500_CRYP_IV0L,
2511 .iv0r = UX500_CRYP_IV0R,
2512 .iv1l = UX500_CRYP_IV1L,
2513 .iv1r = UX500_CRYP_IV1R,
2514};
2515
2516static const struct stm32_cryp_caps f7_data = {
2517 .aeads_support = true,
2518 .linear_aes_key = false,
2519 .kp_mode = true,
2520 .iv_protection = false,
2521 .swap_final = true,
2522 .padding_wa = true,
2523 .cr = CRYP_CR,
2524 .sr = CRYP_SR,
2525 .din = CRYP_DIN,
2526 .dout = CRYP_DOUT,
2527 .dmacr = CRYP_DMACR,
2528 .imsc = CRYP_IMSCR,
2529 .mis = CRYP_MISR,
2530 .k1l = CRYP_K1LR,
2531 .k1r = CRYP_K1RR,
2532 .k3r = CRYP_K3RR,
2533 .iv0l = CRYP_IV0LR,
2534 .iv0r = CRYP_IV0RR,
2535 .iv1l = CRYP_IV1LR,
2536 .iv1r = CRYP_IV1RR,
2537};
2538
2539static const struct stm32_cryp_caps mp1_data = {
2540 .aeads_support = true,
2541 .linear_aes_key = false,
2542 .kp_mode = true,
2543 .iv_protection = false,
2544 .swap_final = false,
2545 .padding_wa = false,
2546 .cr = CRYP_CR,
2547 .sr = CRYP_SR,
2548 .din = CRYP_DIN,
2549 .dout = CRYP_DOUT,
2550 .dmacr = CRYP_DMACR,
2551 .imsc = CRYP_IMSCR,
2552 .mis = CRYP_MISR,
2553 .k1l = CRYP_K1LR,
2554 .k1r = CRYP_K1RR,
2555 .k3r = CRYP_K3RR,
2556 .iv0l = CRYP_IV0LR,
2557 .iv0r = CRYP_IV0RR,
2558 .iv1l = CRYP_IV1LR,
2559 .iv1r = CRYP_IV1RR,
2560};
2561
2562static const struct of_device_id stm32_dt_ids[] = {
2563 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
2564 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
2565 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
2566 {},
2567};
2568MODULE_DEVICE_TABLE(of, stm32_dt_ids);
2569
2570static int stm32_cryp_probe(struct platform_device *pdev)
2571{
2572 struct device *dev = &pdev->dev;
2573 struct stm32_cryp *cryp;
2574 struct reset_control *rst;
2575 int irq, ret;
2576
2577 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
2578 if (!cryp)
2579 return -ENOMEM;
2580
2581 cryp->caps = of_device_get_match_data(dev);
2582 if (!cryp->caps)
2583 return -ENODEV;
2584
2585 cryp->dev = dev;
2586
2587 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
2588 if (IS_ERR(cryp->regs))
2589 return PTR_ERR(cryp->regs);
2590
2591 cryp->phys_base = platform_get_resource(pdev, IORESOURCE_MEM, 0)->start;
2592
2593 irq = platform_get_irq(pdev, 0);
2594 if (irq < 0)
2595 return irq;
2596
2597 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
2598 stm32_cryp_irq_thread, IRQF_ONESHOT,
2599 dev_name(dev), cryp);
2600 if (ret) {
2601 dev_err(dev, "Cannot grab IRQ\n");
2602 return ret;
2603 }
2604
2605 cryp->clk = devm_clk_get(dev, NULL);
2606 if (IS_ERR(cryp->clk)) {
2607 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
2608
2609 return PTR_ERR(cryp->clk);
2610 }
2611
2612 ret = clk_prepare_enable(cryp->clk);
2613 if (ret) {
2614 dev_err(cryp->dev, "Failed to enable clock\n");
2615 return ret;
2616 }
2617
2618 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2619 pm_runtime_use_autosuspend(dev);
2620
2621 pm_runtime_get_noresume(dev);
2622 pm_runtime_set_active(dev);
2623 pm_runtime_enable(dev);
2624
2625 rst = devm_reset_control_get(dev, NULL);
2626 if (IS_ERR(rst)) {
2627 ret = PTR_ERR(rst);
2628 if (ret == -EPROBE_DEFER)
2629 goto err_rst;
2630 } else {
2631 reset_control_assert(rst);
2632 udelay(2);
2633 reset_control_deassert(rst);
2634 }
2635
2636 platform_set_drvdata(pdev, cryp);
2637
2638 ret = stm32_cryp_dma_init(cryp);
2639 switch (ret) {
2640 case 0:
2641 break;
2642 case -ENODEV:
2643 dev_dbg(dev, "DMA mode not available\n");
2644 break;
2645 default:
2646 goto err_dma;
2647 }
2648
2649 spin_lock(&cryp_list.lock);
2650 list_add(&cryp->list, &cryp_list.dev_list);
2651 spin_unlock(&cryp_list.lock);
2652
2653 /* Initialize crypto engine */
2654 cryp->engine = crypto_engine_alloc_init(dev, 1);
2655 if (!cryp->engine) {
2656 dev_err(dev, "Could not init crypto engine\n");
2657 ret = -ENOMEM;
2658 goto err_engine1;
2659 }
2660
2661 ret = crypto_engine_start(cryp->engine);
2662 if (ret) {
2663 dev_err(dev, "Could not start crypto engine\n");
2664 goto err_engine2;
2665 }
2666
2667 ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2668 if (ret) {
2669 dev_err(dev, "Could not register algs\n");
2670 goto err_algs;
2671 }
2672
2673 if (cryp->caps->aeads_support) {
2674 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2675 if (ret)
2676 goto err_aead_algs;
2677 }
2678
2679 dev_info(dev, "Initialized\n");
2680
2681 pm_runtime_put_sync(dev);
2682
2683 return 0;
2684
2685err_aead_algs:
2686 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2687err_algs:
2688err_engine2:
2689 crypto_engine_exit(cryp->engine);
2690err_engine1:
2691 spin_lock(&cryp_list.lock);
2692 list_del(&cryp->list);
2693 spin_unlock(&cryp_list.lock);
2694
2695 if (cryp->dma_lch_in)
2696 dma_release_channel(cryp->dma_lch_in);
2697 if (cryp->dma_lch_out)
2698 dma_release_channel(cryp->dma_lch_out);
2699err_dma:
2700err_rst:
2701 pm_runtime_disable(dev);
2702 pm_runtime_put_noidle(dev);
2703
2704 clk_disable_unprepare(cryp->clk);
2705
2706 return ret;
2707}
2708
2709static void stm32_cryp_remove(struct platform_device *pdev)
2710{
2711 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2712 int ret;
2713
2714 ret = pm_runtime_get_sync(cryp->dev);
2715
2716 if (cryp->caps->aeads_support)
2717 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2718 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2719
2720 crypto_engine_exit(cryp->engine);
2721
2722 spin_lock(&cryp_list.lock);
2723 list_del(&cryp->list);
2724 spin_unlock(&cryp_list.lock);
2725
2726 if (cryp->dma_lch_in)
2727 dma_release_channel(cryp->dma_lch_in);
2728
2729 if (cryp->dma_lch_out)
2730 dma_release_channel(cryp->dma_lch_out);
2731
2732 pm_runtime_disable(cryp->dev);
2733 pm_runtime_put_noidle(cryp->dev);
2734
2735 if (ret >= 0)
2736 clk_disable_unprepare(cryp->clk);
2737}
2738
2739#ifdef CONFIG_PM
2740static int stm32_cryp_runtime_suspend(struct device *dev)
2741{
2742 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2743
2744 clk_disable_unprepare(cryp->clk);
2745
2746 return 0;
2747}
2748
2749static int stm32_cryp_runtime_resume(struct device *dev)
2750{
2751 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2752 int ret;
2753
2754 ret = clk_prepare_enable(cryp->clk);
2755 if (ret) {
2756 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2757 return ret;
2758 }
2759
2760 return 0;
2761}
2762#endif
2763
2764static const struct dev_pm_ops stm32_cryp_pm_ops = {
2765 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2766 pm_runtime_force_resume)
2767 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2768 stm32_cryp_runtime_resume, NULL)
2769};
2770
2771static struct platform_driver stm32_cryp_driver = {
2772 .probe = stm32_cryp_probe,
2773 .remove = stm32_cryp_remove,
2774 .driver = {
2775 .name = DRIVER_NAME,
2776 .pm = &stm32_cryp_pm_ops,
2777 .of_match_table = stm32_dt_ids,
2778 },
2779};
2780
2781module_platform_driver(stm32_cryp_driver);
2782
2783MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2784MODULE_DESCRIPTION("STMicroelectronics STM32 CRYP hardware driver");
2785MODULE_LICENSE("GPL");