Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Asynchronous Cryptographic Hash operations.
4 *
5 * This is the implementation of the ahash (asynchronous hash) API. It differs
6 * from shash (synchronous hash) in that ahash supports asynchronous operations,
7 * and it hashes data from scatterlists instead of virtually addressed buffers.
8 *
9 * The ahash API provides access to both ahash and shash algorithms. The shash
10 * API only provides access to shash algorithms.
11 *
12 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13 */
14
15#include <crypto/scatterwalk.h>
16#include <linux/cryptouser.h>
17#include <linux/err.h>
18#include <linux/kernel.h>
19#include <linux/mm.h>
20#include <linux/module.h>
21#include <linux/scatterlist.h>
22#include <linux/slab.h>
23#include <linux/seq_file.h>
24#include <linux/string.h>
25#include <linux/string_choices.h>
26#include <net/netlink.h>
27
28#include "hash.h"
29
30#define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
31
32static int ahash_def_finup(struct ahash_request *req);
33
34static inline bool crypto_ahash_block_only(struct crypto_ahash *tfm)
35{
36 return crypto_ahash_alg(tfm)->halg.base.cra_flags &
37 CRYPTO_AHASH_ALG_BLOCK_ONLY;
38}
39
40static inline bool crypto_ahash_final_nonzero(struct crypto_ahash *tfm)
41{
42 return crypto_ahash_alg(tfm)->halg.base.cra_flags &
43 CRYPTO_AHASH_ALG_FINAL_NONZERO;
44}
45
46static inline bool crypto_ahash_need_fallback(struct crypto_ahash *tfm)
47{
48 return crypto_ahash_alg(tfm)->halg.base.cra_flags &
49 CRYPTO_ALG_NEED_FALLBACK;
50}
51
52static inline void ahash_op_done(void *data, int err,
53 int (*finish)(struct ahash_request *, int))
54{
55 struct ahash_request *areq = data;
56 crypto_completion_t compl;
57
58 compl = areq->saved_complete;
59 data = areq->saved_data;
60 if (err == -EINPROGRESS)
61 goto out;
62
63 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
64
65 err = finish(areq, err);
66 if (err == -EINPROGRESS || err == -EBUSY)
67 return;
68
69out:
70 compl(data, err);
71}
72
73static int hash_walk_next(struct crypto_hash_walk *walk)
74{
75 unsigned int offset = walk->offset;
76 unsigned int nbytes = min(walk->entrylen,
77 ((unsigned int)(PAGE_SIZE)) - offset);
78
79 walk->data = kmap_local_page(walk->pg);
80 walk->data += offset;
81 walk->entrylen -= nbytes;
82 return nbytes;
83}
84
85static int hash_walk_new_entry(struct crypto_hash_walk *walk)
86{
87 struct scatterlist *sg;
88
89 sg = walk->sg;
90 walk->offset = sg->offset;
91 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
92 walk->offset = offset_in_page(walk->offset);
93 walk->entrylen = sg->length;
94
95 if (walk->entrylen > walk->total)
96 walk->entrylen = walk->total;
97 walk->total -= walk->entrylen;
98
99 return hash_walk_next(walk);
100}
101
102int crypto_hash_walk_first(struct ahash_request *req,
103 struct crypto_hash_walk *walk)
104{
105 walk->total = req->nbytes;
106 walk->entrylen = 0;
107
108 if (!walk->total)
109 return 0;
110
111 walk->flags = req->base.flags;
112
113 if (ahash_request_isvirt(req)) {
114 walk->data = req->svirt;
115 walk->total = 0;
116 return req->nbytes;
117 }
118
119 walk->sg = req->src;
120
121 return hash_walk_new_entry(walk);
122}
123EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
124
125int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
126{
127 if ((walk->flags & CRYPTO_AHASH_REQ_VIRT))
128 return err;
129
130 walk->data -= walk->offset;
131
132 kunmap_local(walk->data);
133 crypto_yield(walk->flags);
134
135 if (err)
136 return err;
137
138 if (walk->entrylen) {
139 walk->offset = 0;
140 walk->pg++;
141 return hash_walk_next(walk);
142 }
143
144 if (!walk->total)
145 return 0;
146
147 walk->sg = sg_next(walk->sg);
148
149 return hash_walk_new_entry(walk);
150}
151EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
152
153/*
154 * For an ahash tfm that is using an shash algorithm (instead of an ahash
155 * algorithm), this returns the underlying shash tfm.
156 */
157static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
158{
159 return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
160}
161
162static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
163 struct crypto_ahash *tfm)
164{
165 struct shash_desc *desc = ahash_request_ctx(req);
166
167 desc->tfm = ahash_to_shash(tfm);
168 return desc;
169}
170
171int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
172{
173 struct crypto_hash_walk walk;
174 int nbytes;
175
176 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
177 nbytes = crypto_hash_walk_done(&walk, nbytes))
178 nbytes = crypto_shash_update(desc, walk.data, nbytes);
179
180 return nbytes;
181}
182EXPORT_SYMBOL_GPL(shash_ahash_update);
183
184int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
185{
186 struct crypto_hash_walk walk;
187 int nbytes;
188
189 nbytes = crypto_hash_walk_first(req, &walk);
190 if (!nbytes)
191 return crypto_shash_final(desc, req->result);
192
193 do {
194 nbytes = crypto_hash_walk_last(&walk) ?
195 crypto_shash_finup(desc, walk.data, nbytes,
196 req->result) :
197 crypto_shash_update(desc, walk.data, nbytes);
198 nbytes = crypto_hash_walk_done(&walk, nbytes);
199 } while (nbytes > 0);
200
201 return nbytes;
202}
203EXPORT_SYMBOL_GPL(shash_ahash_finup);
204
205int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
206{
207 unsigned int nbytes = req->nbytes;
208 struct scatterlist *sg;
209 unsigned int offset;
210 struct page *page;
211 const u8 *data;
212 int err;
213
214 data = req->svirt;
215 if (!nbytes || ahash_request_isvirt(req))
216 return crypto_shash_digest(desc, data, nbytes, req->result);
217
218 sg = req->src;
219 if (nbytes > sg->length)
220 return crypto_shash_init(desc) ?:
221 shash_ahash_finup(req, desc);
222
223 page = sg_page(sg);
224 offset = sg->offset;
225 data = lowmem_page_address(page) + offset;
226 if (!IS_ENABLED(CONFIG_HIGHMEM))
227 return crypto_shash_digest(desc, data, nbytes, req->result);
228
229 page += offset >> PAGE_SHIFT;
230 offset = offset_in_page(offset);
231
232 if (nbytes > (unsigned int)PAGE_SIZE - offset)
233 return crypto_shash_init(desc) ?:
234 shash_ahash_finup(req, desc);
235
236 data = kmap_local_page(page);
237 err = crypto_shash_digest(desc, data + offset, nbytes,
238 req->result);
239 kunmap_local(data);
240 return err;
241}
242EXPORT_SYMBOL_GPL(shash_ahash_digest);
243
244static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
245{
246 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
247
248 crypto_free_shash(*ctx);
249}
250
251static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
252{
253 struct crypto_alg *calg = tfm->__crt_alg;
254 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
255 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
256 struct crypto_shash *shash;
257
258 if (!crypto_mod_get(calg))
259 return -EAGAIN;
260
261 shash = crypto_create_tfm(calg, &crypto_shash_type);
262 if (IS_ERR(shash)) {
263 crypto_mod_put(calg);
264 return PTR_ERR(shash);
265 }
266
267 crt->using_shash = true;
268 *ctx = shash;
269 tfm->exit = crypto_exit_ahash_using_shash;
270
271 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
272 CRYPTO_TFM_NEED_KEY);
273
274 return 0;
275}
276
277static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
278 unsigned int keylen)
279{
280 return -ENOSYS;
281}
282
283static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
284{
285 if (alg->setkey != ahash_nosetkey &&
286 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
287 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
288}
289
290int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
291 unsigned int keylen)
292{
293 if (likely(tfm->using_shash)) {
294 struct crypto_shash *shash = ahash_to_shash(tfm);
295 int err;
296
297 err = crypto_shash_setkey(shash, key, keylen);
298 if (unlikely(err)) {
299 crypto_ahash_set_flags(tfm,
300 crypto_shash_get_flags(shash) &
301 CRYPTO_TFM_NEED_KEY);
302 return err;
303 }
304 } else {
305 struct ahash_alg *alg = crypto_ahash_alg(tfm);
306 int err;
307
308 err = alg->setkey(tfm, key, keylen);
309 if (!err && crypto_ahash_need_fallback(tfm))
310 err = crypto_ahash_setkey(crypto_ahash_fb(tfm),
311 key, keylen);
312 if (unlikely(err)) {
313 ahash_set_needkey(tfm, alg);
314 return err;
315 }
316 }
317 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
318 return 0;
319}
320EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
321
322static int ahash_do_req_chain(struct ahash_request *req,
323 int (*const *op)(struct ahash_request *req))
324{
325 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
326 int err;
327
328 if (crypto_ahash_req_virt(tfm) || !ahash_request_isvirt(req))
329 return (*op)(req);
330
331 if (crypto_ahash_statesize(tfm) > HASH_MAX_STATESIZE)
332 return -ENOSYS;
333
334 if (!crypto_ahash_need_fallback(tfm))
335 return -ENOSYS;
336
337 if (crypto_hash_no_export_core(tfm))
338 return -ENOSYS;
339
340 {
341 u8 state[HASH_MAX_STATESIZE];
342
343 if (op == &crypto_ahash_alg(tfm)->digest) {
344 ahash_request_set_tfm(req, crypto_ahash_fb(tfm));
345 err = crypto_ahash_digest(req);
346 goto out_no_state;
347 }
348
349 err = crypto_ahash_export(req, state);
350 ahash_request_set_tfm(req, crypto_ahash_fb(tfm));
351 err = err ?: crypto_ahash_import(req, state);
352
353 if (op == &crypto_ahash_alg(tfm)->finup) {
354 err = err ?: crypto_ahash_finup(req);
355 goto out_no_state;
356 }
357
358 err = err ?:
359 crypto_ahash_update(req) ?:
360 crypto_ahash_export(req, state);
361
362 ahash_request_set_tfm(req, tfm);
363 return err ?: crypto_ahash_import(req, state);
364
365out_no_state:
366 ahash_request_set_tfm(req, tfm);
367 return err;
368 }
369}
370
371int crypto_ahash_init(struct ahash_request *req)
372{
373 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
374
375 if (likely(tfm->using_shash))
376 return crypto_shash_init(prepare_shash_desc(req, tfm));
377 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
378 return -ENOKEY;
379 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
380 return -EAGAIN;
381 if (crypto_ahash_block_only(tfm)) {
382 u8 *buf = ahash_request_ctx(req);
383
384 buf += crypto_ahash_reqsize(tfm) - 1;
385 *buf = 0;
386 }
387 return crypto_ahash_alg(tfm)->init(req);
388}
389EXPORT_SYMBOL_GPL(crypto_ahash_init);
390
391static void ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
392{
393 req->saved_complete = req->base.complete;
394 req->saved_data = req->base.data;
395 req->base.complete = cplt;
396 req->base.data = req;
397}
398
399static void ahash_restore_req(struct ahash_request *req)
400{
401 req->base.complete = req->saved_complete;
402 req->base.data = req->saved_data;
403}
404
405static int ahash_update_finish(struct ahash_request *req, int err)
406{
407 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
408 bool nonzero = crypto_ahash_final_nonzero(tfm);
409 int bs = crypto_ahash_blocksize(tfm);
410 u8 *blenp = ahash_request_ctx(req);
411 int blen;
412 u8 *buf;
413
414 blenp += crypto_ahash_reqsize(tfm) - 1;
415 blen = *blenp;
416 buf = blenp - bs;
417
418 if (blen) {
419 req->src = req->sg_head + 1;
420 if (sg_is_chain(req->src))
421 req->src = sg_chain_ptr(req->src);
422 }
423
424 req->nbytes += nonzero - blen;
425
426 blen = 0;
427 if (err >= 0) {
428 blen = err + nonzero;
429 err = 0;
430 }
431 if (ahash_request_isvirt(req))
432 memcpy(buf, req->svirt + req->nbytes - blen, blen);
433 else
434 memcpy_from_sglist(buf, req->src, req->nbytes - blen, blen);
435 *blenp = blen;
436
437 ahash_restore_req(req);
438
439 return err;
440}
441
442static void ahash_update_done(void *data, int err)
443{
444 ahash_op_done(data, err, ahash_update_finish);
445}
446
447int crypto_ahash_update(struct ahash_request *req)
448{
449 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
450 bool nonzero = crypto_ahash_final_nonzero(tfm);
451 int bs = crypto_ahash_blocksize(tfm);
452 u8 *blenp = ahash_request_ctx(req);
453 int blen, err;
454 u8 *buf;
455
456 if (likely(tfm->using_shash))
457 return shash_ahash_update(req, ahash_request_ctx(req));
458 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
459 return -EAGAIN;
460 if (!crypto_ahash_block_only(tfm))
461 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update);
462
463 blenp += crypto_ahash_reqsize(tfm) - 1;
464 blen = *blenp;
465 buf = blenp - bs;
466
467 if (blen + req->nbytes < bs + nonzero) {
468 if (ahash_request_isvirt(req))
469 memcpy(buf + blen, req->svirt, req->nbytes);
470 else
471 memcpy_from_sglist(buf + blen, req->src, 0,
472 req->nbytes);
473
474 *blenp += req->nbytes;
475 return 0;
476 }
477
478 if (blen) {
479 memset(req->sg_head, 0, sizeof(req->sg_head[0]));
480 sg_set_buf(req->sg_head, buf, blen);
481 if (req->src != req->sg_head + 1)
482 sg_chain(req->sg_head, 2, req->src);
483 req->src = req->sg_head;
484 req->nbytes += blen;
485 }
486 req->nbytes -= nonzero;
487
488 ahash_save_req(req, ahash_update_done);
489
490 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update);
491 if (err == -EINPROGRESS || err == -EBUSY)
492 return err;
493
494 return ahash_update_finish(req, err);
495}
496EXPORT_SYMBOL_GPL(crypto_ahash_update);
497
498static int ahash_finup_finish(struct ahash_request *req, int err)
499{
500 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
501 u8 *blenp = ahash_request_ctx(req);
502 int blen;
503
504 blenp += crypto_ahash_reqsize(tfm) - 1;
505 blen = *blenp;
506
507 if (blen) {
508 if (sg_is_last(req->src))
509 req->src = NULL;
510 else {
511 req->src = req->sg_head + 1;
512 if (sg_is_chain(req->src))
513 req->src = sg_chain_ptr(req->src);
514 }
515 req->nbytes -= blen;
516 }
517
518 ahash_restore_req(req);
519
520 return err;
521}
522
523static void ahash_finup_done(void *data, int err)
524{
525 ahash_op_done(data, err, ahash_finup_finish);
526}
527
528int crypto_ahash_finup(struct ahash_request *req)
529{
530 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
531 int bs = crypto_ahash_blocksize(tfm);
532 u8 *blenp = ahash_request_ctx(req);
533 int blen, err;
534 u8 *buf;
535
536 if (likely(tfm->using_shash))
537 return shash_ahash_finup(req, ahash_request_ctx(req));
538 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
539 return -EAGAIN;
540 if (!crypto_ahash_alg(tfm)->finup)
541 return ahash_def_finup(req);
542 if (!crypto_ahash_block_only(tfm))
543 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup);
544
545 blenp += crypto_ahash_reqsize(tfm) - 1;
546 blen = *blenp;
547 buf = blenp - bs;
548
549 if (blen) {
550 memset(req->sg_head, 0, sizeof(req->sg_head[0]));
551 sg_set_buf(req->sg_head, buf, blen);
552 if (!req->src)
553 sg_mark_end(req->sg_head);
554 else if (req->src != req->sg_head + 1)
555 sg_chain(req->sg_head, 2, req->src);
556 req->src = req->sg_head;
557 req->nbytes += blen;
558 }
559
560 ahash_save_req(req, ahash_finup_done);
561
562 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup);
563 if (err == -EINPROGRESS || err == -EBUSY)
564 return err;
565
566 return ahash_finup_finish(req, err);
567}
568EXPORT_SYMBOL_GPL(crypto_ahash_finup);
569
570int crypto_ahash_digest(struct ahash_request *req)
571{
572 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
573
574 if (likely(tfm->using_shash))
575 return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
576 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
577 return -EAGAIN;
578 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
579 return -ENOKEY;
580 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->digest);
581}
582EXPORT_SYMBOL_GPL(crypto_ahash_digest);
583
584static void ahash_def_finup_done2(void *data, int err)
585{
586 struct ahash_request *areq = data;
587
588 if (err == -EINPROGRESS)
589 return;
590
591 ahash_restore_req(areq);
592 ahash_request_complete(areq, err);
593}
594
595static int ahash_def_finup_finish1(struct ahash_request *req, int err)
596{
597 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
598
599 if (err)
600 goto out;
601
602 req->base.complete = ahash_def_finup_done2;
603
604 err = crypto_ahash_alg(tfm)->final(req);
605 if (err == -EINPROGRESS || err == -EBUSY)
606 return err;
607
608out:
609 ahash_restore_req(req);
610 return err;
611}
612
613static void ahash_def_finup_done1(void *data, int err)
614{
615 ahash_op_done(data, err, ahash_def_finup_finish1);
616}
617
618static int ahash_def_finup(struct ahash_request *req)
619{
620 int err;
621
622 ahash_save_req(req, ahash_def_finup_done1);
623
624 err = crypto_ahash_update(req);
625 if (err == -EINPROGRESS || err == -EBUSY)
626 return err;
627
628 return ahash_def_finup_finish1(req, err);
629}
630
631int crypto_ahash_export_core(struct ahash_request *req, void *out)
632{
633 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
634
635 if (likely(tfm->using_shash))
636 return crypto_shash_export_core(ahash_request_ctx(req), out);
637 return crypto_ahash_alg(tfm)->export_core(req, out);
638}
639EXPORT_SYMBOL_GPL(crypto_ahash_export_core);
640
641int crypto_ahash_export(struct ahash_request *req, void *out)
642{
643 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
644
645 if (likely(tfm->using_shash))
646 return crypto_shash_export(ahash_request_ctx(req), out);
647 if (crypto_ahash_block_only(tfm)) {
648 unsigned int plen = crypto_ahash_blocksize(tfm) + 1;
649 unsigned int reqsize = crypto_ahash_reqsize(tfm);
650 unsigned int ss = crypto_ahash_statesize(tfm);
651 u8 *buf = ahash_request_ctx(req);
652
653 memcpy(out + ss - plen, buf + reqsize - plen, plen);
654 }
655 return crypto_ahash_alg(tfm)->export(req, out);
656}
657EXPORT_SYMBOL_GPL(crypto_ahash_export);
658
659int crypto_ahash_import_core(struct ahash_request *req, const void *in)
660{
661 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
662
663 if (likely(tfm->using_shash))
664 return crypto_shash_import_core(prepare_shash_desc(req, tfm),
665 in);
666 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
667 return -ENOKEY;
668 if (crypto_ahash_block_only(tfm)) {
669 unsigned int reqsize = crypto_ahash_reqsize(tfm);
670 u8 *buf = ahash_request_ctx(req);
671
672 buf[reqsize - 1] = 0;
673 }
674 return crypto_ahash_alg(tfm)->import_core(req, in);
675}
676EXPORT_SYMBOL_GPL(crypto_ahash_import_core);
677
678int crypto_ahash_import(struct ahash_request *req, const void *in)
679{
680 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
681
682 if (likely(tfm->using_shash))
683 return crypto_shash_import(prepare_shash_desc(req, tfm), in);
684 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
685 return -ENOKEY;
686 if (crypto_ahash_block_only(tfm)) {
687 unsigned int plen = crypto_ahash_blocksize(tfm) + 1;
688 unsigned int reqsize = crypto_ahash_reqsize(tfm);
689 unsigned int ss = crypto_ahash_statesize(tfm);
690 u8 *buf = ahash_request_ctx(req);
691
692 memcpy(buf + reqsize - plen, in + ss - plen, plen);
693 if (buf[reqsize - 1] >= plen)
694 return -EOVERFLOW;
695 }
696 return crypto_ahash_alg(tfm)->import(req, in);
697}
698EXPORT_SYMBOL_GPL(crypto_ahash_import);
699
700static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
701{
702 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
703 struct ahash_alg *alg = crypto_ahash_alg(hash);
704
705 if (alg->exit_tfm)
706 alg->exit_tfm(hash);
707 else if (tfm->__crt_alg->cra_exit)
708 tfm->__crt_alg->cra_exit(tfm);
709
710 if (crypto_ahash_need_fallback(hash))
711 crypto_free_ahash(crypto_ahash_fb(hash));
712}
713
714static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
715{
716 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
717 struct ahash_alg *alg = crypto_ahash_alg(hash);
718 struct crypto_ahash *fb = NULL;
719 int err;
720
721 crypto_ahash_set_statesize(hash, alg->halg.statesize);
722 crypto_ahash_set_reqsize(hash, crypto_tfm_alg_reqsize(tfm));
723
724 if (tfm->__crt_alg->cra_type == &crypto_shash_type)
725 return crypto_init_ahash_using_shash(tfm);
726
727 if (crypto_ahash_need_fallback(hash)) {
728 fb = crypto_alloc_ahash(crypto_ahash_alg_name(hash),
729 CRYPTO_ALG_REQ_VIRT,
730 CRYPTO_ALG_ASYNC |
731 CRYPTO_ALG_REQ_VIRT |
732 CRYPTO_AHASH_ALG_NO_EXPORT_CORE);
733 if (IS_ERR(fb))
734 return PTR_ERR(fb);
735
736 tfm->fb = crypto_ahash_tfm(fb);
737 }
738
739 ahash_set_needkey(hash, alg);
740
741 tfm->exit = crypto_ahash_exit_tfm;
742
743 if (alg->init_tfm)
744 err = alg->init_tfm(hash);
745 else if (tfm->__crt_alg->cra_init)
746 err = tfm->__crt_alg->cra_init(tfm);
747 else
748 return 0;
749
750 if (err)
751 goto out_free_sync_hash;
752
753 if (!ahash_is_async(hash) && crypto_ahash_reqsize(hash) >
754 MAX_SYNC_HASH_REQSIZE)
755 goto out_exit_tfm;
756
757 BUILD_BUG_ON(HASH_MAX_DESCSIZE > MAX_SYNC_HASH_REQSIZE);
758 if (crypto_ahash_reqsize(hash) < HASH_MAX_DESCSIZE)
759 crypto_ahash_set_reqsize(hash, HASH_MAX_DESCSIZE);
760
761 return 0;
762
763out_exit_tfm:
764 if (alg->exit_tfm)
765 alg->exit_tfm(hash);
766 else if (tfm->__crt_alg->cra_exit)
767 tfm->__crt_alg->cra_exit(tfm);
768 err = -EINVAL;
769out_free_sync_hash:
770 crypto_free_ahash(fb);
771 return err;
772}
773
774static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
775{
776 if (alg->cra_type == &crypto_shash_type)
777 return sizeof(struct crypto_shash *);
778
779 return crypto_alg_extsize(alg);
780}
781
782static void crypto_ahash_free_instance(struct crypto_instance *inst)
783{
784 struct ahash_instance *ahash = ahash_instance(inst);
785
786 ahash->free(ahash);
787}
788
789static int __maybe_unused crypto_ahash_report(
790 struct sk_buff *skb, struct crypto_alg *alg)
791{
792 struct crypto_report_hash rhash;
793
794 memset(&rhash, 0, sizeof(rhash));
795
796 strscpy(rhash.type, "ahash", sizeof(rhash.type));
797
798 rhash.blocksize = alg->cra_blocksize;
799 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
800
801 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
802}
803
804static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
805 __maybe_unused;
806static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
807{
808 seq_printf(m, "type : ahash\n");
809 seq_printf(m, "async : %s\n",
810 str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC));
811 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
812 seq_printf(m, "digestsize : %u\n",
813 __crypto_hash_alg_common(alg)->digestsize);
814}
815
816static const struct crypto_type crypto_ahash_type = {
817 .extsize = crypto_ahash_extsize,
818 .init_tfm = crypto_ahash_init_tfm,
819 .free = crypto_ahash_free_instance,
820#ifdef CONFIG_PROC_FS
821 .show = crypto_ahash_show,
822#endif
823#if IS_ENABLED(CONFIG_CRYPTO_USER)
824 .report = crypto_ahash_report,
825#endif
826 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
827 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
828 .type = CRYPTO_ALG_TYPE_AHASH,
829 .tfmsize = offsetof(struct crypto_ahash, base),
830 .algsize = offsetof(struct ahash_alg, halg.base),
831};
832
833int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
834 struct crypto_instance *inst,
835 const char *name, u32 type, u32 mask)
836{
837 spawn->base.frontend = &crypto_ahash_type;
838 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
839}
840EXPORT_SYMBOL_GPL(crypto_grab_ahash);
841
842struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
843 u32 mask)
844{
845 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
846}
847EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
848
849int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
850{
851 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
852}
853EXPORT_SYMBOL_GPL(crypto_has_ahash);
854
855bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
856{
857 struct crypto_alg *alg = &halg->base;
858
859 if (alg->cra_type == &crypto_shash_type)
860 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
861
862 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
863}
864EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
865
866struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
867{
868 struct hash_alg_common *halg = crypto_hash_alg_common(hash);
869 struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
870 struct crypto_ahash *fb = NULL;
871 struct crypto_ahash *nhash;
872 struct ahash_alg *alg;
873 int err;
874
875 if (!crypto_hash_alg_has_setkey(halg)) {
876 tfm = crypto_tfm_get(tfm);
877 if (IS_ERR(tfm))
878 return ERR_CAST(tfm);
879
880 return hash;
881 }
882
883 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
884
885 if (IS_ERR(nhash))
886 return nhash;
887
888 nhash->reqsize = hash->reqsize;
889 nhash->statesize = hash->statesize;
890
891 if (likely(hash->using_shash)) {
892 struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
893 struct crypto_shash *shash;
894
895 shash = crypto_clone_shash(ahash_to_shash(hash));
896 if (IS_ERR(shash)) {
897 err = PTR_ERR(shash);
898 goto out_free_nhash;
899 }
900 crypto_ahash_tfm(nhash)->exit = crypto_exit_ahash_using_shash;
901 nhash->using_shash = true;
902 *nctx = shash;
903 return nhash;
904 }
905
906 if (crypto_ahash_need_fallback(hash)) {
907 fb = crypto_clone_ahash(crypto_ahash_fb(hash));
908 err = PTR_ERR(fb);
909 if (IS_ERR(fb))
910 goto out_free_nhash;
911
912 crypto_ahash_tfm(nhash)->fb = crypto_ahash_tfm(fb);
913 }
914
915 err = -ENOSYS;
916 alg = crypto_ahash_alg(hash);
917 if (!alg->clone_tfm)
918 goto out_free_fb;
919
920 err = alg->clone_tfm(nhash, hash);
921 if (err)
922 goto out_free_fb;
923
924 crypto_ahash_tfm(nhash)->exit = crypto_ahash_exit_tfm;
925
926 return nhash;
927
928out_free_fb:
929 crypto_free_ahash(fb);
930out_free_nhash:
931 crypto_free_ahash(nhash);
932 return ERR_PTR(err);
933}
934EXPORT_SYMBOL_GPL(crypto_clone_ahash);
935
936static int ahash_default_export_core(struct ahash_request *req, void *out)
937{
938 return -ENOSYS;
939}
940
941static int ahash_default_import_core(struct ahash_request *req, const void *in)
942{
943 return -ENOSYS;
944}
945
946static int ahash_prepare_alg(struct ahash_alg *alg)
947{
948 struct crypto_alg *base = &alg->halg.base;
949 int err;
950
951 if (alg->halg.statesize == 0)
952 return -EINVAL;
953
954 if (base->cra_reqsize && base->cra_reqsize < alg->halg.statesize)
955 return -EINVAL;
956
957 if (!(base->cra_flags & CRYPTO_ALG_ASYNC) &&
958 base->cra_reqsize > MAX_SYNC_HASH_REQSIZE)
959 return -EINVAL;
960
961 if (base->cra_flags & CRYPTO_ALG_NEED_FALLBACK &&
962 base->cra_flags & CRYPTO_ALG_NO_FALLBACK)
963 return -EINVAL;
964
965 err = hash_prepare_alg(&alg->halg);
966 if (err)
967 return err;
968
969 base->cra_type = &crypto_ahash_type;
970 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
971
972 if ((base->cra_flags ^ CRYPTO_ALG_REQ_VIRT) &
973 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_REQ_VIRT) &&
974 !(base->cra_flags & CRYPTO_ALG_NO_FALLBACK))
975 base->cra_flags |= CRYPTO_ALG_NEED_FALLBACK;
976
977 if (!alg->setkey)
978 alg->setkey = ahash_nosetkey;
979
980 if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) {
981 BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256);
982 if (!alg->finup)
983 return -EINVAL;
984
985 base->cra_reqsize += base->cra_blocksize + 1;
986 alg->halg.statesize += base->cra_blocksize + 1;
987 alg->export_core = alg->export;
988 alg->import_core = alg->import;
989 } else if (!alg->export_core || !alg->import_core) {
990 alg->export_core = ahash_default_export_core;
991 alg->import_core = ahash_default_import_core;
992 base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE;
993 }
994
995 return 0;
996}
997
998int crypto_register_ahash(struct ahash_alg *alg)
999{
1000 struct crypto_alg *base = &alg->halg.base;
1001 int err;
1002
1003 err = ahash_prepare_alg(alg);
1004 if (err)
1005 return err;
1006
1007 return crypto_register_alg(base);
1008}
1009EXPORT_SYMBOL_GPL(crypto_register_ahash);
1010
1011void crypto_unregister_ahash(struct ahash_alg *alg)
1012{
1013 crypto_unregister_alg(&alg->halg.base);
1014}
1015EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
1016
1017int crypto_register_ahashes(struct ahash_alg *algs, int count)
1018{
1019 int i, ret;
1020
1021 for (i = 0; i < count; i++) {
1022 ret = crypto_register_ahash(&algs[i]);
1023 if (ret)
1024 goto err;
1025 }
1026
1027 return 0;
1028
1029err:
1030 for (--i; i >= 0; --i)
1031 crypto_unregister_ahash(&algs[i]);
1032
1033 return ret;
1034}
1035EXPORT_SYMBOL_GPL(crypto_register_ahashes);
1036
1037void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
1038{
1039 int i;
1040
1041 for (i = count - 1; i >= 0; --i)
1042 crypto_unregister_ahash(&algs[i]);
1043}
1044EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
1045
1046int ahash_register_instance(struct crypto_template *tmpl,
1047 struct ahash_instance *inst)
1048{
1049 int err;
1050
1051 if (WARN_ON(!inst->free))
1052 return -EINVAL;
1053
1054 err = ahash_prepare_alg(&inst->alg);
1055 if (err)
1056 return err;
1057
1058 return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
1059}
1060EXPORT_SYMBOL_GPL(ahash_register_instance);
1061
1062void ahash_request_free(struct ahash_request *req)
1063{
1064 if (unlikely(!req))
1065 return;
1066
1067 if (!ahash_req_on_stack(req)) {
1068 kfree(req);
1069 return;
1070 }
1071
1072 ahash_request_zero(req);
1073}
1074EXPORT_SYMBOL_GPL(ahash_request_free);
1075
1076int crypto_hash_digest(struct crypto_ahash *tfm, const u8 *data,
1077 unsigned int len, u8 *out)
1078{
1079 HASH_REQUEST_ON_STACK(req, crypto_ahash_fb(tfm));
1080 int err;
1081
1082 ahash_request_set_callback(req, 0, NULL, NULL);
1083 ahash_request_set_virt(req, data, out, len);
1084 err = crypto_ahash_digest(req);
1085
1086 ahash_request_zero(req);
1087
1088 return err;
1089}
1090EXPORT_SYMBOL_GPL(crypto_hash_digest);
1091
1092void ahash_free_singlespawn_instance(struct ahash_instance *inst)
1093{
1094 crypto_drop_spawn(ahash_instance_ctx(inst));
1095 kfree(inst);
1096}
1097EXPORT_SYMBOL_GPL(ahash_free_singlespawn_instance);
1098
1099MODULE_LICENSE("GPL");
1100MODULE_DESCRIPTION("Asynchronous cryptographic hash type");