Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Asynchronous Cryptographic Hash operations.
4 *
5 * This is the implementation of the ahash (asynchronous hash) API. It differs
6 * from shash (synchronous hash) in that ahash supports asynchronous operations,
7 * and it hashes data from scatterlists instead of virtually addressed buffers.
8 *
9 * The ahash API provides access to both ahash and shash algorithms. The shash
10 * API only provides access to shash algorithms.
11 *
12 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13 */
14
15#include <crypto/scatterwalk.h>
16#include <linux/cryptouser.h>
17#include <linux/err.h>
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/sched.h>
21#include <linux/slab.h>
22#include <linux/seq_file.h>
23#include <linux/string.h>
24#include <net/netlink.h>
25
26#include "hash.h"
27
28#define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
29
30struct crypto_hash_walk {
31 char *data;
32
33 unsigned int offset;
34 unsigned int flags;
35
36 struct page *pg;
37 unsigned int entrylen;
38
39 unsigned int total;
40 struct scatterlist *sg;
41};
42
43static int hash_walk_next(struct crypto_hash_walk *walk)
44{
45 unsigned int offset = walk->offset;
46 unsigned int nbytes = min(walk->entrylen,
47 ((unsigned int)(PAGE_SIZE)) - offset);
48
49 walk->data = kmap_local_page(walk->pg);
50 walk->data += offset;
51 walk->entrylen -= nbytes;
52 return nbytes;
53}
54
55static int hash_walk_new_entry(struct crypto_hash_walk *walk)
56{
57 struct scatterlist *sg;
58
59 sg = walk->sg;
60 walk->offset = sg->offset;
61 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
62 walk->offset = offset_in_page(walk->offset);
63 walk->entrylen = sg->length;
64
65 if (walk->entrylen > walk->total)
66 walk->entrylen = walk->total;
67 walk->total -= walk->entrylen;
68
69 return hash_walk_next(walk);
70}
71
72static int crypto_hash_walk_first(struct ahash_request *req,
73 struct crypto_hash_walk *walk)
74{
75 walk->total = req->nbytes;
76
77 if (!walk->total) {
78 walk->entrylen = 0;
79 return 0;
80 }
81
82 walk->sg = req->src;
83 walk->flags = req->base.flags;
84
85 return hash_walk_new_entry(walk);
86}
87
88static int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
89{
90 walk->data -= walk->offset;
91
92 kunmap_local(walk->data);
93 crypto_yield(walk->flags);
94
95 if (err)
96 return err;
97
98 if (walk->entrylen) {
99 walk->offset = 0;
100 walk->pg++;
101 return hash_walk_next(walk);
102 }
103
104 if (!walk->total)
105 return 0;
106
107 walk->sg = sg_next(walk->sg);
108
109 return hash_walk_new_entry(walk);
110}
111
112static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
113{
114 return !(walk->entrylen | walk->total);
115}
116
117/*
118 * For an ahash tfm that is using an shash algorithm (instead of an ahash
119 * algorithm), this returns the underlying shash tfm.
120 */
121static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
122{
123 return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
124}
125
126static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
127 struct crypto_ahash *tfm)
128{
129 struct shash_desc *desc = ahash_request_ctx(req);
130
131 desc->tfm = ahash_to_shash(tfm);
132 return desc;
133}
134
135int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
136{
137 struct crypto_hash_walk walk;
138 int nbytes;
139
140 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
141 nbytes = crypto_hash_walk_done(&walk, nbytes))
142 nbytes = crypto_shash_update(desc, walk.data, nbytes);
143
144 return nbytes;
145}
146EXPORT_SYMBOL_GPL(shash_ahash_update);
147
148int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
149{
150 struct crypto_hash_walk walk;
151 int nbytes;
152
153 nbytes = crypto_hash_walk_first(req, &walk);
154 if (!nbytes)
155 return crypto_shash_final(desc, req->result);
156
157 do {
158 nbytes = crypto_hash_walk_last(&walk) ?
159 crypto_shash_finup(desc, walk.data, nbytes,
160 req->result) :
161 crypto_shash_update(desc, walk.data, nbytes);
162 nbytes = crypto_hash_walk_done(&walk, nbytes);
163 } while (nbytes > 0);
164
165 return nbytes;
166}
167EXPORT_SYMBOL_GPL(shash_ahash_finup);
168
169int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
170{
171 unsigned int nbytes = req->nbytes;
172 struct scatterlist *sg;
173 unsigned int offset;
174 int err;
175
176 if (nbytes &&
177 (sg = req->src, offset = sg->offset,
178 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
179 void *data;
180
181 data = kmap_local_page(sg_page(sg));
182 err = crypto_shash_digest(desc, data + offset, nbytes,
183 req->result);
184 kunmap_local(data);
185 } else
186 err = crypto_shash_init(desc) ?:
187 shash_ahash_finup(req, desc);
188
189 return err;
190}
191EXPORT_SYMBOL_GPL(shash_ahash_digest);
192
193static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
194{
195 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
196
197 crypto_free_shash(*ctx);
198}
199
200static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
201{
202 struct crypto_alg *calg = tfm->__crt_alg;
203 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
204 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
205 struct crypto_shash *shash;
206
207 if (!crypto_mod_get(calg))
208 return -EAGAIN;
209
210 shash = crypto_create_tfm(calg, &crypto_shash_type);
211 if (IS_ERR(shash)) {
212 crypto_mod_put(calg);
213 return PTR_ERR(shash);
214 }
215
216 crt->using_shash = true;
217 *ctx = shash;
218 tfm->exit = crypto_exit_ahash_using_shash;
219
220 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
221 CRYPTO_TFM_NEED_KEY);
222 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
223
224 return 0;
225}
226
227static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
228 unsigned int keylen)
229{
230 return -ENOSYS;
231}
232
233static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
234{
235 if (alg->setkey != ahash_nosetkey &&
236 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
237 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
238}
239
240int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
241 unsigned int keylen)
242{
243 if (likely(tfm->using_shash)) {
244 struct crypto_shash *shash = ahash_to_shash(tfm);
245 int err;
246
247 err = crypto_shash_setkey(shash, key, keylen);
248 if (unlikely(err)) {
249 crypto_ahash_set_flags(tfm,
250 crypto_shash_get_flags(shash) &
251 CRYPTO_TFM_NEED_KEY);
252 return err;
253 }
254 } else {
255 struct ahash_alg *alg = crypto_ahash_alg(tfm);
256 int err;
257
258 err = alg->setkey(tfm, key, keylen);
259 if (unlikely(err)) {
260 ahash_set_needkey(tfm, alg);
261 return err;
262 }
263 }
264 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
265 return 0;
266}
267EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
268
269int crypto_ahash_init(struct ahash_request *req)
270{
271 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
272
273 if (likely(tfm->using_shash))
274 return crypto_shash_init(prepare_shash_desc(req, tfm));
275 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
276 return -ENOKEY;
277 return crypto_ahash_alg(tfm)->init(req);
278}
279EXPORT_SYMBOL_GPL(crypto_ahash_init);
280
281static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
282 bool has_state)
283{
284 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
285 unsigned int ds = crypto_ahash_digestsize(tfm);
286 struct ahash_request *subreq;
287 unsigned int subreq_size;
288 unsigned int reqsize;
289 u8 *result;
290 gfp_t gfp;
291 u32 flags;
292
293 subreq_size = sizeof(*subreq);
294 reqsize = crypto_ahash_reqsize(tfm);
295 reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
296 subreq_size += reqsize;
297 subreq_size += ds;
298
299 flags = ahash_request_flags(req);
300 gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC;
301 subreq = kmalloc(subreq_size, gfp);
302 if (!subreq)
303 return -ENOMEM;
304
305 ahash_request_set_tfm(subreq, tfm);
306 ahash_request_set_callback(subreq, flags, cplt, req);
307
308 result = (u8 *)(subreq + 1) + reqsize;
309
310 ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
311
312 if (has_state) {
313 void *state;
314
315 state = kmalloc(crypto_ahash_statesize(tfm), gfp);
316 if (!state) {
317 kfree(subreq);
318 return -ENOMEM;
319 }
320
321 crypto_ahash_export(req, state);
322 crypto_ahash_import(subreq, state);
323 kfree_sensitive(state);
324 }
325
326 req->priv = subreq;
327
328 return 0;
329}
330
331static void ahash_restore_req(struct ahash_request *req, int err)
332{
333 struct ahash_request *subreq = req->priv;
334
335 if (!err)
336 memcpy(req->result, subreq->result,
337 crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
338
339 req->priv = NULL;
340
341 kfree_sensitive(subreq);
342}
343
344int crypto_ahash_update(struct ahash_request *req)
345{
346 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
347
348 if (likely(tfm->using_shash))
349 return shash_ahash_update(req, ahash_request_ctx(req));
350
351 return crypto_ahash_alg(tfm)->update(req);
352}
353EXPORT_SYMBOL_GPL(crypto_ahash_update);
354
355int crypto_ahash_final(struct ahash_request *req)
356{
357 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
358
359 if (likely(tfm->using_shash))
360 return crypto_shash_final(ahash_request_ctx(req), req->result);
361
362 return crypto_ahash_alg(tfm)->final(req);
363}
364EXPORT_SYMBOL_GPL(crypto_ahash_final);
365
366int crypto_ahash_finup(struct ahash_request *req)
367{
368 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
369
370 if (likely(tfm->using_shash))
371 return shash_ahash_finup(req, ahash_request_ctx(req));
372
373 return crypto_ahash_alg(tfm)->finup(req);
374}
375EXPORT_SYMBOL_GPL(crypto_ahash_finup);
376
377int crypto_ahash_digest(struct ahash_request *req)
378{
379 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
380
381 if (likely(tfm->using_shash))
382 return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
383
384 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
385 return -ENOKEY;
386
387 return crypto_ahash_alg(tfm)->digest(req);
388}
389EXPORT_SYMBOL_GPL(crypto_ahash_digest);
390
391static void ahash_def_finup_done2(void *data, int err)
392{
393 struct ahash_request *areq = data;
394
395 if (err == -EINPROGRESS)
396 return;
397
398 ahash_restore_req(areq, err);
399
400 ahash_request_complete(areq, err);
401}
402
403static int ahash_def_finup_finish1(struct ahash_request *req, int err)
404{
405 struct ahash_request *subreq = req->priv;
406
407 if (err)
408 goto out;
409
410 subreq->base.complete = ahash_def_finup_done2;
411
412 err = crypto_ahash_alg(crypto_ahash_reqtfm(req))->final(subreq);
413 if (err == -EINPROGRESS || err == -EBUSY)
414 return err;
415
416out:
417 ahash_restore_req(req, err);
418 return err;
419}
420
421static void ahash_def_finup_done1(void *data, int err)
422{
423 struct ahash_request *areq = data;
424 struct ahash_request *subreq;
425
426 if (err == -EINPROGRESS)
427 goto out;
428
429 subreq = areq->priv;
430 subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
431
432 err = ahash_def_finup_finish1(areq, err);
433 if (err == -EINPROGRESS || err == -EBUSY)
434 return;
435
436out:
437 ahash_request_complete(areq, err);
438}
439
440static int ahash_def_finup(struct ahash_request *req)
441{
442 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
443 int err;
444
445 err = ahash_save_req(req, ahash_def_finup_done1, true);
446 if (err)
447 return err;
448
449 err = crypto_ahash_alg(tfm)->update(req->priv);
450 if (err == -EINPROGRESS || err == -EBUSY)
451 return err;
452
453 return ahash_def_finup_finish1(req, err);
454}
455
456int crypto_ahash_export(struct ahash_request *req, void *out)
457{
458 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
459
460 if (likely(tfm->using_shash))
461 return crypto_shash_export(ahash_request_ctx(req), out);
462 return crypto_ahash_alg(tfm)->export(req, out);
463}
464EXPORT_SYMBOL_GPL(crypto_ahash_export);
465
466int crypto_ahash_import(struct ahash_request *req, const void *in)
467{
468 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
469
470 if (likely(tfm->using_shash))
471 return crypto_shash_import(prepare_shash_desc(req, tfm), in);
472 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
473 return -ENOKEY;
474 return crypto_ahash_alg(tfm)->import(req, in);
475}
476EXPORT_SYMBOL_GPL(crypto_ahash_import);
477
478static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
479{
480 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
481 struct ahash_alg *alg = crypto_ahash_alg(hash);
482
483 alg->exit_tfm(hash);
484}
485
486static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
487{
488 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
489 struct ahash_alg *alg = crypto_ahash_alg(hash);
490
491 crypto_ahash_set_statesize(hash, alg->halg.statesize);
492
493 if (tfm->__crt_alg->cra_type == &crypto_shash_type)
494 return crypto_init_ahash_using_shash(tfm);
495
496 ahash_set_needkey(hash, alg);
497
498 if (alg->exit_tfm)
499 tfm->exit = crypto_ahash_exit_tfm;
500
501 return alg->init_tfm ? alg->init_tfm(hash) : 0;
502}
503
504static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
505{
506 if (alg->cra_type == &crypto_shash_type)
507 return sizeof(struct crypto_shash *);
508
509 return crypto_alg_extsize(alg);
510}
511
512static void crypto_ahash_free_instance(struct crypto_instance *inst)
513{
514 struct ahash_instance *ahash = ahash_instance(inst);
515
516 ahash->free(ahash);
517}
518
519static int __maybe_unused crypto_ahash_report(
520 struct sk_buff *skb, struct crypto_alg *alg)
521{
522 struct crypto_report_hash rhash;
523
524 memset(&rhash, 0, sizeof(rhash));
525
526 strscpy(rhash.type, "ahash", sizeof(rhash.type));
527
528 rhash.blocksize = alg->cra_blocksize;
529 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
530
531 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
532}
533
534static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
535 __maybe_unused;
536static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
537{
538 seq_printf(m, "type : ahash\n");
539 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
540 "yes" : "no");
541 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
542 seq_printf(m, "digestsize : %u\n",
543 __crypto_hash_alg_common(alg)->digestsize);
544}
545
546static const struct crypto_type crypto_ahash_type = {
547 .extsize = crypto_ahash_extsize,
548 .init_tfm = crypto_ahash_init_tfm,
549 .free = crypto_ahash_free_instance,
550#ifdef CONFIG_PROC_FS
551 .show = crypto_ahash_show,
552#endif
553#if IS_ENABLED(CONFIG_CRYPTO_USER)
554 .report = crypto_ahash_report,
555#endif
556 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
557 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
558 .type = CRYPTO_ALG_TYPE_AHASH,
559 .tfmsize = offsetof(struct crypto_ahash, base),
560};
561
562int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
563 struct crypto_instance *inst,
564 const char *name, u32 type, u32 mask)
565{
566 spawn->base.frontend = &crypto_ahash_type;
567 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
568}
569EXPORT_SYMBOL_GPL(crypto_grab_ahash);
570
571struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
572 u32 mask)
573{
574 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
575}
576EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
577
578int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
579{
580 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
581}
582EXPORT_SYMBOL_GPL(crypto_has_ahash);
583
584static bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
585{
586 struct crypto_alg *alg = &halg->base;
587
588 if (alg->cra_type == &crypto_shash_type)
589 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
590
591 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
592}
593
594struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
595{
596 struct hash_alg_common *halg = crypto_hash_alg_common(hash);
597 struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
598 struct crypto_ahash *nhash;
599 struct ahash_alg *alg;
600 int err;
601
602 if (!crypto_hash_alg_has_setkey(halg)) {
603 tfm = crypto_tfm_get(tfm);
604 if (IS_ERR(tfm))
605 return ERR_CAST(tfm);
606
607 return hash;
608 }
609
610 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
611
612 if (IS_ERR(nhash))
613 return nhash;
614
615 nhash->reqsize = hash->reqsize;
616 nhash->statesize = hash->statesize;
617
618 if (likely(hash->using_shash)) {
619 struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
620 struct crypto_shash *shash;
621
622 shash = crypto_clone_shash(ahash_to_shash(hash));
623 if (IS_ERR(shash)) {
624 err = PTR_ERR(shash);
625 goto out_free_nhash;
626 }
627 nhash->using_shash = true;
628 *nctx = shash;
629 return nhash;
630 }
631
632 err = -ENOSYS;
633 alg = crypto_ahash_alg(hash);
634 if (!alg->clone_tfm)
635 goto out_free_nhash;
636
637 err = alg->clone_tfm(nhash, hash);
638 if (err)
639 goto out_free_nhash;
640
641 return nhash;
642
643out_free_nhash:
644 crypto_free_ahash(nhash);
645 return ERR_PTR(err);
646}
647EXPORT_SYMBOL_GPL(crypto_clone_ahash);
648
649static int ahash_prepare_alg(struct ahash_alg *alg)
650{
651 struct crypto_alg *base = &alg->halg.base;
652 int err;
653
654 if (alg->halg.statesize == 0)
655 return -EINVAL;
656
657 err = hash_prepare_alg(&alg->halg);
658 if (err)
659 return err;
660
661 base->cra_type = &crypto_ahash_type;
662 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
663
664 if (!alg->finup)
665 alg->finup = ahash_def_finup;
666 if (!alg->setkey)
667 alg->setkey = ahash_nosetkey;
668
669 return 0;
670}
671
672int crypto_register_ahash(struct ahash_alg *alg)
673{
674 struct crypto_alg *base = &alg->halg.base;
675 int err;
676
677 err = ahash_prepare_alg(alg);
678 if (err)
679 return err;
680
681 return crypto_register_alg(base);
682}
683EXPORT_SYMBOL_GPL(crypto_register_ahash);
684
685void crypto_unregister_ahash(struct ahash_alg *alg)
686{
687 crypto_unregister_alg(&alg->halg.base);
688}
689EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
690
691int crypto_register_ahashes(struct ahash_alg *algs, int count)
692{
693 int i, ret;
694
695 for (i = 0; i < count; i++) {
696 ret = crypto_register_ahash(&algs[i]);
697 if (ret)
698 goto err;
699 }
700
701 return 0;
702
703err:
704 for (--i; i >= 0; --i)
705 crypto_unregister_ahash(&algs[i]);
706
707 return ret;
708}
709EXPORT_SYMBOL_GPL(crypto_register_ahashes);
710
711void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
712{
713 int i;
714
715 for (i = count - 1; i >= 0; --i)
716 crypto_unregister_ahash(&algs[i]);
717}
718EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
719
720int ahash_register_instance(struct crypto_template *tmpl,
721 struct ahash_instance *inst)
722{
723 int err;
724
725 if (WARN_ON(!inst->free))
726 return -EINVAL;
727
728 err = ahash_prepare_alg(&inst->alg);
729 if (err)
730 return err;
731
732 return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
733}
734EXPORT_SYMBOL_GPL(ahash_register_instance);
735
736MODULE_LICENSE("GPL");
737MODULE_DESCRIPTION("Asynchronous cryptographic hash type");