Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/algapi.h>
9#include <crypto/internal/simd.h>
10#include <linux/err.h>
11#include <linux/errno.h>
12#include <linux/fips.h>
13#include <linux/init.h>
14#include <linux/kernel.h>
15#include <linux/list.h>
16#include <linux/module.h>
17#include <linux/rtnetlink.h>
18#include <linux/slab.h>
19#include <linux/string.h>
20
21#include "internal.h"
22
23static LIST_HEAD(crypto_template_list);
24
25#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
26DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
27EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
28#endif
29
30static inline void crypto_check_module_sig(struct module *mod)
31{
32 if (fips_enabled && mod && !module_sig_ok(mod))
33 panic("Module %s signature verification failed in FIPS mode\n",
34 module_name(mod));
35}
36
37static int crypto_check_alg(struct crypto_alg *alg)
38{
39 crypto_check_module_sig(alg->cra_module);
40
41 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
42 return -EINVAL;
43
44 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
45 return -EINVAL;
46
47 /* General maximums for all algs. */
48 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
49 return -EINVAL;
50
51 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
52 return -EINVAL;
53
54 /* Lower maximums for specific alg types. */
55 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
56 CRYPTO_ALG_TYPE_CIPHER) {
57 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
58 return -EINVAL;
59
60 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
61 return -EINVAL;
62 }
63
64 if (alg->cra_priority < 0)
65 return -EINVAL;
66
67 refcount_set(&alg->cra_refcnt, 1);
68
69 return 0;
70}
71
72static void crypto_free_instance(struct crypto_instance *inst)
73{
74 inst->alg.cra_type->free(inst);
75}
76
77static void crypto_destroy_instance(struct crypto_alg *alg)
78{
79 struct crypto_instance *inst = (void *)alg;
80 struct crypto_template *tmpl = inst->tmpl;
81
82 crypto_free_instance(inst);
83 crypto_tmpl_put(tmpl);
84}
85
86/*
87 * This function adds a spawn to the list secondary_spawns which
88 * will be used at the end of crypto_remove_spawns to unregister
89 * instances, unless the spawn happens to be one that is depended
90 * on by the new algorithm (nalg in crypto_remove_spawns).
91 *
92 * This function is also responsible for resurrecting any algorithms
93 * in the dependency chain of nalg by unsetting n->dead.
94 */
95static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
96 struct list_head *stack,
97 struct list_head *top,
98 struct list_head *secondary_spawns)
99{
100 struct crypto_spawn *spawn, *n;
101
102 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
103 if (!spawn)
104 return NULL;
105
106 n = list_prev_entry(spawn, list);
107 list_move(&spawn->list, secondary_spawns);
108
109 if (list_is_last(&n->list, stack))
110 return top;
111
112 n = list_next_entry(n, list);
113 if (!spawn->dead)
114 n->dead = false;
115
116 return &n->inst->alg.cra_users;
117}
118
119static void crypto_remove_instance(struct crypto_instance *inst,
120 struct list_head *list)
121{
122 struct crypto_template *tmpl = inst->tmpl;
123
124 if (crypto_is_dead(&inst->alg))
125 return;
126
127 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
128
129 if (!tmpl || !crypto_tmpl_get(tmpl))
130 return;
131
132 list_move(&inst->alg.cra_list, list);
133 hlist_del(&inst->list);
134 inst->alg.cra_destroy = crypto_destroy_instance;
135
136 BUG_ON(!list_empty(&inst->alg.cra_users));
137}
138
139/*
140 * Given an algorithm alg, remove all algorithms that depend on it
141 * through spawns. If nalg is not null, then exempt any algorithms
142 * that is depended on by nalg. This is useful when nalg itself
143 * depends on alg.
144 */
145void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
146 struct crypto_alg *nalg)
147{
148 u32 new_type = (nalg ?: alg)->cra_flags;
149 struct crypto_spawn *spawn, *n;
150 LIST_HEAD(secondary_spawns);
151 struct list_head *spawns;
152 LIST_HEAD(stack);
153 LIST_HEAD(top);
154
155 spawns = &alg->cra_users;
156 list_for_each_entry_safe(spawn, n, spawns, list) {
157 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
158 continue;
159
160 list_move(&spawn->list, &top);
161 }
162
163 /*
164 * Perform a depth-first walk starting from alg through
165 * the cra_users tree. The list stack records the path
166 * from alg to the current spawn.
167 */
168 spawns = ⊤
169 do {
170 while (!list_empty(spawns)) {
171 struct crypto_instance *inst;
172
173 spawn = list_first_entry(spawns, struct crypto_spawn,
174 list);
175 inst = spawn->inst;
176
177 list_move(&spawn->list, &stack);
178 spawn->dead = !spawn->registered || &inst->alg != nalg;
179
180 if (!spawn->registered)
181 break;
182
183 BUG_ON(&inst->alg == alg);
184
185 if (&inst->alg == nalg)
186 break;
187
188 spawns = &inst->alg.cra_users;
189
190 /*
191 * Even if spawn->registered is true, the
192 * instance itself may still be unregistered.
193 * This is because it may have failed during
194 * registration. Therefore we still need to
195 * make the following test.
196 *
197 * We may encounter an unregistered instance here, since
198 * an instance's spawns are set up prior to the instance
199 * being registered. An unregistered instance will have
200 * NULL ->cra_users.next, since ->cra_users isn't
201 * properly initialized until registration. But an
202 * unregistered instance cannot have any users, so treat
203 * it the same as ->cra_users being empty.
204 */
205 if (spawns->next == NULL)
206 break;
207 }
208 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
209 &secondary_spawns)));
210
211 /*
212 * Remove all instances that are marked as dead. Also
213 * complete the resurrection of the others by moving them
214 * back to the cra_users list.
215 */
216 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
217 if (!spawn->dead)
218 list_move(&spawn->list, &spawn->alg->cra_users);
219 else if (spawn->registered)
220 crypto_remove_instance(spawn->inst, list);
221 }
222}
223EXPORT_SYMBOL_GPL(crypto_remove_spawns);
224
225static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
226{
227 struct crypto_larval *larval;
228
229 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER))
230 return NULL;
231
232 larval = crypto_larval_alloc(alg->cra_name,
233 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
234 if (IS_ERR(larval))
235 return larval;
236
237 larval->adult = crypto_mod_get(alg);
238 if (!larval->adult) {
239 kfree(larval);
240 return ERR_PTR(-ENOENT);
241 }
242
243 refcount_set(&larval->alg.cra_refcnt, 1);
244 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
245 CRYPTO_MAX_ALG_NAME);
246 larval->alg.cra_priority = alg->cra_priority;
247
248 return larval;
249}
250
251static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
252{
253 struct crypto_alg *q;
254 struct crypto_larval *larval;
255 int ret = -EAGAIN;
256
257 if (crypto_is_dead(alg))
258 goto err;
259
260 INIT_LIST_HEAD(&alg->cra_users);
261
262 /* No cheating! */
263 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
264
265 ret = -EEXIST;
266
267 list_for_each_entry(q, &crypto_alg_list, cra_list) {
268 if (q == alg)
269 goto err;
270
271 if (crypto_is_moribund(q))
272 continue;
273
274 if (crypto_is_larval(q)) {
275 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
276 goto err;
277 continue;
278 }
279
280 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
281 !strcmp(q->cra_name, alg->cra_driver_name))
282 goto err;
283 }
284
285 larval = crypto_alloc_test_larval(alg);
286 if (IS_ERR(larval))
287 goto out;
288
289 list_add(&alg->cra_list, &crypto_alg_list);
290
291 if (larval)
292 list_add(&larval->alg.cra_list, &crypto_alg_list);
293 else
294 alg->cra_flags |= CRYPTO_ALG_TESTED;
295
296 crypto_stats_init(alg);
297
298out:
299 return larval;
300
301err:
302 larval = ERR_PTR(ret);
303 goto out;
304}
305
306void crypto_alg_tested(const char *name, int err)
307{
308 struct crypto_larval *test;
309 struct crypto_alg *alg;
310 struct crypto_alg *q;
311 LIST_HEAD(list);
312 bool best;
313
314 down_write(&crypto_alg_sem);
315 list_for_each_entry(q, &crypto_alg_list, cra_list) {
316 if (crypto_is_moribund(q) || !crypto_is_larval(q))
317 continue;
318
319 test = (struct crypto_larval *)q;
320
321 if (!strcmp(q->cra_driver_name, name))
322 goto found;
323 }
324
325 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
326 goto unlock;
327
328found:
329 q->cra_flags |= CRYPTO_ALG_DEAD;
330 alg = test->adult;
331
332 if (list_empty(&alg->cra_list))
333 goto complete;
334
335 if (err == -ECANCELED)
336 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
337 else if (err)
338 goto complete;
339 else
340 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
341
342 alg->cra_flags |= CRYPTO_ALG_TESTED;
343
344 /* Only satisfy larval waiters if we are the best. */
345 best = true;
346 list_for_each_entry(q, &crypto_alg_list, cra_list) {
347 if (crypto_is_moribund(q) || !crypto_is_larval(q))
348 continue;
349
350 if (strcmp(alg->cra_name, q->cra_name))
351 continue;
352
353 if (q->cra_priority > alg->cra_priority) {
354 best = false;
355 break;
356 }
357 }
358
359 list_for_each_entry(q, &crypto_alg_list, cra_list) {
360 if (q == alg)
361 continue;
362
363 if (crypto_is_moribund(q))
364 continue;
365
366 if (crypto_is_larval(q)) {
367 struct crypto_larval *larval = (void *)q;
368
369 /*
370 * Check to see if either our generic name or
371 * specific name can satisfy the name requested
372 * by the larval entry q.
373 */
374 if (strcmp(alg->cra_name, q->cra_name) &&
375 strcmp(alg->cra_driver_name, q->cra_name))
376 continue;
377
378 if (larval->adult)
379 continue;
380 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
381 continue;
382
383 if (best && crypto_mod_get(alg))
384 larval->adult = alg;
385 else
386 larval->adult = ERR_PTR(-EAGAIN);
387
388 continue;
389 }
390
391 if (strcmp(alg->cra_name, q->cra_name))
392 continue;
393
394 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
395 q->cra_priority > alg->cra_priority)
396 continue;
397
398 crypto_remove_spawns(q, &list, alg);
399 }
400
401complete:
402 complete_all(&test->completion);
403
404unlock:
405 up_write(&crypto_alg_sem);
406
407 crypto_remove_final(&list);
408}
409EXPORT_SYMBOL_GPL(crypto_alg_tested);
410
411void crypto_remove_final(struct list_head *list)
412{
413 struct crypto_alg *alg;
414 struct crypto_alg *n;
415
416 list_for_each_entry_safe(alg, n, list, cra_list) {
417 list_del_init(&alg->cra_list);
418 crypto_alg_put(alg);
419 }
420}
421EXPORT_SYMBOL_GPL(crypto_remove_final);
422
423int crypto_register_alg(struct crypto_alg *alg)
424{
425 struct crypto_larval *larval;
426 bool test_started;
427 int err;
428
429 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
430 err = crypto_check_alg(alg);
431 if (err)
432 return err;
433
434 down_write(&crypto_alg_sem);
435 larval = __crypto_register_alg(alg);
436 test_started = static_key_enabled(&crypto_boot_test_finished);
437 if (!IS_ERR_OR_NULL(larval))
438 larval->test_started = test_started;
439 up_write(&crypto_alg_sem);
440
441 if (IS_ERR_OR_NULL(larval))
442 return PTR_ERR(larval);
443
444 if (test_started)
445 crypto_wait_for_test(larval);
446 return 0;
447}
448EXPORT_SYMBOL_GPL(crypto_register_alg);
449
450static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
451{
452 if (unlikely(list_empty(&alg->cra_list)))
453 return -ENOENT;
454
455 alg->cra_flags |= CRYPTO_ALG_DEAD;
456
457 list_del_init(&alg->cra_list);
458 crypto_remove_spawns(alg, list, NULL);
459
460 return 0;
461}
462
463void crypto_unregister_alg(struct crypto_alg *alg)
464{
465 int ret;
466 LIST_HEAD(list);
467
468 down_write(&crypto_alg_sem);
469 ret = crypto_remove_alg(alg, &list);
470 up_write(&crypto_alg_sem);
471
472 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
473 return;
474
475 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
476 if (alg->cra_destroy)
477 alg->cra_destroy(alg);
478
479 crypto_remove_final(&list);
480}
481EXPORT_SYMBOL_GPL(crypto_unregister_alg);
482
483int crypto_register_algs(struct crypto_alg *algs, int count)
484{
485 int i, ret;
486
487 for (i = 0; i < count; i++) {
488 ret = crypto_register_alg(&algs[i]);
489 if (ret)
490 goto err;
491 }
492
493 return 0;
494
495err:
496 for (--i; i >= 0; --i)
497 crypto_unregister_alg(&algs[i]);
498
499 return ret;
500}
501EXPORT_SYMBOL_GPL(crypto_register_algs);
502
503void crypto_unregister_algs(struct crypto_alg *algs, int count)
504{
505 int i;
506
507 for (i = 0; i < count; i++)
508 crypto_unregister_alg(&algs[i]);
509}
510EXPORT_SYMBOL_GPL(crypto_unregister_algs);
511
512int crypto_register_template(struct crypto_template *tmpl)
513{
514 struct crypto_template *q;
515 int err = -EEXIST;
516
517 down_write(&crypto_alg_sem);
518
519 crypto_check_module_sig(tmpl->module);
520
521 list_for_each_entry(q, &crypto_template_list, list) {
522 if (q == tmpl)
523 goto out;
524 }
525
526 list_add(&tmpl->list, &crypto_template_list);
527 err = 0;
528out:
529 up_write(&crypto_alg_sem);
530 return err;
531}
532EXPORT_SYMBOL_GPL(crypto_register_template);
533
534int crypto_register_templates(struct crypto_template *tmpls, int count)
535{
536 int i, err;
537
538 for (i = 0; i < count; i++) {
539 err = crypto_register_template(&tmpls[i]);
540 if (err)
541 goto out;
542 }
543 return 0;
544
545out:
546 for (--i; i >= 0; --i)
547 crypto_unregister_template(&tmpls[i]);
548 return err;
549}
550EXPORT_SYMBOL_GPL(crypto_register_templates);
551
552void crypto_unregister_template(struct crypto_template *tmpl)
553{
554 struct crypto_instance *inst;
555 struct hlist_node *n;
556 struct hlist_head *list;
557 LIST_HEAD(users);
558
559 down_write(&crypto_alg_sem);
560
561 BUG_ON(list_empty(&tmpl->list));
562 list_del_init(&tmpl->list);
563
564 list = &tmpl->instances;
565 hlist_for_each_entry(inst, list, list) {
566 int err = crypto_remove_alg(&inst->alg, &users);
567
568 BUG_ON(err);
569 }
570
571 up_write(&crypto_alg_sem);
572
573 hlist_for_each_entry_safe(inst, n, list, list) {
574 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
575 crypto_free_instance(inst);
576 }
577 crypto_remove_final(&users);
578}
579EXPORT_SYMBOL_GPL(crypto_unregister_template);
580
581void crypto_unregister_templates(struct crypto_template *tmpls, int count)
582{
583 int i;
584
585 for (i = count - 1; i >= 0; --i)
586 crypto_unregister_template(&tmpls[i]);
587}
588EXPORT_SYMBOL_GPL(crypto_unregister_templates);
589
590static struct crypto_template *__crypto_lookup_template(const char *name)
591{
592 struct crypto_template *q, *tmpl = NULL;
593
594 down_read(&crypto_alg_sem);
595 list_for_each_entry(q, &crypto_template_list, list) {
596 if (strcmp(q->name, name))
597 continue;
598 if (unlikely(!crypto_tmpl_get(q)))
599 continue;
600
601 tmpl = q;
602 break;
603 }
604 up_read(&crypto_alg_sem);
605
606 return tmpl;
607}
608
609struct crypto_template *crypto_lookup_template(const char *name)
610{
611 return try_then_request_module(__crypto_lookup_template(name),
612 "crypto-%s", name);
613}
614EXPORT_SYMBOL_GPL(crypto_lookup_template);
615
616int crypto_register_instance(struct crypto_template *tmpl,
617 struct crypto_instance *inst)
618{
619 struct crypto_larval *larval;
620 struct crypto_spawn *spawn;
621 u32 fips_internal = 0;
622 int err;
623
624 err = crypto_check_alg(&inst->alg);
625 if (err)
626 return err;
627
628 inst->alg.cra_module = tmpl->module;
629 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
630
631 down_write(&crypto_alg_sem);
632
633 larval = ERR_PTR(-EAGAIN);
634 for (spawn = inst->spawns; spawn;) {
635 struct crypto_spawn *next;
636
637 if (spawn->dead)
638 goto unlock;
639
640 next = spawn->next;
641 spawn->inst = inst;
642 spawn->registered = true;
643
644 fips_internal |= spawn->alg->cra_flags;
645
646 crypto_mod_put(spawn->alg);
647
648 spawn = next;
649 }
650
651 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
652
653 larval = __crypto_register_alg(&inst->alg);
654 if (IS_ERR(larval))
655 goto unlock;
656 else if (larval)
657 larval->test_started = true;
658
659 hlist_add_head(&inst->list, &tmpl->instances);
660 inst->tmpl = tmpl;
661
662unlock:
663 up_write(&crypto_alg_sem);
664
665 err = PTR_ERR(larval);
666 if (IS_ERR_OR_NULL(larval))
667 goto err;
668
669 crypto_wait_for_test(larval);
670 err = 0;
671
672err:
673 return err;
674}
675EXPORT_SYMBOL_GPL(crypto_register_instance);
676
677void crypto_unregister_instance(struct crypto_instance *inst)
678{
679 LIST_HEAD(list);
680
681 down_write(&crypto_alg_sem);
682
683 crypto_remove_spawns(&inst->alg, &list, NULL);
684 crypto_remove_instance(inst, &list);
685
686 up_write(&crypto_alg_sem);
687
688 crypto_remove_final(&list);
689}
690EXPORT_SYMBOL_GPL(crypto_unregister_instance);
691
692int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
693 const char *name, u32 type, u32 mask)
694{
695 struct crypto_alg *alg;
696 int err = -EAGAIN;
697
698 if (WARN_ON_ONCE(inst == NULL))
699 return -EINVAL;
700
701 /* Allow the result of crypto_attr_alg_name() to be passed directly */
702 if (IS_ERR(name))
703 return PTR_ERR(name);
704
705 alg = crypto_find_alg(name, spawn->frontend,
706 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
707 if (IS_ERR(alg))
708 return PTR_ERR(alg);
709
710 down_write(&crypto_alg_sem);
711 if (!crypto_is_moribund(alg)) {
712 list_add(&spawn->list, &alg->cra_users);
713 spawn->alg = alg;
714 spawn->mask = mask;
715 spawn->next = inst->spawns;
716 inst->spawns = spawn;
717 inst->alg.cra_flags |=
718 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
719 err = 0;
720 }
721 up_write(&crypto_alg_sem);
722 if (err)
723 crypto_mod_put(alg);
724 return err;
725}
726EXPORT_SYMBOL_GPL(crypto_grab_spawn);
727
728void crypto_drop_spawn(struct crypto_spawn *spawn)
729{
730 if (!spawn->alg) /* not yet initialized? */
731 return;
732
733 down_write(&crypto_alg_sem);
734 if (!spawn->dead)
735 list_del(&spawn->list);
736 up_write(&crypto_alg_sem);
737
738 if (!spawn->registered)
739 crypto_mod_put(spawn->alg);
740}
741EXPORT_SYMBOL_GPL(crypto_drop_spawn);
742
743static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
744{
745 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
746 struct crypto_alg *target;
747 bool shoot = false;
748
749 down_read(&crypto_alg_sem);
750 if (!spawn->dead) {
751 alg = spawn->alg;
752 if (!crypto_mod_get(alg)) {
753 target = crypto_alg_get(alg);
754 shoot = true;
755 alg = ERR_PTR(-EAGAIN);
756 }
757 }
758 up_read(&crypto_alg_sem);
759
760 if (shoot) {
761 crypto_shoot_alg(target);
762 crypto_alg_put(target);
763 }
764
765 return alg;
766}
767
768struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
769 u32 mask)
770{
771 struct crypto_alg *alg;
772 struct crypto_tfm *tfm;
773
774 alg = crypto_spawn_alg(spawn);
775 if (IS_ERR(alg))
776 return ERR_CAST(alg);
777
778 tfm = ERR_PTR(-EINVAL);
779 if (unlikely((alg->cra_flags ^ type) & mask))
780 goto out_put_alg;
781
782 tfm = __crypto_alloc_tfm(alg, type, mask);
783 if (IS_ERR(tfm))
784 goto out_put_alg;
785
786 return tfm;
787
788out_put_alg:
789 crypto_mod_put(alg);
790 return tfm;
791}
792EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
793
794void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
795{
796 struct crypto_alg *alg;
797 struct crypto_tfm *tfm;
798
799 alg = crypto_spawn_alg(spawn);
800 if (IS_ERR(alg))
801 return ERR_CAST(alg);
802
803 tfm = crypto_create_tfm(alg, spawn->frontend);
804 if (IS_ERR(tfm))
805 goto out_put_alg;
806
807 return tfm;
808
809out_put_alg:
810 crypto_mod_put(alg);
811 return tfm;
812}
813EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
814
815int crypto_register_notifier(struct notifier_block *nb)
816{
817 return blocking_notifier_chain_register(&crypto_chain, nb);
818}
819EXPORT_SYMBOL_GPL(crypto_register_notifier);
820
821int crypto_unregister_notifier(struct notifier_block *nb)
822{
823 return blocking_notifier_chain_unregister(&crypto_chain, nb);
824}
825EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
826
827struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
828{
829 struct rtattr *rta = tb[0];
830 struct crypto_attr_type *algt;
831
832 if (!rta)
833 return ERR_PTR(-ENOENT);
834 if (RTA_PAYLOAD(rta) < sizeof(*algt))
835 return ERR_PTR(-EINVAL);
836 if (rta->rta_type != CRYPTOA_TYPE)
837 return ERR_PTR(-EINVAL);
838
839 algt = RTA_DATA(rta);
840
841 return algt;
842}
843EXPORT_SYMBOL_GPL(crypto_get_attr_type);
844
845/**
846 * crypto_check_attr_type() - check algorithm type and compute inherited mask
847 * @tb: the template parameters
848 * @type: the algorithm type the template would be instantiated as
849 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
850 * to restrict the flags of any inner algorithms
851 *
852 * Validate that the algorithm type the user requested is compatible with the
853 * one the template would actually be instantiated as. E.g., if the user is
854 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
855 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
856 *
857 * Also compute the mask to use to restrict the flags of any inner algorithms.
858 *
859 * Return: 0 on success; -errno on failure
860 */
861int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
862{
863 struct crypto_attr_type *algt;
864
865 algt = crypto_get_attr_type(tb);
866 if (IS_ERR(algt))
867 return PTR_ERR(algt);
868
869 if ((algt->type ^ type) & algt->mask)
870 return -EINVAL;
871
872 *mask_ret = crypto_algt_inherited_mask(algt);
873 return 0;
874}
875EXPORT_SYMBOL_GPL(crypto_check_attr_type);
876
877const char *crypto_attr_alg_name(struct rtattr *rta)
878{
879 struct crypto_attr_alg *alga;
880
881 if (!rta)
882 return ERR_PTR(-ENOENT);
883 if (RTA_PAYLOAD(rta) < sizeof(*alga))
884 return ERR_PTR(-EINVAL);
885 if (rta->rta_type != CRYPTOA_ALG)
886 return ERR_PTR(-EINVAL);
887
888 alga = RTA_DATA(rta);
889 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
890
891 return alga->name;
892}
893EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
894
895int crypto_inst_setname(struct crypto_instance *inst, const char *name,
896 struct crypto_alg *alg)
897{
898 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
899 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
900 return -ENAMETOOLONG;
901
902 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
903 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
904 return -ENAMETOOLONG;
905
906 return 0;
907}
908EXPORT_SYMBOL_GPL(crypto_inst_setname);
909
910void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
911{
912 INIT_LIST_HEAD(&queue->list);
913 queue->backlog = &queue->list;
914 queue->qlen = 0;
915 queue->max_qlen = max_qlen;
916}
917EXPORT_SYMBOL_GPL(crypto_init_queue);
918
919int crypto_enqueue_request(struct crypto_queue *queue,
920 struct crypto_async_request *request)
921{
922 int err = -EINPROGRESS;
923
924 if (unlikely(queue->qlen >= queue->max_qlen)) {
925 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
926 err = -ENOSPC;
927 goto out;
928 }
929 err = -EBUSY;
930 if (queue->backlog == &queue->list)
931 queue->backlog = &request->list;
932 }
933
934 queue->qlen++;
935 list_add_tail(&request->list, &queue->list);
936
937out:
938 return err;
939}
940EXPORT_SYMBOL_GPL(crypto_enqueue_request);
941
942void crypto_enqueue_request_head(struct crypto_queue *queue,
943 struct crypto_async_request *request)
944{
945 queue->qlen++;
946 list_add(&request->list, &queue->list);
947}
948EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
949
950struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
951{
952 struct list_head *request;
953
954 if (unlikely(!queue->qlen))
955 return NULL;
956
957 queue->qlen--;
958
959 if (queue->backlog != &queue->list)
960 queue->backlog = queue->backlog->next;
961
962 request = queue->list.next;
963 list_del(request);
964
965 return list_entry(request, struct crypto_async_request, list);
966}
967EXPORT_SYMBOL_GPL(crypto_dequeue_request);
968
969static inline void crypto_inc_byte(u8 *a, unsigned int size)
970{
971 u8 *b = (a + size);
972 u8 c;
973
974 for (; size; size--) {
975 c = *--b + 1;
976 *b = c;
977 if (c)
978 break;
979 }
980}
981
982void crypto_inc(u8 *a, unsigned int size)
983{
984 __be32 *b = (__be32 *)(a + size);
985 u32 c;
986
987 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
988 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
989 for (; size >= 4; size -= 4) {
990 c = be32_to_cpu(*--b) + 1;
991 *b = cpu_to_be32(c);
992 if (likely(c))
993 return;
994 }
995
996 crypto_inc_byte(a, size);
997}
998EXPORT_SYMBOL_GPL(crypto_inc);
999
1000void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
1001{
1002 int relalign = 0;
1003
1004 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1005 int size = sizeof(unsigned long);
1006 int d = (((unsigned long)dst ^ (unsigned long)src1) |
1007 ((unsigned long)dst ^ (unsigned long)src2)) &
1008 (size - 1);
1009
1010 relalign = d ? 1 << __ffs(d) : size;
1011
1012 /*
1013 * If we care about alignment, process as many bytes as
1014 * needed to advance dst and src to values whose alignments
1015 * equal their relative alignment. This will allow us to
1016 * process the remainder of the input using optimal strides.
1017 */
1018 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1019 *dst++ = *src1++ ^ *src2++;
1020 len--;
1021 }
1022 }
1023
1024 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1025 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1026 u64 l = get_unaligned((u64 *)src1) ^
1027 get_unaligned((u64 *)src2);
1028 put_unaligned(l, (u64 *)dst);
1029 } else {
1030 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
1031 }
1032 dst += 8;
1033 src1 += 8;
1034 src2 += 8;
1035 len -= 8;
1036 }
1037
1038 while (len >= 4 && !(relalign & 3)) {
1039 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1040 u32 l = get_unaligned((u32 *)src1) ^
1041 get_unaligned((u32 *)src2);
1042 put_unaligned(l, (u32 *)dst);
1043 } else {
1044 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1045 }
1046 dst += 4;
1047 src1 += 4;
1048 src2 += 4;
1049 len -= 4;
1050 }
1051
1052 while (len >= 2 && !(relalign & 1)) {
1053 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1054 u16 l = get_unaligned((u16 *)src1) ^
1055 get_unaligned((u16 *)src2);
1056 put_unaligned(l, (u16 *)dst);
1057 } else {
1058 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1059 }
1060 dst += 2;
1061 src1 += 2;
1062 src2 += 2;
1063 len -= 2;
1064 }
1065
1066 while (len--)
1067 *dst++ = *src1++ ^ *src2++;
1068}
1069EXPORT_SYMBOL_GPL(__crypto_xor);
1070
1071unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1072{
1073 return alg->cra_ctxsize +
1074 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1075}
1076EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1077
1078int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1079 u32 type, u32 mask)
1080{
1081 int ret = 0;
1082 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1083
1084 if (!IS_ERR(alg)) {
1085 crypto_mod_put(alg);
1086 ret = 1;
1087 }
1088
1089 return ret;
1090}
1091EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1092
1093#ifdef CONFIG_CRYPTO_STATS
1094void crypto_stats_init(struct crypto_alg *alg)
1095{
1096 memset(&alg->stats, 0, sizeof(alg->stats));
1097}
1098EXPORT_SYMBOL_GPL(crypto_stats_init);
1099
1100void crypto_stats_get(struct crypto_alg *alg)
1101{
1102 crypto_alg_get(alg);
1103}
1104EXPORT_SYMBOL_GPL(crypto_stats_get);
1105
1106void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1107 int ret)
1108{
1109 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1110 atomic64_inc(&alg->stats.aead.err_cnt);
1111 } else {
1112 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1113 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1114 }
1115 crypto_alg_put(alg);
1116}
1117EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1118
1119void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1120 int ret)
1121{
1122 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1123 atomic64_inc(&alg->stats.aead.err_cnt);
1124 } else {
1125 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1126 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1127 }
1128 crypto_alg_put(alg);
1129}
1130EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1131
1132void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1133 struct crypto_alg *alg)
1134{
1135 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1136 atomic64_inc(&alg->stats.akcipher.err_cnt);
1137 } else {
1138 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1139 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1140 }
1141 crypto_alg_put(alg);
1142}
1143EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1144
1145void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1146 struct crypto_alg *alg)
1147{
1148 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1149 atomic64_inc(&alg->stats.akcipher.err_cnt);
1150 } else {
1151 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1152 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1153 }
1154 crypto_alg_put(alg);
1155}
1156EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1157
1158void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1159{
1160 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1161 atomic64_inc(&alg->stats.akcipher.err_cnt);
1162 else
1163 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1164 crypto_alg_put(alg);
1165}
1166EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1167
1168void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1169{
1170 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1171 atomic64_inc(&alg->stats.akcipher.err_cnt);
1172 else
1173 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1174 crypto_alg_put(alg);
1175}
1176EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1177
1178void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1179{
1180 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1181 atomic64_inc(&alg->stats.compress.err_cnt);
1182 } else {
1183 atomic64_inc(&alg->stats.compress.compress_cnt);
1184 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1185 }
1186 crypto_alg_put(alg);
1187}
1188EXPORT_SYMBOL_GPL(crypto_stats_compress);
1189
1190void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1191{
1192 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1193 atomic64_inc(&alg->stats.compress.err_cnt);
1194 } else {
1195 atomic64_inc(&alg->stats.compress.decompress_cnt);
1196 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1197 }
1198 crypto_alg_put(alg);
1199}
1200EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1201
1202void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1203 struct crypto_alg *alg)
1204{
1205 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1206 atomic64_inc(&alg->stats.hash.err_cnt);
1207 else
1208 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1209 crypto_alg_put(alg);
1210}
1211EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1212
1213void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1214 struct crypto_alg *alg)
1215{
1216 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1217 atomic64_inc(&alg->stats.hash.err_cnt);
1218 } else {
1219 atomic64_inc(&alg->stats.hash.hash_cnt);
1220 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1221 }
1222 crypto_alg_put(alg);
1223}
1224EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1225
1226void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1227{
1228 if (ret)
1229 atomic64_inc(&alg->stats.kpp.err_cnt);
1230 else
1231 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1232 crypto_alg_put(alg);
1233}
1234EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1235
1236void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1237{
1238 if (ret)
1239 atomic64_inc(&alg->stats.kpp.err_cnt);
1240 else
1241 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1242 crypto_alg_put(alg);
1243}
1244EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1245
1246void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1247{
1248 if (ret)
1249 atomic64_inc(&alg->stats.kpp.err_cnt);
1250 else
1251 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1252 crypto_alg_put(alg);
1253}
1254EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1255
1256void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1257{
1258 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1259 atomic64_inc(&alg->stats.rng.err_cnt);
1260 else
1261 atomic64_inc(&alg->stats.rng.seed_cnt);
1262 crypto_alg_put(alg);
1263}
1264EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1265
1266void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1267 int ret)
1268{
1269 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1270 atomic64_inc(&alg->stats.rng.err_cnt);
1271 } else {
1272 atomic64_inc(&alg->stats.rng.generate_cnt);
1273 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1274 }
1275 crypto_alg_put(alg);
1276}
1277EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1278
1279void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1280 struct crypto_alg *alg)
1281{
1282 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1283 atomic64_inc(&alg->stats.cipher.err_cnt);
1284 } else {
1285 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1286 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1287 }
1288 crypto_alg_put(alg);
1289}
1290EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1291
1292void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1293 struct crypto_alg *alg)
1294{
1295 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1296 atomic64_inc(&alg->stats.cipher.err_cnt);
1297 } else {
1298 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1299 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1300 }
1301 crypto_alg_put(alg);
1302}
1303EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1304#endif
1305
1306static void __init crypto_start_tests(void)
1307{
1308 for (;;) {
1309 struct crypto_larval *larval = NULL;
1310 struct crypto_alg *q;
1311
1312 down_write(&crypto_alg_sem);
1313
1314 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1315 struct crypto_larval *l;
1316
1317 if (!crypto_is_larval(q))
1318 continue;
1319
1320 l = (void *)q;
1321
1322 if (!crypto_is_test_larval(l))
1323 continue;
1324
1325 if (l->test_started)
1326 continue;
1327
1328 l->test_started = true;
1329 larval = l;
1330 break;
1331 }
1332
1333 up_write(&crypto_alg_sem);
1334
1335 if (!larval)
1336 break;
1337
1338 crypto_wait_for_test(larval);
1339 }
1340
1341 static_branch_enable(&crypto_boot_test_finished);
1342}
1343
1344static int __init crypto_algapi_init(void)
1345{
1346 crypto_init_proc();
1347 crypto_start_tests();
1348 return 0;
1349}
1350
1351static void __exit crypto_algapi_exit(void)
1352{
1353 crypto_exit_proc();
1354}
1355
1356/*
1357 * We run this at late_initcall so that all the built-in algorithms
1358 * have had a chance to register themselves first.
1359 */
1360late_initcall(crypto_algapi_init);
1361module_exit(crypto_algapi_exit);
1362
1363MODULE_LICENSE("GPL");
1364MODULE_DESCRIPTION("Cryptographic algorithms API");
1365MODULE_SOFTDEP("pre: cryptomgr");