Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: cryptd - Use refcount_t for refcount

Reference counters are preferred to use refcount_t instead of
atomic_t.
This is because the implementation of refcount_t can prevent
overflows and detect possible use-after-free.
So convert atomic_t ref counters to refcount_t.

Signed-off-by: Chuhong Yuan <hslester96@gmail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Chuhong Yuan and committed by
Herbert Xu
43b970fa 03a3bb7a

+22 -22
+22 -22
crypto/cryptd.c
··· 16 16 #include <crypto/internal/aead.h> 17 17 #include <crypto/internal/skcipher.h> 18 18 #include <crypto/cryptd.h> 19 - #include <linux/atomic.h> 19 + #include <linux/refcount.h> 20 20 #include <linux/err.h> 21 21 #include <linux/init.h> 22 22 #include <linux/kernel.h> ··· 63 63 }; 64 64 65 65 struct cryptd_skcipher_ctx { 66 - atomic_t refcnt; 66 + refcount_t refcnt; 67 67 struct crypto_sync_skcipher *child; 68 68 }; 69 69 ··· 72 72 }; 73 73 74 74 struct cryptd_hash_ctx { 75 - atomic_t refcnt; 75 + refcount_t refcnt; 76 76 struct crypto_shash *child; 77 77 }; 78 78 ··· 82 82 }; 83 83 84 84 struct cryptd_aead_ctx { 85 - atomic_t refcnt; 85 + refcount_t refcnt; 86 86 struct crypto_aead *child; 87 87 }; 88 88 ··· 127 127 { 128 128 int cpu, err; 129 129 struct cryptd_cpu_queue *cpu_queue; 130 - atomic_t *refcnt; 130 + refcount_t *refcnt; 131 131 132 132 cpu = get_cpu(); 133 133 cpu_queue = this_cpu_ptr(queue->cpu_queue); ··· 140 140 141 141 queue_work_on(cpu, cryptd_wq, &cpu_queue->work); 142 142 143 - if (!atomic_read(refcnt)) 143 + if (!refcount_read(refcnt)) 144 144 goto out_put_cpu; 145 145 146 - atomic_inc(refcnt); 146 + refcount_inc(refcnt); 147 147 148 148 out_put_cpu: 149 149 put_cpu(); ··· 270 270 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 271 271 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); 272 272 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); 273 - int refcnt = atomic_read(&ctx->refcnt); 273 + int refcnt = refcount_read(&ctx->refcnt); 274 274 275 275 local_bh_disable(); 276 276 rctx->complete(&req->base, err); 277 277 local_bh_enable(); 278 278 279 - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) 279 + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) 280 280 crypto_free_skcipher(tfm); 281 281 } 282 282 ··· 521 521 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 522 522 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); 523 523 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 524 - int refcnt = atomic_read(&ctx->refcnt); 524 + int refcnt = refcount_read(&ctx->refcnt); 525 525 526 526 local_bh_disable(); 527 527 rctx->complete(&req->base, err); 528 528 local_bh_enable(); 529 529 530 - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) 530 + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) 531 531 crypto_free_ahash(tfm); 532 532 } 533 533 ··· 772 772 773 773 out: 774 774 ctx = crypto_aead_ctx(tfm); 775 - refcnt = atomic_read(&ctx->refcnt); 775 + refcnt = refcount_read(&ctx->refcnt); 776 776 777 777 local_bh_disable(); 778 778 compl(&req->base, err); 779 779 local_bh_enable(); 780 780 781 - if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt)) 781 + if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt)) 782 782 crypto_free_aead(tfm); 783 783 } 784 784 ··· 979 979 } 980 980 981 981 ctx = crypto_skcipher_ctx(tfm); 982 - atomic_set(&ctx->refcnt, 1); 982 + refcount_set(&ctx->refcnt, 1); 983 983 984 984 return container_of(tfm, struct cryptd_skcipher, base); 985 985 } ··· 997 997 { 998 998 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); 999 999 1000 - return atomic_read(&ctx->refcnt) - 1; 1000 + return refcount_read(&ctx->refcnt) - 1; 1001 1001 } 1002 1002 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued); 1003 1003 ··· 1005 1005 { 1006 1006 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); 1007 1007 1008 - if (atomic_dec_and_test(&ctx->refcnt)) 1008 + if (refcount_dec_and_test(&ctx->refcnt)) 1009 1009 crypto_free_skcipher(&tfm->base); 1010 1010 } 1011 1011 EXPORT_SYMBOL_GPL(cryptd_free_skcipher); ··· 1029 1029 } 1030 1030 1031 1031 ctx = crypto_ahash_ctx(tfm); 1032 - atomic_set(&ctx->refcnt, 1); 1032 + refcount_set(&ctx->refcnt, 1); 1033 1033 1034 1034 return __cryptd_ahash_cast(tfm); 1035 1035 } ··· 1054 1054 { 1055 1055 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 1056 1056 1057 - return atomic_read(&ctx->refcnt) - 1; 1057 + return refcount_read(&ctx->refcnt) - 1; 1058 1058 } 1059 1059 EXPORT_SYMBOL_GPL(cryptd_ahash_queued); 1060 1060 ··· 1062 1062 { 1063 1063 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 1064 1064 1065 - if (atomic_dec_and_test(&ctx->refcnt)) 1065 + if (refcount_dec_and_test(&ctx->refcnt)) 1066 1066 crypto_free_ahash(&tfm->base); 1067 1067 } 1068 1068 EXPORT_SYMBOL_GPL(cryptd_free_ahash); ··· 1086 1086 } 1087 1087 1088 1088 ctx = crypto_aead_ctx(tfm); 1089 - atomic_set(&ctx->refcnt, 1); 1089 + refcount_set(&ctx->refcnt, 1); 1090 1090 1091 1091 return __cryptd_aead_cast(tfm); 1092 1092 } ··· 1104 1104 { 1105 1105 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); 1106 1106 1107 - return atomic_read(&ctx->refcnt) - 1; 1107 + return refcount_read(&ctx->refcnt) - 1; 1108 1108 } 1109 1109 EXPORT_SYMBOL_GPL(cryptd_aead_queued); 1110 1110 ··· 1112 1112 { 1113 1113 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); 1114 1114 1115 - if (atomic_dec_and_test(&ctx->refcnt)) 1115 + if (refcount_dec_and_test(&ctx->refcnt)) 1116 1116 crypto_free_aead(&tfm->base); 1117 1117 } 1118 1118 EXPORT_SYMBOL_GPL(cryptd_free_aead);