Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Merge the crypto tree to pull in fixes for the next merge window.

+47 -43
+1 -1
crypto/asymmetric_keys/public_key.c
··· 141 141 * signature and returns that to us. 142 142 */ 143 143 ret = crypto_akcipher_verify(req); 144 - if (ret == -EINPROGRESS) { 144 + if ((ret == -EINPROGRESS) || (ret == -EBUSY)) { 145 145 wait_for_completion(&compl.completion); 146 146 ret = compl.err; 147 147 }
+2 -3
crypto/drbg.c
··· 1768 1768 break; 1769 1769 case -EINPROGRESS: 1770 1770 case -EBUSY: 1771 - ret = wait_for_completion_interruptible( 1772 - &drbg->ctr_completion); 1773 - if (!ret && !drbg->ctr_async_err) { 1771 + wait_for_completion(&drbg->ctr_completion); 1772 + if (!drbg->ctr_async_err) { 1774 1773 reinit_completion(&drbg->ctr_completion); 1775 1774 break; 1776 1775 }
+2 -4
crypto/gcm.c
··· 152 152 153 153 err = crypto_skcipher_encrypt(&data->req); 154 154 if (err == -EINPROGRESS || err == -EBUSY) { 155 - err = wait_for_completion_interruptible( 156 - &data->result.completion); 157 - if (!err) 158 - err = data->result.err; 155 + wait_for_completion(&data->result.completion); 156 + err = data->result.err; 159 157 } 160 158 161 159 if (err)
+4 -6
drivers/crypto/caam/caamalg.c
··· 1187 1187 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1188 1188 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1189 1189 struct device *jrdev = ctx->jrdev; 1190 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1191 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1190 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1191 + GFP_KERNEL : GFP_ATOMIC; 1192 1192 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1193 1193 struct aead_edesc *edesc; 1194 1194 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; ··· 1475 1475 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1476 1476 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1477 1477 struct device *jrdev = ctx->jrdev; 1478 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1479 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? 1478 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1480 1479 GFP_KERNEL : GFP_ATOMIC; 1481 1480 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1482 1481 struct ablkcipher_edesc *edesc; ··· 1680 1681 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1681 1682 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1682 1683 struct device *jrdev = ctx->jrdev; 1683 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1684 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? 1684 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1685 1685 GFP_KERNEL : GFP_ATOMIC; 1686 1686 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents; 1687 1687 struct ablkcipher_edesc *edesc;
+4 -6
drivers/crypto/caam/caamalg_qi.c
··· 555 555 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 556 556 typeof(*alg), aead); 557 557 struct device *qidev = ctx->qidev; 558 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 559 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 558 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 559 + GFP_KERNEL : GFP_ATOMIC; 560 560 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 561 561 struct aead_edesc *edesc; 562 562 dma_addr_t qm_sg_dma, iv_dma = 0; ··· 808 808 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 809 809 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 810 810 struct device *qidev = ctx->qidev; 811 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 812 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? 811 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 813 812 GFP_KERNEL : GFP_ATOMIC; 814 813 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 815 814 struct ablkcipher_edesc *edesc; ··· 952 953 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 953 954 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 954 955 struct device *qidev = ctx->qidev; 955 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 956 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? 956 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 957 957 GFP_KERNEL : GFP_ATOMIC; 958 958 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents; 959 959 struct ablkcipher_edesc *edesc;
+16 -16
drivers/crypto/caam/caamhash.c
··· 719 719 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 720 720 struct caam_hash_state *state = ahash_request_ctx(req); 721 721 struct device *jrdev = ctx->jrdev; 722 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 723 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 722 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 723 + GFP_KERNEL : GFP_ATOMIC; 724 724 u8 *buf = current_buf(state); 725 725 int *buflen = current_buflen(state); 726 726 u8 *next_buf = alt_buf(state); ··· 849 849 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 850 850 struct caam_hash_state *state = ahash_request_ctx(req); 851 851 struct device *jrdev = ctx->jrdev; 852 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 853 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 852 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 853 + GFP_KERNEL : GFP_ATOMIC; 854 854 int buflen = *current_buflen(state); 855 855 u32 *desc; 856 856 int sec4_sg_bytes, sec4_sg_src_index; ··· 926 926 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 927 927 struct caam_hash_state *state = ahash_request_ctx(req); 928 928 struct device *jrdev = ctx->jrdev; 929 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 930 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 929 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 930 + GFP_KERNEL : GFP_ATOMIC; 931 931 int buflen = *current_buflen(state); 932 932 u32 *desc; 933 933 int sec4_sg_src_index; ··· 1013 1013 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 1014 1014 struct caam_hash_state *state = ahash_request_ctx(req); 1015 1015 struct device *jrdev = ctx->jrdev; 1016 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1017 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1016 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1017 + GFP_KERNEL : GFP_ATOMIC; 1018 1018 u32 *desc; 1019 1019 int digestsize = crypto_ahash_digestsize(ahash); 1020 1020 int src_nents, mapped_nents; ··· 1093 1093 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 1094 1094 struct caam_hash_state *state = ahash_request_ctx(req); 1095 1095 struct device *jrdev = ctx->jrdev; 1096 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1097 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1096 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1097 + GFP_KERNEL : GFP_ATOMIC; 1098 1098 u8 *buf = current_buf(state); 1099 1099 int buflen = *current_buflen(state); 1100 1100 u32 *desc; ··· 1154 1154 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 1155 1155 struct caam_hash_state *state = ahash_request_ctx(req); 1156 1156 struct device *jrdev = ctx->jrdev; 1157 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1158 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1157 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1158 + GFP_KERNEL : GFP_ATOMIC; 1159 1159 u8 *buf = current_buf(state); 1160 1160 int *buflen = current_buflen(state); 1161 1161 u8 *next_buf = alt_buf(state); ··· 1280 1280 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 1281 1281 struct caam_hash_state *state = ahash_request_ctx(req); 1282 1282 struct device *jrdev = ctx->jrdev; 1283 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1284 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1283 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1284 + GFP_KERNEL : GFP_ATOMIC; 1285 1285 int buflen = *current_buflen(state); 1286 1286 u32 *desc; 1287 1287 int sec4_sg_bytes, sec4_sg_src_index, src_nents, mapped_nents; ··· 1370 1370 struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash); 1371 1371 struct caam_hash_state *state = ahash_request_ctx(req); 1372 1372 struct device *jrdev = ctx->jrdev; 1373 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 1374 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 1373 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1374 + GFP_KERNEL : GFP_ATOMIC; 1375 1375 u8 *next_buf = alt_buf(state); 1376 1376 int *next_buflen = alt_buflen(state); 1377 1377 int to_hash;
+2 -2
drivers/crypto/caam/caampkc.c
··· 173 173 struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm); 174 174 struct device *dev = ctx->dev; 175 175 struct rsa_edesc *edesc; 176 - gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | 177 - CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC; 176 + gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 177 + GFP_KERNEL : GFP_ATOMIC; 178 178 int sgc; 179 179 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes; 180 180 int src_nents, dst_nents;
+4 -1
drivers/infiniband/sw/rxe/rxe.h
··· 68 68 static inline u32 rxe_crc32(struct rxe_dev *rxe, 69 69 u32 crc, void *next, size_t len) 70 70 { 71 + u32 retval; 71 72 int err; 72 73 73 74 SHASH_DESC_ON_STACK(shash, rxe->tfm); ··· 82 81 return crc32_le(crc, next, len); 83 82 } 84 83 85 - return *(u32 *)shash_desc_ctx(shash); 84 + retval = *(u32 *)shash_desc_ctx(shash); 85 + barrier_data(shash_desc_ctx(shash)); 86 + return retval; 86 87 } 87 88 88 89 int rxe_set_mtu(struct rxe_dev *rxe, unsigned int dev_mtu);
+4 -1
fs/btrfs/hash.c
··· 38 38 { 39 39 SHASH_DESC_ON_STACK(shash, tfm); 40 40 u32 *ctx = (u32 *)shash_desc_ctx(shash); 41 + u32 retval; 41 42 int err; 42 43 43 44 shash->tfm = tfm; ··· 48 47 err = crypto_shash_update(shash, address, length); 49 48 BUG_ON(err); 50 49 51 - return *ctx; 50 + retval = *ctx; 51 + barrier_data(ctx); 52 + return retval; 52 53 }
+4 -1
fs/f2fs/f2fs.h
··· 1078 1078 { 1079 1079 SHASH_DESC_ON_STACK(shash, sbi->s_chksum_driver); 1080 1080 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1081 + u32 retval; 1081 1082 int err; 1082 1083 1083 1084 shash->tfm = sbi->s_chksum_driver; ··· 1088 1087 err = crypto_shash_update(shash, address, length); 1089 1088 BUG_ON(err); 1090 1089 1091 - return *ctx; 1090 + retval = *ctx; 1091 + barrier_data(ctx); 1092 + return retval; 1092 1093 } 1093 1094 1094 1095 static inline bool f2fs_crc_valid(struct f2fs_sb_info *sbi, __u32 blk_crc,
+4 -2
lib/libcrc32c.c
··· 43 43 u32 crc32c(u32 crc, const void *address, unsigned int length) 44 44 { 45 45 SHASH_DESC_ON_STACK(shash, tfm); 46 - u32 *ctx = (u32 *)shash_desc_ctx(shash); 46 + u32 ret, *ctx = (u32 *)shash_desc_ctx(shash); 47 47 int err; 48 48 49 49 shash->tfm = tfm; ··· 53 53 err = crypto_shash_update(shash, address, length); 54 54 BUG_ON(err); 55 55 56 - return *ctx; 56 + ret = *ctx; 57 + barrier_data(ctx); 58 + return ret; 57 59 } 58 60 59 61 EXPORT_SYMBOL(crc32c);