Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

libceph: Use skcipher

This patch replaces uses of blkcipher with skcipher.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+56 -41
+56 -41
net/ceph/crypto.c
··· 4 4 #include <linux/err.h> 5 5 #include <linux/scatterlist.h> 6 6 #include <linux/slab.h> 7 - #include <crypto/hash.h> 7 + #include <crypto/aes.h> 8 + #include <crypto/skcipher.h> 8 9 #include <linux/key-type.h> 9 10 10 11 #include <keys/ceph-type.h> ··· 80 79 return 0; 81 80 } 82 81 83 - static struct crypto_blkcipher *ceph_crypto_alloc_cipher(void) 82 + static struct crypto_skcipher *ceph_crypto_alloc_cipher(void) 84 83 { 85 - return crypto_alloc_blkcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC); 84 + return crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC); 86 85 } 87 86 88 87 static const u8 *aes_iv = (u8 *)CEPH_AES_IV; ··· 163 162 { 164 163 struct scatterlist sg_in[2], prealloc_sg; 165 164 struct sg_table sg_out; 166 - struct crypto_blkcipher *tfm = ceph_crypto_alloc_cipher(); 167 - struct blkcipher_desc desc = { .tfm = tfm, .flags = 0 }; 165 + struct crypto_skcipher *tfm = ceph_crypto_alloc_cipher(); 166 + SKCIPHER_REQUEST_ON_STACK(req, tfm); 168 167 int ret; 169 - void *iv; 170 - int ivsize; 168 + int ivsize = AES_BLOCK_SIZE; 169 + char iv[ivsize]; 171 170 size_t zero_padding = (0x10 - (src_len & 0x0f)); 172 171 char pad[16]; 173 172 ··· 185 184 if (ret) 186 185 goto out_tfm; 187 186 188 - crypto_blkcipher_setkey((void *)tfm, key, key_len); 189 - iv = crypto_blkcipher_crt(tfm)->iv; 190 - ivsize = crypto_blkcipher_ivsize(tfm); 187 + crypto_skcipher_setkey((void *)tfm, key, key_len); 191 188 memcpy(iv, aes_iv, ivsize); 189 + 190 + skcipher_request_set_tfm(req, tfm); 191 + skcipher_request_set_callback(req, 0, NULL, NULL); 192 + skcipher_request_set_crypt(req, sg_in, sg_out.sgl, 193 + src_len + zero_padding, iv); 192 194 193 195 /* 194 196 print_hex_dump(KERN_ERR, "enc key: ", DUMP_PREFIX_NONE, 16, 1, ··· 201 197 print_hex_dump(KERN_ERR, "enc pad: ", DUMP_PREFIX_NONE, 16, 1, 202 198 pad, zero_padding, 1); 203 199 */ 204 - ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, 205 - src_len + zero_padding); 200 + ret = crypto_skcipher_encrypt(req); 201 + skcipher_request_zero(req); 206 202 if (ret < 0) { 207 203 pr_err("ceph_aes_crypt failed %d\n", ret); 208 204 goto out_sg; ··· 215 211 out_sg: 216 212 teardown_sgtable(&sg_out); 217 213 out_tfm: 218 - crypto_free_blkcipher(tfm); 214 + crypto_free_skcipher(tfm); 219 215 return ret; 220 216 } 221 217 ··· 226 222 { 227 223 struct scatterlist sg_in[3], prealloc_sg; 228 224 struct sg_table sg_out; 229 - struct crypto_blkcipher *tfm = ceph_crypto_alloc_cipher(); 230 - struct blkcipher_desc desc = { .tfm = tfm, .flags = 0 }; 225 + struct crypto_skcipher *tfm = ceph_crypto_alloc_cipher(); 226 + SKCIPHER_REQUEST_ON_STACK(req, tfm); 231 227 int ret; 232 - void *iv; 233 - int ivsize; 228 + int ivsize = AES_BLOCK_SIZE; 229 + char iv[ivsize]; 234 230 size_t zero_padding = (0x10 - ((src1_len + src2_len) & 0x0f)); 235 231 char pad[16]; 236 232 ··· 249 245 if (ret) 250 246 goto out_tfm; 251 247 252 - crypto_blkcipher_setkey((void *)tfm, key, key_len); 253 - iv = crypto_blkcipher_crt(tfm)->iv; 254 - ivsize = crypto_blkcipher_ivsize(tfm); 248 + crypto_skcipher_setkey((void *)tfm, key, key_len); 255 249 memcpy(iv, aes_iv, ivsize); 250 + 251 + skcipher_request_set_tfm(req, tfm); 252 + skcipher_request_set_callback(req, 0, NULL, NULL); 253 + skcipher_request_set_crypt(req, sg_in, sg_out.sgl, 254 + src1_len + src2_len + zero_padding, iv); 256 255 257 256 /* 258 257 print_hex_dump(KERN_ERR, "enc key: ", DUMP_PREFIX_NONE, 16, 1, ··· 267 260 print_hex_dump(KERN_ERR, "enc pad: ", DUMP_PREFIX_NONE, 16, 1, 268 261 pad, zero_padding, 1); 269 262 */ 270 - ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, 271 - src1_len + src2_len + zero_padding); 263 + ret = crypto_skcipher_encrypt(req); 264 + skcipher_request_zero(req); 272 265 if (ret < 0) { 273 266 pr_err("ceph_aes_crypt2 failed %d\n", ret); 274 267 goto out_sg; ··· 281 274 out_sg: 282 275 teardown_sgtable(&sg_out); 283 276 out_tfm: 284 - crypto_free_blkcipher(tfm); 277 + crypto_free_skcipher(tfm); 285 278 return ret; 286 279 } 287 280 ··· 291 284 { 292 285 struct sg_table sg_in; 293 286 struct scatterlist sg_out[2], prealloc_sg; 294 - struct crypto_blkcipher *tfm = ceph_crypto_alloc_cipher(); 295 - struct blkcipher_desc desc = { .tfm = tfm }; 287 + struct crypto_skcipher *tfm = ceph_crypto_alloc_cipher(); 288 + SKCIPHER_REQUEST_ON_STACK(req, tfm); 296 289 char pad[16]; 297 - void *iv; 298 - int ivsize; 290 + int ivsize = AES_BLOCK_SIZE; 291 + char iv[16]; 299 292 int ret; 300 293 int last_byte; 301 294 ··· 309 302 if (ret) 310 303 goto out_tfm; 311 304 312 - crypto_blkcipher_setkey((void *)tfm, key, key_len); 313 - iv = crypto_blkcipher_crt(tfm)->iv; 314 - ivsize = crypto_blkcipher_ivsize(tfm); 305 + crypto_skcipher_setkey((void *)tfm, key, key_len); 315 306 memcpy(iv, aes_iv, ivsize); 307 + 308 + skcipher_request_set_tfm(req, tfm); 309 + skcipher_request_set_callback(req, 0, NULL, NULL); 310 + skcipher_request_set_crypt(req, sg_in.sgl, sg_out, 311 + src_len, iv); 316 312 317 313 /* 318 314 print_hex_dump(KERN_ERR, "dec key: ", DUMP_PREFIX_NONE, 16, 1, ··· 323 313 print_hex_dump(KERN_ERR, "dec in: ", DUMP_PREFIX_NONE, 16, 1, 324 314 src, src_len, 1); 325 315 */ 326 - ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); 316 + ret = crypto_skcipher_decrypt(req); 317 + skcipher_request_zero(req); 327 318 if (ret < 0) { 328 319 pr_err("ceph_aes_decrypt failed %d\n", ret); 329 320 goto out_sg; ··· 349 338 out_sg: 350 339 teardown_sgtable(&sg_in); 351 340 out_tfm: 352 - crypto_free_blkcipher(tfm); 341 + crypto_free_skcipher(tfm); 353 342 return ret; 354 343 } 355 344 ··· 360 349 { 361 350 struct sg_table sg_in; 362 351 struct scatterlist sg_out[3], prealloc_sg; 363 - struct crypto_blkcipher *tfm = ceph_crypto_alloc_cipher(); 364 - struct blkcipher_desc desc = { .tfm = tfm }; 352 + struct crypto_skcipher *tfm = ceph_crypto_alloc_cipher(); 353 + SKCIPHER_REQUEST_ON_STACK(req, tfm); 365 354 char pad[16]; 366 - void *iv; 367 - int ivsize; 355 + int ivsize = AES_BLOCK_SIZE; 356 + char iv[ivsize]; 368 357 int ret; 369 358 int last_byte; 370 359 ··· 379 368 if (ret) 380 369 goto out_tfm; 381 370 382 - crypto_blkcipher_setkey((void *)tfm, key, key_len); 383 - iv = crypto_blkcipher_crt(tfm)->iv; 384 - ivsize = crypto_blkcipher_ivsize(tfm); 371 + crypto_skcipher_setkey((void *)tfm, key, key_len); 385 372 memcpy(iv, aes_iv, ivsize); 373 + 374 + skcipher_request_set_tfm(req, tfm); 375 + skcipher_request_set_callback(req, 0, NULL, NULL); 376 + skcipher_request_set_crypt(req, sg_in.sgl, sg_out, 377 + src_len, iv); 386 378 387 379 /* 388 380 print_hex_dump(KERN_ERR, "dec key: ", DUMP_PREFIX_NONE, 16, 1, ··· 393 379 print_hex_dump(KERN_ERR, "dec in: ", DUMP_PREFIX_NONE, 16, 1, 394 380 src, src_len, 1); 395 381 */ 396 - ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); 382 + ret = crypto_skcipher_decrypt(req); 383 + skcipher_request_zero(req); 397 384 if (ret < 0) { 398 385 pr_err("ceph_aes_decrypt failed %d\n", ret); 399 386 goto out_sg; ··· 430 415 out_sg: 431 416 teardown_sgtable(&sg_in); 432 417 out_tfm: 433 - crypto_free_blkcipher(tfm); 418 + crypto_free_skcipher(tfm); 434 419 return ret; 435 420 } 436 421