at v4.15 14 kB view raw
1/* 2 * Synchronous Cryptographic Hash operations. 3 * 4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13#include <crypto/scatterwalk.h> 14#include <crypto/internal/hash.h> 15#include <linux/err.h> 16#include <linux/kernel.h> 17#include <linux/module.h> 18#include <linux/slab.h> 19#include <linux/seq_file.h> 20#include <linux/cryptouser.h> 21#include <net/netlink.h> 22#include <linux/compiler.h> 23 24#include "internal.h" 25 26static const struct crypto_type crypto_shash_type; 27 28int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, 29 unsigned int keylen) 30{ 31 return -ENOSYS; 32} 33EXPORT_SYMBOL_GPL(shash_no_setkey); 34 35static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, 36 unsigned int keylen) 37{ 38 struct shash_alg *shash = crypto_shash_alg(tfm); 39 unsigned long alignmask = crypto_shash_alignmask(tfm); 40 unsigned long absize; 41 u8 *buffer, *alignbuffer; 42 int err; 43 44 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 45 buffer = kmalloc(absize, GFP_ATOMIC); 46 if (!buffer) 47 return -ENOMEM; 48 49 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 50 memcpy(alignbuffer, key, keylen); 51 err = shash->setkey(tfm, alignbuffer, keylen); 52 kzfree(buffer); 53 return err; 54} 55 56int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, 57 unsigned int keylen) 58{ 59 struct shash_alg *shash = crypto_shash_alg(tfm); 60 unsigned long alignmask = crypto_shash_alignmask(tfm); 61 62 if ((unsigned long)key & alignmask) 63 return shash_setkey_unaligned(tfm, key, keylen); 64 65 return shash->setkey(tfm, key, keylen); 66} 67EXPORT_SYMBOL_GPL(crypto_shash_setkey); 68 69static inline unsigned int shash_align_buffer_size(unsigned len, 70 unsigned long mask) 71{ 72 typedef u8 __aligned_largest u8_aligned; 73 return len + (mask & ~(__alignof__(u8_aligned) - 1)); 74} 75 76static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, 77 unsigned int len) 78{ 79 struct crypto_shash *tfm = desc->tfm; 80 struct shash_alg *shash = crypto_shash_alg(tfm); 81 unsigned long alignmask = crypto_shash_alignmask(tfm); 82 unsigned int unaligned_len = alignmask + 1 - 83 ((unsigned long)data & alignmask); 84 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] 85 __aligned_largest; 86 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); 87 int err; 88 89 if (unaligned_len > len) 90 unaligned_len = len; 91 92 memcpy(buf, data, unaligned_len); 93 err = shash->update(desc, buf, unaligned_len); 94 memset(buf, 0, unaligned_len); 95 96 return err ?: 97 shash->update(desc, data + unaligned_len, len - unaligned_len); 98} 99 100int crypto_shash_update(struct shash_desc *desc, const u8 *data, 101 unsigned int len) 102{ 103 struct crypto_shash *tfm = desc->tfm; 104 struct shash_alg *shash = crypto_shash_alg(tfm); 105 unsigned long alignmask = crypto_shash_alignmask(tfm); 106 107 if ((unsigned long)data & alignmask) 108 return shash_update_unaligned(desc, data, len); 109 110 return shash->update(desc, data, len); 111} 112EXPORT_SYMBOL_GPL(crypto_shash_update); 113 114static int shash_final_unaligned(struct shash_desc *desc, u8 *out) 115{ 116 struct crypto_shash *tfm = desc->tfm; 117 unsigned long alignmask = crypto_shash_alignmask(tfm); 118 struct shash_alg *shash = crypto_shash_alg(tfm); 119 unsigned int ds = crypto_shash_digestsize(tfm); 120 u8 ubuf[shash_align_buffer_size(ds, alignmask)] 121 __aligned_largest; 122 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); 123 int err; 124 125 err = shash->final(desc, buf); 126 if (err) 127 goto out; 128 129 memcpy(out, buf, ds); 130 131out: 132 memset(buf, 0, ds); 133 return err; 134} 135 136int crypto_shash_final(struct shash_desc *desc, u8 *out) 137{ 138 struct crypto_shash *tfm = desc->tfm; 139 struct shash_alg *shash = crypto_shash_alg(tfm); 140 unsigned long alignmask = crypto_shash_alignmask(tfm); 141 142 if ((unsigned long)out & alignmask) 143 return shash_final_unaligned(desc, out); 144 145 return shash->final(desc, out); 146} 147EXPORT_SYMBOL_GPL(crypto_shash_final); 148 149static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, 150 unsigned int len, u8 *out) 151{ 152 return crypto_shash_update(desc, data, len) ?: 153 crypto_shash_final(desc, out); 154} 155 156int crypto_shash_finup(struct shash_desc *desc, const u8 *data, 157 unsigned int len, u8 *out) 158{ 159 struct crypto_shash *tfm = desc->tfm; 160 struct shash_alg *shash = crypto_shash_alg(tfm); 161 unsigned long alignmask = crypto_shash_alignmask(tfm); 162 163 if (((unsigned long)data | (unsigned long)out) & alignmask) 164 return shash_finup_unaligned(desc, data, len, out); 165 166 return shash->finup(desc, data, len, out); 167} 168EXPORT_SYMBOL_GPL(crypto_shash_finup); 169 170static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, 171 unsigned int len, u8 *out) 172{ 173 return crypto_shash_init(desc) ?: 174 crypto_shash_finup(desc, data, len, out); 175} 176 177int crypto_shash_digest(struct shash_desc *desc, const u8 *data, 178 unsigned int len, u8 *out) 179{ 180 struct crypto_shash *tfm = desc->tfm; 181 struct shash_alg *shash = crypto_shash_alg(tfm); 182 unsigned long alignmask = crypto_shash_alignmask(tfm); 183 184 if (((unsigned long)data | (unsigned long)out) & alignmask) 185 return shash_digest_unaligned(desc, data, len, out); 186 187 return shash->digest(desc, data, len, out); 188} 189EXPORT_SYMBOL_GPL(crypto_shash_digest); 190 191static int shash_default_export(struct shash_desc *desc, void *out) 192{ 193 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); 194 return 0; 195} 196 197static int shash_default_import(struct shash_desc *desc, const void *in) 198{ 199 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); 200 return 0; 201} 202 203static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, 204 unsigned int keylen) 205{ 206 struct crypto_shash **ctx = crypto_ahash_ctx(tfm); 207 208 return crypto_shash_setkey(*ctx, key, keylen); 209} 210 211static int shash_async_init(struct ahash_request *req) 212{ 213 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 214 struct shash_desc *desc = ahash_request_ctx(req); 215 216 desc->tfm = *ctx; 217 desc->flags = req->base.flags; 218 219 return crypto_shash_init(desc); 220} 221 222int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) 223{ 224 struct crypto_hash_walk walk; 225 int nbytes; 226 227 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 228 nbytes = crypto_hash_walk_done(&walk, nbytes)) 229 nbytes = crypto_shash_update(desc, walk.data, nbytes); 230 231 return nbytes; 232} 233EXPORT_SYMBOL_GPL(shash_ahash_update); 234 235static int shash_async_update(struct ahash_request *req) 236{ 237 return shash_ahash_update(req, ahash_request_ctx(req)); 238} 239 240static int shash_async_final(struct ahash_request *req) 241{ 242 return crypto_shash_final(ahash_request_ctx(req), req->result); 243} 244 245int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 246{ 247 struct crypto_hash_walk walk; 248 int nbytes; 249 250 nbytes = crypto_hash_walk_first(req, &walk); 251 if (!nbytes) 252 return crypto_shash_final(desc, req->result); 253 254 do { 255 nbytes = crypto_hash_walk_last(&walk) ? 256 crypto_shash_finup(desc, walk.data, nbytes, 257 req->result) : 258 crypto_shash_update(desc, walk.data, nbytes); 259 nbytes = crypto_hash_walk_done(&walk, nbytes); 260 } while (nbytes > 0); 261 262 return nbytes; 263} 264EXPORT_SYMBOL_GPL(shash_ahash_finup); 265 266static int shash_async_finup(struct ahash_request *req) 267{ 268 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 269 struct shash_desc *desc = ahash_request_ctx(req); 270 271 desc->tfm = *ctx; 272 desc->flags = req->base.flags; 273 274 return shash_ahash_finup(req, desc); 275} 276 277int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 278{ 279 unsigned int nbytes = req->nbytes; 280 struct scatterlist *sg; 281 unsigned int offset; 282 int err; 283 284 if (nbytes && 285 (sg = req->src, offset = sg->offset, 286 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { 287 void *data; 288 289 data = kmap_atomic(sg_page(sg)); 290 err = crypto_shash_digest(desc, data + offset, nbytes, 291 req->result); 292 kunmap_atomic(data); 293 crypto_yield(desc->flags); 294 } else 295 err = crypto_shash_init(desc) ?: 296 shash_ahash_finup(req, desc); 297 298 return err; 299} 300EXPORT_SYMBOL_GPL(shash_ahash_digest); 301 302static int shash_async_digest(struct ahash_request *req) 303{ 304 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 305 struct shash_desc *desc = ahash_request_ctx(req); 306 307 desc->tfm = *ctx; 308 desc->flags = req->base.flags; 309 310 return shash_ahash_digest(req, desc); 311} 312 313static int shash_async_export(struct ahash_request *req, void *out) 314{ 315 return crypto_shash_export(ahash_request_ctx(req), out); 316} 317 318static int shash_async_import(struct ahash_request *req, const void *in) 319{ 320 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 321 struct shash_desc *desc = ahash_request_ctx(req); 322 323 desc->tfm = *ctx; 324 desc->flags = req->base.flags; 325 326 return crypto_shash_import(desc, in); 327} 328 329static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) 330{ 331 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 332 333 crypto_free_shash(*ctx); 334} 335 336int crypto_init_shash_ops_async(struct crypto_tfm *tfm) 337{ 338 struct crypto_alg *calg = tfm->__crt_alg; 339 struct shash_alg *alg = __crypto_shash_alg(calg); 340 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 341 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 342 struct crypto_shash *shash; 343 344 if (!crypto_mod_get(calg)) 345 return -EAGAIN; 346 347 shash = crypto_create_tfm(calg, &crypto_shash_type); 348 if (IS_ERR(shash)) { 349 crypto_mod_put(calg); 350 return PTR_ERR(shash); 351 } 352 353 *ctx = shash; 354 tfm->exit = crypto_exit_shash_ops_async; 355 356 crt->init = shash_async_init; 357 crt->update = shash_async_update; 358 crt->final = shash_async_final; 359 crt->finup = shash_async_finup; 360 crt->digest = shash_async_digest; 361 crt->setkey = shash_async_setkey; 362 363 crt->has_setkey = alg->setkey != shash_no_setkey; 364 365 if (alg->export) 366 crt->export = shash_async_export; 367 if (alg->import) 368 crt->import = shash_async_import; 369 370 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); 371 372 return 0; 373} 374 375static int crypto_shash_init_tfm(struct crypto_tfm *tfm) 376{ 377 struct crypto_shash *hash = __crypto_shash_cast(tfm); 378 379 hash->descsize = crypto_shash_alg(hash)->descsize; 380 return 0; 381} 382 383#ifdef CONFIG_NET 384static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) 385{ 386 struct crypto_report_hash rhash; 387 struct shash_alg *salg = __crypto_shash_alg(alg); 388 389 strncpy(rhash.type, "shash", sizeof(rhash.type)); 390 391 rhash.blocksize = alg->cra_blocksize; 392 rhash.digestsize = salg->digestsize; 393 394 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH, 395 sizeof(struct crypto_report_hash), &rhash)) 396 goto nla_put_failure; 397 return 0; 398 399nla_put_failure: 400 return -EMSGSIZE; 401} 402#else 403static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) 404{ 405 return -ENOSYS; 406} 407#endif 408 409static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) 410 __maybe_unused; 411static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) 412{ 413 struct shash_alg *salg = __crypto_shash_alg(alg); 414 415 seq_printf(m, "type : shash\n"); 416 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 417 seq_printf(m, "digestsize : %u\n", salg->digestsize); 418} 419 420static const struct crypto_type crypto_shash_type = { 421 .extsize = crypto_alg_extsize, 422 .init_tfm = crypto_shash_init_tfm, 423#ifdef CONFIG_PROC_FS 424 .show = crypto_shash_show, 425#endif 426 .report = crypto_shash_report, 427 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 428 .maskset = CRYPTO_ALG_TYPE_MASK, 429 .type = CRYPTO_ALG_TYPE_SHASH, 430 .tfmsize = offsetof(struct crypto_shash, base), 431}; 432 433struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, 434 u32 mask) 435{ 436 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); 437} 438EXPORT_SYMBOL_GPL(crypto_alloc_shash); 439 440static int shash_prepare_alg(struct shash_alg *alg) 441{ 442 struct crypto_alg *base = &alg->base; 443 444 if (alg->digestsize > PAGE_SIZE / 8 || 445 alg->descsize > PAGE_SIZE / 8 || 446 alg->statesize > PAGE_SIZE / 8) 447 return -EINVAL; 448 449 base->cra_type = &crypto_shash_type; 450 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 451 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; 452 453 if (!alg->finup) 454 alg->finup = shash_finup_unaligned; 455 if (!alg->digest) 456 alg->digest = shash_digest_unaligned; 457 if (!alg->export) { 458 alg->export = shash_default_export; 459 alg->import = shash_default_import; 460 alg->statesize = alg->descsize; 461 } 462 if (!alg->setkey) 463 alg->setkey = shash_no_setkey; 464 465 return 0; 466} 467 468int crypto_register_shash(struct shash_alg *alg) 469{ 470 struct crypto_alg *base = &alg->base; 471 int err; 472 473 err = shash_prepare_alg(alg); 474 if (err) 475 return err; 476 477 return crypto_register_alg(base); 478} 479EXPORT_SYMBOL_GPL(crypto_register_shash); 480 481int crypto_unregister_shash(struct shash_alg *alg) 482{ 483 return crypto_unregister_alg(&alg->base); 484} 485EXPORT_SYMBOL_GPL(crypto_unregister_shash); 486 487int crypto_register_shashes(struct shash_alg *algs, int count) 488{ 489 int i, ret; 490 491 for (i = 0; i < count; i++) { 492 ret = crypto_register_shash(&algs[i]); 493 if (ret) 494 goto err; 495 } 496 497 return 0; 498 499err: 500 for (--i; i >= 0; --i) 501 crypto_unregister_shash(&algs[i]); 502 503 return ret; 504} 505EXPORT_SYMBOL_GPL(crypto_register_shashes); 506 507int crypto_unregister_shashes(struct shash_alg *algs, int count) 508{ 509 int i, ret; 510 511 for (i = count - 1; i >= 0; --i) { 512 ret = crypto_unregister_shash(&algs[i]); 513 if (ret) 514 pr_err("Failed to unregister %s %s: %d\n", 515 algs[i].base.cra_driver_name, 516 algs[i].base.cra_name, ret); 517 } 518 519 return 0; 520} 521EXPORT_SYMBOL_GPL(crypto_unregister_shashes); 522 523int shash_register_instance(struct crypto_template *tmpl, 524 struct shash_instance *inst) 525{ 526 int err; 527 528 err = shash_prepare_alg(&inst->alg); 529 if (err) 530 return err; 531 532 return crypto_register_instance(tmpl, shash_crypto_instance(inst)); 533} 534EXPORT_SYMBOL_GPL(shash_register_instance); 535 536void shash_free_instance(struct crypto_instance *inst) 537{ 538 crypto_drop_spawn(crypto_instance_ctx(inst)); 539 kfree(shash_instance(inst)); 540} 541EXPORT_SYMBOL_GPL(shash_free_instance); 542 543int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, 544 struct shash_alg *alg, 545 struct crypto_instance *inst) 546{ 547 return crypto_init_spawn2(&spawn->base, &alg->base, inst, 548 &crypto_shash_type); 549} 550EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); 551 552struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) 553{ 554 struct crypto_alg *alg; 555 556 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); 557 return IS_ERR(alg) ? ERR_CAST(alg) : 558 container_of(alg, struct shash_alg, base); 559} 560EXPORT_SYMBOL_GPL(shash_attr_alg); 561 562MODULE_LICENSE("GPL"); 563MODULE_DESCRIPTION("Synchronous cryptographic hash type");